Compare commits

..

250 Commits

Author SHA1 Message Date
Micha Reiser
ae39ce56c0 Bump version to 0.6.8 (#13522) 2024-09-26 14:09:03 +02:00
Micha Reiser
ff2d214e11 Don't skip over imports and other nodes containing nested statements in import collector (#13521) 2024-09-26 11:57:05 +00:00
Micha Reiser
9442cd8fae Parenthesize match..case if guards (#13513) 2024-09-26 06:44:33 +00:00
Micha Reiser
8012707348 Align formatting of patterns in match-cases with expression formatting in clause headers (#13510) 2024-09-26 08:35:22 +02:00
Charlie Marsh
d7ffe46054 Disable the typeset plugin (#13517)
## Summary

There seems to be a bad interaction between enabling anchorlinks and the
`typeset` plugin. I think the former is more important than the
latter... so disabling the latter for now.

## Test Plan

Before:

![Screenshot 2024-09-25 at 7 53
21 PM](https://github.com/user-attachments/assets/bf7c70bb-19ab-4ece-9709-4c297f8ba67b)

After:

![Screenshot 2024-09-25 at 7 53
12 PM](https://github.com/user-attachments/assets/e767a575-1664-4288-aecb-82e8b1b1a7bd)
2024-09-25 23:58:35 +00:00
haarisr
7c83af419c red-knot: Implement the not operator for all Type variants (#13432)
Signed-off-by: haaris <haarisrahman@gmail.com>
Co-authored-by: Carl Meyer <carl@oddbird.net>
2024-09-25 13:44:19 -07:00
Zanie Blue
bbb044ebda Detect tuples bound to variadic positional arguments i.e. *args (#13512)
In https://github.com/astral-sh/ruff/pull/13503, we added supported for
detecting variadic keyword arguments as dictionaries, here we use the
same strategy for detecting variadic positional arguments as tuples.
2024-09-25 10:03:25 -05:00
Zanie Blue
481065238b Avoid UP028 false negatives with non-reference shadowed bindings of loop variables (#13504)
Closes https://github.com/astral-sh/ruff/issues/13266

Avoids false negatives for shadowed bindings that aren't actually
references to the loop variable. There are some shadowed bindings we
need to support still, e.g., `del` requires the loop variable to exist.
2024-09-25 10:03:09 -05:00
Zanie Blue
11f06e0d55 Detect SIM910 when using variadic keyword arguments, i.e., **kwargs (#13503)
Closes https://github.com/astral-sh/ruff/issues/13493
2024-09-25 10:02:59 -05:00
Dylan
f27a8b8c7a [internal] ComparableExpr (f)strings and bytes made invariant under concatenation (#13301) 2024-09-25 16:58:57 +02:00
Vince van Noort
ca0ae0a484 [pylint] Implement boolean-chained-comparison (R1716) (#13435)
Co-authored-by: Micha Reiser <micha@reiser.io>
2024-09-25 09:14:12 +00:00
TomerBin
be1d5e3368 [red-knot] Add Type::bool and boolean expression inference (#13449) 2024-09-25 00:02:26 +00:00
Simon Brugman
03503f7f56 C401 message missing closing parenthesis (#13498) 2024-09-24 14:55:32 +02:00
Charlie Marsh
ff4b6d11fa Detect basic wildcard imports in ruff analyze graph (#13486)
## Summary

I guess we can just ignore the `*` entirely for now? This will add the
`__init__.py` for anything that's importing a package.
2024-09-23 18:09:00 -04:00
Charlie Marsh
96e7f3f96f Exit gracefully on broken pipe errors (#13485)
## Summary

Closes https://github.com/astral-sh/ruff/issues/13483.

Closes https://github.com/astral-sh/ruff/issues/13442.

## Test Plan

```
❯ cargo run analyze graph ../django | head -n 10
   Compiling ruff v0.6.7 (/Users/crmarsh/workspace/ruff/crates/ruff)
    Finished `dev` profile [unoptimized + debuginfo] target(s) in 0.63s
     Running `target/debug/ruff analyze graph ../django`
warning: `ruff analyze graph` is experimental and may change without warning
{
  "/Users/crmarsh/workspace/django/django/__init__.py": [
    "/Users/crmarsh/workspace/django/django/apps/__init__.py",
    "/Users/crmarsh/workspace/django/django/conf/__init__.py",
    "/Users/crmarsh/workspace/django/django/urls/__init__.py",
    "/Users/crmarsh/workspace/django/django/utils/log.py",
    "/Users/crmarsh/workspace/django/django/utils/version.py"
  ],
  "/Users/crmarsh/workspace/django/django/__main__.py": [
    "/Users/crmarsh/workspace/django/django/core/management/__init__.py"
```
2024-09-23 13:48:43 +00:00
Charlie Marsh
90dc7438ee Avoid panic when analyze graph hits broken pipe (#13484)
## Summary

I think we should also make the change that @BurntSushi recommended in
the linked issue, but this gets rid of the panic.

See: https://github.com/astral-sh/ruff/issues/13483

See: https://github.com/astral-sh/ruff/issues/13442

## Test Plan

```
warning: `ruff analyze graph` is experimental and may change without warning
{
  "/Users/crmarsh/workspace/django/django/__init__.py": [
    "/Users/crmarsh/workspace/django/django/apps/__init__.py",
    "/Users/crmarsh/workspace/django/django/conf/__init__.py",
    "/Users/crmarsh/workspace/django/django/urls/__init__.py",
    "/Users/crmarsh/workspace/django/django/utils/log.py",
    "/Users/crmarsh/workspace/django/django/utils/version.py"
  ],
  "/Users/crmarsh/workspace/django/django/__main__.py": [
    "/Users/crmarsh/workspace/django/django/core/management/__init__.py"
ruff failed
  Cause: Broken pipe (os error 32)
```
2024-09-23 09:43:09 -04:00
Micha Reiser
3e99ab141c Update Salsa (#13480) 2024-09-23 14:04:04 +02:00
renovate[bot]
115745a8ac Update dependency monaco-editor to ^0.52.0 (#13475)
This PR contains the following updates:

| Package | Change | Age | Adoption | Passing | Confidence |
|---|---|---|---|---|---|
| [monaco-editor](https://redirect.github.com/microsoft/monaco-editor) |
[`^0.51.0` ->
`^0.52.0`](https://renovatebot.com/diffs/npm/monaco-editor/0.51.0/0.52.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/monaco-editor/0.52.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![adoption](https://developer.mend.io/api/mc/badges/adoption/npm/monaco-editor/0.52.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![passing](https://developer.mend.io/api/mc/badges/compatibility/npm/monaco-editor/0.51.0/0.52.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/monaco-editor/0.51.0/0.52.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|

---

### Release Notes

<details>
<summary>microsoft/monaco-editor (monaco-editor)</summary>

###
[`v0.52.0`](https://redirect.github.com/microsoft/monaco-editor/blob/HEAD/CHANGELOG.md#0520)

[Compare
Source](https://redirect.github.com/microsoft/monaco-editor/compare/v0.51.0...v0.52.0)

-   Comment added inside of `IModelContentChangedEvent`

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/ruff).

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOC44MC4wIiwidXBkYXRlZEluVmVyIjoiMzguODAuMCIsInRhcmdldEJyYW5jaCI6Im1haW4iLCJsYWJlbHMiOlsiaW50ZXJuYWwiXX0=-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-23 09:55:12 +02:00
renovate[bot]
8bb59d7216 Update Rust crate unicode_names2 to v1.3.0 (#13474)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
| [unicode_names2](https://redirect.github.com/progval/unicode_names2) |
workspace.dependencies | minor | `1.2.2` -> `1.3.0` |

---

### Configuration

📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/ruff).

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOC44MC4wIiwidXBkYXRlZEluVmVyIjoiMzguODAuMCIsInRhcmdldEJyYW5jaCI6Im1haW4iLCJsYWJlbHMiOlsiaW50ZXJuYWwiXX0=-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-23 09:54:44 +02:00
renovate[bot]
47aac060de Update Rust crate insta to v1.40.0 (#13472)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-23 09:40:02 +02:00
Steve C
7c55330534 Fix formatting for analyze direction values (#13476) 2024-09-23 09:18:28 +02:00
renovate[bot]
047d77c60b Update pre-commit dependencies (#13467) 2024-09-22 22:54:34 -04:00
renovate[bot]
18fddd458a Update dependency eslint to v8.57.1 (#13465) 2024-09-22 22:54:14 -04:00
Charlie Marsh
db76000521 Use anchorlinks rather than permalinks (#13471)
## Summary

See: https://github.com/astral-sh/uv/pull/7626
2024-09-23 02:44:45 +00:00
renovate[bot]
a2ed1e1cd1 Update Rust crate thiserror to v1.0.64 (#13462) 2024-09-22 22:32:45 -04:00
renovate[bot]
7457679582 Update Rust crate dashmap to v6.1.0 (#13470) 2024-09-22 22:32:26 -04:00
renovate[bot]
1d352872ba Update Rust crate codspeed-criterion-compat to v2.7.2 (#13469) 2024-09-22 22:32:20 -04:00
renovate[bot]
c8b905bc96 Update NPM Development dependencies (#13468) 2024-09-22 22:32:11 -04:00
renovate[bot]
5b593d0397 Update dependency ruff to v0.6.7 (#13466) 2024-09-22 22:32:02 -04:00
renovate[bot]
c5c5acda23 Update Rust crate unicode-normalization to v0.1.24 (#13464) 2024-09-22 22:31:53 -04:00
renovate[bot]
26747aae75 Update Rust crate unicode-ident to v1.0.13 (#13463) 2024-09-22 22:31:47 -04:00
renovate[bot]
85b825a2a1 Update Rust crate syn to v2.0.77 (#13461) 2024-09-22 22:31:40 -04:00
renovate[bot]
9e764ef6d0 Update Rust crate serde_json to v1.0.128 (#13460) 2024-09-23 02:03:47 +00:00
renovate[bot]
0e325a53ef Update Rust crate serde to v1.0.210 (#13459) 2024-09-23 02:03:15 +00:00
renovate[bot]
2a136cfb57 Update Rust crate pretty_assertions to v1.4.1 (#13458) 2024-09-23 02:02:12 +00:00
renovate[bot]
7749164d4a Update Rust crate ordermap to v0.5.3 (#13457) 2024-09-23 02:01:44 +00:00
renovate[bot]
da50e14524 Update Rust crate lsp-server to v0.7.7 (#13456) 2024-09-23 02:00:23 +00:00
renovate[bot]
1886b731a5 Update Rust crate ignore to v0.4.23 (#13455) 2024-09-22 22:00:06 -04:00
renovate[bot]
364eddc95a Update Rust crate globset to v0.4.15 (#13454) 2024-09-22 22:00:01 -04:00
renovate[bot]
48fb340e3b Update Rust crate filetime to v0.2.25 (#13453) 2024-09-22 21:59:50 -04:00
renovate[bot]
71bb4d3bdc Update Rust crate clap to v4.5.18 (#13452) 2024-09-22 21:59:44 -04:00
renovate[bot]
5c20f570d0 Update Rust crate anyhow to v1.0.89 (#13451) 2024-09-23 01:58:14 +00:00
Charlie Marsh
7441da287f Skip traversal for non-compound statements (#13441)
## Summary

None of these can contain imports.
2024-09-21 20:47:30 +00:00
Charlie Marsh
c2a5179d75 Reuse BTreeSets in module resolver (#13440)
## Summary

For dependencies, there's no reason to re-allocate here, since we know
the paths are unique.
2024-09-21 20:14:32 +00:00
Charlie Marsh
17c4690b5e Bump version to v0.6.7 (#13439) 2024-09-21 13:16:36 -04:00
Charlie Marsh
f06d44e6e5 Use forget for module resolver database (#13438)
## Summary

A tiny bit faster and the `red-knot` CLI does the same thing.
2024-09-21 17:00:02 +00:00
Micha Reiser
653c09001a Use an empty vendored file system in Ruff (#13436)
## Summary

This PR changes removes the typeshed stubs from the vendored file system
shipped with ruff
and instead ships an empty "typeshed".

Making the typeshed files optional required extracting the typshed files
into a new `ruff_vendored` crate. I do like this even if all our builds
always include typeshed because it means `red_knot_python_semantic`
contains less code that needs compiling.

This also allows us to use deflate because the compression algorithm
doesn't matter for an archive containing a single, empty file.

## Test Plan

`cargo test`

I verified with ` cargo tree -f "{p} {f}" -p <package> ` that:

* red_knot_wasm: enables `deflate` compression
* red_knot: enables `zstd` compression
* `ruff`: uses stored


I'm not quiet sure how to build the binary that maturin builds but
comparing the release artifact size with `strip = true` shows a `1.5MB`
size reduction

---------

Co-authored-by: Charlie Marsh <charlie.r.marsh@gmail.com>
2024-09-21 16:31:42 +00:00
Micha Reiser
8921fbb54c vendored_typeshed_versions should use db.vendored (#13434) 2024-09-21 16:35:06 +02:00
Charlie Marsh
3018303c87 Avoid parsing with Salsa (#13437)
## Summary

For reasons I haven't investigated, this speeds up the resolver about 2x
(from 6.404s to 3.612s on an extremely large codebase).

## Test Plan

\cc @BurntSushi 

```
[andrew@duff rippling]$ time ruff analyze graph --preview > /dev/null

real    3.274
user    16.039
sys     7.609
maxmem  11631 MB
faults  0
[andrew@duff rippling]$ time ruff-patch analyze graph --preview > /dev/null

real    1.841
user    14.625
sys     3.639
maxmem  7173 MB
faults  0
[andrew@duff rippling]$ time ruff-patch2 analyze graph --preview > /dev/null

real    2.087
user    15.333
sys     4.869
maxmem  8642 MB
faults  0
```

Where that's `main`, then (`ruff-patch`) using the version with no
`File`, no `SemanticModel`, then (`ruff-patch2`) using `File`.
2024-09-21 13:52:16 +00:00
haarisr
6c303b2445 red-knot: Add not unary operator for boolean literals (#13422)
## Summary

Contributes to #12701

## Test Plan

Added test for boolean literals

Signed-off-by: haaris <haarisrahman@gmail.com>
2024-09-20 15:24:38 -07:00
Charlie Marsh
7579a792c7 Add test coverage for non-Python globs (#13430) 2024-09-20 20:46:00 +00:00
Charlie Marsh
0bbc138037 Upgrade to latest cargo-dist version (#13416)
## Summary

Follows https://github.com/astral-sh/uv/pull/7092.
2024-09-20 15:59:32 -04:00
Charlie Marsh
ff11db61b4 Add Python version support to ruff analyze CLI (#13426) 2024-09-20 15:40:47 -04:00
Charlie Marsh
2823487bf8 Respect lint.exclude in ruff check --add-noqa (#13427)
## Summary

Closes https://github.com/astral-sh/ruff/issues/13423.
2024-09-20 19:39:36 +00:00
Charlie Marsh
910fac781d Add exclude support to ruff analyze (#13425)
## Summary

Closes https://github.com/astral-sh/ruff/issues/13424.
2024-09-20 15:34:35 -04:00
Carl Meyer
149fb2090e [red-knot] more efficient UnionBuilder::add (#13411)
Avoid quadratic time in subsumed elements when adding a super-type of
existing union elements.

Reserve space in advance when adding multiple elements (from another
union) to a union.

Make union elements a `Box<[Type]>` instead of an `FxOrderSet`; the set
doesn't buy much since the rules of union uniqueness are defined in
terms of supertype/subtype, not in terms of simple type identity.

Move sealed-boolean handling out of a separate `UnionBuilder::simplify`
method and into `UnionBuilder::add`; now that `add` is iterating
existing elements anyway, this is more efficient.

Remove `UnionType::contains`, since it's now `O(n)` and we shouldn't
really need it, generally we care about subtype/supertype, not type
identity. (Right now it's used for `Type::Unbound`, which shouldn't even
be a type.)

Add support for `is_subtype_of` for the `object` type.

Addresses comments on https://github.com/astral-sh/ruff/pull/13401
2024-09-20 10:49:45 -07:00
Carl Meyer
40c65dcfa7 [red-knot] dedicated error message for all-union-elements not callable (#13412)
This was mentioned in an earlier review, and seemed easy enough to just
do it. No need to repeat all the types twice when it gives no additional
information.
2024-09-20 08:08:43 -07:00
yahayaohinoyi
03f3a4e855 [pycodestyle] Fix: Don't autofix if the first line ends in a question mark? (D400) (#13399)
Co-authored-by: Micha Reiser <micha@reiser.io>
2024-09-20 11:05:26 +00:00
Micha Reiser
531ebf6dff Fix parentheses around return type annotations (#13381) 2024-09-20 09:23:53 +02:00
Rupert Tombs
7c2011599f Correct Some value is incorrect (#13418) 2024-09-20 08:25:58 +02:00
Charlie Marsh
17e90823da Some minor internal refactors for module graph (#13417) 2024-09-20 00:21:30 -04:00
Charlie Marsh
d01cbf7f8f Bump version to v0.6.6 (#13415) 2024-09-19 23:09:57 -04:00
Charlie Marsh
770b276c21 Cache glob resolutions in import graph (#13413)
## Summary

These are often repeated; caching the resolutions can have a huge
impact.
2024-09-20 02:24:06 +00:00
Charlie Marsh
4e935f7d7d Add a subcommand to generate dependency graphs (#13402)
## Summary

This PR adds an experimental Ruff subcommand to generate dependency
graphs based on module resolution.

A few highlights:

- You can generate either dependency or dependent graphs via the
`--direction` command-line argument.
- Like Pants, we also provide an option to identify imports from string
literals (`--detect-string-imports`).
- Users can also provide additional dependency data via the
`include-dependencies` key under `[tool.ruff.import-map]`. This map uses
file paths as keys, and lists of strings as values. Those strings can be
file paths or globs.

The dependency resolution uses the red-knot module resolver which is
intended to be fully spec compliant, so it's also a chance to expose the
module resolver in a real-world setting.

The CLI is, e.g., `ruff graph build ../autobot`, which will output a
JSON map from file to files it depends on for the `autobot` project.
2024-09-19 21:06:32 -04:00
Carl Meyer
260c2ecd15 [red-knot] visit with-item vars even if not a Name (#13409)
This fixes the last panic on checking pandas.

(Match statement became an `if let` because clippy decided it wanted
that once I added the additional line in the else case?)

---------

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
2024-09-19 10:37:49 -07:00
Dylan
f110d80279 [refurb] Skip slice-to-remove-prefix-or-suffix (FURB188) when nontrivial slice step is present (#13405) 2024-09-19 12:47:17 -04:00
Carl Meyer
a6d3d2fccd [red-knot] support reveal_type as pseudo-builtin (#13403)
Support using `reveal_type` without importing it, as implied by the type
spec and supported by existing type checkers.

We use `typing_extensions.reveal_type` for the implicit built-in; this
way it exists on all Python versions. (It imports from `typing` on newer
Python versions.)

Emits an "undefined name" diagnostic whenever `reveal_type` is
referenced in this way (in addition to the revealed-type diagnostic when
it is called). This follows the mypy example (with `--enable-error-code
unimported-reveal`) and I think provides a good (and easily
understandable) balance for user experience. If you are using
`reveal_type` for quick temporary debugging, the additional
undefined-name diagnostic doesn't hinder that use case. If we make the
revealed-type diagnostic a non-failing one, the undefined-name
diagnostic can still be a failing diagnostic, helping prevent
accidentally leaving it in place. For any use cases where you want to
leave it in place, you can always import it to avoid the undefined-name
diagnostic.

In the future, we can easily provide configuration options to a) turn
off builtin-reveal_type altogether, and/or b) silence the undefined-name
diagnostic when using it, if we have users on either side (loving or
hating pseudo-builtin `reveal_type`) who are dissatisfied with this
compromise.
2024-09-19 07:58:08 -07:00
Micha Reiser
afdb659111 Fix off-by one error in the LineIndex::offset calculation (#13407) 2024-09-19 11:58:45 +00:00
Simon
a8d9104fa3 Fix/#13070 defer annotations when future is active (#13395) 2024-09-19 10:13:37 +02:00
Micha Reiser
d3530ab997 Fix rendering of FURB188 docs (#13406) 2024-09-19 07:29:31 +00:00
Carl Meyer
cf1e91bb59 [red-knot] simplify subtypes from unions (#13401)
Add `Type::is_subtype_of` method, and simplify subtypes out of unions.
2024-09-18 22:06:39 -07:00
Carl Meyer
125eaafae0 [red-knot] inferred type, not Unknown, for undeclared paths (#13400)
After looking at more cases (for example, the case in the added test in
this PR), I realized that our previous rule, "if a symbol has any
declarations, use only declarations for its public type" is not
adequate. Rather than using `Unknown` as fallback if the symbol is not
declared in some paths, we need to use the inferred type as fallback in
that case.

For the paths where the symbol _was_ declared, we know that any bindings
must be assignable to the declared type in that path, so this won't
change the overall declared type in those paths. But for paths where the
symbol wasn't declared, this will give us a better type in place of
`Unknown`.
2024-09-18 21:47:49 -07:00
Carl Meyer
7aae80903c [red-knot] add support for typing_extensions.reveal_type (#13397)
Before `typing.reveal_type` existed, there was
`typing_extensions.reveal_type`. We should support both.

Also adds a test to verify that we can handle aliasing of `reveal_type`
to a different name.

Adds a bit of code to ensure that if we have a union of different
`reveal_type` functions (e.g. a union containing both
`typing_extensions.reveal_type` and `typing.reveal_type`) we still emit
the reveal-type diagnostic only once. This is probably unlikely in
practice, but it doesn't hurt to handle it smoothly. (It comes up now
because we don't support `version_info` checks yet, so
`typing_extensions.reveal_type` is actually that union.)

---------

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
2024-09-18 21:39:03 -07:00
Carl Meyer
4aca9b91ba [red-knot] consider imports to be declarations (#13398)
I noticed that this pattern sometimes occurs in typeshed:
```
if ...:
    from foo import bar
else:
    def bar(): ...
```

If we have the rule that symbols with declarations only use declarations
for the public type, then this ends up resolving as `Unknown |
Literal[bar]`, because we didn't consider the import to be a
declaration.

I think the most straightforward thing here is to also consider imports
as declarations. The same rationale applies as for function and class
definitions: if you shadow an import, you should have to explicitly
shadow with an annotation, rather than just doing it
implicitly/accidentally.

We may also ultimately need to re-evaluate the rule that public type
considers only declarations, if there are declarations.
2024-09-18 20:59:03 -07:00
Hamir Mahal
8b3da1867e refactor: remove unnecessary string hashes (#13250) 2024-09-18 19:08:59 +02:00
Carl Meyer
c173ec5bc7 [red-knot] support for typing.reveal_type (#13384)
Add support for the `typing.reveal_type` function, emitting a diagnostic
revealing the type of its single argument. This is a necessary piece for
the planned testing framework.

This puts the cart slightly in front of the horse, in that we don't yet
have proper support for validating call signatures / argument types. But
it's easy to do just enough to make `reveal_type` work.

This PR includes support for calling union types (this is necessary
because we don't yet support `sys.version_info` checks, so
`typing.reveal_type` itself is a union type), plus some nice
consolidated error messages for calls to unions where some elements are
not callable. This is mostly to demonstrate the flexibility in
diagnostics that we get from the `CallOutcome` enum.
2024-09-18 09:59:51 -07:00
Charlie Marsh
44d916fb4e Respect FastAPI aliases in route definitions (#13394)
## Summary

Closes https://github.com/astral-sh/ruff/issues/13263
2024-09-18 12:06:49 -04:00
Micha Reiser
4eb849aed3 Update the revisions of the formatter stability check projects (#13380) 2024-09-18 08:26:40 +02:00
Micha Reiser
6ac61d7b89 Fix placement of inline parameter comments (#13379) 2024-09-18 08:26:06 +02:00
renovate[bot]
c7b2e336f0 Update dependency vite to v5.4.6 (#13385)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-18 08:25:49 +02:00
Charlie Marsh
70748950ae Respect word boundaries when detecting function signature in docs (#13388)
## Summary

Closes https://github.com/astral-sh/ruff/issues/13242.
2024-09-18 00:01:38 -04:00
Carl Meyer
dcfebaa4a8 [red-knot] use declared types in inference/checking (#13335)
Use declared types in inference and checking. This means several things:

* Imports prefer declarations over inference, when declarations are
available.
* When we encounter a binding, we check that the bound value's inferred
type is assignable to the live declarations of the bound symbol, if any.
* When we encounter a declaration, we check that the declared type is
assignable from the inferred type of the symbol from previous bindings,
if any.
* When we encounter a binding+declaration, we check that the inferred
type of the bound value is assignable to the declared type.
2024-09-17 08:11:06 -07:00
Micha Reiser
d86e5ad031 Update Black tests (#13375) 2024-09-17 11:16:50 +02:00
Simon Brugman
bb12fe9d0c DOCS: navigate back to rule overview linter (#13368) 2024-09-16 16:21:26 +00:00
Micha Reiser
3b57faf19b Fix build of ruff_benchmark on NixOS (#13366) 2024-09-16 09:41:46 +02:00
renovate[bot]
c9f7c3d652 Update dependency react-resizable-panels to v2.1.3 (#13360)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-16 07:38:21 +00:00
Micha Reiser
489dbbaadc Add diagnostics panel and navigation features to playground (#13357) 2024-09-16 07:34:46 +00:00
renovate[bot]
47e9ea2d5d Update pre-commit hook astral-sh/ruff-pre-commit to v0.6.5 (#13362)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-16 09:34:14 +02:00
renovate[bot]
7919a7122a Update NPM Development dependencies (#13363)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-16 09:32:56 +02:00
renovate[bot]
a70d693b1c Update dependency ruff to v0.6.5 (#13361)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-16 09:32:06 +02:00
github-actions[bot]
1365b0806d Sync vendored typeshed stubs (#13355)
Close and reopen this PR to trigger CI

Co-authored-by: typeshedbot <>
2024-09-14 20:40:42 -04:00
Alex Waygood
f4de49ab37 [red-knot] Clarify how scopes are pushed and popped for comprehensions and generator expressions (#13353) 2024-09-14 13:31:17 -04:00
François-Michel L'Heureux
8b49845537 Fix documentation for editor vim plugin ALE (#13348)
The documented configuration did not work. On failure, ALE suggest to
run `ALEFixSuggest`, into with it documents the working configuration
key

'ruff_format' - Fix python files with the ruff formatter.

Fix an inaccuracy in the documentation, regarding the ALE plugin for the
Vim text editor.
2024-09-13 23:27:17 +05:30
Carl Meyer
d988204b1b [red-knot] add Declarations support to semantic indexing (#13334)
Add support for declared types to the semantic index. This involves a
lot of renaming to clarify the distinction between bindings and
declarations. The Definition (or more specifically, the DefinitionKind)
becomes responsible for determining which definitions are bindings,
which are declarations, and which are both, and the symbol table
building is refactored a bit so that the `IS_BOUND` (renamed from
`IS_DEFINED` for consistent terminology) flag is always set when a
binding is added, rather than being set separately (and requiring us to
ensure it is set properly).

The `SymbolState` is split into two parts, `SymbolBindings` and
`SymbolDeclarations`, because we need to store live bindings for every
declaration and live declarations for every binding; the split lets us
do this without storing more than we need.

The massive doc comment in `use_def.rs` is updated to reflect bindings
vs declarations.

The `UseDefMap` gains some new APIs which are allow-unused for now,
since this PR doesn't yet update type inference to take declarations
into account.
2024-09-13 13:55:22 -04:00
Dhruv Manilawala
8558126df1 Bump version to 0.6.5 (#13346) 2024-09-13 20:12:26 +05:30
Dhruv Manilawala
9bd9981e70 Create insta snapshot for SARIF output (#13345)
## Summary

Follow-up from #13268, this PR updates the test case to use
`assert_snapshot` now that the output is limited to only include the
rules with diagnostics.

## Test Plan

`cargo insta test`
2024-09-13 14:35:45 +00:00
Micha Reiser
21bfab9b69 Playground: Add Copy as pyproject.toml/ruff.toml and paste from TOML (#13328) 2024-09-13 13:44:24 +01:00
Carl Meyer
43a5922f6f [red-knot] add BitSet::is_empty and BitSet::union (#13333)
Add `::is_empty` and `::union` methods to the `BitSet` implementation.

Allowing unused for now, until these methods become used later with the
declared-types implementation.

---------

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
2024-09-12 14:25:45 -04:00
Carl Meyer
175d067250 [red-knot] add initial Type::is_equivalent_to and Type::is_assignable_to (#13332)
These are quite incomplete, but I needed to start stubbing them out in
order to build and test declared-types.

Allowing unused for now, until they are used later in the declared-types
PR.

---------

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
2024-09-12 14:15:25 -04:00
Alex Waygood
4dc2c257ef [red-knot] Fix type inference for except* definitions (#13320) 2024-09-11 15:05:40 -04:00
Dhruv Manilawala
b72d49be16 Add support for extensionless Python files for server (#13326)
## Summary

Closes: #12539 

## Test Plan

https://github.com/user-attachments/assets/e49b2669-6f12-4684-9e45-a3321b19b659
2024-09-12 00:35:26 +05:30
Alexey Preobrazhenskiy
eded78a39b [pyupgrade] Fix broken doc link and clarify that deprecated aliases were removed in Python 3.12 (UP005) (#13327) 2024-09-11 14:27:08 -04:00
Alex Waygood
a7b8cc08f0 [red-knot] Fix .to_instance() for union types (#13319) 2024-09-10 22:41:45 +00:00
Alex Waygood
b93d0ab57c [red-knot] Add control flow for for loops (#13318) 2024-09-10 22:04:35 +00:00
Alex Waygood
e6b927a583 [red-knot] Add a convenience method for constructing a union from a list of elements (#13315) 2024-09-10 17:38:56 -04:00
Alex Waygood
acab1f4fd8 Remove allocation from ruff_python_stdlib::builtins::python_builtins (#13317) 2024-09-10 16:34:24 -04:00
Alex Waygood
2ca78721e6 [red-knot] Improve type inference for iteration over heterogenous tuples (#13314)
Followup to #13295
2024-09-10 15:13:50 -04:00
Micha Reiser
a528edad35 Disable jemalloc decay in benchmarks (#13299) 2024-09-10 19:32:43 +01:00
Alex Waygood
1d5bd89987 [pyflakes] Improve error message for UndefinedName when a builtin was added in a newer version than specified in Ruff config (F821) (#13293) 2024-09-10 18:03:52 +00:00
Dhruv Manilawala
b7cef6c999 [red-knot] Add heterogeneous tuple type variant (#13295)
## Summary

This PR adds a new `Type` variant called `TupleType` which is used for
heterogeneous elements.

### Display notes

* For an empty tuple, I'm using `tuple[()]` as described in the docs:
https://docs.python.org/3/library/typing.html#annotating-tuples
* For nested elements, it'll use the literal type instead of builtin
type unlike Pyright which does `tuple[Literal[1], tuple[int, int]]`
instead of `tuple[Literal[1], tuple[Literal[2], Literal[3]]]`. Also,
mypy would give `tuple[builtins.int, builtins.int]` instead of
`tuple[Literal[1], Literal[2]]`

## Test Plan

Update test case to account for the display change and add cases for
multiple elements and nested tuple elements.

---------

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
Co-authored-by: Carl Meyer <carl@astral.sh>
2024-09-10 17:54:19 +00:00
Micha Reiser
110193af57 Fix tuple expansion example in formatter compatibility document (#13313) 2024-09-10 17:47:12 +00:00
Auguste Lalande
d6bd841512 [pydoclint] Ignore DOC201 when function name is "__new__" (#13300) 2024-09-10 13:25:38 -04:00
Alexey Preobrazhenskiy
210a9e6068 [isort] Improve rule documentation with a link to the option (I002) (#13308) 2024-09-10 09:36:21 -04:00
Micha Reiser
7c872e639b Only run executable rules when they are enabled (#13298) 2024-09-10 01:46:55 +01:00
Luo Peng
5ef6979d9a Only include rules with diagnostics in SARIF metadata (#13268) 2024-09-09 22:23:53 +01:00
Dhruv Manilawala
62c7d8f6ba [red-knot] Add control flow support for match statement (#13241)
## Summary

This PR adds support for control flow for match statement.

It also adds the necessary infrastructure required for narrowing
constraints in case blocks and implements the logic for
`PatternMatchSingleton` which is either `None` / `True` / `False`. Even
after this the inferred type doesn't get simplified completely, there's
a TODO for that in the test code.

## Test Plan

Add test cases for control flow for (a) when there's a wildcard pattern
and (b) when there isn't. There's also a test case to verify the
narrowing logic.

---------

Co-authored-by: Carl Meyer <carl@astral.sh>
2024-09-10 02:14:19 +05:30
Alex Waygood
6f53aaf931 [red-knot] Add type inference for loop variables inside comprehension scopes (#13251) 2024-09-09 20:22:01 +00:00
Micha Reiser
ac720cd705 ERA001: Ignore script-comments with multiple end-tags (#13283) 2024-09-09 19:47:39 +01:00
Micha Reiser
312bd86e48 Fix configuration inheritance for configurations specified in the LSP settings (#13285) 2024-09-09 19:46:39 +01:00
Dylan
b04948fb72 [refurb] Implement slice-to-remove-prefix-or-suffix (FURB188) (#13256) 2024-09-09 15:08:44 +00:00
Calum Young
a98dbcee78 Add meta descriptions to rule pages (#13234)
## Summary

This PR updates the `scripts/generate_mkdocs.py` to add meta
descriptions to each rule as well as a fallback `site_description`.

I was initially planning to add this to `generate_docs.rs`; however
running `mdformat` on the rules caused the format of the additional
description to change into a state that mkdocs could not handle.

Fixes #13197 

## Test Plan

- Run  `python scripts/generate_mkdocs.py` to build the documentation
- Run `mkdocs serve -f mkdocs.public.yml` to serve the docs site locally
- Navigate to a rule on both the local site and the current production
site and note the addition of the description head tag. For example:
  - http://127.0.0.1:8000/ruff/rules/unused-import/

![image](https://github.com/user-attachments/assets/f47ae4fa-fe5b-42e1-8874-cb36a2ef2c9b)
  - https://docs.astral.sh/ruff/rules/unused-import/

![image](https://github.com/user-attachments/assets/6a650bff-2fcb-4df2-9cb6-40f66a2a5b8a)
2024-09-09 10:01:59 -04:00
Alex Waygood
1eb3e4057f [red-knot] Add definitions and limited type inference for exception handlers (#13267) 2024-09-09 07:35:15 -04:00
renovate[bot]
346dbf45b5 Update pre-commit dependencies (#13289)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Alex Waygood <alex.waygood@gmail.com>
2024-09-09 11:11:01 +00:00
renovate[bot]
f427a7a5a3 Update NPM Development dependencies (#13290)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Micha Reiser <micha@reiser.io>
2024-09-09 11:07:16 +00:00
Micha Reiser
955dc8804a Playground: Fix errors not shown on page load (#13262) 2024-09-09 11:47:39 +01:00
renovate[bot]
e1603e3dca Update dependency ruff to v0.6.4 (#13288) 2024-09-08 22:00:43 -04:00
Micha Reiser
35d45c1e4b refactor: Return copied TextRange in CommentRanges iterator (#13281) 2024-09-08 13:17:37 +02:00
Dylan
e4aa479515 [red-knot] Handle StringLiteral truncation (#13276)
When a type of the form `Literal["..."]` would be constructed with too
large of a string, this PR converts it to `LiteralString` instead.

We also extend inference for binary operations to include the case where
one of the operands is `LiteralString`.

Closes #13224
2024-09-07 20:25:09 -07:00
Dylan
a7c936878d [ruff] Handle unary operators in decimal-from-float-literal (RUF032) (#13275)
Co-authored-by: Alex Waygood <alex.waygood@gmail.com>
2024-09-07 13:25:49 +00:00
Micha Reiser
c3bcd5c842 Upgrade to Rust 1.81 (#13265) 2024-09-06 15:09:09 +02:00
Simon
594dee1b0b [red-knot] resolve source/stubs over namespace packages (#13254) 2024-09-06 12:14:26 +01:00
Carl Meyer
a4ebe7d344 [red-knot] consolidate diagnostic and inference tests (#13248)
Pull the tests from `types.rs` into `infer.rs`.

All of these are integration tests with the same basic form: create a
code sample, run type inference or check on it, and make some assertions
about types and/or diagnostics. These are the sort of tests we will want
to move into a test framework with a low-boilerplate custom textual
format. In the meantime, having them together (and more importantly,
their helper utilities together) means that it's easy to keep tests for
related language features together (iterable tests with other iterable
tests, callable tests with other callable tests), without an artificial
split based on tests which test diagnostics vs tests which test
inference. And it allows a single test to more easily test both
diagnostics and inference. (Ultimately in the test framework, they will
likely all test diagnostics, just in some cases the diagnostics will
come from `reveal_type()`.)
2024-09-05 09:15:22 -07:00
Carl Meyer
2a3775e525 [red-knot] AnnAssign with no RHS is not a Definition (#13247)
My plan for handling declared types is to introduce a `Declaration` in
addition to `Definition`. A `Declaration` is an annotation of a name
with a type; a `Definition` is an actual runtime assignment of a value
to a name. A few things (an annotated function parameter, an
annotated-assignment with an RHS) are both a `Definition` and a
`Declaration`.

This more cleanly separates type inference (only cares about
`Definition`) from declared types (only impacted by a `Declaration`),
and I think it will work out better than trying to squeeze everything
into `Definition`. One of the tests in this PR
(`annotation_only_assignment_transparent_to_local_inference`)
demonstrates one reason why. The statement `x: int` should have no
effect on local inference of the type of `x`; whatever the locally
inferred type of `x` was before `x: int` should still be the inferred
type after `x: int`. This is actually quite hard to do if `x: int` is
considered a `Definition`, because a core assumption of the use-def map
is that a `Definition` replaces the previous value. To achieve this
would require some hackery to effectively treat `x: int` sort of as if
it were `x: int = x`, but it's not really even equivalent to that, so
this approach gets quite ugly.

As a first step in this plan, this PR stops treating AnnAssign with no
RHS as a `Definition`, which fixes behavior in a couple added tests.

This actually makes things temporarily worse for the ellipsis-type test,
since it is defined in typeshed only using annotated assignments with no
RHS. This will be fixed properly by the upcoming addition of
declarations, which should also treat a declared type as sufficient to
import a name, at least from a stub.
2024-09-05 08:55:00 -07:00
Dhruv Manilawala
65cc6ec41d Bump version to 0.6.4 (#13253) 2024-09-05 21:05:15 +05:30
Carl Meyer
66fe226608 [red-knot] fix lookup of nonlocal names in deferred annotations (#13236)
Initially I had deferred annotation name lookups reuse the "public
symbol type", since that gives the correct "from end of scope" view of
reaching definitions that we want. But there is a key difference; public
symbol types are based only on definitions in the queried scope (or
"name in the given namespace" in runtime terms), they don't ever look up
a name in nonlocal/global/builtin scopes. Deferred annotation resolution
should do this lookup.

Add a test, and fix deferred name resolution to support
nonlocal/global/builtin names.

Fixes #13176
2024-09-04 10:10:54 -07:00
Alex Waygood
e965f9cc0e [red-knot] Infer Unknown for the loop var in async for loops (#13243) 2024-09-04 14:24:58 +00:00
Alex Waygood
0512428a6f [red-knot] Emit a diagnostic if the value of a starred expression or a yield from expression is not iterable (#13240) 2024-09-04 14:19:11 +00:00
Alex Waygood
46a457318d [red-knot] Add type inference for basic for loops (#13195) 2024-09-04 10:19:50 +00:00
Alex Waygood
57289099bb Make mypy pass on black in knot_benchmark (#13235) 2024-09-04 09:35:58 +00:00
Lucas Vieira dos Santos
9d1bd7a8a7 [pylint] removed dunder methods in Python 3 (PLW3201) (#13194)
Co-authored-by: Micha Reiser <micha@reiser.io>
2024-09-04 08:23:08 +02:00
Lucas Vieira dos Santos
e37bde458e [ruff] implement useless if-else (RUF034) (#13218) 2024-09-04 08:22:17 +02:00
Dhruv Manilawala
862bd0c429 [red-knot] Add debug assert to check for duplicate definitions (#13214)
## Summary

Closes: #13085

## Test Plan

`cargo insta test --workspace`
2024-09-04 05:53:32 +00:00
Dhruv Manilawala
e1e9143c47 [red-knot] Handle multiple comprehension targets (#13213)
## Summary

Part of #13085, this PR updates the comprehension definition to handle
multiple targets.

## Test Plan

Update existing semantic index test case for comprehension with multiple
targets. Running corpus tests shouldn't panic.
2024-09-04 11:18:58 +05:30
Carl Meyer
3c4ec82aee [red-knot] support non-local name lookups (#13177)
Add support for non-local name lookups.

There's one TODO around annotated assignments without a RHS; these need
a fair amount of attention, which they'll get in an upcoming PR about
declared vs inferred types.

Fixes #11663
2024-09-03 14:18:05 -07:00
Carl Meyer
29c36a56b2 [red-knot] fix scope inference with deferred types (#13204)
Test coverage for #13131 wasn't as good as I thought it was, because
although we infer a lot of types in stubs in typeshed, we don't check
typeshed, and therefore we don't do scope-level inference and pull all
types for a scope. So we didn't really have good test coverage for
scope-level inference in a stub. And because of this, I got the code for
supporting that wrong, meaning that if we did scope-level inference with
deferred types, we'd end up never populating the deferred types in the
scope's `TypeInference`, which causes panics like #13160.

Here I both add test coverage by running the corpus tests both as `.py`
and as `.pyi` (which reveals the panic), and I fix the code to support
deferred types in scope inference.

This also revealed a problem with deferred types in generic functions,
which effectively span two scopes. That problem will require a bit more
thought, and I don't want to block this PR on it, so for now I just
don't defer annotations on generic functions.

Fixes #13160.
2024-09-03 11:20:43 -07:00
Alex Waygood
dfee65882b [red-knot] Inline Type::is_literal (#13230) 2024-09-03 15:02:50 +01:00
Alex Waygood
50c8ee5175 Fix virtual environment details in knot_benchmark (#13228) 2024-09-03 14:35:45 +01:00
Micha Reiser
c2aac5f826 Enable multithreading for pyright (#13227) 2024-09-03 11:24:42 +00:00
Alex Waygood
387af831f9 Improve detection of whether a symbol refers to a builtin exception (#13215) 2024-09-03 10:33:03 +00:00
Alex Waygood
9d517061f2 [red-knot] Reduce some repetitiveness in tests (#13135) 2024-09-03 11:26:44 +01:00
Dhruv Manilawala
facf6febf0 [red-knot] Remove match pattern definition visitor (#13209)
## Summary

This PR is based on this discussion:
https://github.com/astral-sh/ruff/pull/13147#discussion_r1739408653.

**Todo**

- [x] Add documentation for `MatchPatternState`

## Test Plan

`cargo insta test` and `cargo clippy`
2024-09-03 08:53:35 +00:00
Simon
46e687e8d1 [red-knot] Condense literals display by types (#13185)
Co-authored-by: Micha Reiser <micha@reiser.io>
2024-09-03 07:23:28 +00:00
Micha Reiser
599103c933 Add a few missing #[return_ref] attributes (#13223) 2024-09-03 09:15:43 +02:00
Charlie Marsh
54df960a4a Use | for page separator in meta titles (#13221)
## Summary

Same as https://github.com/astral-sh/uv/pull/6953.
2024-09-03 00:46:15 +00:00
Charlie Marsh
3463683632 Update URL in structured schema (#13220) 2024-09-03 00:21:45 +00:00
Charlie Marsh
6b973b2556 Point docs to Astral favicon (#13219)
## Summary

Same as https://github.com/astral-sh/uv/pull/6951. Unfortunately we have
to use a single favicon for the docs.
2024-09-02 20:11:39 -04:00
Alex Waygood
c0e2c13d0d [flake8-pyi] Teach various rules that annotations might be stringized (#12951) 2024-09-02 13:40:06 +00:00
Ruben van Eldik
591a7a152c Handle singular case for incompatible rules warning (#13212)
Co-authored-by: Micha Reiser <micha@reiser.io>
2024-09-02 15:16:06 +02:00
Alex Waygood
b7c7b4b387 Add a method to Checker for cached parsing of stringified type annotations (#13158) 2024-09-02 12:44:20 +00:00
Tom Kuson
ea0246c51a [ruff] Implement post-init-default (RUF033) (#13192)
Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
2024-09-02 13:10:55 +01:00
Ewoud Samuels
0f85769976 Fix example in PLE1520 documentation (#13210) 2024-09-02 13:19:43 +02:00
Dhruv Manilawala
47f0b45be3 Implement AstNode for Identifier (#13207)
## Summary

Follow-up to #13147, this PR implements the `AstNode` for `Identifier`.
This makes it easier to create the `NodeKey` in red knot because it uses
a generic method to construct the key from `AnyNodeRef` and is important
for definitions that are created only on identifiers instead of
`ExprName`.

## Test Plan

`cargo test` and `cargo clippy`
2024-09-02 16:27:12 +05:30
Dhruv Manilawala
f4bed22b05 Ignore invalid notebook in ecosystem checks (#13211) 2024-09-02 16:22:51 +05:30
Dhruv Manilawala
17eb65b26f Add definitions for match statement (#13147)
## Summary

This PR adds definition for match patterns.

## Test Plan

Update the existing test case for match statement symbols to verify that
the definitions are added as well.
2024-09-02 14:40:09 +05:30
Micha Reiser
9986397d56 Avoid allocating OrderedSet in UnionBuilder::simplify (#13206)
Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
2024-09-02 09:07:53 +00:00
Alex Waygood
58c641c92f Optimize some SemanticModel methods (#13091) 2024-09-02 10:03:52 +01:00
renovate[bot]
227fa4e035 Update Rust crate quick-junit to 0.5.0 (#13203)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
| [quick-junit](https://redirect.github.com/nextest-rs/quick-junit) |
workspace.dependencies | minor | `0.4.0` -> `0.5.0` |

---

### Release Notes

<details>
<summary>nextest-rs/quick-junit (quick-junit)</summary>

###
[`v0.5.0`](https://redirect.github.com/nextest-rs/quick-junit/blob/HEAD/CHANGELOG.md#050---2024-09-01)

[Compare
Source](https://redirect.github.com/nextest-rs/quick-junit/compare/quick-junit-0.4.0...quick-junit-0.5.0)

##### Changed

- The `Output` type, which strips invalid XML characters from a string,
has been renamed to
    `XmlString`.
-   All internal storage now uses `XmlString` rather than `String`.

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/ruff).

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOC41Ni4wIiwidXBkYXRlZEluVmVyIjoiMzguNTkuMiIsInRhcmdldEJyYW5jaCI6Im1haW4iLCJsYWJlbHMiOlsiaW50ZXJuYWwiXX0=-->

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Dhruv Manilawala <dhruvmanila@gmail.com>
2024-09-02 06:58:58 +00:00
renovate[bot]
2b21b77ee6 Update dependency mkdocs to v1.6.1 (#13199)
This PR contains the following updates:

| Package | Change | Age | Adoption | Passing | Confidence |
|---|---|---|---|---|---|
| [mkdocs](https://redirect.github.com/mkdocs/mkdocs)
([changelog](https://www.mkdocs.org/about/release-notes/)) | `==1.6.0`
-> `==1.6.1` |
[![age](https://developer.mend.io/api/mc/badges/age/pypi/mkdocs/1.6.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/mkdocs/1.6.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/mkdocs/1.6.0/1.6.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/mkdocs/1.6.0/1.6.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|

---

### Release Notes

<details>
<summary>mkdocs/mkdocs (mkdocs)</summary>

###
[`v1.6.1`](https://redirect.github.com/mkdocs/mkdocs/releases/tag/1.6.1)

[Compare
Source](https://redirect.github.com/mkdocs/mkdocs/compare/1.6.0...1.6.1)

### Version 1.6.1 (Friday 30th August, 2024)

##### Fixed

- Fix build error when environment variable `SOURCE_DATE_EPOCH=0` is
set.
[#&#8203;3795](https://redirect.github.com/mkdocs/mkdocs/issues/3795)
- Fix build error when `mkdocs_theme.yml` config is empty.
[#&#8203;3700](https://redirect.github.com/mkdocs/mkdocs/issues/3700)
- Support `python -W` and `PYTHONWARNINGS` instead of overriding the
configuration.
[#&#8203;3809](https://redirect.github.com/mkdocs/mkdocs/issues/3809)
- Support running with Docker under strict mode, by removing `0.0.0.0`
dev server warning.
[#&#8203;3784](https://redirect.github.com/mkdocs/mkdocs/issues/3784)
- Drop unnecessary `changefreq` from `sitemap.xml`.
[#&#8203;3629](https://redirect.github.com/mkdocs/mkdocs/issues/3629)
- Fix JavaScript console error when closing menu dropdown.
[#&#8203;3774](https://redirect.github.com/mkdocs/mkdocs/issues/3774)
- Fix JavaScript console error that occur on repeated clicks.
[#&#8203;3730](https://redirect.github.com/mkdocs/mkdocs/issues/3730)
- Fix JavaScript console error that can occur on dropdown selections.
[#&#8203;3694](https://redirect.github.com/mkdocs/mkdocs/issues/3694)

##### Added

- Added translations for Dutch.
[#&#8203;3804](https://redirect.github.com/mkdocs/mkdocs/issues/3804)
- Added and updated translations for Chinese (Simplified).
[#&#8203;3684](https://redirect.github.com/mkdocs/mkdocs/issues/3684)

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/ruff).

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOC41Ni4wIiwidXBkYXRlZEluVmVyIjoiMzguNTkuMiIsInRhcmdldEJyYW5jaCI6Im1haW4iLCJsYWJlbHMiOlsiaW50ZXJuYWwiXX0=-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-02 12:14:19 +05:30
renovate[bot]
ba272b093c Update dependency ruff to v0.6.3 (#13201)
This PR contains the following updates:

| Package | Change | Age | Adoption | Passing | Confidence |
|---|---|---|---|---|---|
| [ruff](https://docs.astral.sh/ruff)
([source](https://redirect.github.com/astral-sh/ruff),
[changelog](https://redirect.github.com/astral-sh/ruff/blob/main/CHANGELOG.md))
| `==0.6.2` -> `==0.6.3` |
[![age](https://developer.mend.io/api/mc/badges/age/pypi/ruff/0.6.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/ruff/0.6.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/ruff/0.6.2/0.6.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/ruff/0.6.2/0.6.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|

---

### Release Notes

<details>
<summary>astral-sh/ruff (ruff)</summary>

###
[`v0.6.3`](https://redirect.github.com/astral-sh/ruff/blob/HEAD/CHANGELOG.md#063)

[Compare
Source](https://redirect.github.com/astral-sh/ruff/compare/0.6.2...0.6.3)

##### Preview features

- \[`flake8-simplify`] Extend `open-file-with-context-handler` to work
with `dbm.sqlite3` (`SIM115`)
([#&#8203;13104](https://redirect.github.com/astral-sh/ruff/pull/13104))
- \[`pycodestyle`] Disable `E741` in stub files (`.pyi`)
([#&#8203;13119](https://redirect.github.com/astral-sh/ruff/pull/13119))
- \[`pydoclint`] Avoid `DOC201` on explicit returns in functions that
only return `None`
([#&#8203;13064](https://redirect.github.com/astral-sh/ruff/pull/13064))

##### Rule changes

- \[`flake8-async`] Disable check for `asyncio` before Python 3.11
(`ASYNC109`)
([#&#8203;13023](https://redirect.github.com/astral-sh/ruff/pull/13023))

##### Bug fixes

- \[`FastAPI`] Avoid introducing invalid syntax in fix for
`fast-api-non-annotated-dependency` (`FAST002`)
([#&#8203;13133](https://redirect.github.com/astral-sh/ruff/pull/13133))
- \[`flake8-implicit-str-concat`] Normalize octals before merging
concatenated strings in `single-line-implicit-string-concatenation`
(`ISC001`)
([#&#8203;13118](https://redirect.github.com/astral-sh/ruff/pull/13118))
- \[`flake8-pytest-style`] Improve help message for
`pytest-incorrect-mark-parentheses-style` (`PT023`)
([#&#8203;13092](https://redirect.github.com/astral-sh/ruff/pull/13092))
- \[`pylint`] Avoid autofix for calls that aren't `min` or `max` as
starred expression (`PLW3301`)
([#&#8203;13089](https://redirect.github.com/astral-sh/ruff/pull/13089))
- \[`ruff`] Add `datetime.time`, `datetime.tzinfo`, and
`datetime.timezone` as immutable function calls (`RUF009`)
([#&#8203;13109](https://redirect.github.com/astral-sh/ruff/pull/13109))
- \[`ruff`] Extend comment deletion for `RUF100` to include trailing
text from `noqa` directives while preserving any following comments on
the same line, if any
([#&#8203;13105](https://redirect.github.com/astral-sh/ruff/pull/13105))
- Fix dark theme on initial page load for the Ruff playground
([#&#8203;13077](https://redirect.github.com/astral-sh/ruff/pull/13077))

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/ruff).

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOC41Ni4wIiwidXBkYXRlZEluVmVyIjoiMzguNTkuMiIsInRhcmdldEJyYW5jaCI6Im1haW4iLCJsYWJlbHMiOlsiaW50ZXJuYWwiXX0=-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-02 12:08:52 +05:30
renovate[bot]
8972e5d175 Update NPM Development dependencies (#13198)
This PR contains the following updates:

| Package | Change | Age | Adoption | Passing | Confidence |
|---|---|---|---|---|---|
|
[@types/react](https://redirect.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/react)
([source](https://redirect.github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/react))
| [`18.3.4` ->
`18.3.5`](https://renovatebot.com/diffs/npm/@types%2freact/18.3.4/18.3.5)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@types%2freact/18.3.5?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![adoption](https://developer.mend.io/api/mc/badges/adoption/npm/@types%2freact/18.3.5?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![passing](https://developer.mend.io/api/mc/badges/compatibility/npm/@types%2freact/18.3.4/18.3.5?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@types%2freact/18.3.4/18.3.5?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
|
[@typescript-eslint/eslint-plugin](https://typescript-eslint.io/packages/eslint-plugin)
([source](https://redirect.github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/eslint-plugin))
| [`8.2.0` ->
`8.3.0`](https://renovatebot.com/diffs/npm/@typescript-eslint%2feslint-plugin/8.2.0/8.3.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@typescript-eslint%2feslint-plugin/8.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![adoption](https://developer.mend.io/api/mc/badges/adoption/npm/@typescript-eslint%2feslint-plugin/8.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![passing](https://developer.mend.io/api/mc/badges/compatibility/npm/@typescript-eslint%2feslint-plugin/8.2.0/8.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@typescript-eslint%2feslint-plugin/8.2.0/8.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
|
[@typescript-eslint/parser](https://typescript-eslint.io/packages/parser)
([source](https://redirect.github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser))
| [`8.2.0` ->
`8.3.0`](https://renovatebot.com/diffs/npm/@typescript-eslint%2fparser/8.2.0/8.3.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@typescript-eslint%2fparser/8.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![adoption](https://developer.mend.io/api/mc/badges/adoption/npm/@typescript-eslint%2fparser/8.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![passing](https://developer.mend.io/api/mc/badges/compatibility/npm/@typescript-eslint%2fparser/8.2.0/8.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@typescript-eslint%2fparser/8.2.0/8.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
| [postcss](https://postcss.org/)
([source](https://redirect.github.com/postcss/postcss)) | [`8.4.41` ->
`8.4.43`](https://renovatebot.com/diffs/npm/postcss/8.4.41/8.4.43) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/postcss/8.4.43?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![adoption](https://developer.mend.io/api/mc/badges/adoption/npm/postcss/8.4.43?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![passing](https://developer.mend.io/api/mc/badges/compatibility/npm/postcss/8.4.41/8.4.43?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/postcss/8.4.41/8.4.43?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
| [wrangler](https://redirect.github.com/cloudflare/workers-sdk)
([source](https://redirect.github.com/cloudflare/workers-sdk/tree/HEAD/packages/wrangler))
| [`3.72.2` ->
`3.73.0`](https://renovatebot.com/diffs/npm/wrangler/3.72.2/3.73.0) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/wrangler/3.73.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![adoption](https://developer.mend.io/api/mc/badges/adoption/npm/wrangler/3.73.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![passing](https://developer.mend.io/api/mc/badges/compatibility/npm/wrangler/3.72.2/3.73.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/wrangler/3.72.2/3.73.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|

---

### Release Notes

<details>
<summary>typescript-eslint/typescript-eslint
(@&#8203;typescript-eslint/eslint-plugin)</summary>

###
[`v8.3.0`](https://redirect.github.com/typescript-eslint/typescript-eslint/blob/HEAD/packages/eslint-plugin/CHANGELOG.md#830-2024-08-26)

[Compare
Source](https://redirect.github.com/typescript-eslint/typescript-eslint/compare/v8.2.0...v8.3.0)

##### 🚀 Features

-   **eslint-plugin:** \[no-deprecation] add rule

##### 🩹 Fixes

- **eslint-plugin:** \[no-unnecessary-template-expression] add missing
parentheses in autofix

- **eslint-plugin:** \[no-unnecessary-type-parameters] check mapped
alias type arguments

-   **utils:** add `TSDeclareFunction` to `functionTypeTypes`

-   **ast-spec:** use `Expression` in argument of `ThrowStatement`

##### ❤️  Thank You

-   Abraham Guo
-   Daichi Kamiyama
-   Josh Goldberg 
-   Kim Sang Du
-   Sukka
-   Vida Xie

You can read about our [versioning
strategy](https://main--typescript-eslint.netlify.app/users/versioning)
and
[releases](https://main--typescript-eslint.netlify.app/users/releases)
on our website.

</details>

<details>
<summary>typescript-eslint/typescript-eslint
(@&#8203;typescript-eslint/parser)</summary>

###
[`v8.3.0`](https://redirect.github.com/typescript-eslint/typescript-eslint/blob/HEAD/packages/parser/CHANGELOG.md#830-2024-08-26)

[Compare
Source](https://redirect.github.com/typescript-eslint/typescript-eslint/compare/v8.2.0...v8.3.0)

This was a version bump only for parser to align it with other projects,
there were no code changes.

You can read about our [versioning
strategy](https://main--typescript-eslint.netlify.app/users/versioning)
and
[releases](https://main--typescript-eslint.netlify.app/users/releases)
on our website.

</details>

<details>
<summary>postcss/postcss (postcss)</summary>

###
[`v8.4.43`](https://redirect.github.com/postcss/postcss/blob/HEAD/CHANGELOG.md#8443)

[Compare
Source](https://redirect.github.com/postcss/postcss/compare/8.4.42...8.4.43)

-   Fixed `markClean is not a function` error.

###
[`v8.4.42`](https://redirect.github.com/postcss/postcss/blob/HEAD/CHANGELOG.md#8442)

[Compare
Source](https://redirect.github.com/postcss/postcss/compare/8.4.41...8.4.42)

- Fixed CSS syntax error on long minified files (by
[@&#8203;varpstar](https://redirect.github.com/varpstar)).

</details>

<details>
<summary>cloudflare/workers-sdk (wrangler)</summary>

###
[`v3.73.0`](https://redirect.github.com/cloudflare/workers-sdk/blob/HEAD/packages/wrangler/CHANGELOG.md#3730)

[Compare
Source](https://redirect.github.com/cloudflare/workers-sdk/compare/wrangler@3.72.3...wrangler@3.73.0)

##### Minor Changes

-
[#&#8203;6571](https://redirect.github.com/cloudflare/workers-sdk/pull/6571)
[`a7e1bfe`](a7e1bfea3e)
Thanks [@&#8203;penalosa](https://redirect.github.com/penalosa)! - feat:
Add deployment http targets to wrangler deploy logs, and add url to
pages deploy logs

-
[#&#8203;6497](https://redirect.github.com/cloudflare/workers-sdk/pull/6497)
[`3bd833c`](3bd833cbe2)
Thanks [@&#8203;WalshyDev](https://redirect.github.com/WalshyDev)! -
chore: move `wrangler versions ...`, `wrangler deployments ...`,
`wrangler rollback` and `wrangler triggers ...` out of experimental and
open beta.
These are now available to use without the --x-versions flag, you can
continue to pass this however without issue to keep compatibility with
all the usage today.

A few of the commands had an output that wasn't guarded by
`--x-versions` those have been updated to use the newer output, we have
tried to keep compatibility where possible (for example: `wrangler
rollback` will continue to output "Worker Version ID:" so users can
continue to grab the ID).
If you wish to use the old versions of the commands you can pass the
`--no-x-versions` flag. Note, these will be removed in the future so
please work on migrating.

-
[#&#8203;6586](https://redirect.github.com/cloudflare/workers-sdk/pull/6586)
[`72ea742`](72ea74214d)
Thanks [@&#8203;penalosa](https://redirect.github.com/penalosa)! - feat:
Inject a 404 response for browser requested `favicon.ico` files when
loading the `/__scheduled` page for scheduled-only Workers

-
[#&#8203;6497](https://redirect.github.com/cloudflare/workers-sdk/pull/6497)
[`3bd833c`](3bd833cbe2)
Thanks [@&#8203;WalshyDev](https://redirect.github.com/WalshyDev)! -
feat: update `wrangler deploy` to use the new versions and deployments
API.
This should have zero user-facing impact but sets up the most used
command to deploy Workers to use the new recommended APIs and move away
from the old ones.
We will still call the old upload path where required (e.g. Durable
Object migration or Service Worker format).

##### Patch Changes

-
[#&#8203;6563](https://redirect.github.com/cloudflare/workers-sdk/pull/6563)
[`da48a70`](da48a70369)
Thanks
[@&#8203;threepointone](https://redirect.github.com/threepointone)! -
chore: remove the warning about local mode flag being removed in the
future

-
[#&#8203;6595](https://redirect.github.com/cloudflare/workers-sdk/pull/6595)
[`0a76d7e`](0a76d7e550)
Thanks [@&#8203;vicb](https://redirect.github.com/vicb)! - feat: update
unenv to the latest available version

-
[#&#8203;5738](https://redirect.github.com/cloudflare/workers-sdk/pull/5738)
[`c2460c4`](c2460c4d89)
Thanks [@&#8203;penalosa](https://redirect.github.com/penalosa)! - fix:
Prevent spaces in names when validating

-
[#&#8203;6586](https://redirect.github.com/cloudflare/workers-sdk/pull/6586)
[`72ea742`](72ea74214d)
Thanks [@&#8203;penalosa](https://redirect.github.com/penalosa)! -
chore: Improve Miniflare CRON warning wording

-
[#&#8203;6593](https://redirect.github.com/cloudflare/workers-sdk/pull/6593)
[`f097cb7`](f097cb73be)
Thanks [@&#8203;vicb](https://redirect.github.com/vicb)! - fix: remove
`experimental:` prefix requirement for nodejs_compat_v2

    See https://jira.cfdata.org/browse/DEVDASH-218

-
[#&#8203;6572](https://redirect.github.com/cloudflare/workers-sdk/pull/6572)
[`0d83428`](0d834284d0)
Thanks [@&#8203;penalosa](https://redirect.github.com/penalosa)! - fix:
Show a clearer user error when trying to use a python worker without the
`python_workers` compatibility flag specified

-
[#&#8203;6589](https://redirect.github.com/cloudflare/workers-sdk/pull/6589)
[`f4c8cea`](f4c8cea142)
Thanks [@&#8203;vicb](https://redirect.github.com/vicb)! - feat: update
unenv to the latest available version

- Updated dependencies
\[[`45ad2e0`](45ad2e0c83)]:
-
[@&#8203;cloudflare/workers-shared](https://redirect.github.com/cloudflare/workers-shared)[@&#8203;0](https://redirect.github.com/0).4.1

###
[`v3.72.3`](https://redirect.github.com/cloudflare/workers-sdk/blob/HEAD/packages/wrangler/CHANGELOG.md#3723)

[Compare
Source](https://redirect.github.com/cloudflare/workers-sdk/compare/wrangler@3.72.2...wrangler@3.72.3)

##### Patch Changes

-
[#&#8203;6548](https://redirect.github.com/cloudflare/workers-sdk/pull/6548)
[`439e63a`](439e63a4ac)
Thanks [@&#8203;garvit-gupta](https://redirect.github.com/garvit-gupta)!
- fix: Fix Vectorize getVectors, deleteVectors payload in Wrangler
Client; VS-271

-
[#&#8203;6554](https://redirect.github.com/cloudflare/workers-sdk/pull/6554)
[`46aee5d`](46aee5d16c)
Thanks [@&#8203;andyjessop](https://redirect.github.com/andyjessop)! -
fix: nodejs_compat flags no longer error when running wrangler types
--x-include-runtime

-
[#&#8203;6548](https://redirect.github.com/cloudflare/workers-sdk/pull/6548)
[`439e63a`](439e63a4ac)
Thanks [@&#8203;garvit-gupta](https://redirect.github.com/garvit-gupta)!
- fix: Add content-type header to Vectorize POST operations;
[#&#8203;6516](https://redirect.github.com/cloudflare/workers-sdk/issues/6516)/VS-269

-
[#&#8203;6566](https://redirect.github.com/cloudflare/workers-sdk/pull/6566)
[`669ec1c`](669ec1c4d1)
Thanks [@&#8203;penalosa](https://redirect.github.com/penalosa)! - fix:
Ensure esbuild warnings are logged when running wrangler deploy

- Updated dependencies
\[[`6c057d1`](6c057d10b2)]:
-
[@&#8203;cloudflare/workers-shared](https://redirect.github.com/cloudflare/workers-shared)[@&#8203;0](https://redirect.github.com/0).4.0

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

👻 **Immortal**: This PR will be recreated if closed unmerged. Get
[config
help](https://redirect.github.com/renovatebot/renovate/discussions) if
that's undesired.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/ruff).

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOC41Ni4wIiwidXBkYXRlZEluVmVyIjoiMzguNTkuMiIsInRhcmdldEJyYW5jaCI6Im1haW4iLCJsYWJlbHMiOlsiaW50ZXJuYWwiXX0=-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-02 12:08:21 +05:30
renovate[bot]
9ac2e61bad Update dependency react-resizable-panels to v2.1.2 (#13200)
This PR contains the following updates:

| Package | Change | Age | Adoption | Passing | Confidence |
|---|---|---|---|---|---|
|
[react-resizable-panels](https://redirect.github.com/bvaughn/react-resizable-panels)
| [`2.1.1` ->
`2.1.2`](https://renovatebot.com/diffs/npm/react-resizable-panels/2.1.1/2.1.2)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/react-resizable-panels/2.1.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![adoption](https://developer.mend.io/api/mc/badges/adoption/npm/react-resizable-panels/2.1.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![passing](https://developer.mend.io/api/mc/badges/compatibility/npm/react-resizable-panels/2.1.1/2.1.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/react-resizable-panels/2.1.1/2.1.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|

---

### Release Notes

<details>
<summary>bvaughn/react-resizable-panels
(react-resizable-panels)</summary>

###
[`v2.1.2`](https://redirect.github.com/bvaughn/react-resizable-panels/compare/2.1.1...2.1.2)

[Compare
Source](https://redirect.github.com/bvaughn/react-resizable-panels/compare/2.1.1...2.1.2)

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/ruff).

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOC41Ni4wIiwidXBkYXRlZEluVmVyIjoiMzguNTkuMiIsInRhcmdldEJyYW5jaCI6Im1haW4iLCJsYWJlbHMiOlsiaW50ZXJuYWwiXX0=-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-02 12:06:11 +05:30
renovate[bot]
6deb056117 Update pre-commit dependencies (#13202)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
|
[astral-sh/ruff-pre-commit](https://togithub.com/astral-sh/ruff-pre-commit)
| repository | patch | `v0.6.2` -> `v0.6.3` |
| [crate-ci/typos](https://togithub.com/crate-ci/typos) | repository |
patch | `v1.24.1` -> `v1.24.3` |

Note: The `pre-commit` manager in Renovate is not supported by the
`pre-commit` maintainers or community. Please do not report any problems
there, instead [create a Discussion in the Renovate
repository](https://togithub.com/renovatebot/renovate/discussions/new)
if you have any questions.

---

### Release Notes

<details>
<summary>astral-sh/ruff-pre-commit (astral-sh/ruff-pre-commit)</summary>

###
[`v0.6.3`](https://togithub.com/astral-sh/ruff-pre-commit/releases/tag/v0.6.3)

[Compare
Source](https://togithub.com/astral-sh/ruff-pre-commit/compare/v0.6.2...v0.6.3)

See: https://github.com/astral-sh/ruff/releases/tag/0.6.3

</details>

<details>
<summary>crate-ci/typos (crate-ci/typos)</summary>

###
[`v1.24.3`](https://togithub.com/crate-ci/typos/releases/tag/v1.24.3)

[Compare
Source](https://togithub.com/crate-ci/typos/compare/v1.24.2...v1.24.3)

#### \[1.24.3] - 2024-08-30

##### Fixes

- Updated the dictionary with the [August
2024](https://togithub.com/crate-ci/typos/issues/1069) changes

###
[`v1.24.2`](https://togithub.com/crate-ci/typos/releases/tag/v1.24.2)

[Compare
Source](https://togithub.com/crate-ci/typos/compare/v1.24.1...v1.24.2)

#### \[1.24.2] - 2024-08-30

##### Performance

- Cap unbounded parsing to avoid worst case performance (hit with test
data)

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

👻 **Immortal**: This PR will be recreated if closed unmerged. Get
[config help](https://togithub.com/renovatebot/renovate/discussions) if
that's undesired.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/ruff).

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOC41Ni4wIiwidXBkYXRlZEluVmVyIjoiMzguNTYuMCIsInRhcmdldEJyYW5jaCI6Im1haW4iLCJsYWJlbHMiOlsiaW50ZXJuYWwiXX0=-->

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Dhruv Manilawala <dhruvmanila@gmail.com>
2024-09-02 06:33:39 +00:00
Charlie Marsh
c4aad4b161 Use dynamic builtins list based on Python version (#13172)
## Summary

Closes https://github.com/astral-sh/ruff/issues/13037.
2024-09-01 17:03:44 +00:00
Alex Waygood
3abd5c08a5 [pylint] Recurse into subscript subexpressions when searching for list/dict lookups (PLR1733, PLR1736) (#13186)
## Summary

The `SequenceIndexVisitor` currently does not recurse into
subexpressions of subscripts when searching for subscript accesses that
would trigger this rule. That means that we don't currently detect
violations of the rule on snippets like this:

```py
data = {"a": 1, "b": 2}
column_names = ["a", "b"]
for index, column_name in enumerate(column_names):
    _ = data[column_names[index]]
```

Fixes #13183

## Test Plan

`cargo test -p ruff_linter`
2024-09-01 17:22:45 +01:00
Alex Waygood
2014cba87f [red-knot] Fix call expression inference edge case for decorated functions (#13191) 2024-09-01 16:19:40 +01:00
Alex Waygood
5661353334 Fix typo in pydoclint enum variant name (#13193) 2024-09-01 11:58:47 +00:00
Luo Peng
dd5d0d523c Enrich messages of SARIF results (#13180) 2024-09-01 12:13:22 +01:00
James Braza
1be8c2e340 Expand docs for ASYNC109 (#13146)
Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
2024-09-01 10:16:57 +00:00
Dylan
52d8847b60 [red-knot] Literal[True,False] normalized to builtins.bool (#13178)
The `UnionBuilder` builds `builtins.bool` when handed `Literal[True]`
and `Literal[False]`.

Caveat: If the builtins module is unfindable somehow, the builder falls
back to the union type of these two literals.

First task from #12694

---------

Co-authored-by: Carl Meyer <carl@astral.sh>
2024-08-31 22:57:50 -07:00
Alex Waygood
d3b6e8f58b Remove pylint::helpers::CmpOpExt (#13189) 2024-09-01 01:55:24 +00:00
Tom Kuson
bf620dcb38 [pydoclint] Permit yielding None in DOC402 and DOC403 (#13148)
Co-authored-by: Alex Waygood <alex.waygood@gmail.com>
2024-09-01 02:03:39 +01:00
Alex Waygood
fae0573817 [red-knot] Fix async function edge case for inference of call expressions (#13187) 2024-09-01 01:58:35 +01:00
github-actions[bot]
0c23b868dc Sync vendored typeshed stubs (#13188)
Co-authored-by: typeshedbot <>
2024-09-01 01:41:27 +01:00
Dylan
3ceedf76b8 [red-knot] Infer type of class constructor call expression (#13171)
This tiny PR implements the following type inference: the type of
`Foo(...)` will be `Foo`.

---------

Co-authored-by: Carl Meyer <carl@astral.sh>
2024-08-30 16:48:06 -07:00
Charlie Marsh
828871dc5c [pyupgrade] Detect aiofiles.open calls in UP015 (#13173)
## Summary

Closes https://github.com/astral-sh/ruff/issues/12879.
2024-08-30 19:39:00 -04:00
Charlie Marsh
ee21fc7fd8 Mark sys.version_info[0] < 3 and similar comparisons as outdated (#13175)
## Summary

Closes https://github.com/astral-sh/ruff/issues/12993.
2024-08-30 19:38:46 -04:00
Chris Krycho
28ab5f4065 [red-knot] implement basic call expression inference (#13164)
## Summary

Adds basic support for inferring the type resulting from a call
expression. This only works for the *result* of call expressions; it
performs no inference on parameters. It also intentionally does nothing
with class instantiation, `__call__` implementors, or lambdas.

## Test Plan

Adds a test that it infers the right thing!

---------

Co-authored-by: Carl Meyer <carl@astral.sh>
2024-08-30 12:51:29 -07:00
Charlie Marsh
a73bebcf15 Avoid no-self-use for attrs-style validators (#13166)
## Summary

Closes https://github.com/astral-sh/ruff/issues/12568.
2024-08-30 12:39:05 -04:00
Charlie Marsh
34dafb67a2 Treat sep arguments with effects as unsafe removals (#13165)
## Summary

Closes https://github.com/astral-sh/ruff/issues/13126.
2024-08-30 12:17:47 -04:00
Chris Krycho
f8656ff35e [red-knot] infer basic (name-based) annotation expressions (#13130)
## Summary

- Introduce methods for inferring annotation and type expressions.
- Correctly infer explicit return types from functions where they are
simple names that can be resolved in scope.

Contributes to #12701 by way of helping unlock call expressions (this
does not remotely finish that, as it stands, but it gets us moving that
direction).

## Test Plan

Added a test for function return types which use the name form of an
annotation expression, since this is aiming toward call expressions.
When we extend this to working for other annotation and type expression
positions, we should add explicit tests for those as well.

---------

Co-authored-by: Alex Waygood <alex.waygood@gmail.com>
Co-authored-by: Carl Meyer <carl@astral.sh>
2024-08-30 08:24:36 -07:00
Alex Waygood
34b4732c46 [flake8-pyi] Respect pep8_naming.classmethod-decorators settings when determining if a method is a classmethod in custom-type-var-return-type (PYI019) (#13162) 2024-08-30 14:24:01 +01:00
zhoufanjin
ce68f1cc1b Fix some typos in comments (#13157) 2024-08-30 10:42:23 +00:00
Alex Waygood
281e6d9791 [pydocstyle] Improve heuristics for detecting Google-style docstrings (#13142) 2024-08-29 16:33:18 +01:00
Dhruv Manilawala
ee258caed7 Bump version to 0.6.3 (#13152) 2024-08-29 20:29:33 +05:30
Aditya Pal
b4d9d26020 Update faq.md to highlight changes to src (#13145)
This attempts to close https://github.com/astral-sh/ruff/issues/13134

## Summary

Documentation change to address
https://github.com/astral-sh/ruff/issues/13134

## Test Plan

Markdown Changes were previewed
2024-08-29 11:57:53 +00:00
Steve C
a99832088a [ruff] - extend comment deletions for unused-noqa (RUF100) (#13105)
## Summary

Extends deletions for RUF100, deleting trailing text from noqa
directives, while preserving upcoming comments on the same line if any.

In cases where it deletes a comment up to another comment on the same
line, the whitespace between them is now shown to be in the autofix in
the diagnostic as well. Leading whitespace before the removed comment is
not, though.

Fixes #12251 

## Test Plan

`cargo test`
2024-08-29 10:50:16 +05:30
Carl Meyer
770ef2ab27 [red-knot] support deferred evaluation of type expressions (#13131)
Prototype deferred evaluation of type expressions by deferring
evaluation of class bases in a stub file. This allows self-referential
class definitions, as occur with the definition of `str` in typeshed
(which inherits `Sequence[str]`).

---------

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
2024-08-28 11:41:01 -07:00
Alex Waygood
c6023c03a2 [red-knot] Add docs on using RAYON_NUM_THREADS for better logging (#13140)
Followup to #13049. We check files concurrently now; to get readable
logs, you probably want to switch that off
2024-08-28 17:14:56 +01:00
Adam Kuhn
df694ca1c1 [FastAPI] Avoid introducing invalid syntax in fix for fast-api-non-annotated-dependency (FAST002) (#13133) 2024-08-28 15:29:00 +00:00
Calum Young
2e75cfbfe7 Format PYI examples in docs as .pyi-file snippets (#13116) 2024-08-28 13:20:40 +01:00
Alex Waygood
cfafaa7637 [red-knot] Remove very noisy tracing call when resolving ImportFrom statements (#13136) 2024-08-28 10:05:00 +00:00
Jonathan Plasse
3e9c7adeee Replace crates by dependi for VS Code Dev Container (#13125)
## Summary
crates is now recommending migrating to dependi
![Screenshot from 2024-08-27
19-33-39](https://github.com/user-attachments/assets/c8f6480e-a07c-41a5-8bd0-d808c5a987a0)

## Test Plan
Opening dev-container installs correctly dependi
2024-08-28 09:53:27 +05:30
Chris Krycho
81cd438d88 red-knot: infer and display ellipsis type (#13124)
## Summary

Just what it says on the tin: adds basic `EllipsisType` inference for
any time `...` appears in the AST.

## Test Plan

Test that `x = ...` produces exactly what we would expect.

---------

Co-authored-by: Carl Meyer <carl@oddbird.net>
2024-08-27 20:52:53 +01:00
Dylan
483748c188 [flake8-implicit-str-concat] Normalize octals before merging concatenated strings in single-line-implicit-string-concatenation (ISC001) (#13118)
Co-authored-by: Alex Waygood <alex.waygood@gmail.com>
2024-08-27 18:53:27 +01:00
Calum Young
eb3dc37faa Add note about how Ruff handles PYI files wrt target version (#13111)
Co-authored-by: Alex Waygood <alex.waygood@gmail.com>
2024-08-27 17:28:22 +00:00
Chris Krycho
aba1802828 red-knot: infer multiplication for strings and integers (#13117)
## Summary

The resulting type when multiplying a string literal by an integer
literal is one of two types:

- `StringLiteral`, in the case where it is a reasonably small resulting
string (arbitrarily bounded here to 4096 bytes, roughly a page on many
operating systems), including the fully expanded string.
- `LiteralString`, matching Pyright etc., for strings larger than that.

Additionally:

- Switch to using `Box<str>` instead of `String` for the internal value
of `StringLiteral`, saving some non-trivial byte overhead (and keeping
the total number of allocations the same).
- Be clearer and more accurate about which types we ought to defer to in
`StringLiteral` and `LiteralString` member lookup.

## Test Plan

Added a test case covering multiplication times integers: positive,
negative, zero, and in and out of bounds.

---------

Co-authored-by: Alex Waygood <alex.waygood@gmail.com>
Co-authored-by: Carl Meyer <carl@astral.sh>
2024-08-27 09:00:36 -07:00
Tom Kuson
96b42b0c8f [DOC201] Permit explicit None in functions that only return None (#13064)
Co-authored-by: Alex Waygood <alex.waygood@gmail.com>
2024-08-27 16:00:18 +00:00
Niels Wouda
e6d0c4a65d Add time, tzinfo, and timezone as immutable function calls (#13109) 2024-08-27 15:51:32 +01:00
Calum Young
4e1b289a67 Disable E741 in stub files (#13119)
Co-authored-by: Alex Waygood <alex.waygood@gmail.com>
2024-08-27 15:02:14 +01:00
Alex Waygood
a5ef124201 [red-knot] Improve the accuracy of the unresolved-import check (#13055) 2024-08-27 14:17:22 +01:00
Chris Krycho
390bb43276 red-knot: flatten match expression in infer_binary_expression (#13115)
## Summary

This fixes the outstanding TODO and make it easier to work with new
cases. (Tidy first, *then* implement, basically!)

## Test Plan

After making this change all the existing tests still pass. A classic
refactor win. 🎉
2024-08-26 12:34:07 -07:00
Chris Krycho
fe8b15291f red-knot: implement unary minus on integer literals (#13114)
# Summary

Add support for the first unary operator: negating integer literals. The
resulting type is another integer literal, with the value being the
negated value of the literal. All other types continue to return
`Type::Unknown` for the present, but this is designed to make it easy to
extend easily with other combinations of operator and operand.

Contributes to #12701.

## Test Plan

Add tests with basic negation, including of very large integers and
double negation.
2024-08-26 12:08:18 -07:00
Dhruv Manilawala
c8e01d7c53 Update dependency in insider requirements.txt (#13112) 2024-08-26 19:02:27 +00:00
Chris Krycho
c4d628cc4c red-knot: infer string literal types (#13113)
## Summary

Introduce a `StringLiteralType` with corresponding `Display` type and a
relatively basic test that the resulting representation is as expected.

Note: we currently always allocate for `StringLiteral` types. This may
end up being a perf issue later, at which point we may want to look at
other ways of representing `value` here, i.e. with some kind of smarter
string structure which can reuse types. That is most likely to show up
with e.g. concatenation.

Contributes to #12701.

## Test Plan

Added a test for individual strings with both single and double quotes
as well as concatenated strings with both forms.
2024-08-26 11:42:34 -07:00
Calum Young
ab3648c4c5 Format docs with ruff formatter (#13087)
## Summary

Now that Ruff provides a formatter, there is no need to rely on Black to
check that the docs are formatted correctly in
`check_docs_formatted.py`. This PR swaps out Black for the Ruff
formatter and updates inconsistencies between the two.

This PR will be a precursor to another PR
([branch](https://github.com/calumy/ruff/tree/format-pyi-in-docs)),
updating the `check_docs_formatted.py` script to check for pyi files,
fixing #11568.

## Test Plan

- CI to check that the docs are formatted correctly using the updated
script.
2024-08-26 21:25:10 +05:30
Teodoro Freund
a822fd6642 Fixed benchmarking section in Contributing guide (#13107)
## Summary

Noticed there was a wrong tip on the Contributing guide, `cargo
benchmark lexer` wouldn't run any benches.
Probably a missed update on #9535 

It may make sense to remove the `cargo benchmark` command from the guide
altogether, but up to the mantainers.
2024-08-26 18:49:01 +05:30
Calum Young
f8f2e2a442 Add anchor tags to README headers (#13083)
## Summary

This pull request adds anchor tags to the elements referenced in the
table of contents section of the readme used on
[PyPI](https://pypi.org/project/ruff/) as an attempt to fix #7257. This
update follows [this
suggestion](https://github.com/pypa/readme_renderer/issues/169#issuecomment-808577486)
to add anchor tags (with no spaces) after the title that is to be linked
to.

## Test Plan

- This has been tested on GitHub to check that the additional tags do
not interfere with how the read me is rendered; see:
https://github.com/calumy/ruff/blob/add-links-to-pypi-docs/README.md
- MK docs were generated using the `generate_mkdocs.py` script; however
as the added tags are beyond the comment `<!-- End section: Overview
-->`, they are excluded so will not change how the docs are rendered.
- I was unable to verify how PyPI renders this change, any suggestions
would be appreciated and I can follow up on this. Hopefully, the four
thumbs up/heart on [this
comment](https://github.com/pypa/readme_renderer/issues/169#issuecomment-808577486)
and [this
suggestion](https://github.com/pypa/readme_renderer/issues/169#issuecomment-1765616890)
all suggest that this approach should work.
2024-08-26 12:42:35 +05:30
Steve C
0b5828a1e8 [flake8-simplify] - extend open-file-with-context-handler to work with dbm.sqlite3 (SIM115) (#13104)
## Summary

Adds upcoming `dbm.sqlite3` to rule that suggests using context managers
to open things with.

See: https://docs.python.org/3.13/library/dbm.html#module-dbm.sqlite3

## Test Plan

`cargo test`
2024-08-26 08:11:03 +01:00
Steve C
5af48337a5 [pylint] - fix incorrect starred expression replacement for nested-min-max (PLW3301) (#13089)
## Summary

Moves the min/max detection up, and fixes #13088 

## Test Plan

`cargo test`
2024-08-26 10:01:38 +05:30
renovate[bot]
39ad6b9472 Update tj-actions/changed-files action to v45 (#13102) 2024-08-25 22:11:24 -04:00
renovate[bot]
41dec93cd2 Update dependency monaco-editor to ^0.51.0 (#13101) 2024-08-25 22:11:15 -04:00
renovate[bot]
aee2caa733 Update NPM Development dependencies (#13100) 2024-08-25 22:11:07 -04:00
renovate[bot]
fe5544e137 Update dependency react-resizable-panels to v2.1.1 (#13098) 2024-08-25 22:11:01 -04:00
renovate[bot]
14c014a48b Update Rust crate syn to v2.0.76 (#13097) 2024-08-25 22:10:57 -04:00
renovate[bot]
ecd0597d6b Update Rust crate serde_json to v1.0.127 (#13096) 2024-08-25 22:10:50 -04:00
renovate[bot]
202271fba6 Update Rust crate serde to v1.0.209 (#13095) 2024-08-25 22:10:45 -04:00
renovate[bot]
4bdb0b4f86 Update Rust crate quote to v1.0.37 (#13094) 2024-08-25 22:10:38 -04:00
renovate[bot]
2286f916c1 Update Rust crate libc to v0.2.158 (#13093) 2024-08-25 22:10:32 -04:00
renovate[bot]
1e4c944251 Update pre-commit dependencies (#13099)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-08-26 01:49:41 +01:00
Calum Young
f50f8732e9 [flake8-pytest-style] Improve help message for pytest-incorrect-mark-parentheses-style (PT023) (#13092) 2024-08-26 01:37:57 +01:00
Micha Reiser
ecab04e338 Basic concurrent checking (#13049) 2024-08-24 09:53:27 +01:00
Dylan
8c09496b07 [red-knot] Resolve function annotations before adding function symbol (#13084)
This PR has the `SemanticIndexBuilder` visit function definition
annotations before adding the function symbol/name to the builder.

For example, the following snippet no longer causes a panic:

```python
def bool(x) -> bool:
    Return True
```

Note: This fix changes the ordering of the global symbol table.

Closes #13069
2024-08-23 19:31:36 -07:00
Alex Waygood
d19fd1b91c [red-knot] Add symbols for for loop variables (#13075)
## Summary

This PR adds symbols introduced by `for` loops to red-knot:
- `x` in `for x in range(10): pass`
- `x` and `y` in `for x, y in d.items(): pass`
- `a`, `b`, `c` and `d` in `for [((a,), b), (c, d)] in foo: pass`

## Test Plan

Several tests added, and the assertion in the benchmarks has been
updated.

---------

Co-authored-by: Micha Reiser <micha@reiser.io>
2024-08-23 23:40:27 +01:00
Dhruv Manilawala
99df859e20 Include all required keys for Zed settings (#13082)
## Summary

Closes: #13081
2024-08-23 16:18:05 +00:00
Micha Reiser
2d5fe9a6d3 Fix dark theme on initial page load (#13077) 2024-08-23 12:53:43 +00:00
jesse
1f2cb09853 [async-function-with-timeout] Disable check for asyncio before Python 3.11 (ASYNC109) (#13023)
Co-authored-by: Micha Reiser <micha@reiser.io>
2024-08-23 08:02:53 +00:00
Dhruv Manilawala
cfe25ab465 [red-knot] Support untitled files in the server (#13044)
## Summary

This PR adds support for untitled files in the red knot server.

## Test Plan

https://github.com/user-attachments/assets/57fa5db6-e1ad-4694-ae5f-c47a21eaa82b
2024-08-23 12:47:35 +05:30
Dhruv Manilawala
551ed2706b [red-knot] Simplify virtual file support (#13043)
## Summary

This PR simplifies the virtual file support in the red knot core,
specifically:

* Update `File::add_virtual_file` method to `File::virtual_file` which
will always create a new virtual file and override the existing entry in
the lookup table
* Add `VirtualFile` which is a wrapper around `File` and provides
methods to increment the file revision / close the virtual file
* Add a new `File::try_virtual_file` to lookup the `VirtualFile` from
`Files`
* Add `File::sync_virtual_path` which takes in the `SystemVirtualPath`,
looks up the `VirtualFile` for it and calls the `sync` method to
increment the file revision
* Removes the `virtual_path_metadata` method on `System` trait

## Test Plan

- [x] Make sure the existing red knot tests pass
- [x] Updated code works well with the LSP
2024-08-23 07:04:15 +00:00
Dhruv Manilawala
21c5606793 [red-knot] Support textDocument/didChange notification (#13042)
## Summary

This PR adds support for `textDocument/didChange` notification.

There seems to be a bug (probably in Salsa) where it panics with:
```
2024-08-22 15:33:38.802 [info] panicked at /Users/dhruv/.cargo/git/checkouts/salsa-61760caba2b17ca5/f608ff8/src/tracked_struct.rs:377:9:
two concurrent writers to Id(4800), should not be possible
```

## Test Plan


https://github.com/user-attachments/assets/81055feb-ba8e-4acf-ad2f-94084a3efead
2024-08-23 06:58:54 +00:00
Dhruv Manilawala
c73a7bb929 [red-knot] Support files outside of any workspace (#13041)
## Summary

This PR adds basic support for files outside of any workspace in the red
knot server.

This also limits the red knot server to only work in a single workspace.
The server will not start if there are multiple workspaces.

## Test Plan

https://github.com/user-attachments/assets/de601387-0ad5-433c-9d2c-7b6ae5137654
2024-08-23 06:51:48 +00:00
Micha Reiser
4f6accb5c6 Add basic red knot benchmark (#13026)
Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
2024-08-23 08:22:42 +02:00
Micha Reiser
1ca14e4335 Move collection of parse errors to check_file (#13059) 2024-08-23 08:22:12 +02:00
Teodoro Freund
b9c8113a8a Added bytes type and some inference (#13061)
## Summary

This PR adds the `bytes` type to red-knot:
- Added the `bytes` type
- Added support for bytes literals
- Support for the `+` operator

Improves on #12701 

Big TODO on supporting and normalizing r-prefixed bytestrings
(`rb"hello\n"`)

## Test Plan

Added a test for a bytes literals, concatenation, and corner values
2024-08-22 13:27:15 -07:00
Dylan
2edd32aa31 [red-knot] SemanticIndexBuilder visits value before target in named expressions (#13053)
The `SemanticIndexBuilder` was causing a cycle in a salsa query by
attempting to resolve the target before the value in a named expression
(e.g. `x := x+1`). This PR swaps the order, avoiding a panic.

Closes #13012.
2024-08-22 07:59:13 -07:00
Dhruv Manilawala
02c4373a49 Bump version to 0.6.2 (#13056) 2024-08-22 18:59:27 +05:30
Steve C
d37e2e5d33 [flake8-simplify] Extend open-file-with-context-handler to work with other standard-library IO modules (SIM115) (#12959)
Co-authored-by: Alex Waygood <alex.waygood@gmail.com>
2024-08-22 14:18:55 +01:00
Dhruv Manilawala
d1d067896c [red-knot] Remove notebook support from the server (#13040)
## Summary

This PR removes notebook sync support from server capabilities because
it isn't tested, it'll be added back once we actually add full support
for notebook.
2024-08-22 14:55:46 +05:30
olp-cs
93f9023ea3 Add hyperfine installation instructions; update hyperfine code samples (#13034)
## Summary

When following the step-by-step instructions to run the benchmarks in
`CONTRIBUTING.md`, I encountered two errors:

**Error 1:**
`bash: hyperfine: command not found`
    
**Solution**: I updated the instructions to include the step of
installing the benchmark tool.

**Error 2:**
```shell
$ ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ 
error: `ruff <path>` has been removed. Use `ruff check <path>` instead.
```
**Solution**: I added `check`.

## Test Plan

I tested it by running the benchmark-related commands in a new workspace
within GitHub Codespaces.
2024-08-22 09:05:09 +05:30
Dhruv Manilawala
8144a11f98 [red-knot] Add definition for with items (#12920)
## Summary

This PR adds symbols and definitions introduced by `with` statements.

The symbols and definitions are introduced for each with item. The type
inference is updated to call the definition region type inference
instead.

## Test Plan

Add test case to check for symbol table and definitions.
2024-08-22 08:00:19 +05:30
Micha Reiser
dce87c21fd Eagerly validate typeshed versions (#12786) 2024-08-21 15:49:53 +00:00
1072 changed files with 28393 additions and 6546 deletions

View File

@@ -20,7 +20,7 @@
"extensions": [
"ms-python.python",
"rust-lang.rust-analyzer",
"serayuzgur.crates",
"fill-labs.dependi",
"tamasfe.even-better-toml",
"Swellaby.vscode-rust-test-adapter",
"charliermarsh.ruff"

View File

@@ -37,7 +37,7 @@ jobs:
with:
fetch-depth: 0
- uses: tj-actions/changed-files@v44
- uses: tj-actions/changed-files@v45
id: changed
with:
files_yaml: |

View File

@@ -34,10 +34,10 @@ jobs:
- name: "Set docs version"
run: |
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
# if version is missing, exit with error
if [[ -z "$version" ]]; then
echo "Can't build docs without a version."
exit 1
# if version is missing, use 'latest'
if [ -z "$version" ]; then
echo "Using 'latest' as version"
version="latest"
fi
# Use version as display name for now
@@ -145,6 +145,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
run: |
branch_name="${{ env.branch_name }}"
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
# give the PR a few seconds to be created before trying to auto-merge it
sleep 10

View File

@@ -1,3 +1,5 @@
# This file was autogenerated by cargo-dist: https://opensource.axo.dev/cargo-dist/
#
# Copyright 2022-2024, axodotdev
# SPDX-License-Identifier: MIT or Apache-2.0
#
@@ -64,7 +66,7 @@ jobs:
# we specify bash to get pipefail; it guards against the `curl` command
# failing. otherwise `sh` won't catch that `curl` returned non-0
shell: bash
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.18.0/cargo-dist-installer.sh | sh"
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.22.1/cargo-dist-installer.sh | sh"
- name: Cache cargo-dist
uses: actions/upload-artifact@v4
with:

View File

@@ -37,13 +37,13 @@ jobs:
- name: Sync typeshed
id: sync
run: |
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed
mkdir ruff/crates/red_knot_python_semantic/vendor/typeshed
cp typeshed/README.md ruff/crates/red_knot_python_semantic/vendor/typeshed
cp typeshed/LICENSE ruff/crates/red_knot_python_semantic/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
rm -rf ruff/crates/ruff_vendored/vendor/typeshed
mkdir ruff/crates/ruff_vendored/vendor/typeshed
cp typeshed/README.md ruff/crates/ruff_vendored/vendor/typeshed
cp typeshed/LICENSE ruff/crates/ruff_vendored/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/ruff_vendored/vendor/typeshed/stdlib
rm -rf ruff/crates/ruff_vendored/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/ruff_vendored/vendor/typeshed/source_commit.txt
- name: Commit the changes
id: commit
if: ${{ steps.sync.outcome == 'success' }}

View File

@@ -2,7 +2,7 @@ fail_fast: true
exclude: |
(?x)^(
crates/red_knot_python_semantic/vendor/.*|
crates/ruff_vendored/vendor/.*|
crates/red_knot_workspace/resources/.*|
crates/ruff_linter/resources/.*|
crates/ruff_linter/src/rules/.*/snapshots/.*|
@@ -45,7 +45,7 @@ repos:
)$
- repo: https://github.com/crate-ci/typos
rev: v1.23.6
rev: v1.24.6
hooks:
- id: typos
@@ -59,7 +59,7 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.1
rev: v0.6.7
hooks:
- id: ruff-format
- id: ruff

View File

@@ -1,5 +1,192 @@
# Changelog
## 0.6.8
### Preview features
- Remove unnecessary parentheses around `match case` clauses ([#13510](https://github.com/astral-sh/ruff/pull/13510))
- Parenthesize overlong `if` guards in `match..case` clauses ([#13513](https://github.com/astral-sh/ruff/pull/13513))
- Detect basic wildcard imports in `ruff analyze graph` ([#13486](https://github.com/astral-sh/ruff/pull/13486))
- \[`pylint`\] Implement `boolean-chained-comparison` (`R1716`) ([#13435](https://github.com/astral-sh/ruff/pull/13435))
### Rule changes
- \[`lake8-simplify`\] Detect `SIM910` when using variadic keyword arguments, i.e., `**kwargs` ([#13503](https://github.com/astral-sh/ruff/pull/13503))
- \[`pyupgrade`\] Avoid false negatives with non-reference shadowed bindings of loop variables (`UP028`) ([#13504](https://github.com/astral-sh/ruff/pull/13504))
### Bug fixes
- Detect tuples bound to variadic positional arguments i.e. `*args` ([#13512](https://github.com/astral-sh/ruff/pull/13512))
- Exit gracefully on broken pipe errors ([#13485](https://github.com/astral-sh/ruff/pull/13485))
- Avoid panic when analyze graph hits broken pipe ([#13484](https://github.com/astral-sh/ruff/pull/13484))
### Performance
- Reuse `BTreeSets` in module resolver ([#13440](https://github.com/astral-sh/ruff/pull/13440))
- Skip traversal for non-compound statements ([#13441](https://github.com/astral-sh/ruff/pull/13441))
## 0.6.7
### Preview features
- Add Python version support to ruff analyze CLI ([#13426](https://github.com/astral-sh/ruff/pull/13426))
- Add `exclude` support to `ruff analyze` ([#13425](https://github.com/astral-sh/ruff/pull/13425))
- Fix parentheses around return type annotations ([#13381](https://github.com/astral-sh/ruff/pull/13381))
### Rule changes
- \[`pycodestyle`\] Fix: Don't autofix if the first line ends in a question mark? (D400) ([#13399](https://github.com/astral-sh/ruff/pull/13399))
### Bug fixes
- Respect `lint.exclude` in ruff check `--add-noqa` ([#13427](https://github.com/astral-sh/ruff/pull/13427))
### Performance
- Avoid tracking module resolver files in Salsa ([#13437](https://github.com/astral-sh/ruff/pull/13437))
- Use `forget` for module resolver database ([#13438](https://github.com/astral-sh/ruff/pull/13438))
## 0.6.6
### Preview features
- \[`refurb`\] Skip `slice-to-remove-prefix-or-suffix` (`FURB188`) when non-trivial slice steps are present ([#13405](https://github.com/astral-sh/ruff/pull/13405))
- Add a subcommand to generate dependency graphs ([#13402](https://github.com/astral-sh/ruff/pull/13402))
### Formatter
- Fix placement of inline parameter comments ([#13379](https://github.com/astral-sh/ruff/pull/13379))
### Server
- Fix off-by one error in the `LineIndex::offset` calculation ([#13407](https://github.com/astral-sh/ruff/pull/13407))
### Bug fixes
- \[`fastapi`\] Respect FastAPI aliases in route definitions ([#13394](https://github.com/astral-sh/ruff/pull/13394))
- \[`pydocstyle`\] Respect word boundaries when detecting function signature in docs ([#13388](https://github.com/astral-sh/ruff/pull/13388))
### Documentation
- Add backlinks to rule overview linter ([#13368](https://github.com/astral-sh/ruff/pull/13368))
- Fix documentation for editor vim plugin ALE ([#13348](https://github.com/astral-sh/ruff/pull/13348))
- Fix rendering of `FURB188` docs ([#13406](https://github.com/astral-sh/ruff/pull/13406))
## 0.6.5
### Preview features
- \[`pydoclint`\] Ignore `DOC201` when function name is "**new**" ([#13300](https://github.com/astral-sh/ruff/pull/13300))
- \[`refurb`\] Implement `slice-to-remove-prefix-or-suffix` (`FURB188`) ([#13256](https://github.com/astral-sh/ruff/pull/13256))
### Rule changes
- \[`eradicate`\] Ignore script-comments with multiple end-tags (`ERA001`) ([#13283](https://github.com/astral-sh/ruff/pull/13283))
- \[`pyflakes`\] Improve error message for `UndefinedName` when a builtin was added in a newer version than specified in Ruff config (`F821`) ([#13293](https://github.com/astral-sh/ruff/pull/13293))
### Server
- Add support for extensionless Python files for server ([#13326](https://github.com/astral-sh/ruff/pull/13326))
- Fix configuration inheritance for configurations specified in the LSP settings ([#13285](https://github.com/astral-sh/ruff/pull/13285))
### Bug fixes
- \[`ruff`\] Handle unary operators in `decimal-from-float-literal` (`RUF032`) ([#13275](https://github.com/astral-sh/ruff/pull/13275))
### CLI
- Only include rules with diagnostics in SARIF metadata ([#13268](https://github.com/astral-sh/ruff/pull/13268))
### Playground
- Add "Copy as pyproject.toml/ruff.toml" and "Paste from TOML" ([#13328](https://github.com/astral-sh/ruff/pull/13328))
- Fix errors not shown for restored snippet on page load ([#13262](https://github.com/astral-sh/ruff/pull/13262))
## 0.6.4
### Preview features
- \[`flake8-builtins`\] Use dynamic builtins list based on Python version ([#13172](https://github.com/astral-sh/ruff/pull/13172))
- \[`pydoclint`\] Permit yielding `None` in `DOC402` and `DOC403` ([#13148](https://github.com/astral-sh/ruff/pull/13148))
- \[`pylint`\] Update diagnostic message for `PLW3201` ([#13194](https://github.com/astral-sh/ruff/pull/13194))
- \[`ruff`\] Implement `post-init-default` (`RUF033`) ([#13192](https://github.com/astral-sh/ruff/pull/13192))
- \[`ruff`\] Implement useless if-else (`RUF034`) ([#13218](https://github.com/astral-sh/ruff/pull/13218))
### Rule changes
- \[`flake8-pyi`\] Respect `pep8_naming.classmethod-decorators` settings when determining if a method is a classmethod in `custom-type-var-return-type` (`PYI019`) ([#13162](https://github.com/astral-sh/ruff/pull/13162))
- \[`flake8-pyi`\] Teach various rules that annotations might be stringized ([#12951](https://github.com/astral-sh/ruff/pull/12951))
- \[`pylint`\] Avoid `no-self-use` for `attrs`-style validators ([#13166](https://github.com/astral-sh/ruff/pull/13166))
- \[`pylint`\] Recurse into subscript subexpressions when searching for list/dict lookups (`PLR1733`, `PLR1736`) ([#13186](https://github.com/astral-sh/ruff/pull/13186))
- \[`pyupgrade`\] Detect `aiofiles.open` calls in `UP015` ([#13173](https://github.com/astral-sh/ruff/pull/13173))
- \[`pyupgrade`\] Mark `sys.version_info[0] < 3` and similar comparisons as outdated (`UP036`) ([#13175](https://github.com/astral-sh/ruff/pull/13175))
### CLI
- Enrich messages of SARIF results ([#13180](https://github.com/astral-sh/ruff/pull/13180))
- Handle singular case for incompatible rules warning in `ruff format` output ([#13212](https://github.com/astral-sh/ruff/pull/13212))
### Bug fixes
- \[`pydocstyle`\] Improve heuristics for detecting Google-style docstrings ([#13142](https://github.com/astral-sh/ruff/pull/13142))
- \[`refurb`\] Treat `sep` arguments with effects as unsafe removals (`FURB105`) ([#13165](https://github.com/astral-sh/ruff/pull/13165))
## 0.6.3
### Preview features
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with `dbm.sqlite3` (`SIM115`) ([#13104](https://github.com/astral-sh/ruff/pull/13104))
- \[`pycodestyle`\] Disable `E741` in stub files (`.pyi`) ([#13119](https://github.com/astral-sh/ruff/pull/13119))
- \[`pydoclint`\] Avoid `DOC201` on explicit returns in functions that only return `None` ([#13064](https://github.com/astral-sh/ruff/pull/13064))
### Rule changes
- \[`flake8-async`\] Disable check for `asyncio` before Python 3.11 (`ASYNC109`) ([#13023](https://github.com/astral-sh/ruff/pull/13023))
### Bug fixes
- \[`FastAPI`\] Avoid introducing invalid syntax in fix for `fast-api-non-annotated-dependency` (`FAST002`) ([#13133](https://github.com/astral-sh/ruff/pull/13133))
- \[`flake8-implicit-str-concat`\] Normalize octals before merging concatenated strings in `single-line-implicit-string-concatenation` (`ISC001`) ([#13118](https://github.com/astral-sh/ruff/pull/13118))
- \[`flake8-pytest-style`\] Improve help message for `pytest-incorrect-mark-parentheses-style` (`PT023`) ([#13092](https://github.com/astral-sh/ruff/pull/13092))
- \[`pylint`\] Avoid autofix for calls that aren't `min` or `max` as starred expression (`PLW3301`) ([#13089](https://github.com/astral-sh/ruff/pull/13089))
- \[`ruff`\] Add `datetime.time`, `datetime.tzinfo`, and `datetime.timezone` as immutable function calls (`RUF009`) ([#13109](https://github.com/astral-sh/ruff/pull/13109))
- \[`ruff`\] Extend comment deletion for `RUF100` to include trailing text from `noqa` directives while preserving any following comments on the same line, if any ([#13105](https://github.com/astral-sh/ruff/pull/13105))
- Fix dark theme on initial page load for the Ruff playground ([#13077](https://github.com/astral-sh/ruff/pull/13077))
## 0.6.2
### Preview features
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with other standard-library IO modules (`SIM115`) ([#12959](https://github.com/astral-sh/ruff/pull/12959))
- \[`ruff`\] Avoid `unused-async` for functions with FastAPI route decorator (`RUF029`) ([#12938](https://github.com/astral-sh/ruff/pull/12938))
- \[`ruff`\] Ignore `fstring-missing-syntax` (`RUF027`) for `fastAPI` paths ([#12939](https://github.com/astral-sh/ruff/pull/12939))
- \[`ruff`\] Implement check for Decimal called with a float literal (RUF032) ([#12909](https://github.com/astral-sh/ruff/pull/12909))
### Rule changes
- \[`flake8-bugbear`\] Update diagnostic message when expression is at the end of function (`B015`) ([#12944](https://github.com/astral-sh/ruff/pull/12944))
- \[`flake8-pyi`\] Skip type annotations in `string-or-bytes-too-long` (`PYI053`) ([#13002](https://github.com/astral-sh/ruff/pull/13002))
- \[`flake8-type-checking`\] Always recognise relative imports as first-party ([#12994](https://github.com/astral-sh/ruff/pull/12994))
- \[`flake8-unused-arguments`\] Ignore unused arguments on stub functions (`ARG001`) ([#12966](https://github.com/astral-sh/ruff/pull/12966))
- \[`pylint`\] Ignore augmented assignment for `self-cls-assignment` (`PLW0642`) ([#12957](https://github.com/astral-sh/ruff/pull/12957))
### Server
- Show full context in error log messages ([#13029](https://github.com/astral-sh/ruff/pull/13029))
### Bug fixes
- \[`pep8-naming`\] Don't flag `from` imports following conventional import names (`N817`) ([#12946](https://github.com/astral-sh/ruff/pull/12946))
- \[`pylint`\] - Allow `__new__` methods to have `cls` as their first argument even if decorated with `@staticmethod` for `bad-staticmethod-argument` (`PLW0211`) ([#12958](https://github.com/astral-sh/ruff/pull/12958))
### Documentation
- Add `hyperfine` installation instructions; update `hyperfine` code samples ([#13034](https://github.com/astral-sh/ruff/pull/13034))
- Expand note to use Ruff with other language server in Kate ([#12806](https://github.com/astral-sh/ruff/pull/12806))
- Update example for `PT001` as per the new default behavior ([#13019](https://github.com/astral-sh/ruff/pull/13019))
- \[`perflint`\] Improve docs for `try-except-in-loop` (`PERF203`) ([#12947](https://github.com/astral-sh/ruff/pull/12947))
- \[`pydocstyle`\] Add reference to `lint.pydocstyle.ignore-decorators` setting to rule docs ([#12996](https://github.com/astral-sh/ruff/pull/12996))
## 0.6.1
This is a hotfix release to address an issue with `ruff-pre-commit`. In v0.6,

View File

@@ -397,12 +397,18 @@ which makes it a good target for benchmarking.
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
```
Install `hyperfine`:
```shell
cargo install hyperfine
```
To benchmark the release build:
```shell
cargo build --release && hyperfine --warmup 10 \
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
@@ -421,7 +427,7 @@ To benchmark against the ecosystem's existing tools:
```shell
hyperfine --ignore-failure --warmup 5 \
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
"pyflakes crates/ruff_linter/resources/test/cpython" \
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
"pycodestyle crates/ruff_linter/resources/test/cpython" \
@@ -467,7 +473,7 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
```shell
cargo build --release && hyperfine --warmup 10 \
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
```
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
@@ -524,6 +530,8 @@ You can run the benchmarks with
cargo benchmark
```
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
#### Benchmark-driven Development
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
@@ -562,7 +570,7 @@ cargo install critcmp
#### Tips
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
to only run the lexer benchmarks.
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)

382
Cargo.lock generated
View File

@@ -129,9 +129,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.86"
version = "1.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6"
[[package]]
name = "append-only-vec"
@@ -161,6 +161,21 @@ version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
[[package]]
name = "assert_fs"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7efdb1fdb47602827a342857666feb372712cbc64b414172bd6b167a02927674"
dependencies = [
"anstyle",
"doc-comment",
"globwalk",
"predicates",
"predicates-core",
"predicates-tree",
"tempfile",
]
[[package]]
name = "autocfg"
version = "1.2.0"
@@ -194,6 +209,15 @@ version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
[[package]]
name = "block-buffer"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
"generic-array",
]
[[package]]
name = "bstr"
version = "1.10.0"
@@ -231,6 +255,9 @@ name = "camino"
version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
dependencies = [
"serde",
]
[[package]]
name = "cast"
@@ -326,9 +353,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.16"
version = "4.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019"
checksum = "b0956a43b323ac1afaffc053ed5c4b7c1f1800bacd1683c353aabbb752515dd3"
dependencies = [
"clap_builder",
"clap_derive",
@@ -336,9 +363,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.15"
version = "4.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6"
checksum = "4d72166dd41634086d5803a47eb71ae740e61d84709c36f3c34110173db3961b"
dependencies = [
"anstream",
"anstyle",
@@ -379,9 +406,9 @@ dependencies = [
[[package]]
name = "clap_derive"
version = "4.5.13"
version = "4.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0"
checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab"
dependencies = [
"heck",
"proc-macro2",
@@ -410,9 +437,9 @@ dependencies = [
[[package]]
name = "codspeed"
version = "2.6.0"
version = "2.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a104ac948e0188b921eb3fcbdd55dcf62e542df4c7ab7e660623f6288302089"
checksum = "450a0e9df9df1c154156f4344f99d8f6f6e69d0fc4de96ef6e2e68b2ec3bce97"
dependencies = [
"colored",
"libc",
@@ -421,9 +448,9 @@ dependencies = [
[[package]]
name = "codspeed-criterion-compat"
version = "2.6.0"
version = "2.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "722c36bdc62d9436d027256ce2627af81ac7a596dfc7d13d849d0d212448d7fe"
checksum = "8eb1a6cb9c20e177fde58cdef97c1c7c9264eb1424fe45c4fccedc2fb078a569"
dependencies = [
"codspeed",
"colored",
@@ -511,6 +538,15 @@ dependencies = [
"rustc-hash 1.1.0",
]
[[package]]
name = "cpufeatures"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51e852e6dc9a5bed1fae92dd2375037bf2b768725bf3be87811edee3249d09ad"
dependencies = [
"libc",
]
[[package]]
name = "crc32fast"
version = "1.4.0"
@@ -616,6 +652,16 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
[[package]]
name = "crypto-common"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
"typenum",
]
[[package]]
name = "ctrlc"
version = "3.4.5"
@@ -676,9 +722,9 @@ dependencies = [
[[package]]
name = "dashmap"
version = "6.0.1"
version = "6.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "804c8821570c3f8b70230c2ba75ffa5c0f9a4189b9a432b6656c536712acae28"
checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf"
dependencies = [
"cfg-if",
"crossbeam-utils",
@@ -694,6 +740,22 @@ version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
[[package]]
name = "difflib"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8"
[[package]]
name = "digest"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"crypto-common",
]
[[package]]
name = "dirs"
version = "4.0.0"
@@ -735,6 +797,12 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "doc-comment"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
[[package]]
name = "drop_bomb"
version = "0.1.5"
@@ -826,9 +894,9 @@ dependencies = [
[[package]]
name = "filetime"
version = "0.2.24"
version = "0.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550"
checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586"
dependencies = [
"cfg-if",
"libc",
@@ -879,6 +947,16 @@ dependencies = [
"libc",
]
[[package]]
name = "generic-array"
version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
]
[[package]]
name = "getopts"
version = "0.2.21"
@@ -909,9 +987,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "globset"
version = "0.4.14"
version = "0.4.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1"
checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19"
dependencies = [
"aho-corasick",
"bstr",
@@ -920,6 +998,17 @@ dependencies = [
"regex-syntax 0.8.3",
]
[[package]]
name = "globwalk"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757"
dependencies = [
"bitflags 2.6.0",
"ignore",
"walkdir",
]
[[package]]
name = "half"
version = "2.4.1"
@@ -1017,9 +1106,9 @@ dependencies = [
[[package]]
name = "ignore"
version = "0.4.22"
version = "0.4.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1"
checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b"
dependencies = [
"crossbeam-deque",
"globset",
@@ -1053,9 +1142,9 @@ dependencies = [
[[package]]
name = "indexmap"
version = "2.4.0"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c"
checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5"
dependencies = [
"equivalent",
"hashbrown",
@@ -1104,14 +1193,16 @@ dependencies = [
[[package]]
name = "insta"
version = "1.39.0"
version = "1.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "810ae6042d48e2c9e9215043563a58a80b877bc863228a74cf10c49d4620a6f5"
checksum = "6593a41c7a73841868772495db7dc1e8ecab43bb5c0b6da2059246c4b506ab60"
dependencies = [
"console",
"globset",
"lazy_static",
"linked-hash-map",
"pest",
"pest_derive",
"regex",
"serde",
"similar",
@@ -1256,9 +1347,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.157"
version = "0.2.158"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "374af5f94e54fa97cf75e945cce8a6b201e88a1a07e688b47dfd2a59c66dbd86"
checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439"
[[package]]
name = "libcst"
@@ -1336,9 +1427,9 @@ checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
[[package]]
name = "lsp-server"
version = "0.7.6"
version = "0.7.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "248f65b78f6db5d8e1b1604b4098a28b43d21a8eb1deeca22b1c421b276c7095"
checksum = "550446e84739dcaf6d48a4a093973850669e13e8a34d8f8d64851041be267cd9"
dependencies = [
"crossbeam-channel",
"log",
@@ -1553,9 +1644,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "ordermap"
version = "0.5.2"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61d7d835be600a7ac71b24e39c92fe6fad9e818b3c71bfc379e3ba65e327d77f"
checksum = "31f2bd7b03bf2c767e1bb7b91505dbe022833776e60480275e6f2fb0db0c7503"
dependencies = [
"indexmap",
]
@@ -1707,6 +1798,51 @@ version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "pest"
version = "2.7.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95"
dependencies = [
"memchr",
"thiserror",
"ucd-trie",
]
[[package]]
name = "pest_derive"
version = "2.7.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a548d2beca6773b1c244554d36fcf8548a8a58e74156968211567250e48e49a"
dependencies = [
"pest",
"pest_generator",
]
[[package]]
name = "pest_generator"
version = "2.7.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c93a82e8d145725dcbaf44e5ea887c8a869efdcc28706df2d08c69e17077183"
dependencies = [
"pest",
"pest_meta",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pest_meta"
version = "2.7.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a941429fea7e08bedec25e4f6785b6ffaacc6b755da98df5ef3e7dcf4a124c4f"
dependencies = [
"once_cell",
"pest",
"sha2",
]
[[package]]
name = "phf"
version = "0.11.2"
@@ -1770,10 +1906,37 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "pretty_assertions"
version = "1.4.0"
name = "predicates"
version = "3.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66"
checksum = "7e9086cc7640c29a356d1a29fd134380bee9d8f79a17410aa76e7ad295f42c97"
dependencies = [
"anstyle",
"difflib",
"predicates-core",
]
[[package]]
name = "predicates-core"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae8177bee8e75d6846599c6b9ff679ed51e882816914eec639944d7c9aa11931"
[[package]]
name = "predicates-tree"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41b740d195ed3166cd147c8047ec98db0e22ec019eb8eeb76d343b795304fb13"
dependencies = [
"predicates-core",
"termtree",
]
[[package]]
name = "pretty_assertions"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d"
dependencies = [
"diff",
"yansi",
@@ -1803,9 +1966,9 @@ dependencies = [
[[package]]
name = "quick-junit"
version = "0.4.0"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfc1a6a5406a114913df2df8507998c755311b55b78584bed5f6e88f6417c4d4"
checksum = "62ffd2f9a162cfae131bed6d9d1ed60adced33be340a94f96952897d7cb0c240"
dependencies = [
"chrono",
"indexmap",
@@ -1818,18 +1981,18 @@ dependencies = [
[[package]]
name = "quick-xml"
version = "0.31.0"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33"
checksum = "96a05e2e8efddfa51a84ca47cec303fac86c8541b686d37cac5efc0e094417bc"
dependencies = [
"memchr",
]
[[package]]
name = "quote"
version = "1.0.36"
version = "1.0.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
dependencies = [
"proc-macro2",
]
@@ -1920,24 +2083,24 @@ dependencies = [
"countme",
"hashbrown",
"insta",
"once_cell",
"ordermap",
"path-slash",
"ruff_db",
"ruff_index",
"ruff_python_ast",
"ruff_python_literal",
"ruff_python_parser",
"ruff_python_stdlib",
"ruff_source_file",
"ruff_text_size",
"ruff_vendored",
"rustc-hash 2.0.0",
"salsa",
"smallvec",
"static_assertions",
"tempfile",
"test-case",
"thiserror",
"tracing",
"walkdir",
"zip",
]
[[package]]
@@ -1950,7 +2113,6 @@ dependencies = [
"libc",
"lsp-server",
"lsp-types",
"red_knot_python_semantic",
"red_knot_workspace",
"ruff_db",
"ruff_notebook",
@@ -1988,14 +2150,16 @@ dependencies = [
"anyhow",
"crossbeam",
"notify",
"rayon",
"red_knot_python_semantic",
"ruff_cache",
"ruff_db",
"ruff_python_ast",
"ruff_text_size",
"ruff_vendored",
"rustc-hash 2.0.0",
"salsa",
"thiserror",
"tempfile",
"tracing",
]
@@ -2089,10 +2253,11 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.6.1"
version = "0.6.8"
dependencies = [
"anyhow",
"argfile",
"assert_fs",
"bincode",
"bitflags 2.6.0",
"cachedir",
@@ -2102,7 +2267,9 @@ dependencies = [
"clearscreen",
"colored",
"filetime",
"globwalk",
"ignore",
"indoc",
"insta",
"insta-cmd",
"is-macro",
@@ -2114,7 +2281,9 @@ dependencies = [
"rayon",
"regex",
"ruff_cache",
"ruff_db",
"ruff_diagnostics",
"ruff_graph",
"ruff_linter",
"ruff_macros",
"ruff_notebook",
@@ -2147,6 +2316,7 @@ dependencies = [
"criterion",
"mimalloc",
"once_cell",
"rayon",
"red_knot_python_semantic",
"red_knot_workspace",
"ruff_db",
@@ -2181,7 +2351,7 @@ version = "0.0.0"
dependencies = [
"camino",
"countme",
"dashmap 6.0.1",
"dashmap 6.1.0",
"filetime",
"ignore",
"insta",
@@ -2196,6 +2366,7 @@ dependencies = [
"ruff_text_size",
"rustc-hash 2.0.0",
"salsa",
"serde",
"tempfile",
"thiserror",
"tracing",
@@ -2271,6 +2442,26 @@ dependencies = [
"unicode-width",
]
[[package]]
name = "ruff_graph"
version = "0.1.0"
dependencies = [
"anyhow",
"clap",
"once_cell",
"red_knot_python_semantic",
"ruff_cache",
"ruff_db",
"ruff_linter",
"ruff_macros",
"ruff_python_ast",
"ruff_python_parser",
"salsa",
"schemars",
"serde",
"zip",
]
[[package]]
name = "ruff_index"
version = "0.0.0"
@@ -2281,7 +2472,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.6.1"
version = "0.6.8"
dependencies = [
"aho-corasick",
"annotate-snippets 0.9.2",
@@ -2599,9 +2790,20 @@ dependencies = [
"static_assertions",
]
[[package]]
name = "ruff_vendored"
version = "0.0.0"
dependencies = [
"once_cell",
"path-slash",
"ruff_db",
"walkdir",
"zip",
]
[[package]]
name = "ruff_wasm"
version = "0.6.1"
version = "0.6.8"
dependencies = [
"console_error_panic_hook",
"console_log",
@@ -2644,6 +2846,7 @@ dependencies = [
"regex",
"ruff_cache",
"ruff_formatter",
"ruff_graph",
"ruff_linter",
"ruff_macros",
"ruff_python_ast",
@@ -2741,12 +2944,12 @@ checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
[[package]]
name = "salsa"
version = "0.18.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
source = "git+https://github.com/salsa-rs/salsa.git?rev=4a7c955255e707e64e43f3ce5eabb771ae067768#4a7c955255e707e64e43f3ce5eabb771ae067768"
dependencies = [
"append-only-vec",
"arc-swap",
"crossbeam",
"dashmap 6.0.1",
"dashmap 6.1.0",
"hashlink",
"indexmap",
"lazy_static",
@@ -2761,12 +2964,12 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.1.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
source = "git+https://github.com/salsa-rs/salsa.git?rev=4a7c955255e707e64e43f3ce5eabb771ae067768#4a7c955255e707e64e43f3ce5eabb771ae067768"
[[package]]
name = "salsa-macros"
version = "0.18.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
source = "git+https://github.com/salsa-rs/salsa.git?rev=4a7c955255e707e64e43f3ce5eabb771ae067768#4a7c955255e707e64e43f3ce5eabb771ae067768"
dependencies = [
"heck",
"proc-macro2",
@@ -2828,9 +3031,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
[[package]]
name = "serde"
version = "1.0.208"
version = "1.0.210"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2"
checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a"
dependencies = [
"serde_derive",
]
@@ -2848,9 +3051,9 @@ dependencies = [
[[package]]
name = "serde_derive"
version = "1.0.208"
version = "1.0.210"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf"
checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
dependencies = [
"proc-macro2",
"quote",
@@ -2870,9 +3073,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.125"
version = "1.0.128"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83c8e735a073ccf5be70aa8066aa984eaf2fa000db6c8d0100ae605b366d31ed"
checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8"
dependencies = [
"itoa",
"memchr",
@@ -2932,6 +3135,17 @@ dependencies = [
"syn",
]
[[package]]
name = "sha2"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]]
name = "sharded-slab"
version = "0.1.7"
@@ -3031,9 +3245,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "syn"
version = "2.0.75"
version = "2.0.77"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6af063034fc1935ede7be0122941bafa9bacb949334d090b77ca98b5817c7d9"
checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed"
dependencies = [
"proc-macro2",
"quote",
@@ -3087,6 +3301,12 @@ dependencies = [
"phf_codegen",
]
[[package]]
name = "termtree"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76"
[[package]]
name = "test-case"
version = "3.3.1"
@@ -3122,18 +3342,18 @@ dependencies = [
[[package]]
name = "thiserror"
version = "1.0.63"
version = "1.0.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724"
checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.63"
version = "1.0.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3"
dependencies = [
"proc-macro2",
"quote",
@@ -3332,6 +3552,18 @@ version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
[[package]]
name = "typenum"
version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]]
name = "ucd-trie"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9"
[[package]]
name = "unic-char-property"
version = "0.9.0"
@@ -3382,15 +3614,15 @@ checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75"
[[package]]
name = "unicode-ident"
version = "1.0.12"
version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"
[[package]]
name = "unicode-normalization"
version = "0.1.23"
version = "0.1.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5"
checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956"
dependencies = [
"tinyvec",
]
@@ -3403,9 +3635,9 @@ checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d"
[[package]]
name = "unicode_names2"
version = "1.2.2"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "addeebf294df7922a1164f729fb27ebbbcea99cc32b3bf08afab62757f707677"
checksum = "d1673eca9782c84de5f81b82e4109dcfb3611c8ba0d52930ec4a9478f547b2dd"
dependencies = [
"phf",
"unicode_names2_generator",
@@ -3413,9 +3645,9 @@ dependencies = [
[[package]]
name = "unicode_names2_generator"
version = "1.2.2"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f444b8bba042fe3c1251ffaca35c603f2dc2ccc08d595c65a8c4f76f3e8426c0"
checksum = "b91e5b84611016120197efd7dc93ef76774f4e084cd73c9fb3ea4a86c570c56e"
dependencies = [
"getopts",
"log",
@@ -3901,9 +4133,9 @@ checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
[[package]]
name = "yansi"
version = "0.5.1"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
[[package]]
name = "yansi-term"

View File

@@ -14,9 +14,10 @@ license = "MIT"
[workspace.dependencies]
ruff = { path = "crates/ruff" }
ruff_cache = { path = "crates/ruff_cache" }
ruff_db = { path = "crates/ruff_db" }
ruff_db = { path = "crates/ruff_db", default-features = false }
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
ruff_formatter = { path = "crates/ruff_formatter" }
ruff_graph = { path = "crates/ruff_graph" }
ruff_index = { path = "crates/ruff_index" }
ruff_linter = { path = "crates/ruff_linter" }
ruff_macros = { path = "crates/ruff_macros" }
@@ -33,15 +34,17 @@ ruff_python_trivia = { path = "crates/ruff_python_trivia" }
ruff_server = { path = "crates/ruff_server" }
ruff_source_file = { path = "crates/ruff_source_file" }
ruff_text_size = { path = "crates/ruff_text_size" }
ruff_vendored = { path = "crates/ruff_vendored" }
ruff_workspace = { path = "crates/ruff_workspace" }
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
red_knot_server = { path = "crates/red_knot_server" }
red_knot_workspace = { path = "crates/red_knot_workspace" }
red_knot_workspace = { path = "crates/red_knot_workspace", default-features = false }
aho-corasick = { version = "1.1.3" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { version = "1.0.80" }
assert_fs = { version = "1.1.0" }
argfile = { version = "0.2.0" }
bincode = { version = "1.3.3" }
bitflags = { version = "2.5.0" }
@@ -68,6 +71,7 @@ fern = { version = "0.6.1" }
filetime = { version = "0.2.23" }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
globwalk = { version = "0.9.1" }
hashbrown = "0.14.3"
ignore = { version = "0.4.22" }
imara-diff = { version = "0.1.5" }
@@ -102,13 +106,13 @@ pep440_rs = { version = "0.6.0", features = ["serde"] }
pretty_assertions = "1.3.0"
proc-macro2 = { version = "1.0.79" }
pyproject-toml = { version = "0.9.0" }
quick-junit = { version = "0.4.0" }
quick-junit = { version = "0.5.0" }
quote = { version = "1.0.23" }
rand = { version = "0.8.5" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "4a7c955255e707e64e43f3ce5eabb771ae067768" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
@@ -230,9 +234,9 @@ inherits = "release"
# Config for 'cargo dist'
[workspace.metadata.dist]
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
cargo-dist-version = "0.18.0"
cargo-dist-version = "0.22.1"
# CI backends to support
ci = ["github"]
ci = "github"
# The installers to generate for each app
installers = ["shell", "powershell"]
# The archive format to use for windows builds (defaults .zip)
@@ -263,11 +267,11 @@ targets = [
auto-includes = false
# Whether cargo-dist should create a GitHub Release or use an existing draft
create-release = true
# Publish jobs to run in CI
# Which actions to run on pull requests
pr-run-mode = "skip"
# Whether CI should trigger releases with dispatches instead of tag pushes
dispatch-releases = true
# The stage during which the GitHub Release should be created
# Which phase cargo-dist should use to create the GitHub release
github-release = "announce"
# Whether CI should include auto-generated code to build local artifacts
build-local-artifacts = false
@@ -275,9 +279,11 @@ build-local-artifacts = false
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
# Publish jobs to run in CI
publish-jobs = ["./publish-pypi", "./publish-wasm"]
# Announcement jobs to run in CI
# Post-announce jobs to run in CI
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
# Custom permissions for GitHub Jobs
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
# Whether to install an updater program
install-updater = false
# Path that installers should place binaries in
install-path = "CARGO_HOME"

View File

@@ -110,7 +110,7 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
1. [Who's Using Ruff?](#whos-using-ruff)
1. [License](#license)
## Getting Started
## Getting Started<a id="getting-started"></a>
For more, see the [documentation](https://docs.astral.sh/ruff/).
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.6.1/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.6.1/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.6.8/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.6.8/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.6.1
rev: v0.6.8
hooks:
# Run the linter.
- id: ruff
@@ -195,7 +195,7 @@ jobs:
- uses: chartboost/ruff-action@v1
```
### Configuration
### Configuration<a id="configuration"></a>
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
@@ -291,7 +291,7 @@ features that may change prior to stabilization.
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
for more on the linting and formatting commands, respectively.
## Rules
## Rules<a id="rules"></a>
<!-- Begin section: Rules -->
@@ -367,21 +367,21 @@ quality tools, including:
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
## Contributing
## Contributing<a id="contributing"></a>
Contributions are welcome and highly appreciated. To get started, check out the
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
## Support
## Support<a id="support"></a>
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
## Acknowledgements
## Acknowledgements<a id="acknowledgements"></a>
Ruff's linter draws on both the APIs and implementation details of many other
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
@@ -405,7 +405,7 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
Ruff is released under the MIT license.
## Who's Using Ruff?
## Who's Using Ruff?<a id="whos-using-ruff"></a>
Ruff is used by a number of major open-source projects and companies, including:
@@ -524,7 +524,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
```
## License
## License<a id="license"></a>
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)

View File

@@ -13,9 +13,8 @@ license.workspace = true
[dependencies]
red_knot_python_semantic = { workspace = true }
red_knot_workspace = { workspace = true }
red_knot_workspace = { workspace = true, features = ["zstd"] }
red_knot_server = { workspace = true }
ruff_db = { workspace = true, features = ["os", "cache"] }
anyhow = { workspace = true }

View File

@@ -13,12 +13,17 @@ The CLI supports different verbosity levels.
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
## `RED_KNOT_LOG`
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
but this can result in a confused logging output where messages from different threads are intertwined.
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
### Examples
#### Show all debug messages

View File

@@ -7,12 +7,12 @@ use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use salsa::plumbing::ZalsaDatabase;
use red_knot_python_semantic::{ProgramSettings, SearchPathSettings};
use red_knot_python_semantic::SitePackages;
use red_knot_server::run_server;
use red_knot_workspace::db::RootDatabase;
use red_knot_workspace::site_packages::VirtualEnvironment;
use red_knot_workspace::watch;
use red_knot_workspace::watch::WorkspaceWatcher;
use red_knot_workspace::workspace::settings::Configuration;
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
use target_version::TargetVersion;
@@ -65,15 +65,14 @@ to resolve type information for the project's third-party dependencies.",
value_name = "PATH",
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
)]
extra_search_path: Vec<SystemPathBuf>,
extra_search_path: Option<Vec<SystemPathBuf>>,
#[arg(
long,
help = "Python version to assume when resolving types",
default_value_t = TargetVersion::default(),
value_name="VERSION")
]
target_version: TargetVersion,
value_name = "VERSION"
)]
target_version: Option<TargetVersion>,
#[clap(flatten)]
verbosity: Verbosity,
@@ -86,6 +85,36 @@ to resolve type information for the project's third-party dependencies.",
watch: bool,
}
impl Args {
fn to_configuration(&self, cli_cwd: &SystemPath) -> Configuration {
let mut configuration = Configuration::default();
if let Some(target_version) = self.target_version {
configuration.target_version = Some(target_version.into());
}
if let Some(venv_path) = &self.venv_path {
configuration.search_paths.site_packages = Some(SitePackages::Derived {
venv_path: SystemPath::absolute(venv_path, cli_cwd),
});
}
if let Some(custom_typeshed_dir) = &self.custom_typeshed_dir {
configuration.search_paths.custom_typeshed =
Some(SystemPath::absolute(custom_typeshed_dir, cli_cwd));
}
if let Some(extra_search_paths) = &self.extra_search_path {
configuration.search_paths.extra_paths = extra_search_paths
.iter()
.map(|path| Some(SystemPath::absolute(path, cli_cwd)))
.collect();
}
configuration
}
}
#[derive(Debug, clap::Subcommand)]
pub enum Command {
/// Start the language server
@@ -115,22 +144,13 @@ pub fn main() -> ExitStatus {
}
fn run() -> anyhow::Result<ExitStatus> {
let Args {
command,
current_directory,
custom_typeshed_dir,
extra_search_path: extra_paths,
venv_path,
target_version,
verbosity,
watch,
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
let args = Args::parse_from(std::env::args().collect::<Vec<_>>());
if matches!(command, Some(Command::Server)) {
if matches!(args.command, Some(Command::Server)) {
return run_server().map(|()| ExitStatus::Success);
}
let verbosity = verbosity.level();
let verbosity = args.verbosity.level();
countme::enable(verbosity.is_trace());
let _guard = setup_tracing(verbosity)?;
@@ -146,10 +166,12 @@ fn run() -> anyhow::Result<ExitStatus> {
})?
};
let cwd = current_directory
let cwd = args
.current_directory
.as_ref()
.map(|cwd| {
if cwd.as_std_path().is_dir() {
Ok(SystemPath::absolute(&cwd, &cli_base_path))
Ok(SystemPath::absolute(cwd, &cli_base_path))
} else {
Err(anyhow!(
"Provided current-directory path '{cwd}' is not a directory."
@@ -160,33 +182,18 @@ fn run() -> anyhow::Result<ExitStatus> {
.unwrap_or_else(|| cli_base_path.clone());
let system = OsSystem::new(cwd.clone());
let workspace_metadata = WorkspaceMetadata::from_path(system.current_directory(), &system)?;
// TODO: Verify the remaining search path settings eagerly.
let site_packages = venv_path
.map(|path| {
VirtualEnvironment::new(path, &OsSystem::new(cli_base_path))
.and_then(|venv| venv.site_packages_directories(&system))
})
.transpose()?
.unwrap_or_default();
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
let program_settings = ProgramSettings {
target_version: target_version.into(),
search_paths: SearchPathSettings {
extra_paths,
src_root: workspace_metadata.root().to_path_buf(),
custom_typeshed: custom_typeshed_dir,
site_packages,
},
};
let cli_configuration = args.to_configuration(&cwd);
let workspace_metadata = WorkspaceMetadata::from_path(
system.current_directory(),
&system,
Some(cli_configuration.clone()),
)?;
// TODO: Use the `program_settings` to compute the key for the database's persistent
// cache and load the cache if it exists.
let mut db = RootDatabase::new(workspace_metadata, program_settings, system)?;
let mut db = RootDatabase::new(workspace_metadata, system)?;
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_configuration);
// Listen to Ctrl+C and abort the watch mode.
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
@@ -198,7 +205,7 @@ fn run() -> anyhow::Result<ExitStatus> {
}
})?;
let exit_status = if watch {
let exit_status = if args.watch {
main_loop.watch(&mut db)?
} else {
main_loop.run(&mut db)
@@ -238,10 +245,12 @@ struct MainLoop {
/// The file system watcher, if running in watch mode.
watcher: Option<WorkspaceWatcher>,
cli_configuration: Configuration,
}
impl MainLoop {
fn new() -> (Self, MainLoopCancellationToken) {
fn new(cli_configuration: Configuration) -> (Self, MainLoopCancellationToken) {
let (sender, receiver) = crossbeam_channel::bounded(10);
(
@@ -249,6 +258,7 @@ impl MainLoop {
sender: sender.clone(),
receiver,
watcher: None,
cli_configuration,
},
MainLoopCancellationToken { sender },
)
@@ -331,7 +341,7 @@ impl MainLoop {
MainLoopMessage::ApplyChanges(changes) => {
revision += 1;
// Automatically cancels any pending queries and waits for them to complete.
db.apply_changes(changes);
db.apply_changes(changes, Some(&self.cli_configuration));
if let Some(watcher) = self.watcher.as_mut() {
watcher.update(db);
}

View File

@@ -5,12 +5,11 @@ use std::time::Duration;
use anyhow::{anyhow, Context};
use red_knot_python_semantic::{
resolve_module, ModuleName, Program, ProgramSettings, PythonVersion, SearchPathSettings,
};
use red_knot_python_semantic::{resolve_module, ModuleName, Program, PythonVersion, SitePackages};
use red_knot_workspace::db::RootDatabase;
use red_knot_workspace::watch;
use red_knot_workspace::watch::{directory_watcher, WorkspaceWatcher};
use red_knot_workspace::workspace::settings::{Configuration, SearchPathConfiguration};
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::files::{system_path_to_file, File, FileError};
use ruff_db::source::source_text;
@@ -25,7 +24,7 @@ struct TestCase {
/// We need to hold on to it in the test case or the temp files get deleted.
_temp_dir: tempfile::TempDir,
root_dir: SystemPathBuf,
search_path_settings: SearchPathSettings,
configuration: Configuration,
}
impl TestCase {
@@ -41,10 +40,6 @@ impl TestCase {
&self.db
}
fn db_mut(&mut self) -> &mut RootDatabase {
&mut self.db
}
fn stop_watch(&mut self) -> Vec<watch::ChangeEvent> {
self.try_stop_watch(Duration::from_secs(10))
.expect("Expected watch changes but observed none.")
@@ -105,16 +100,20 @@ impl TestCase {
Some(all_events)
}
fn apply_changes(&mut self, changes: Vec<watch::ChangeEvent>) {
self.db.apply_changes(changes, Some(&self.configuration));
}
fn update_search_path_settings(
&mut self,
f: impl FnOnce(&SearchPathSettings) -> SearchPathSettings,
configuration: SearchPathConfiguration,
) -> anyhow::Result<()> {
let program = Program::get(self.db());
let new_settings = f(&self.search_path_settings);
self.configuration.search_paths = configuration.clone();
let new_settings = configuration.into_settings(self.db.workspace().root(&self.db));
program.update_search_paths(&mut self.db, new_settings.clone())?;
self.search_path_settings = new_settings;
program.update_search_paths(&mut self.db, &new_settings)?;
if let Some(watcher) = &mut self.watcher {
watcher.update(&self.db);
@@ -179,17 +178,14 @@ fn setup<F>(setup_files: F) -> anyhow::Result<TestCase>
where
F: SetupFiles,
{
setup_with_search_paths(setup_files, |_root, workspace_path| SearchPathSettings {
extra_paths: vec![],
src_root: workspace_path.to_path_buf(),
custom_typeshed: None,
site_packages: vec![],
setup_with_search_paths(setup_files, |_root, _workspace_path| {
SearchPathConfiguration::default()
})
}
fn setup_with_search_paths<F>(
setup_files: F,
create_search_paths: impl FnOnce(&SystemPath, &SystemPath) -> SearchPathSettings,
create_search_paths: impl FnOnce(&SystemPath, &SystemPath) -> SearchPathConfiguration,
) -> anyhow::Result<TestCase>
where
F: SetupFiles,
@@ -221,25 +217,34 @@ where
let system = OsSystem::new(&workspace_path);
let workspace = WorkspaceMetadata::from_path(&workspace_path, &system)?;
let search_path_settings = create_search_paths(&root_path, workspace.root());
let search_paths = create_search_paths(&root_path, &workspace_path);
for path in search_path_settings
for path in search_paths
.extra_paths
.iter()
.chain(search_path_settings.site_packages.iter())
.chain(search_path_settings.custom_typeshed.iter())
.flatten()
.chain(search_paths.custom_typeshed.iter())
.chain(search_paths.site_packages.iter().flat_map(|site_packages| {
if let SitePackages::Known(path) = site_packages {
path.as_slice()
} else {
&[]
}
}))
{
std::fs::create_dir_all(path.as_std_path())
.with_context(|| format!("Failed to create search path '{path}'"))?;
}
let settings = ProgramSettings {
target_version: PythonVersion::default(),
search_paths: search_path_settings.clone(),
let configuration = Configuration {
target_version: Some(PythonVersion::PY312),
search_paths,
};
let db = RootDatabase::new(workspace, settings, system)?;
let workspace =
WorkspaceMetadata::from_path(&workspace_path, &system, Some(configuration.clone()))?;
let db = RootDatabase::new(workspace, system)?;
let (sender, receiver) = crossbeam::channel::unbounded();
let watcher = directory_watcher(move |events| sender.send(events).unwrap())
@@ -254,7 +259,7 @@ where
watcher: Some(watcher),
_temp_dir: temp_dir,
root_dir: root_path,
search_path_settings,
configuration,
};
// Sometimes the file watcher reports changes for events that happened before the watcher was started.
@@ -307,7 +312,7 @@ fn new_file() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
let foo = case.system_file(&foo_path).expect("foo.py to exist.");
@@ -330,7 +335,7 @@ fn new_ignored_file() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
assert!(case.system_file(&foo_path).is_ok());
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file]);
@@ -354,7 +359,7 @@ fn changed_file() -> anyhow::Result<()> {
assert!(!changes.is_empty());
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
assert_eq!(&case.collect_package_files(&foo_path), &[foo]);
@@ -377,7 +382,7 @@ fn deleted_file() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
assert!(!foo.exists(case.db()));
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]);
@@ -409,7 +414,7 @@ fn move_file_to_trash() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
assert!(!foo.exists(case.db()));
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]);
@@ -441,7 +446,7 @@ fn move_file_to_workspace() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
let foo_in_workspace = case.system_file(&foo_in_workspace_path)?;
@@ -469,7 +474,7 @@ fn rename_file() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
assert!(!foo.exists(case.db()));
@@ -510,7 +515,7 @@ fn directory_moved_to_workspace() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
let init_file = case
.system_file(sub_new_path.join("__init__.py"))
@@ -561,7 +566,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
// `import sub.a` should no longer resolve
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
@@ -615,7 +620,7 @@ fn directory_renamed() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
// `import sub.a` should no longer resolve
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
@@ -680,7 +685,7 @@ fn directory_deleted() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
// `import sub.a` should no longer resolve
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
@@ -694,15 +699,13 @@ fn directory_deleted() -> anyhow::Result<()> {
#[test]
fn search_path() -> anyhow::Result<()> {
let mut case =
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, workspace_path| {
SearchPathSettings {
extra_paths: vec![],
src_root: workspace_path.to_path_buf(),
custom_typeshed: None,
site_packages: vec![root_path.join("site_packages")],
}
})?;
let mut case = setup_with_search_paths(
[("bar.py", "import sub.a")],
|root_path, _workspace_path| SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
..SearchPathConfiguration::default()
},
)?;
let site_packages = case.root_path().join("site_packages");
@@ -715,7 +718,7 @@ fn search_path() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_some());
assert_eq!(
@@ -736,9 +739,9 @@ fn add_search_path() -> anyhow::Result<()> {
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_none());
// Register site-packages as a search path.
case.update_search_path_settings(|settings| SearchPathSettings {
site_packages: vec![site_packages.clone()],
..settings.clone()
case.update_search_path_settings(SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![site_packages.clone()])),
..SearchPathConfiguration::default()
})
.expect("Search path settings to be valid");
@@ -746,7 +749,7 @@ fn add_search_path() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_some());
@@ -755,21 +758,19 @@ fn add_search_path() -> anyhow::Result<()> {
#[test]
fn remove_search_path() -> anyhow::Result<()> {
let mut case =
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, workspace_path| {
SearchPathSettings {
extra_paths: vec![],
src_root: workspace_path.to_path_buf(),
custom_typeshed: None,
site_packages: vec![root_path.join("site_packages")],
}
})?;
let mut case = setup_with_search_paths(
[("bar.py", "import sub.a")],
|root_path, _workspace_path| SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
..SearchPathConfiguration::default()
},
)?;
// Remove site packages from the search path settings.
let site_packages = case.root_path().join("site_packages");
case.update_search_path_settings(|settings| SearchPathSettings {
site_packages: vec![],
..settings.clone()
case.update_search_path_settings(SearchPathConfiguration {
site_packages: None,
..SearchPathConfiguration::default()
})
.expect("Search path settings to be valid");
@@ -782,6 +783,48 @@ fn remove_search_path() -> anyhow::Result<()> {
Ok(())
}
#[test]
fn changed_versions_file() -> anyhow::Result<()> {
let mut case = setup_with_search_paths(
|root_path: &SystemPath, workspace_path: &SystemPath| {
std::fs::write(workspace_path.join("bar.py").as_std_path(), "import sub.a")?;
std::fs::create_dir_all(root_path.join("typeshed/stdlib").as_std_path())?;
std::fs::write(root_path.join("typeshed/stdlib/VERSIONS").as_std_path(), "")?;
std::fs::write(
root_path.join("typeshed/stdlib/os.pyi").as_std_path(),
"# not important",
)?;
Ok(())
},
|root_path, _workspace_path| SearchPathConfiguration {
custom_typeshed: Some(root_path.join("typeshed")),
..SearchPathConfiguration::default()
},
)?;
// Unset the custom typeshed directory.
assert_eq!(
resolve_module(case.db(), ModuleName::new("os").unwrap()),
None
);
std::fs::write(
case.root_path()
.join("typeshed/stdlib/VERSIONS")
.as_std_path(),
"os: 3.0-",
)?;
let changes = case.stop_watch();
case.apply_changes(changes);
assert!(resolve_module(case.db(), ModuleName::new("os").unwrap()).is_some());
Ok(())
}
/// Watch a workspace that contains two files where one file is a hardlink to another.
///
/// Setup:
@@ -828,7 +871,7 @@ fn hard_links_in_workspace() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
@@ -899,7 +942,7 @@ fn hard_links_to_target_outside_workspace() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
assert_eq!(source_text(case.db(), bar).as_str(), "print('Version 2')");
@@ -938,7 +981,7 @@ mod unix {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
assert_eq!(
foo.permissions(case.db()),
@@ -1023,7 +1066,7 @@ mod unix {
let changes = case.take_watch_changes();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
assert_eq!(
source_text(case.db(), baz.file()).as_str(),
@@ -1036,7 +1079,7 @@ mod unix {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
assert_eq!(
source_text(case.db(), baz.file()).as_str(),
@@ -1107,7 +1150,7 @@ mod unix {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
// The file watcher is guaranteed to emit one event for the changed file, but it isn't specified
// if the event is emitted for the "original" or linked path because both paths are watched.
@@ -1176,11 +1219,11 @@ mod unix {
Ok(())
},
|_root, workspace| SearchPathSettings {
extra_paths: vec![],
src_root: workspace.to_path_buf(),
custom_typeshed: None,
site_packages: vec![workspace.join(".venv/lib/python3.12/site-packages")],
|_root, workspace| SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![
workspace.join(".venv/lib/python3.12/site-packages")
])),
..SearchPathConfiguration::default()
},
)?;
@@ -1215,7 +1258,7 @@ mod unix {
let changes = case.stop_watch();
case.db_mut().apply_changes(changes);
case.apply_changes(changes);
assert_eq!(
source_text(case.db(), baz_original_file).as_str(),

View File

@@ -17,35 +17,31 @@ ruff_python_ast = { workspace = true }
ruff_python_stdlib = { workspace = true }
ruff_source_file = { workspace = true }
ruff_text_size = { workspace = true }
ruff_python_literal = { workspace = true }
anyhow = { workspace = true }
bitflags = { workspace = true }
camino = { workspace = true }
compact_str = { workspace = true }
countme = { workspace = true }
once_cell = { workspace = true }
ordermap = { workspace = true }
salsa = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
rustc-hash = { workspace = true }
hashbrown = { workspace = true }
smallvec = { workspace = true }
static_assertions = { workspace = true }
[build-dependencies]
path-slash = { workspace = true }
walkdir = { workspace = true }
zip = { workspace = true, features = ["zstd", "deflate"] }
test-case = { workspace = true }
[dev-dependencies]
ruff_db = { workspace = true, features = ["os", "testing"] }
ruff_python_parser = { workspace = true }
ruff_vendored = { workspace = true }
anyhow = { workspace = true }
insta = { workspace = true }
tempfile = { workspace = true }
walkdir = { workspace = true }
zip = { workspace = true }
[lints]
workspace = true

View File

@@ -31,10 +31,10 @@ impl<T> AstNodeRef<T> {
/// which the `AstNodeRef` belongs.
///
/// ## Safety
///
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the
/// [`ParsedModule`] to which `node` belongs. It's the caller's responsibility to ensure that
/// the invariant `node belongs to parsed` is upheld.
pub(super) unsafe fn new(parsed: ParsedModule, node: &T) -> Self {
Self {
_parsed: parsed,

View File

@@ -1,16 +0,0 @@
use crate::module_name::ModuleName;
use crate::module_resolver::resolve_module;
use crate::semantic_index::global_scope;
use crate::semantic_index::symbol::ScopeId;
use crate::Db;
/// Salsa query to get the builtins scope.
///
/// Can return None if a custom typeshed is used that is missing `builtins.pyi`.
#[salsa::tracked]
pub(crate) fn builtins_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
let builtins_name =
ModuleName::new_static("builtins").expect("Expected 'builtins' to be a valid module name");
let builtins_file = resolve_module(db, builtins_name)?.file();
Some(global_scope(db, builtins_file))
}

View File

@@ -11,7 +11,6 @@ pub trait Db: SourceDb + Upcast<dyn SourceDb> {
pub(crate) mod tests {
use std::sync::Arc;
use crate::module_resolver::vendored_typeshed_stubs;
use ruff_db::files::{File, Files};
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
@@ -33,7 +32,7 @@ pub(crate) mod tests {
Self {
storage: salsa::Storage::default(),
system: TestSystem::default(),
vendored: vendored_typeshed_stubs().clone(),
vendored: ruff_vendored::file_system().clone(),
events: std::sync::Arc::default(),
files: Files::default(),
}

View File

@@ -4,13 +4,12 @@ use rustc_hash::FxHasher;
pub use db::Db;
pub use module_name::ModuleName;
pub use module_resolver::{resolve_module, system_module_search_paths, vendored_typeshed_stubs};
pub use program::{Program, ProgramSettings, SearchPathSettings};
pub use module_resolver::{resolve_module, system_module_search_paths, Module};
pub use program::{Program, ProgramSettings, SearchPathSettings, SitePackages};
pub use python_version::PythonVersion;
pub use semantic_model::{HasTy, SemanticModel};
pub mod ast_node_ref;
mod builtins;
mod db;
mod module_name;
mod module_resolver;
@@ -19,6 +18,8 @@ mod program;
mod python_version;
pub mod semantic_index;
mod semantic_model;
pub(crate) mod site_packages;
mod stdlib;
pub mod types;
type FxOrderSet<V> = ordermap::set::OrderSet<V, BuildHasherDefault<FxHasher>>;

View File

@@ -1,10 +1,9 @@
use std::iter::FusedIterator;
pub(crate) use module::Module;
pub use module::Module;
pub use resolver::resolve_module;
pub(crate) use resolver::{file_to_module, SearchPaths};
use ruff_db::system::SystemPath;
pub use typeshed::vendored_typeshed_stubs;
use crate::module_resolver::resolver::search_paths;
use crate::Db;
@@ -13,7 +12,6 @@ use resolver::SearchPathIterator;
mod module;
mod path;
mod resolver;
mod state;
mod typeshed;
#[cfg(test)]

View File

@@ -9,11 +9,11 @@ use ruff_db::files::{system_path_to_file, vendored_path_to_file, File, FileError
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_db::vendored::{VendoredPath, VendoredPathBuf};
use super::typeshed::{typeshed_versions, TypeshedVersionsParseError, TypeshedVersionsQueryResult};
use crate::db::Db;
use crate::module_name::ModuleName;
use super::state::ResolverState;
use super::typeshed::{TypeshedVersionsParseError, TypeshedVersionsQueryResult};
use crate::module_resolver::resolver::ResolverContext;
use crate::site_packages::SitePackagesDiscoveryError;
/// A path that points to a Python module.
///
@@ -59,8 +59,12 @@ impl ModulePath {
self.relative_path.push(component);
}
pub(crate) fn pop(&mut self) -> bool {
self.relative_path.pop()
}
#[must_use]
pub(crate) fn is_directory(&self, resolver: &ResolverState) -> bool {
pub(super) fn is_directory(&self, resolver: &ResolverContext) -> bool {
let ModulePath {
search_path,
relative_path,
@@ -74,7 +78,7 @@ impl ModulePath {
== Err(FileError::IsADirectory)
}
SearchPathInner::StandardLibraryCustom(stdlib_root) => {
match query_stdlib_version(Some(stdlib_root), relative_path, resolver) {
match query_stdlib_version(relative_path, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => false,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => {
@@ -84,7 +88,7 @@ impl ModulePath {
}
}
SearchPathInner::StandardLibraryVendored(stdlib_root) => {
match query_stdlib_version(None, relative_path, resolver) {
match query_stdlib_version(relative_path, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => false,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => resolver
@@ -96,7 +100,7 @@ impl ModulePath {
}
#[must_use]
pub(crate) fn is_regular_package(&self, resolver: &ResolverState) -> bool {
pub(super) fn is_regular_package(&self, resolver: &ResolverContext) -> bool {
let ModulePath {
search_path,
relative_path,
@@ -113,7 +117,7 @@ impl ModulePath {
.is_ok()
}
SearchPathInner::StandardLibraryCustom(search_path) => {
match query_stdlib_version(Some(search_path), relative_path, resolver) {
match query_stdlib_version(relative_path, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => false,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => system_path_to_file(
@@ -124,7 +128,7 @@ impl ModulePath {
}
}
SearchPathInner::StandardLibraryVendored(search_path) => {
match query_stdlib_version(None, relative_path, resolver) {
match query_stdlib_version(relative_path, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => false,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => resolver
@@ -136,7 +140,7 @@ impl ModulePath {
}
#[must_use]
pub(crate) fn to_file(&self, resolver: &ResolverState) -> Option<File> {
pub(super) fn to_file(&self, resolver: &ResolverContext) -> Option<File> {
let db = resolver.db.upcast();
let ModulePath {
search_path,
@@ -150,7 +154,7 @@ impl ModulePath {
system_path_to_file(db, search_path.join(relative_path)).ok()
}
SearchPathInner::StandardLibraryCustom(stdlib_root) => {
match query_stdlib_version(Some(stdlib_root), relative_path, resolver) {
match query_stdlib_version(relative_path, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => None,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => {
@@ -159,7 +163,7 @@ impl ModulePath {
}
}
SearchPathInner::StandardLibraryVendored(stdlib_root) => {
match query_stdlib_version(None, relative_path, resolver) {
match query_stdlib_version(relative_path, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => None,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => {
@@ -273,19 +277,15 @@ fn stdlib_path_to_module_name(relative_path: &Utf8Path) -> Option<ModuleName> {
#[must_use]
fn query_stdlib_version(
custom_stdlib_root: Option<&SystemPath>,
relative_path: &Utf8Path,
resolver: &ResolverState,
context: &ResolverContext,
) -> TypeshedVersionsQueryResult {
let Some(module_name) = stdlib_path_to_module_name(relative_path) else {
return TypeshedVersionsQueryResult::DoesNotExist;
};
let ResolverState {
db,
typeshed_versions,
target_version,
} = resolver;
typeshed_versions.query_module(*db, &module_name, custom_stdlib_root, *target_version)
let ResolverContext { db, target_version } = context;
typeshed_versions(*db).query_module(&module_name, *target_version)
}
/// Enumeration describing the various ways in which validation of a search path might fail.
@@ -293,7 +293,7 @@ fn query_stdlib_version(
/// If validation fails for a search path derived from the user settings,
/// a message must be displayed to the user,
/// as type checking cannot be done reliably in these circumstances.
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug)]
pub(crate) enum SearchPathValidationError {
/// The path provided by the user was not a directory
NotADirectory(SystemPathBuf),
@@ -304,18 +304,20 @@ pub(crate) enum SearchPathValidationError {
NoStdlibSubdirectory(SystemPathBuf),
/// The typeshed path provided by the user is a directory,
/// but no `stdlib/VERSIONS` file exists.
/// but `stdlib/VERSIONS` could not be read.
/// (This is only relevant for stdlib search paths.)
NoVersionsFile(SystemPathBuf),
/// `stdlib/VERSIONS` is a directory.
/// (This is only relevant for stdlib search paths.)
VersionsIsADirectory(SystemPathBuf),
FailedToReadVersionsFile {
path: SystemPathBuf,
error: std::io::Error,
},
/// The path provided by the user is a directory,
/// and a `stdlib/VERSIONS` file exists, but it fails to parse.
/// (This is only relevant for stdlib search paths.)
VersionsParseError(TypeshedVersionsParseError),
/// Failed to discover the site-packages for the configured virtual environment.
SitePackagesDiscovery(SitePackagesDiscoveryError),
}
impl fmt::Display for SearchPathValidationError {
@@ -325,9 +327,16 @@ impl fmt::Display for SearchPathValidationError {
Self::NoStdlibSubdirectory(path) => {
write!(f, "The directory at {path} has no `stdlib/` subdirectory")
}
Self::NoVersionsFile(path) => write!(f, "Expected a file at {path}/stdlib/VERSIONS"),
Self::VersionsIsADirectory(path) => write!(f, "{path}/stdlib/VERSIONS is a directory."),
Self::FailedToReadVersionsFile { path, error } => {
write!(
f,
"Failed to read the custom typeshed versions file '{path}': {error}"
)
}
Self::VersionsParseError(underlying_error) => underlying_error.fmt(f),
SearchPathValidationError::SitePackagesDiscovery(error) => {
write!(f, "Failed to discover the site-packages directory: {error}")
}
}
}
}
@@ -342,6 +351,18 @@ impl std::error::Error for SearchPathValidationError {
}
}
impl From<TypeshedVersionsParseError> for SearchPathValidationError {
fn from(value: TypeshedVersionsParseError) -> Self {
Self::VersionsParseError(value)
}
}
impl From<SitePackagesDiscoveryError> for SearchPathValidationError {
fn from(value: SitePackagesDiscoveryError) -> Self {
Self::SitePackagesDiscovery(value)
}
}
type SearchPathResult<T> = Result<T, SearchPathValidationError>;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -384,11 +405,10 @@ pub(crate) struct SearchPath(Arc<SearchPathInner>);
impl SearchPath {
fn directory_path(system: &dyn System, root: SystemPathBuf) -> SearchPathResult<SystemPathBuf> {
let canonicalized = system.canonicalize_path(&root).unwrap_or(root);
if system.is_directory(&canonicalized) {
Ok(canonicalized)
if system.is_directory(&root) {
Ok(root)
} else {
Err(SearchPathValidationError::NotADirectory(canonicalized))
Err(SearchPathValidationError::NotADirectory(root))
}
}
@@ -407,32 +427,22 @@ impl SearchPath {
}
/// Create a new standard-library search path pointing to a custom directory on disk
pub(crate) fn custom_stdlib(db: &dyn Db, typeshed: SystemPathBuf) -> SearchPathResult<Self> {
pub(crate) fn custom_stdlib(db: &dyn Db, typeshed: &SystemPath) -> SearchPathResult<Self> {
let system = db.system();
if !system.is_directory(&typeshed) {
if !system.is_directory(typeshed) {
return Err(SearchPathValidationError::NotADirectory(
typeshed.to_path_buf(),
));
}
let stdlib =
Self::directory_path(system, typeshed.join("stdlib")).map_err(|err| match err {
SearchPathValidationError::NotADirectory(path) => {
SearchPathValidationError::NoStdlibSubdirectory(path)
SearchPathValidationError::NotADirectory(_) => {
SearchPathValidationError::NoStdlibSubdirectory(typeshed.to_path_buf())
}
err => err,
})?;
let typeshed_versions =
system_path_to_file(db.upcast(), stdlib.join("VERSIONS")).map_err(|err| match err {
FileError::NotFound => SearchPathValidationError::NoVersionsFile(typeshed),
FileError::IsADirectory => {
SearchPathValidationError::VersionsIsADirectory(typeshed)
}
})?;
super::typeshed::parse_typeshed_versions(db, typeshed_versions)
.as_ref()
.map_err(|validation_error| {
SearchPathValidationError::VersionsParseError(validation_error.clone())
})?;
Ok(Self(Arc::new(SearchPathInner::StandardLibraryCustom(
stdlib,
))))
@@ -623,11 +633,11 @@ mod tests {
use ruff_db::Db;
use crate::db::tests::TestDb;
use super::*;
use crate::module_resolver::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
use crate::python_version::PythonVersion;
use super::*;
impl ModulePath {
#[must_use]
fn join(&self, component: &str) -> ModulePath {
@@ -638,15 +648,6 @@ mod tests {
}
impl SearchPath {
#[must_use]
pub(crate) fn is_stdlib_search_path(&self) -> bool {
matches!(
&*self.0,
SearchPathInner::StandardLibraryCustom(_)
| SearchPathInner::StandardLibraryVendored(_)
)
}
fn join(&self, component: &str) -> ModulePath {
self.to_module_path().join(component)
}
@@ -661,7 +662,7 @@ mod tests {
.build();
assert_eq!(
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap())
.unwrap()
.to_module_path()
.with_py_extension(),
@@ -669,7 +670,7 @@ mod tests {
);
assert_eq!(
&SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
&SearchPath::custom_stdlib(&db, stdlib.parent().unwrap())
.unwrap()
.join("foo")
.with_pyi_extension(),
@@ -780,7 +781,7 @@ mod tests {
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
.with_custom_typeshed(MockedTypeshed::default())
.build();
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap())
.unwrap()
.to_module_path()
.push("bar.py");
@@ -792,7 +793,7 @@ mod tests {
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
.with_custom_typeshed(MockedTypeshed::default())
.build();
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap())
.unwrap()
.to_module_path()
.push("bar.rs");
@@ -824,7 +825,7 @@ mod tests {
.with_custom_typeshed(MockedTypeshed::default())
.build();
let root = SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf()).unwrap();
let root = SearchPath::custom_stdlib(&db, stdlib.parent().unwrap()).unwrap();
// Must have a `.pyi` extension or no extension:
let bad_absolute_path = SystemPath::new("foo/stdlib/x.py");
@@ -872,8 +873,7 @@ mod tests {
.with_custom_typeshed(typeshed)
.with_target_version(target_version)
.build();
let stdlib =
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf()).unwrap();
let stdlib = SearchPath::custom_stdlib(&db, stdlib.parent().unwrap()).unwrap();
(db, stdlib)
}
@@ -898,7 +898,7 @@ mod tests {
};
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
let resolver = ResolverState::new(&db, PythonVersion::PY38);
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
let asyncio_regular_package = stdlib_path.join("asyncio");
assert!(asyncio_regular_package.is_directory(&resolver));
@@ -926,7 +926,7 @@ mod tests {
};
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
let resolver = ResolverState::new(&db, PythonVersion::PY38);
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
let xml_namespace_package = stdlib_path.join("xml");
assert!(xml_namespace_package.is_directory(&resolver));
@@ -948,7 +948,7 @@ mod tests {
};
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
let resolver = ResolverState::new(&db, PythonVersion::PY38);
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
let functools_module = stdlib_path.join("functools.pyi");
assert!(functools_module.to_file(&resolver).is_some());
@@ -964,7 +964,7 @@ mod tests {
};
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
let resolver = ResolverState::new(&db, PythonVersion::PY38);
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
let collections_regular_package = stdlib_path.join("collections");
assert_eq!(collections_regular_package.to_file(&resolver), None);
@@ -980,7 +980,7 @@ mod tests {
};
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
let resolver = ResolverState::new(&db, PythonVersion::PY38);
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
let importlib_namespace_package = stdlib_path.join("importlib");
assert_eq!(importlib_namespace_package.to_file(&resolver), None);
@@ -1001,7 +1001,7 @@ mod tests {
};
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
let resolver = ResolverState::new(&db, PythonVersion::PY38);
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
let non_existent = stdlib_path.join("doesnt_even_exist");
assert_eq!(non_existent.to_file(&resolver), None);
@@ -1029,7 +1029,7 @@ mod tests {
};
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
let resolver = ResolverState::new(&db, PythonVersion::PY39);
let resolver = ResolverContext::new(&db, PythonVersion::PY39);
// Since we've set the target version to Py39,
// `collections` should now exist as a directory, according to VERSIONS...
@@ -1058,7 +1058,7 @@ mod tests {
};
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
let resolver = ResolverState::new(&db, PythonVersion::PY39);
let resolver = ResolverContext::new(&db, PythonVersion::PY39);
// The `importlib` directory now also exists
let importlib_namespace_package = stdlib_path.join("importlib");
@@ -1082,7 +1082,7 @@ mod tests {
};
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
let resolver = ResolverState::new(&db, PythonVersion::PY39);
let resolver = ResolverContext::new(&db, PythonVersion::PY39);
// The `xml` package no longer exists on py39:
let xml_namespace_package = stdlib_path.join("xml");

View File

@@ -4,16 +4,17 @@ use std::iter::FusedIterator;
use rustc_hash::{FxBuildHasher, FxHashSet};
use ruff_db::files::{File, FilePath, FileRootKind};
use ruff_db::system::{DirectoryEntry, SystemPath, SystemPathBuf};
use ruff_db::vendored::VendoredPath;
use ruff_db::system::{DirectoryEntry, System, SystemPath, SystemPathBuf};
use ruff_db::vendored::{VendoredFileSystem, VendoredPath};
use crate::db::Db;
use crate::module_name::ModuleName;
use crate::{Program, SearchPathSettings};
use crate::module_resolver::typeshed::{vendored_typeshed_versions, TypeshedVersions};
use crate::site_packages::VirtualEnvironment;
use crate::{Program, PythonVersion, SearchPathSettings, SitePackages};
use super::module::{Module, ModuleKind};
use super::path::{ModulePath, SearchPath, SearchPathValidationError};
use super::state::ResolverState;
/// Resolves a module name to a module.
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
@@ -122,7 +123,7 @@ pub(crate) fn search_paths(db: &dyn Db) -> SearchPathIterator {
Program::get(db).search_paths(db).iter(db)
}
#[derive(Debug, PartialEq, Eq, Default)]
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct SearchPaths {
/// Search paths that have been statically determined purely from reading Ruff's configuration settings.
/// These shouldn't ever change unless the config settings themselves change.
@@ -135,6 +136,8 @@ pub(crate) struct SearchPaths {
/// in terms of module-resolution priority until we've discovered the editable installs
/// for the first `site-packages` path
site_packages: Vec<SearchPath>,
typeshed_versions: TypeshedVersions,
}
impl SearchPaths {
@@ -146,8 +149,14 @@ impl SearchPaths {
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
pub(crate) fn from_settings(
db: &dyn Db,
settings: SearchPathSettings,
settings: &SearchPathSettings,
) -> Result<Self, SearchPathValidationError> {
fn canonicalize(path: &SystemPath, system: &dyn System) -> SystemPathBuf {
system
.canonicalize_path(path)
.unwrap_or_else(|_| path.to_path_buf())
}
let SearchPathSettings {
extra_paths,
src_root,
@@ -161,45 +170,65 @@ impl SearchPaths {
let mut static_paths = vec![];
for path in extra_paths {
tracing::debug!("Adding static extra search-path '{path}'");
let path = canonicalize(path, system);
files.try_add_root(db.upcast(), &path, FileRootKind::LibrarySearchPath);
tracing::debug!("Adding extra search-path '{path}'");
let search_path = SearchPath::extra(system, path)?;
files.try_add_root(
db.upcast(),
search_path.as_system_path().unwrap(),
FileRootKind::LibrarySearchPath,
);
static_paths.push(search_path);
static_paths.push(SearchPath::extra(system, path)?);
}
tracing::debug!("Adding first-party search path '{src_root}'");
static_paths.push(SearchPath::first_party(system, src_root)?);
static_paths.push(SearchPath::first_party(system, src_root.to_path_buf())?);
static_paths.push(if let Some(custom_typeshed) = custom_typeshed {
let (typeshed_versions, stdlib_path) = if let Some(custom_typeshed) = custom_typeshed {
let custom_typeshed = canonicalize(custom_typeshed, system);
tracing::debug!("Adding custom-stdlib search path '{custom_typeshed}'");
let search_path = SearchPath::custom_stdlib(db, custom_typeshed)?;
files.try_add_root(
db.upcast(),
search_path.as_system_path().unwrap(),
&custom_typeshed,
FileRootKind::LibrarySearchPath,
);
search_path
let versions_path = custom_typeshed.join("stdlib/VERSIONS");
let versions_content = system.read_to_string(&versions_path).map_err(|error| {
SearchPathValidationError::FailedToReadVersionsFile {
path: versions_path,
error,
}
})?;
let parsed: TypeshedVersions = versions_content.parse()?;
let search_path = SearchPath::custom_stdlib(db, &custom_typeshed)?;
(parsed, search_path)
} else {
SearchPath::vendored_stdlib()
});
tracing::debug!("Using vendored stdlib");
(
vendored_typeshed_versions(db),
SearchPath::vendored_stdlib(),
)
};
static_paths.push(stdlib_path);
let site_packages_paths = match site_packages_paths {
SitePackages::Derived { venv_path } => VirtualEnvironment::new(venv_path, system)
.and_then(|venv| venv.site_packages_directories(system))?,
SitePackages::Known(paths) => paths
.iter()
.map(|path| canonicalize(path, system))
.collect(),
};
let mut site_packages: Vec<_> = Vec::with_capacity(site_packages_paths.len());
for path in site_packages_paths {
tracing::debug!("Adding site-packages search path '{path}'");
let search_path = SearchPath::site_packages(system, path)?;
files.try_add_root(
db.upcast(),
search_path.as_system_path().unwrap(),
FileRootKind::LibrarySearchPath,
);
site_packages.push(search_path);
files.try_add_root(db.upcast(), &path, FileRootKind::LibrarySearchPath);
site_packages.push(SearchPath::site_packages(system, path)?);
}
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
@@ -224,16 +253,31 @@ impl SearchPaths {
Ok(SearchPaths {
static_paths,
site_packages,
typeshed_versions,
})
}
pub(crate) fn iter<'a>(&'a self, db: &'a dyn Db) -> SearchPathIterator<'a> {
pub(super) fn iter<'a>(&'a self, db: &'a dyn Db) -> SearchPathIterator<'a> {
SearchPathIterator {
db,
static_paths: self.static_paths.iter(),
dynamic_paths: None,
}
}
pub(crate) fn custom_stdlib(&self) -> Option<&SystemPath> {
self.static_paths.iter().find_map(|search_path| {
if search_path.is_standard_library() {
search_path.as_system_path()
} else {
None
}
})
}
pub(super) fn typeshed_versions(&self) -> &TypeshedVersions {
&self.typeshed_versions
}
}
/// Collect all dynamic search paths. For each `site-packages` path:
@@ -251,6 +295,7 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
let SearchPaths {
static_paths,
site_packages,
typeshed_versions: _,
} = Program::get(db).search_paths(db);
let mut dynamic_paths = Vec::new();
@@ -315,12 +360,16 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
let installations = all_pth_files.iter().flat_map(PthFile::items);
for installation in installations {
let installation = system
.canonicalize_path(&installation)
.unwrap_or(installation);
if existing_paths.insert(Cow::Owned(installation.clone())) {
match SearchPath::editable(system, installation) {
match SearchPath::editable(system, installation.clone()) {
Ok(search_path) => {
tracing::debug!(
"Adding editable installation to module resolution path {path}",
path = search_path.as_system_path().unwrap()
path = installation
);
dynamic_paths.push(search_path);
}
@@ -482,7 +531,7 @@ struct ModuleNameIngredient<'db> {
fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, ModuleKind)> {
let program = Program::get(db);
let target_version = program.target_version(db);
let resolver_state = ResolverState::new(db, target_version);
let resolver_state = ResolverContext::new(db, target_version);
let is_builtin_module =
ruff_python_stdlib::sys::is_builtin_module(target_version.minor, name.as_str());
@@ -504,24 +553,16 @@ fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, Mod
package_path.push(module_name);
// Must be a `__init__.pyi` or `__init__.py` or it isn't a package.
let kind = if package_path.is_directory(&resolver_state) {
package_path.push("__init__");
ModuleKind::Package
} else {
ModuleKind::Module
};
// TODO Implement full https://peps.python.org/pep-0561/#type-checker-module-resolution-order resolution
if let Some(stub) = package_path.with_pyi_extension().to_file(&resolver_state) {
return Some((search_path.clone(), stub, kind));
// Check for a regular package first (highest priority)
package_path.push("__init__");
if let Some(regular_package) = resolve_file_module(&package_path, &resolver_state) {
return Some((search_path.clone(), regular_package, ModuleKind::Package));
}
if let Some(module) = package_path
.with_py_extension()
.and_then(|path| path.to_file(&resolver_state))
{
return Some((search_path.clone(), module, kind));
// Check for a file module next
package_path.pop();
if let Some(file_module) = resolve_file_module(&package_path, &resolver_state) {
return Some((search_path.clone(), file_module, ModuleKind::Module));
}
// For regular packages, don't search the next search path. All files of that
@@ -542,10 +583,27 @@ fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, Mod
None
}
/// If `module` exists on disk with either a `.pyi` or `.py` extension,
/// return the [`File`] corresponding to that path.
///
/// `.pyi` files take priority, as they always have priority when
/// resolving modules.
fn resolve_file_module(module: &ModulePath, resolver_state: &ResolverContext) -> Option<File> {
// Stubs have precedence over source files
module
.with_pyi_extension()
.to_file(resolver_state)
.or_else(|| {
module
.with_py_extension()
.and_then(|path| path.to_file(resolver_state))
})
}
fn resolve_package<'a, 'db, I>(
module_search_path: &SearchPath,
components: I,
resolver_state: &ResolverState<'db>,
resolver_state: &ResolverContext<'db>,
) -> Result<ResolvedPackage, PackageKind>
where
I: Iterator<Item = &'a str>,
@@ -568,7 +626,10 @@ where
if is_regular_package {
in_namespace_package = false;
} else if package_path.is_directory(resolver_state) {
} else if package_path.is_directory(resolver_state)
// Pure modules hide namespace packages with the same name
&& resolve_file_module(&package_path, resolver_state).is_none()
{
// A directory without an `__init__.py` is a namespace package, continue with the next folder.
in_namespace_package = true;
} else if in_namespace_package {
@@ -627,6 +688,21 @@ impl PackageKind {
}
}
pub(super) struct ResolverContext<'db> {
pub(super) db: &'db dyn Db,
pub(super) target_version: PythonVersion,
}
impl<'db> ResolverContext<'db> {
pub(super) fn new(db: &'db dyn Db, target_version: PythonVersion) -> Self {
Self { db, target_version }
}
pub(super) fn vendored(&self) -> &VendoredFileSystem {
self.db.vendored()
}
}
#[cfg(test)]
mod tests {
use ruff_db::files::{system_path_to_file, File, FilePath};
@@ -781,7 +857,7 @@ mod tests {
"Search path for {module_name} was unexpectedly {search_path:?}"
);
assert!(
search_path.is_stdlib_search_path(),
search_path.is_standard_library(),
"Expected a stdlib search path, but got {search_path:?}"
);
}
@@ -877,7 +953,7 @@ mod tests {
"Search path for {module_name} was unexpectedly {search_path:?}"
);
assert!(
search_path.is_stdlib_search_path(),
search_path.is_standard_library(),
"Expected a stdlib search path, but got {search_path:?}"
);
}
@@ -1011,6 +1087,25 @@ mod tests {
);
}
#[test]
fn single_file_takes_priority_over_namespace_package() {
//const SRC: &[FileSpec] = &[("foo.py", "x = 1")];
const SRC: &[FileSpec] = &[("foo.py", "x = 1"), ("foo/bar.py", "x = 2")];
let TestCase { db, src, .. } = TestCaseBuilder::new().with_src_files(SRC).build();
let foo_module_name = ModuleName::new_static("foo").unwrap();
let foo_bar_module_name = ModuleName::new_static("foo.bar").unwrap();
// `foo.py` takes priority over the `foo` namespace package
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
assert_eq!(foo_module.file().path(&db), &src.join("foo.py"));
// `foo.bar` isn't recognised as a module
let foo_bar_module = resolve_module(&db, foo_bar_module_name.clone());
assert_eq!(foo_bar_module, None);
}
#[test]
fn typing_stub_over_module() {
const SRC: &[FileSpec] = &[("foo.py", "print('Hello, world!')"), ("foo.pyi", "x: int")];
@@ -1194,13 +1289,13 @@ mod tests {
Program::from_settings(
&db,
ProgramSettings {
&ProgramSettings {
target_version: PythonVersion::PY38,
search_paths: SearchPathSettings {
extra_paths: vec![],
src_root: src.clone(),
custom_typeshed: Some(custom_typeshed.clone()),
site_packages: vec![site_packages],
site_packages: SitePackages::Known(vec![site_packages]),
},
},
)
@@ -1699,13 +1794,16 @@ not_a_directory
Program::from_settings(
&db,
ProgramSettings {
&ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings {
extra_paths: vec![],
src_root: SystemPathBuf::from("/src"),
custom_typeshed: None,
site_packages: vec![venv_site_packages, system_site_packages],
site_packages: SitePackages::Known(vec![
venv_site_packages,
system_site_packages,
]),
},
},
)

View File

@@ -1,25 +0,0 @@
use ruff_db::vendored::VendoredFileSystem;
use super::typeshed::LazyTypeshedVersions;
use crate::db::Db;
use crate::python_version::PythonVersion;
pub(crate) struct ResolverState<'db> {
pub(crate) db: &'db dyn Db,
pub(crate) typeshed_versions: LazyTypeshedVersions<'db>,
pub(crate) target_version: PythonVersion,
}
impl<'db> ResolverState<'db> {
pub(crate) fn new(db: &'db dyn Db, target_version: PythonVersion) -> Self {
Self {
db,
typeshed_versions: LazyTypeshedVersions::new(),
target_version,
}
}
pub(crate) fn vendored(&self) -> &VendoredFileSystem {
self.db.vendored()
}
}

View File

@@ -4,7 +4,7 @@ use ruff_db::vendored::VendoredPathBuf;
use crate::db::tests::TestDb;
use crate::program::{Program, SearchPathSettings};
use crate::python_version::PythonVersion;
use crate::ProgramSettings;
use crate::{ProgramSettings, SitePackages};
/// A test case for the module resolver.
///
@@ -179,6 +179,7 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
first_party_files,
site_packages_files,
} = self;
TestCaseBuilder {
typeshed_option: typeshed,
target_version,
@@ -195,6 +196,7 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
site_packages,
target_version,
} = self.with_custom_typeshed(MockedTypeshed::default()).build();
TestCase {
db,
src,
@@ -223,13 +225,13 @@ impl TestCaseBuilder<MockedTypeshed> {
Program::from_settings(
&db,
ProgramSettings {
&ProgramSettings {
target_version,
search_paths: SearchPathSettings {
extra_paths: vec![],
src_root: src.clone(),
custom_typeshed: Some(typeshed.clone()),
site_packages: vec![site_packages.clone()],
site_packages: SitePackages::Known(vec![site_packages.clone()]),
},
},
)
@@ -279,13 +281,11 @@ impl TestCaseBuilder<VendoredTypeshed> {
Program::from_settings(
&db,
ProgramSettings {
&ProgramSettings {
target_version,
search_paths: SearchPathSettings {
extra_paths: vec![],
src_root: src.clone(),
custom_typeshed: None,
site_packages: vec![site_packages.clone()],
site_packages: SitePackages::Known(vec![site_packages.clone()]),
..SearchPathSettings::new(src.clone())
},
},
)

View File

@@ -1,92 +1,27 @@
use std::cell::OnceCell;
use std::collections::BTreeMap;
use std::fmt;
use std::num::{NonZeroU16, NonZeroUsize};
use std::ops::{RangeFrom, RangeInclusive};
use std::str::FromStr;
use once_cell::sync::Lazy;
use ruff_db::system::SystemPath;
use rustc_hash::FxHashMap;
use ruff_db::files::{system_path_to_file, File};
use super::vendored::vendored_typeshed_stubs;
use crate::db::Db;
use crate::module_name::ModuleName;
use crate::python_version::PythonVersion;
use crate::{Program, PythonVersion};
#[derive(Debug)]
pub(crate) struct LazyTypeshedVersions<'db>(OnceCell<&'db TypeshedVersions>);
impl<'db> LazyTypeshedVersions<'db> {
#[must_use]
pub(crate) fn new() -> Self {
Self(OnceCell::new())
}
/// Query whether a module exists at runtime in the stdlib on a certain Python version.
///
/// Simply probing whether a file exists in typeshed is insufficient for this question,
/// as a module in the stdlib may have been added in Python 3.10, but the typeshed stub
/// will still be available (either in a custom typeshed dir or in our vendored copy)
/// even if the user specified Python 3.8 as the target version.
///
/// For top-level modules and packages, the VERSIONS file can always provide an unambiguous answer
/// as to whether the module exists on the specified target version. However, VERSIONS does not
/// provide comprehensive information on all submodules, meaning that this method sometimes
/// returns [`TypeshedVersionsQueryResult::MaybeExists`].
/// See [`TypeshedVersionsQueryResult`] for more details.
#[must_use]
pub(crate) fn query_module(
&self,
db: &'db dyn Db,
module: &ModuleName,
stdlib_root: Option<&SystemPath>,
target_version: PythonVersion,
) -> TypeshedVersionsQueryResult {
let versions = self.0.get_or_init(|| {
let versions_path = if let Some(system_path) = stdlib_root {
system_path.join("VERSIONS")
} else {
return &VENDORED_VERSIONS;
};
let Ok(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
todo!(
"Still need to figure out how to handle VERSIONS files being deleted \
from custom typeshed directories! Expected a file to exist at {versions_path}"
)
};
// TODO(Alex/Micha): If VERSIONS is invalid,
// this should invalidate not just the specific module resolution we're currently attempting,
// but all type inference that depends on any standard-library types.
// Unwrapping here is not correct...
parse_typeshed_versions(db, versions_file).as_ref().unwrap()
});
versions.query_module(module, target_version)
}
}
#[salsa::tracked(return_ref)]
pub(crate) fn parse_typeshed_versions(
db: &dyn Db,
versions_file: File,
) -> Result<TypeshedVersions, TypeshedVersionsParseError> {
// TODO: Handle IO errors
let file_content = versions_file
.read_to_string(db.upcast())
.unwrap_or_default();
file_content.parse()
}
static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
pub(in crate::module_resolver) fn vendored_typeshed_versions(db: &dyn Db) -> TypeshedVersions {
TypeshedVersions::from_str(
&vendored_typeshed_stubs()
&db.vendored()
.read_to_string("stdlib/VERSIONS")
.unwrap(),
.expect("The vendored typeshed stubs should contain a VERSIONS file"),
)
.unwrap()
});
.expect("The VERSIONS file in the vendored typeshed stubs should be well-formed")
}
pub(crate) fn typeshed_versions(db: &dyn Db) -> &TypeshedVersions {
Program::get(db).search_paths(db).typeshed_versions()
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub(crate) struct TypeshedVersionsParseError {
@@ -174,7 +109,7 @@ impl TypeshedVersions {
}
#[must_use]
fn query_module(
pub(in crate::module_resolver) fn query_module(
&self,
module: &ModuleName,
target_version: PythonVersion,
@@ -204,7 +139,7 @@ impl TypeshedVersions {
}
}
/// Possible answers [`LazyTypeshedVersions::query_module()`] could give to the question:
/// Possible answers [`TypeshedVersions::query_module()`] could give to the question:
/// "Does this module exist in the stdlib at runtime on a certain target version?"
#[derive(Debug, Copy, PartialEq, Eq, Clone, Hash)]
pub(crate) enum TypeshedVersionsQueryResult {
@@ -391,6 +326,8 @@ mod tests {
use insta::assert_snapshot;
use crate::db::tests::TestDb;
use super::*;
const TYPESHED_STDLIB_DIR: &str = "stdlib";
@@ -412,12 +349,9 @@ mod tests {
#[test]
fn can_parse_vendored_versions_file() {
let versions_data = include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/vendor/typeshed/stdlib/VERSIONS"
));
let db = TestDb::new();
let versions = TypeshedVersions::from_str(versions_data).unwrap();
let versions = vendored_typeshed_versions(&db);
assert!(versions.len() > 100);
assert!(versions.len() < 1000);
@@ -454,9 +388,10 @@ mod tests {
#[test]
fn typeshed_versions_consistent_with_vendored_stubs() {
const VERSIONS_DATA: &str = include_str!("../../../vendor/typeshed/stdlib/VERSIONS");
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
let db = TestDb::new();
let vendored_typeshed_versions = vendored_typeshed_versions(&db);
let vendored_typeshed_dir =
Path::new(env!("CARGO_MANIFEST_DIR")).join("../ruff_vendored/vendor/typeshed");
let mut empty_iterator = true;

View File

@@ -1,8 +0,0 @@
pub use self::vendored::vendored_typeshed_stubs;
pub(super) use self::versions::{
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsParseError,
TypeshedVersionsQueryResult,
};
mod vendored;
mod versions;

View File

@@ -1,4 +1,4 @@
use ruff_python_ast::{AnyNodeRef, AstNode, NodeKind};
use ruff_python_ast::{AnyNodeRef, NodeKind};
use ruff_text_size::{Ranged, TextRange};
/// Compact key for a node for use in a hash map.
@@ -11,19 +11,7 @@ pub(super) struct NodeKey {
}
impl NodeKey {
#[inline]
pub(super) fn from_node<'a, N>(node: &N) -> Self
where
N: AstNode,
{
NodeKey {
kind: node.kind(),
range: node.range(),
}
}
#[inline]
pub(super) fn from_ref<'a, N>(node: N) -> Self
pub(super) fn from_node<'a, N>(node: N) -> Self
where
N: Into<AnyNodeRef<'a>>,
{

View File

@@ -3,7 +3,7 @@ use anyhow::Context;
use salsa::Durability;
use salsa::Setter;
use ruff_db::system::SystemPathBuf;
use ruff_db::system::{SystemPath, SystemPathBuf};
use crate::module_resolver::SearchPaths;
use crate::Db;
@@ -12,13 +12,12 @@ use crate::Db;
pub struct Program {
pub target_version: PythonVersion,
#[default]
#[return_ref]
pub(crate) search_paths: SearchPaths,
}
impl Program {
pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> anyhow::Result<Self> {
pub fn from_settings(db: &dyn Db, settings: &ProgramSettings) -> anyhow::Result<Self> {
let ProgramSettings {
target_version,
search_paths,
@@ -29,16 +28,15 @@ impl Program {
let search_paths = SearchPaths::from_settings(db, search_paths)
.with_context(|| "Invalid search path settings")?;
Ok(Program::builder(settings.target_version)
Ok(Program::builder(settings.target_version, search_paths)
.durability(Durability::HIGH)
.search_paths(search_paths)
.new(db))
}
pub fn update_search_paths(
&self,
self,
db: &mut dyn Db,
search_path_settings: SearchPathSettings,
search_path_settings: &SearchPathSettings,
) -> anyhow::Result<()> {
let search_paths = SearchPaths::from_settings(db, search_path_settings)?;
@@ -49,16 +47,20 @@ impl Program {
Ok(())
}
pub fn custom_stdlib_search_path(self, db: &dyn Db) -> Option<&SystemPath> {
self.search_paths(db).custom_stdlib()
}
}
#[derive(Debug, Eq, PartialEq)]
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct ProgramSettings {
pub target_version: PythonVersion,
pub search_paths: SearchPathSettings,
}
/// Configures the search paths for module resolution.
#[derive(Eq, PartialEq, Debug, Clone, Default)]
#[derive(Eq, PartialEq, Debug, Clone)]
pub struct SearchPathSettings {
/// List of user-provided paths that should take first priority in the module resolution.
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
@@ -74,5 +76,25 @@ pub struct SearchPathSettings {
pub custom_typeshed: Option<SystemPathBuf>,
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
pub site_packages: Vec<SystemPathBuf>,
pub site_packages: SitePackages,
}
impl SearchPathSettings {
pub fn new(src_root: SystemPathBuf) -> Self {
Self {
src_root,
extra_paths: vec![],
custom_typeshed: None,
site_packages: SitePackages::Known(vec![]),
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum SitePackages {
Derived {
venv_path: SystemPathBuf,
},
/// Resolved site packages paths
Known(Vec<SystemPathBuf>),
}

View File

@@ -54,6 +54,13 @@ impl TryFrom<(&str, &str)> for PythonVersion {
}
}
impl From<(u8, u8)> for PythonVersion {
fn from(value: (u8, u8)) -> Self {
let (major, minor) = value;
Self { major, minor }
}
}
impl fmt::Display for PythonVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let PythonVersion { major, minor } = self;

View File

@@ -21,12 +21,15 @@ use crate::Db;
pub mod ast_ids;
mod builder;
pub(crate) mod constraint;
pub mod definition;
pub mod expression;
pub mod symbol;
mod use_def;
pub(crate) use self::use_def::{DefinitionWithConstraints, DefinitionWithConstraintsIterator};
pub(crate) use self::use_def::{
BindingWithConstraints, BindingWithConstraintsIterator, DeclarationsIterator,
};
type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
@@ -112,6 +115,9 @@ pub(crate) struct SemanticIndex<'db> {
/// Note: We should not depend on this map when analysing other files or
/// changing a file invalidates all dependents.
ast_ids: IndexVec<FileScopeId, AstIds>,
/// Flags about the global scope (code usage impacting inference)
has_future_annotations: bool,
}
impl<'db> SemanticIndex<'db> {
@@ -212,6 +218,12 @@ impl<'db> SemanticIndex<'db> {
pub(crate) fn node_scope(&self, node: NodeWithScopeRef) -> FileScopeId {
self.scopes_by_node[&node.node_key()]
}
/// Checks if there is an import of `__future__.annotations` in the global scope, which affects
/// the logic for type inference.
pub(super) fn has_future_annotations(&self) -> bool {
self.has_future_annotations
}
}
pub struct AncestorsIter<'a> {
@@ -325,16 +337,16 @@ mod tests {
use crate::Db;
impl UseDefMap<'_> {
fn first_public_definition(&self, symbol: ScopedSymbolId) -> Option<Definition<'_>> {
self.public_definitions(symbol)
fn first_public_binding(&self, symbol: ScopedSymbolId) -> Option<Definition<'_>> {
self.public_bindings(symbol)
.next()
.map(|constrained_definition| constrained_definition.definition)
.map(|constrained_binding| constrained_binding.binding)
}
fn first_use_definition(&self, use_id: ScopedUseId) -> Option<Definition<'_>> {
self.use_definitions(use_id)
fn first_binding_at_use(&self, use_id: ScopedUseId) -> Option<Definition<'_>> {
self.bindings_at_use(use_id)
.next()
.map(|constrained_definition| constrained_definition.definition)
.map(|constrained_binding| constrained_binding.binding)
}
}
@@ -396,8 +408,8 @@ mod tests {
let foo = global_table.symbol_id_by_name("foo").unwrap();
let use_def = use_def_map(&db, scope);
let definition = use_def.first_public_definition(foo).unwrap();
assert!(matches!(definition.node(&db), DefinitionKind::Import(_)));
let binding = use_def.first_public_binding(foo).unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::Import(_)));
}
#[test]
@@ -426,22 +438,19 @@ mod tests {
assert!(
global_table
.symbol_by_name("foo")
.is_some_and(|symbol| { symbol.is_defined() && !symbol.is_used() }),
.is_some_and(|symbol| { symbol.is_bound() && !symbol.is_used() }),
"symbols that are defined get the defined flag"
);
let use_def = use_def_map(&db, scope);
let definition = use_def
.first_public_definition(
let binding = use_def
.first_public_binding(
global_table
.symbol_id_by_name("foo")
.expect("symbol to exist"),
)
.unwrap();
assert!(matches!(
definition.node(&db),
DefinitionKind::ImportFrom(_)
));
assert!(matches!(binding.kind(&db), DefinitionKind::ImportFrom(_)));
}
#[test]
@@ -454,17 +463,14 @@ mod tests {
assert!(
global_table
.symbol_by_name("foo")
.is_some_and(|symbol| { !symbol.is_defined() && symbol.is_used() }),
"a symbol used but not defined in a scope should have only the used flag"
.is_some_and(|symbol| { !symbol.is_bound() && symbol.is_used() }),
"a symbol used but not bound in a scope should have only the used flag"
);
let use_def = use_def_map(&db, scope);
let definition = use_def
.first_public_definition(global_table.symbol_id_by_name("x").expect("symbol exists"))
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name("x").expect("symbol exists"))
.unwrap();
assert!(matches!(
definition.node(&db),
DefinitionKind::Assignment(_)
));
assert!(matches!(binding.kind(&db), DefinitionKind::Assignment(_)));
}
#[test]
@@ -476,12 +482,12 @@ mod tests {
assert_eq!(names(&global_table), vec!["x"]);
let use_def = use_def_map(&db, scope);
let definition = use_def
.first_public_definition(global_table.symbol_id_by_name("x").unwrap())
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name("x").unwrap())
.unwrap();
assert!(matches!(
definition.node(&db),
binding.kind(&db),
DefinitionKind::AugmentedAssignment(_)
));
}
@@ -514,13 +520,10 @@ y = 2
assert_eq!(names(&class_table), vec!["x"]);
let use_def = index.use_def_map(class_scope_id);
let definition = use_def
.first_public_definition(class_table.symbol_id_by_name("x").expect("symbol exists"))
let binding = use_def
.first_public_binding(class_table.symbol_id_by_name("x").expect("symbol exists"))
.unwrap();
assert!(matches!(
definition.node(&db),
DefinitionKind::Assignment(_)
));
assert!(matches!(binding.kind(&db), DefinitionKind::Assignment(_)));
}
#[test]
@@ -550,17 +553,14 @@ y = 2
assert_eq!(names(&function_table), vec!["x"]);
let use_def = index.use_def_map(function_scope_id);
let definition = use_def
.first_public_definition(
let binding = use_def
.first_public_binding(
function_table
.symbol_id_by_name("x")
.expect("symbol exists"),
)
.unwrap();
assert!(matches!(
definition.node(&db),
DefinitionKind::Assignment(_)
));
assert!(matches!(binding.kind(&db), DefinitionKind::Assignment(_)));
}
#[test]
@@ -575,7 +575,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
let index = semantic_index(&db, file);
let global_table = symbol_table(&db, global_scope(&db, file));
assert_eq!(names(&global_table), vec!["f", "str", "int"]);
assert_eq!(names(&global_table), vec!["str", "int", "f"]);
let [(function_scope_id, _function_scope)] = index
.child_scopes(FileScopeId::global())
@@ -592,27 +592,27 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
let use_def = index.use_def_map(function_scope_id);
for name in ["a", "b", "c", "d"] {
let definition = use_def
.first_public_definition(
let binding = use_def
.first_public_binding(
function_table
.symbol_id_by_name(name)
.expect("symbol exists"),
)
.unwrap();
assert!(matches!(
definition.node(&db),
binding.kind(&db),
DefinitionKind::ParameterWithDefault(_)
));
}
for name in ["args", "kwargs"] {
let definition = use_def
.first_public_definition(
let binding = use_def
.first_public_binding(
function_table
.symbol_id_by_name(name)
.expect("symbol exists"),
)
.unwrap();
assert!(matches!(definition.node(&db), DefinitionKind::Parameter(_)));
assert!(matches!(binding.kind(&db), DefinitionKind::Parameter(_)));
}
}
@@ -640,23 +640,19 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
let use_def = index.use_def_map(lambda_scope_id);
for name in ["a", "b", "c", "d"] {
let definition = use_def
.first_public_definition(
lambda_table.symbol_id_by_name(name).expect("symbol exists"),
)
let binding = use_def
.first_public_binding(lambda_table.symbol_id_by_name(name).expect("symbol exists"))
.unwrap();
assert!(matches!(
definition.node(&db),
binding.kind(&db),
DefinitionKind::ParameterWithDefault(_)
));
}
for name in ["args", "kwargs"] {
let definition = use_def
.first_public_definition(
lambda_table.symbol_id_by_name(name).expect("symbol exists"),
)
let binding = use_def
.first_public_binding(lambda_table.symbol_id_by_name(name).expect("symbol exists"))
.unwrap();
assert!(matches!(definition.node(&db), DefinitionKind::Parameter(_)));
assert!(matches!(binding.kind(&db), DefinitionKind::Parameter(_)));
}
}
@@ -666,7 +662,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
fn comprehension_scope() {
let TestCase { db, file } = test_case(
"
[x for x in iter1]
[x for x, y in iter1]
",
);
@@ -690,7 +686,22 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
assert_eq!(names(&comprehension_symbol_table), vec!["x"]);
assert_eq!(names(&comprehension_symbol_table), vec!["x", "y"]);
let use_def = index.use_def_map(comprehension_scope_id);
for name in ["x", "y"] {
let binding = use_def
.first_public_binding(
comprehension_symbol_table
.symbol_id_by_name(name)
.expect("symbol exists"),
)
.unwrap();
assert!(matches!(
binding.kind(&db),
DefinitionKind::Comprehension(_)
));
}
}
/// Test case to validate that the `x` variable used in the comprehension is referencing the
@@ -726,12 +737,12 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
let element_use_id =
element.scoped_use_id(&db, comprehension_scope_id.to_scope_id(&db, file));
let definition = use_def.first_use_definition(element_use_id).unwrap();
let DefinitionKind::Comprehension(comprehension) = definition.node(&db) else {
let binding = use_def.first_binding_at_use(element_use_id).unwrap();
let DefinitionKind::Comprehension(comprehension) = binding.kind(&db) else {
panic!("expected generator definition")
};
let ast::Comprehension { target, .. } = comprehension.node();
let name = target.as_name_expr().unwrap().id().as_str();
let target = comprehension.target();
let name = target.id().as_str();
assert_eq!(name, "x");
assert_eq!(target.range(), TextRange::new(23.into(), 24.into()));
@@ -790,6 +801,52 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
assert_eq!(names(&inner_comprehension_symbol_table), vec!["x"]);
}
#[test]
fn with_item_definition() {
let TestCase { db, file } = test_case(
"
with item1 as x, item2 as y:
pass
",
);
let index = semantic_index(&db, file);
let global_table = index.symbol_table(FileScopeId::global());
assert_eq!(names(&global_table), vec!["item1", "x", "item2", "y"]);
let use_def = index.use_def_map(FileScopeId::global());
for name in ["x", "y"] {
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
.expect("Expected with item definition for {name}");
assert!(matches!(binding.kind(&db), DefinitionKind::WithItem(_)));
}
}
#[test]
fn with_item_unpacked_definition() {
let TestCase { db, file } = test_case(
"
with context() as (x, y):
pass
",
);
let index = semantic_index(&db, file);
let global_table = index.symbol_table(FileScopeId::global());
assert_eq!(names(&global_table), vec!["context", "x", "y"]);
let use_def = index.use_def_map(FileScopeId::global());
for name in ["x", "y"] {
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
.expect("Expected with item definition for {name}");
assert!(matches!(binding.kind(&db), DefinitionKind::WithItem(_)));
}
}
#[test]
fn dupes() {
let TestCase { db, file } = test_case(
@@ -823,14 +880,14 @@ def func():
assert_eq!(names(&func2_table), vec!["y"]);
let use_def = index.use_def_map(FileScopeId::global());
let definition = use_def
.first_public_definition(
let binding = use_def
.first_public_binding(
global_table
.symbol_id_by_name("func")
.expect("symbol exists"),
)
.unwrap();
assert!(matches!(definition.node(&db), DefinitionKind::Function(_)));
assert!(matches!(binding.kind(&db), DefinitionKind::Function(_)));
}
#[test]
@@ -898,7 +955,7 @@ class C[T]:
assert!(
ann_table
.symbol_by_name("T")
.is_some_and(|s| s.is_defined() && !s.is_used()),
.is_some_and(|s| s.is_bound() && !s.is_used()),
"type parameters are defined by the scope that introduces them"
);
@@ -930,8 +987,8 @@ class C[T]:
};
let x_use_id = x_use_expr_name.scoped_use_id(&db, scope);
let use_def = use_def_map(&db, scope);
let definition = use_def.first_use_definition(x_use_id).unwrap();
let DefinitionKind::Assignment(assignment) = definition.node(&db) else {
let binding = use_def.first_binding_at_use(x_use_id).unwrap();
let DefinitionKind::Assignment(assignment) = binding.kind(&db) else {
panic!("should be an assignment definition")
};
let ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
@@ -979,7 +1036,7 @@ class C[T]:
}
let TestCase { db, file } = test_case(
r#"
r"
class Test:
def foo():
def bar():
@@ -988,7 +1045,7 @@ class Test:
pass
def x():
pass"#,
pass",
);
let index = semantic_index(&db, file);
@@ -1023,7 +1080,7 @@ def x():
}
#[test]
fn match_stmt_symbols() {
fn match_stmt() {
let TestCase { db, file } = test_case(
"
match subject:
@@ -1037,12 +1094,120 @@ match subject:
",
);
let global_table = symbol_table(&db, global_scope(&db, file));
let global_scope_id = global_scope(&db, file);
let global_table = symbol_table(&db, global_scope_id);
assert!(global_table.symbol_by_name("Foo").unwrap().is_used());
assert_eq!(
names(&global_table),
vec!["subject", "a", "b", "c", "d", "f", "e", "h", "g", "Foo", "i", "j", "k", "l"]
vec!["subject", "a", "b", "c", "d", "e", "f", "g", "h", "Foo", "i", "j", "k", "l"]
);
let use_def = use_def_map(&db, global_scope_id);
for (name, expected_index) in [
("a", 0),
("b", 0),
("c", 1),
("d", 2),
("e", 0),
("f", 1),
("g", 0),
("h", 1),
("i", 0),
("j", 1),
("k", 0),
("l", 1),
] {
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
.expect("Expected with item definition for {name}");
if let DefinitionKind::MatchPattern(pattern) = binding.kind(&db) {
assert_eq!(pattern.index(), expected_index);
} else {
panic!("Expected match pattern definition for {name}");
}
}
}
#[test]
fn nested_match_case() {
let TestCase { db, file } = test_case(
"
match 1:
case first:
match 2:
case second:
pass
",
);
let global_scope_id = global_scope(&db, file);
let global_table = symbol_table(&db, global_scope_id);
assert_eq!(names(&global_table), vec!["first", "second"]);
let use_def = use_def_map(&db, global_scope_id);
for (name, expected_index) in [("first", 0), ("second", 0)] {
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
.expect("Expected with item definition for {name}");
if let DefinitionKind::MatchPattern(pattern) = binding.kind(&db) {
assert_eq!(pattern.index(), expected_index);
} else {
panic!("Expected match pattern definition for {name}");
}
}
}
#[test]
fn for_loops_single_assignment() {
let TestCase { db, file } = test_case("for x in a: pass");
let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope);
assert_eq!(&names(&global_table), &["a", "x"]);
let use_def = use_def_map(&db, scope);
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name("x").unwrap())
.unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::For(_)));
}
#[test]
fn for_loops_simple_unpacking() {
let TestCase { db, file } = test_case("for (x, y) in a: pass");
let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope);
assert_eq!(&names(&global_table), &["a", "x", "y"]);
let use_def = use_def_map(&db, scope);
let x_binding = use_def
.first_public_binding(global_table.symbol_id_by_name("x").unwrap())
.unwrap();
let y_binding = use_def
.first_public_binding(global_table.symbol_id_by_name("y").unwrap())
.unwrap();
assert!(matches!(x_binding.kind(&db), DefinitionKind::For(_)));
assert!(matches!(y_binding.kind(&db), DefinitionKind::For(_)));
}
#[test]
fn for_loops_complex_unpacking() {
let TestCase { db, file } = test_case("for [((a,) b), (c, d)] in e: pass");
let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope);
assert_eq!(&names(&global_table), &["e", "a", "b", "c", "d"]);
let use_def = use_def_map(&db, scope);
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name("a").unwrap())
.unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::For(_)));
}
}

View File

@@ -197,14 +197,12 @@ pub(crate) mod node_key {
pub(crate) struct ExpressionNodeKey(NodeKey);
impl From<ast::ExpressionRef<'_>> for ExpressionNodeKey {
#[inline]
fn from(value: ast::ExpressionRef<'_>) -> Self {
Self(NodeKey::from_ref(value))
Self(NodeKey::from_node(value))
}
}
impl From<&ast::Expr> for ExpressionNodeKey {
#[inline]
fn from(value: &ast::Expr) -> Self {
Self(NodeKey::from_node(value))
}

View File

@@ -15,17 +15,23 @@ use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
use crate::semantic_index::ast_ids::AstIdsBuilder;
use crate::semantic_index::definition::{
AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionNodeKey,
DefinitionNodeRef, ImportFromDefinitionNodeRef,
DefinitionNodeRef, ForStmtDefinitionNodeRef, ImportFromDefinitionNodeRef,
};
use crate::semantic_index::expression::Expression;
use crate::semantic_index::symbol::{
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolFlags,
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId,
SymbolTableBuilder,
};
use crate::semantic_index::use_def::{FlowSnapshot, UseDefMapBuilder};
use crate::semantic_index::SemanticIndex;
use crate::Db;
use super::constraint::{Constraint, PatternConstraint};
use super::definition::{
DefinitionCategory, ExceptHandlerDefinitionNodeRef, MatchPatternDefinitionNodeRef,
WithItemDefinitionNodeRef,
};
pub(super) struct SemanticIndexBuilder<'db> {
// Builder state
db: &'db dyn Db,
@@ -34,9 +40,14 @@ pub(super) struct SemanticIndexBuilder<'db> {
scope_stack: Vec<FileScopeId>,
/// The assignment we're currently visiting.
current_assignment: Option<CurrentAssignment<'db>>,
/// The match case we're currently visiting.
current_match_case: Option<CurrentMatchCase<'db>>,
/// Flow states at each `break` in the current loop.
loop_break_states: Vec<FlowSnapshot>,
/// Flags about the file's global scope
has_future_annotations: bool,
// Semantic Index fields
scopes: IndexVec<FileScopeId, Scope>,
scope_ids_by_scope: IndexVec<FileScopeId, ScopeId<'db>>,
@@ -57,8 +68,11 @@ impl<'db> SemanticIndexBuilder<'db> {
module: parsed,
scope_stack: Vec::new(),
current_assignment: None,
current_match_case: None,
loop_break_states: vec![],
has_future_annotations: false,
scopes: IndexVec::new(),
symbol_tables: IndexVec::new(),
ast_ids: IndexVec::new(),
@@ -160,49 +174,95 @@ impl<'db> SemanticIndexBuilder<'db> {
self.current_use_def_map_mut().merge(state);
}
fn add_or_update_symbol(&mut self, name: Name, flags: SymbolFlags) -> ScopedSymbolId {
let symbol_table = self.current_symbol_table();
let (symbol_id, added) = symbol_table.add_or_update_symbol(name, flags);
fn add_symbol(&mut self, name: Name) -> ScopedSymbolId {
let (symbol_id, added) = self.current_symbol_table().add_symbol(name);
if added {
let use_def_map = self.current_use_def_map_mut();
use_def_map.add_symbol(symbol_id);
self.current_use_def_map_mut().add_symbol(symbol_id);
}
symbol_id
}
fn mark_symbol_bound(&mut self, id: ScopedSymbolId) {
self.current_symbol_table().mark_symbol_bound(id);
}
fn mark_symbol_used(&mut self, id: ScopedSymbolId) {
self.current_symbol_table().mark_symbol_used(id);
}
fn add_definition<'a>(
&mut self,
symbol: ScopedSymbolId,
definition_node: impl Into<DefinitionNodeRef<'a>>,
) -> Definition<'db> {
let definition_node: DefinitionNodeRef<'_> = definition_node.into();
#[allow(unsafe_code)]
// SAFETY: `definition_node` is guaranteed to be a child of `self.module`
let kind = unsafe { definition_node.into_owned(self.module.clone()) };
let category = kind.category();
let definition = Definition::new(
self.db,
self.file,
self.current_scope(),
symbol,
#[allow(unsafe_code)]
unsafe {
definition_node.into_owned(self.module.clone())
},
kind,
countme::Count::default(),
);
self.definitions_by_node
let existing_definition = self
.definitions_by_node
.insert(definition_node.key(), definition);
self.current_use_def_map_mut()
.record_definition(symbol, definition);
debug_assert_eq!(existing_definition, None);
if category.is_binding() {
self.mark_symbol_bound(symbol);
}
let use_def = self.current_use_def_map_mut();
match category {
DefinitionCategory::DeclarationAndBinding => {
use_def.record_declaration_and_binding(symbol, definition);
}
DefinitionCategory::Declaration => use_def.record_declaration(symbol, definition),
DefinitionCategory::Binding => use_def.record_binding(symbol, definition),
}
definition
}
fn add_constraint(&mut self, constraint_node: &ast::Expr) -> Expression<'db> {
fn add_expression_constraint(&mut self, constraint_node: &ast::Expr) -> Expression<'db> {
let expression = self.add_standalone_expression(constraint_node);
self.current_use_def_map_mut().record_constraint(expression);
self.current_use_def_map_mut()
.record_constraint(Constraint::Expression(expression));
expression
}
fn add_pattern_constraint(
&mut self,
subject: &ast::Expr,
pattern: &ast::Pattern,
) -> PatternConstraint<'db> {
#[allow(unsafe_code)]
let (subject, pattern) = unsafe {
(
AstNodeRef::new(self.module.clone(), subject),
AstNodeRef::new(self.module.clone(), pattern),
)
};
let pattern_constraint = PatternConstraint::new(
self.db,
self.file,
self.current_scope(),
subject,
pattern,
countme::Count::default(),
);
self.current_use_def_map_mut()
.record_constraint(Constraint::Pattern(pattern_constraint));
pattern_constraint
}
/// Record an expression that needs to be a Salsa ingredient, because we need to infer its type
/// standalone (type narrowing tests, RHS of an assignment.)
fn add_standalone_expression(&mut self, expression_node: &ast::Expr) -> Expression<'db> {
@@ -247,10 +307,13 @@ impl<'db> SemanticIndexBuilder<'db> {
..
}) => (name, &None, default),
};
// TODO create Definition for typevars
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
if let Some(bound) = bound {
self.visit_expr(bound);
let symbol = self.add_symbol(name.id.clone());
// TODO create Definition for PEP 695 typevars
// note that the "bound" on the typevar is a totally different thing than whether
// or not a name is "bound" by a typevar declaration; the latter is always true.
self.mark_symbol_bound(symbol);
if let Some(bounds) = bound {
self.visit_expr(bounds);
}
if let Some(default) = default {
self.visit_expr(default);
@@ -267,11 +330,23 @@ impl<'db> SemanticIndexBuilder<'db> {
nested_scope
}
/// Visit a list of [`Comprehension`] nodes, assumed to be the "generators" that compose a
/// comprehension (that is, the `for x in y` and `for y in z` parts of `x for x in y for y in z`.)
/// This method does several things:
/// - It pushes a new scope onto the stack for visiting
/// a list/dict/set comprehension or generator expression
/// - Inside that scope, it visits a list of [`Comprehension`] nodes,
/// assumed to be the "generators" that compose a comprehension
/// (that is, the `for x in y` and `for y in z` parts of `x for x in y for y in z`).
/// - Inside that scope, it also calls a closure for visiting the outer `elt`
/// of a list/dict/set comprehension or generator expression
/// - It then pops the new scope off the stack
///
/// [`Comprehension`]: ast::Comprehension
fn visit_generators(&mut self, scope: NodeWithScopeRef, generators: &'db [ast::Comprehension]) {
fn with_generators_scope(
&mut self,
scope: NodeWithScopeRef,
generators: &'db [ast::Comprehension],
visit_outer_elt: impl FnOnce(&mut Self),
) {
let mut generators_iter = generators.iter();
let Some(generator) = generators_iter.next() else {
@@ -280,6 +355,7 @@ impl<'db> SemanticIndexBuilder<'db> {
// The `iter` of the first generator is evaluated in the outer scope, while all subsequent
// nodes are evaluated in the inner scope.
self.add_standalone_expression(&generator.iter);
self.visit_expr(&generator.iter);
self.push_scope(scope);
@@ -295,6 +371,7 @@ impl<'db> SemanticIndexBuilder<'db> {
}
for generator in generators_iter {
self.add_standalone_expression(&generator.iter);
self.visit_expr(&generator.iter);
self.current_assignment = Some(CurrentAssignment::Comprehension {
@@ -308,11 +385,13 @@ impl<'db> SemanticIndexBuilder<'db> {
self.visit_expr(expr);
}
}
visit_outer_elt(self);
self.pop_scope();
}
fn declare_parameter(&mut self, parameter: AnyParameterRef) {
let symbol =
self.add_or_update_symbol(parameter.name().id().clone(), SymbolFlags::IS_DEFINED);
let symbol = self.add_symbol(parameter.name().id().clone());
let definition = self.add_definition(symbol, parameter);
@@ -320,10 +399,11 @@ impl<'db> SemanticIndexBuilder<'db> {
// Insert a mapping from the parameter to the same definition.
// This ensures that calling `HasTy::ty` on the inner parameter returns
// a valid type (and doesn't panic)
self.definitions_by_node.insert(
let existing_definition = self.definitions_by_node.insert(
DefinitionNodeRef::from(AnyParameterRef::Variadic(&with_default.parameter)).key(),
definition,
);
debug_assert_eq!(existing_definition, None);
}
}
@@ -375,6 +455,7 @@ impl<'db> SemanticIndexBuilder<'db> {
scopes_by_expression: self.scopes_by_expression,
scopes_by_node: self.scopes_by_node,
use_def_maps,
has_future_annotations: self.has_future_annotations,
}
}
}
@@ -390,20 +471,6 @@ where
self.visit_decorator(decorator);
}
let symbol = self
.add_or_update_symbol(function_def.name.id.clone(), SymbolFlags::IS_DEFINED);
self.add_definition(symbol, function_def);
// The default value of the parameters needs to be evaluated in the
// enclosing scope.
for default in function_def
.parameters
.iter_non_variadic_params()
.filter_map(|param| param.default.as_deref())
{
self.visit_expr(default);
}
self.with_type_params(
NodeWithScopeRef::FunctionTypeParameters(function_def),
function_def.type_params.as_deref(),
@@ -424,14 +491,27 @@ where
builder.pop_scope()
},
);
// The default value of the parameters needs to be evaluated in the
// enclosing scope.
for default in function_def
.parameters
.iter_non_variadic_params()
.filter_map(|param| param.default.as_deref())
{
self.visit_expr(default);
}
// The symbol for the function name itself has to be evaluated
// at the end to match the runtime evaluation of parameter defaults
// and return-type annotations.
let symbol = self.add_symbol(function_def.name.id.clone());
self.add_definition(symbol, function_def);
}
ast::Stmt::ClassDef(class) => {
for decorator in &class.decorator_list {
self.visit_decorator(decorator);
}
let symbol =
self.add_or_update_symbol(class.name.id.clone(), SymbolFlags::IS_DEFINED);
let symbol = self.add_symbol(class.name.id.clone());
self.add_definition(symbol, class);
self.with_type_params(
@@ -457,7 +537,7 @@ where
Name::new(alias.name.id.split('.').next().unwrap())
};
let symbol = self.add_or_update_symbol(symbol_name, SymbolFlags::IS_DEFINED);
let symbol = self.add_symbol(symbol_name);
self.add_definition(symbol, alias);
}
}
@@ -469,8 +549,16 @@ where
&alias.name.id
};
let symbol =
self.add_or_update_symbol(symbol_name.clone(), SymbolFlags::IS_DEFINED);
// Look for imports `from __future__ import annotations`, ignore `as ...`
// We intentionally don't enforce the rules about location of `__future__`
// imports here, we assume the user's intent was to apply the `__future__`
// import, so we still check using it (and will also emit a diagnostic about a
// miss-placed `__future__` import.)
self.has_future_annotations |= alias.name.id == "annotations"
&& node.module.as_deref() == Some("__future__");
let symbol = self.add_symbol(symbol_name.clone());
self.add_definition(symbol, ImportFromDefinitionNodeRef { node, alias_index });
}
}
@@ -486,7 +574,6 @@ where
}
ast::Stmt::AnnAssign(node) => {
debug_assert!(self.current_assignment.is_none());
// TODO deferred annotation visiting
self.visit_expr(&node.annotation);
if let Some(value) = &node.value {
self.visit_expr(value);
@@ -512,7 +599,7 @@ where
ast::Stmt::If(node) => {
self.visit_expr(&node.test);
let pre_if = self.flow_snapshot();
self.add_constraint(&node.test);
self.add_expression_constraint(&node.test);
self.visit_body(&node.body);
let mut post_clauses: Vec<FlowSnapshot> = vec![];
for clause in &node.elif_else_clauses {
@@ -537,14 +624,23 @@ where
self.flow_merge(pre_if);
}
}
ast::Stmt::While(node) => {
self.visit_expr(&node.test);
ast::Stmt::While(ast::StmtWhile {
test,
body,
orelse,
range: _,
}) => {
self.visit_expr(test);
let pre_loop = self.flow_snapshot();
// Save aside any break states from an outer loop
let saved_break_states = std::mem::take(&mut self.loop_break_states);
self.visit_body(&node.body);
// TODO: definitions created inside the body should be fully visible
// to other statements/expressions inside the body --Alex/Carl
self.visit_body(body);
// Get the break states from the body of this loop, and restore the saved outer
// ones.
let break_states =
@@ -553,7 +649,7 @@ where
// We may execute the `else` clause without ever executing the body, so merge in
// the pre-loop state before visiting `else`.
self.flow_merge(pre_loop);
self.visit_body(&node.orelse);
self.visit_body(orelse);
// Breaking out of a while loop bypasses the `else` clause, so merge in the break
// states after visiting `else`.
@@ -561,9 +657,138 @@ where
self.flow_merge(break_state);
}
}
ast::Stmt::With(ast::StmtWith { items, body, .. }) => {
for item in items {
self.visit_expr(&item.context_expr);
if let Some(optional_vars) = item.optional_vars.as_deref() {
self.add_standalone_expression(&item.context_expr);
self.current_assignment = Some(item.into());
self.visit_expr(optional_vars);
self.current_assignment = None;
}
}
self.visit_body(body);
}
ast::Stmt::Break(_) => {
self.loop_break_states.push(self.flow_snapshot());
}
ast::Stmt::For(
for_stmt @ ast::StmtFor {
range: _,
is_async: _,
target,
iter,
body,
orelse,
},
) => {
self.add_standalone_expression(iter);
self.visit_expr(iter);
let pre_loop = self.flow_snapshot();
let saved_break_states = std::mem::take(&mut self.loop_break_states);
debug_assert!(self.current_assignment.is_none());
self.current_assignment = Some(for_stmt.into());
self.visit_expr(target);
self.current_assignment = None;
// TODO: Definitions created by loop variables
// (and definitions created inside the body)
// are fully visible to other statements/expressions inside the body --Alex/Carl
self.visit_body(body);
let break_states =
std::mem::replace(&mut self.loop_break_states, saved_break_states);
// We may execute the `else` clause without ever executing the body, so merge in
// the pre-loop state before visiting `else`.
self.flow_merge(pre_loop);
self.visit_body(orelse);
// Breaking out of a `for` loop bypasses the `else` clause, so merge in the break
// states after visiting `else`.
for break_state in break_states {
self.flow_merge(break_state);
}
}
ast::Stmt::Match(ast::StmtMatch {
subject,
cases,
range: _,
}) => {
self.add_standalone_expression(subject);
self.visit_expr(subject);
let after_subject = self.flow_snapshot();
let Some((first, remaining)) = cases.split_first() else {
return;
};
self.add_pattern_constraint(subject, &first.pattern);
self.visit_match_case(first);
let mut post_case_snapshots = vec![];
for case in remaining {
post_case_snapshots.push(self.flow_snapshot());
self.flow_restore(after_subject.clone());
self.add_pattern_constraint(subject, &case.pattern);
self.visit_match_case(case);
}
for post_clause_state in post_case_snapshots {
self.flow_merge(post_clause_state);
}
if !cases
.last()
.is_some_and(|case| case.guard.is_none() && case.pattern.is_wildcard())
{
self.flow_merge(after_subject);
}
}
ast::Stmt::Try(ast::StmtTry {
body,
handlers,
orelse,
finalbody,
is_star,
range: _,
}) => {
self.visit_body(body);
for except_handler in handlers {
let ast::ExceptHandler::ExceptHandler(except_handler) = except_handler;
let ast::ExceptHandlerExceptHandler {
name: symbol_name,
type_: handled_exceptions,
body: handler_body,
range: _,
} = except_handler;
if let Some(handled_exceptions) = handled_exceptions {
self.visit_expr(handled_exceptions);
}
// If `handled_exceptions` above was `None`, it's something like `except as e:`,
// which is invalid syntax. However, it's still pretty obvious here that the user
// *wanted* `e` to be bound, so we should still create a definition here nonetheless.
if let Some(symbol_name) = symbol_name {
let symbol = self.add_symbol(symbol_name.id.clone());
self.add_definition(
symbol,
DefinitionNodeRef::ExceptHandler(ExceptHandlerDefinitionNodeRef {
handler: except_handler,
is_star: *is_star,
}),
);
}
self.visit_body(handler_body);
}
self.visit_body(orelse);
self.visit_body(finalbody);
}
_ => {
walk_stmt(self, stmt);
}
@@ -577,23 +802,18 @@ where
match expr {
ast::Expr::Name(name_node @ ast::ExprName { id, ctx, .. }) => {
let mut flags = match ctx {
ast::ExprContext::Load => SymbolFlags::IS_USED,
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
ast::ExprContext::Del => SymbolFlags::IS_DEFINED,
ast::ExprContext::Invalid => SymbolFlags::empty(),
let (is_use, is_definition) = match (ctx, self.current_assignment) {
(ast::ExprContext::Store, Some(CurrentAssignment::AugAssign(_))) => {
// For augmented assignment, the target expression is also used.
(true, true)
}
(ast::ExprContext::Load, _) => (true, false),
(ast::ExprContext::Store, _) => (false, true),
(ast::ExprContext::Del, _) => (false, true),
(ast::ExprContext::Invalid, _) => (false, false),
};
if matches!(
self.current_assignment,
Some(CurrentAssignment::AugAssign(_))
) && !ctx.is_invalid()
{
// For augmented assignment, the target expression is also used, so we should
// record that as a use.
flags |= SymbolFlags::IS_USED;
}
let symbol = self.add_or_update_symbol(id.clone(), flags);
if flags.contains(SymbolFlags::IS_DEFINED) {
let symbol = self.add_symbol(id.clone());
if is_definition {
match self.current_assignment {
Some(CurrentAssignment::Assign(assignment)) => {
self.add_definition(
@@ -610,6 +830,16 @@ where
Some(CurrentAssignment::AugAssign(aug_assign)) => {
self.add_definition(symbol, aug_assign);
}
Some(CurrentAssignment::For(node)) => {
self.add_definition(
symbol,
ForStmtDefinitionNodeRef {
iterable: &node.iter,
target: name_node,
is_async: node.is_async,
},
);
}
Some(CurrentAssignment::Named(named)) => {
// TODO(dhruvmanila): If the current scope is a comprehension, then the
// named expression is implicitly nonlocal. This is yet to be
@@ -619,14 +849,29 @@ where
Some(CurrentAssignment::Comprehension { node, first }) => {
self.add_definition(
symbol,
ComprehensionDefinitionNodeRef { node, first },
ComprehensionDefinitionNodeRef {
iterable: &node.iter,
target: name_node,
first,
is_async: node.is_async,
},
);
}
Some(CurrentAssignment::WithItem(with_item)) => {
self.add_definition(
symbol,
WithItemDefinitionNodeRef {
node: with_item,
target: name_node,
},
);
}
None => {}
}
}
if flags.contains(SymbolFlags::IS_USED) {
if is_use {
self.mark_symbol_used(symbol);
let use_id = self.current_ast_ids().record_use(expr);
self.current_use_def_map_mut().record_use(symbol, use_id);
}
@@ -635,11 +880,11 @@ where
}
ast::Expr::Named(node) => {
debug_assert!(self.current_assignment.is_none());
self.current_assignment = Some(node.into());
// TODO walrus in comprehensions is implicitly nonlocal
self.visit_expr(&node.value);
self.current_assignment = Some(node.into());
self.visit_expr(&node.target);
self.current_assignment = None;
self.visit_expr(&node.value);
}
ast::Expr::Lambda(lambda) => {
if let Some(parameters) = &lambda.parameters {
@@ -663,6 +908,7 @@ where
}
self.visit_expr(lambda.body.as_ref());
self.pop_scope();
}
ast::Expr::If(ast::ExprIf {
body, test, orelse, ..
@@ -683,30 +929,33 @@ where
elt, generators, ..
},
) => {
self.visit_generators(
self.with_generators_scope(
NodeWithScopeRef::ListComprehension(list_comprehension),
generators,
|builder| builder.visit_expr(elt),
);
self.visit_expr(elt);
}
ast::Expr::SetComp(
set_comprehension @ ast::ExprSetComp {
elt, generators, ..
},
) => {
self.visit_generators(
self.with_generators_scope(
NodeWithScopeRef::SetComprehension(set_comprehension),
generators,
|builder| builder.visit_expr(elt),
);
self.visit_expr(elt);
}
ast::Expr::Generator(
generator @ ast::ExprGenerator {
elt, generators, ..
},
) => {
self.visit_generators(NodeWithScopeRef::GeneratorExpression(generator), generators);
self.visit_expr(elt);
self.with_generators_scope(
NodeWithScopeRef::GeneratorExpression(generator),
generators,
|builder| builder.visit_expr(elt),
);
}
ast::Expr::DictComp(
dict_comprehension @ ast::ExprDictComp {
@@ -716,31 +965,22 @@ where
..
},
) => {
self.visit_generators(
self.with_generators_scope(
NodeWithScopeRef::DictComprehension(dict_comprehension),
generators,
|builder| {
builder.visit_expr(key);
builder.visit_expr(value);
},
);
self.visit_expr(key);
self.visit_expr(value);
}
_ => {
walk_expr(self, expr);
}
}
if matches!(
expr,
ast::Expr::Lambda(_)
| ast::Expr::ListComp(_)
| ast::Expr::SetComp(_)
| ast::Expr::Generator(_)
| ast::Expr::DictComp(_)
) {
self.pop_scope();
}
}
fn visit_parameters(&mut self, parameters: &'ast ruff_python_ast::Parameters) {
fn visit_parameters(&mut self, parameters: &'ast ast::Parameters) {
// Intentionally avoid walking default expressions, as we handle them in the enclosing
// scope.
for parameter in parameters.iter().map(ast::AnyParameterRef::as_parameter) {
@@ -748,23 +988,58 @@ where
}
}
fn visit_match_case(&mut self, match_case: &'ast ast::MatchCase) {
debug_assert!(self.current_match_case.is_none());
self.current_match_case = Some(CurrentMatchCase::new(&match_case.pattern));
self.visit_pattern(&match_case.pattern);
self.current_match_case = None;
if let Some(expr) = &match_case.guard {
self.visit_expr(expr);
}
self.visit_body(&match_case.body);
}
fn visit_pattern(&mut self, pattern: &'ast ast::Pattern) {
if let ast::Pattern::MatchAs(ast::PatternMatchAs {
name: Some(name), ..
})
| ast::Pattern::MatchStar(ast::PatternMatchStar {
if let ast::Pattern::MatchStar(ast::PatternMatchStar {
name: Some(name),
range: _,
}) = pattern
{
let symbol = self.add_symbol(name.id().clone());
let state = self.current_match_case.as_ref().unwrap();
self.add_definition(
symbol,
MatchPatternDefinitionNodeRef {
pattern: state.pattern,
identifier: name,
index: state.index,
},
);
}
walk_pattern(self, pattern);
if let ast::Pattern::MatchAs(ast::PatternMatchAs {
name: Some(name), ..
})
| ast::Pattern::MatchMapping(ast::PatternMatchMapping {
rest: Some(name), ..
}) = pattern
{
// TODO(dhruvmanila): Add definition
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
let symbol = self.add_symbol(name.id().clone());
let state = self.current_match_case.as_ref().unwrap();
self.add_definition(
symbol,
MatchPatternDefinitionNodeRef {
pattern: state.pattern,
identifier: name,
index: state.index,
},
);
}
walk_pattern(self, pattern);
self.current_match_case.as_mut().unwrap().index += 1;
}
}
@@ -773,11 +1048,13 @@ enum CurrentAssignment<'a> {
Assign(&'a ast::StmtAssign),
AnnAssign(&'a ast::StmtAnnAssign),
AugAssign(&'a ast::StmtAugAssign),
For(&'a ast::StmtFor),
Named(&'a ast::ExprNamed),
Comprehension {
node: &'a ast::Comprehension,
first: bool,
},
WithItem(&'a ast::WithItem),
}
impl<'a> From<&'a ast::StmtAssign> for CurrentAssignment<'a> {
@@ -798,8 +1075,44 @@ impl<'a> From<&'a ast::StmtAugAssign> for CurrentAssignment<'a> {
}
}
impl<'a> From<&'a ast::StmtFor> for CurrentAssignment<'a> {
fn from(value: &'a ast::StmtFor) -> Self {
Self::For(value)
}
}
impl<'a> From<&'a ast::ExprNamed> for CurrentAssignment<'a> {
fn from(value: &'a ast::ExprNamed) -> Self {
Self::Named(value)
}
}
impl<'a> From<&'a ast::WithItem> for CurrentAssignment<'a> {
fn from(value: &'a ast::WithItem) -> Self {
Self::WithItem(value)
}
}
struct CurrentMatchCase<'a> {
/// The pattern that's part of the current match case.
pattern: &'a ast::Pattern,
/// The index of the sub-pattern that's being currently visited within the pattern.
///
/// For example:
/// ```py
/// match subject:
/// case a as b: ...
/// case [a, b]: ...
/// case a | b: ...
/// ```
///
/// In all of the above cases, the index would be 0 for `a` and 1 for `b`.
index: u32,
}
impl<'a> CurrentMatchCase<'a> {
fn new(pattern: &'a ast::Pattern) -> Self {
Self { pattern, index: 0 }
}
}

View File

@@ -0,0 +1,39 @@
use ruff_db::files::File;
use ruff_python_ast as ast;
use crate::ast_node_ref::AstNodeRef;
use crate::db::Db;
use crate::semantic_index::expression::Expression;
use crate::semantic_index::symbol::{FileScopeId, ScopeId};
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(crate) enum Constraint<'db> {
Expression(Expression<'db>),
Pattern(PatternConstraint<'db>),
}
#[salsa::tracked]
pub(crate) struct PatternConstraint<'db> {
#[id]
pub(crate) file: File,
#[id]
pub(crate) file_scope: FileScopeId,
#[no_eq]
#[return_ref]
pub(crate) subject: AstNodeRef<ast::Expr>,
#[no_eq]
#[return_ref]
pub(crate) pattern: AstNodeRef<ast::Pattern>,
#[no_eq]
count: countme::Count<PatternConstraint<'static>>,
}
impl<'db> PatternConstraint<'db> {
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
self.file_scope(db).to_scope_id(db, self.file(db))
}
}

View File

@@ -23,7 +23,7 @@ pub struct Definition<'db> {
#[no_eq]
#[return_ref]
pub(crate) node: DefinitionKind,
pub(crate) kind: DefinitionKind,
#[no_eq]
count: countme::Count<Definition<'static>>,
@@ -33,12 +33,25 @@ impl<'db> Definition<'db> {
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
self.file_scope(db).to_scope_id(db, self.file(db))
}
pub(crate) fn category(self, db: &'db dyn Db) -> DefinitionCategory {
self.kind(db).category()
}
pub(crate) fn is_declaration(self, db: &'db dyn Db) -> bool {
self.kind(db).category().is_declaration()
}
pub(crate) fn is_binding(self, db: &'db dyn Db) -> bool {
self.kind(db).category().is_binding()
}
}
#[derive(Copy, Clone, Debug)]
pub(crate) enum DefinitionNodeRef<'a> {
Import(&'a ast::Alias),
ImportFrom(ImportFromDefinitionNodeRef<'a>),
For(ForStmtDefinitionNodeRef<'a>),
Function(&'a ast::StmtFunctionDef),
Class(&'a ast::StmtClassDef),
NamedExpression(&'a ast::ExprNamed),
@@ -47,6 +60,9 @@ pub(crate) enum DefinitionNodeRef<'a> {
AugmentedAssignment(&'a ast::StmtAugAssign),
Comprehension(ComprehensionDefinitionNodeRef<'a>),
Parameter(ast::AnyParameterRef<'a>),
WithItem(WithItemDefinitionNodeRef<'a>),
MatchPattern(MatchPatternDefinitionNodeRef<'a>),
ExceptHandler(ExceptHandlerDefinitionNodeRef<'a>),
}
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
@@ -91,12 +107,24 @@ impl<'a> From<ImportFromDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
}
}
impl<'a> From<ForStmtDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(value: ForStmtDefinitionNodeRef<'a>) -> Self {
Self::For(value)
}
}
impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(node_ref: AssignmentDefinitionNodeRef<'a>) -> Self {
Self::Assignment(node_ref)
}
}
impl<'a> From<WithItemDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(node_ref: WithItemDefinitionNodeRef<'a>) -> Self {
Self::WithItem(node_ref)
}
}
impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(node: ComprehensionDefinitionNodeRef<'a>) -> Self {
Self::Comprehension(node)
@@ -109,6 +137,12 @@ impl<'a> From<ast::AnyParameterRef<'a>> for DefinitionNodeRef<'a> {
}
}
impl<'a> From<MatchPatternDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(node: MatchPatternDefinitionNodeRef<'a>) -> Self {
Self::MatchPattern(node)
}
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
pub(crate) node: &'a ast::StmtImportFrom,
@@ -121,10 +155,42 @@ pub(crate) struct AssignmentDefinitionNodeRef<'a> {
pub(crate) target: &'a ast::ExprName,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct WithItemDefinitionNodeRef<'a> {
pub(crate) node: &'a ast::WithItem,
pub(crate) target: &'a ast::ExprName,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct ForStmtDefinitionNodeRef<'a> {
pub(crate) iterable: &'a ast::Expr,
pub(crate) target: &'a ast::ExprName,
pub(crate) is_async: bool,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct ExceptHandlerDefinitionNodeRef<'a> {
pub(crate) handler: &'a ast::ExceptHandlerExceptHandler,
pub(crate) is_star: bool,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct ComprehensionDefinitionNodeRef<'a> {
pub(crate) node: &'a ast::Comprehension,
pub(crate) iterable: &'a ast::Expr,
pub(crate) target: &'a ast::ExprName,
pub(crate) first: bool,
pub(crate) is_async: bool,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct MatchPatternDefinitionNodeRef<'a> {
/// The outermost pattern node in which the identifier being defined occurs.
pub(crate) pattern: &'a ast::Pattern,
/// The identifier being defined.
pub(crate) identifier: &'a ast::Identifier,
/// The index of the identifier in the pattern when visiting the `pattern` node in evaluation
/// order.
pub(crate) index: u32,
}
impl DefinitionNodeRef<'_> {
@@ -161,12 +227,26 @@ impl DefinitionNodeRef<'_> {
DefinitionNodeRef::AugmentedAssignment(augmented_assignment) => {
DefinitionKind::AugmentedAssignment(AstNodeRef::new(parsed, augmented_assignment))
}
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef { node, first }) => {
DefinitionKind::Comprehension(ComprehensionDefinitionKind {
node: AstNodeRef::new(parsed, node),
first,
})
}
DefinitionNodeRef::For(ForStmtDefinitionNodeRef {
iterable,
target,
is_async,
}) => DefinitionKind::For(ForStmtDefinitionKind {
iterable: AstNodeRef::new(parsed.clone(), iterable),
target: AstNodeRef::new(parsed, target),
is_async,
}),
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef {
iterable,
target,
first,
is_async,
}) => DefinitionKind::Comprehension(ComprehensionDefinitionKind {
iterable: AstNodeRef::new(parsed.clone(), iterable),
target: AstNodeRef::new(parsed, target),
first,
is_async,
}),
DefinitionNodeRef::Parameter(parameter) => match parameter {
ast::AnyParameterRef::Variadic(parameter) => {
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
@@ -175,6 +255,28 @@ impl DefinitionNodeRef<'_> {
DefinitionKind::ParameterWithDefault(AstNodeRef::new(parsed, parameter))
}
},
DefinitionNodeRef::WithItem(WithItemDefinitionNodeRef { node, target }) => {
DefinitionKind::WithItem(WithItemDefinitionKind {
node: AstNodeRef::new(parsed.clone(), node),
target: AstNodeRef::new(parsed, target),
})
}
DefinitionNodeRef::MatchPattern(MatchPatternDefinitionNodeRef {
pattern,
identifier,
index,
}) => DefinitionKind::MatchPattern(MatchPatternDefinitionKind {
pattern: AstNodeRef::new(parsed.clone(), pattern),
identifier: AstNodeRef::new(parsed, identifier),
index,
}),
DefinitionNodeRef::ExceptHandler(ExceptHandlerDefinitionNodeRef {
handler,
is_star,
}) => DefinitionKind::ExceptHandler(ExceptHandlerDefinitionKind {
handler: AstNodeRef::new(parsed.clone(), handler),
is_star,
}),
}
}
@@ -193,15 +295,60 @@ impl DefinitionNodeRef<'_> {
}) => target.into(),
Self::AnnotatedAssignment(node) => node.into(),
Self::AugmentedAssignment(node) => node.into(),
Self::Comprehension(ComprehensionDefinitionNodeRef { node, first: _ }) => node.into(),
Self::For(ForStmtDefinitionNodeRef {
iterable: _,
target,
is_async: _,
}) => target.into(),
Self::Comprehension(ComprehensionDefinitionNodeRef { target, .. }) => target.into(),
Self::Parameter(node) => match node {
ast::AnyParameterRef::Variadic(parameter) => parameter.into(),
ast::AnyParameterRef::NonVariadic(parameter) => parameter.into(),
},
Self::WithItem(WithItemDefinitionNodeRef { node: _, target }) => target.into(),
Self::MatchPattern(MatchPatternDefinitionNodeRef { identifier, .. }) => {
identifier.into()
}
Self::ExceptHandler(ExceptHandlerDefinitionNodeRef { handler, .. }) => handler.into(),
}
}
}
#[derive(Clone, Copy, Debug)]
pub(crate) enum DefinitionCategory {
/// A Definition which binds a value to a name (e.g. `x = 1`).
Binding,
/// A Definition which declares the upper-bound of acceptable types for this name (`x: int`).
Declaration,
/// A Definition which both declares a type and binds a value (e.g. `x: int = 1`).
DeclarationAndBinding,
}
impl DefinitionCategory {
/// True if this definition establishes a "declared type" for the symbol.
///
/// If so, any assignments reached by this definition are in error if they assign a value of a
/// type not assignable to the declared type.
///
/// Annotations establish a declared type. So do function and class definitions, and imports.
pub(crate) fn is_declaration(self) -> bool {
matches!(
self,
DefinitionCategory::Declaration | DefinitionCategory::DeclarationAndBinding
)
}
/// True if this definition assigns a value to the symbol.
///
/// False only for annotated assignments without a RHS.
pub(crate) fn is_binding(self) -> bool {
matches!(
self,
DefinitionCategory::Binding | DefinitionCategory::DeclarationAndBinding
)
}
}
#[derive(Clone, Debug)]
pub enum DefinitionKind {
Import(AstNodeRef<ast::Alias>),
@@ -212,25 +359,102 @@ pub enum DefinitionKind {
Assignment(AssignmentDefinitionKind),
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
AugmentedAssignment(AstNodeRef<ast::StmtAugAssign>),
For(ForStmtDefinitionKind),
Comprehension(ComprehensionDefinitionKind),
Parameter(AstNodeRef<ast::Parameter>),
ParameterWithDefault(AstNodeRef<ast::ParameterWithDefault>),
WithItem(WithItemDefinitionKind),
MatchPattern(MatchPatternDefinitionKind),
ExceptHandler(ExceptHandlerDefinitionKind),
}
impl DefinitionKind {
pub(crate) fn category(&self) -> DefinitionCategory {
match self {
// functions, classes, and imports always bind, and we consider them declarations
DefinitionKind::Function(_)
| DefinitionKind::Class(_)
| DefinitionKind::Import(_)
| DefinitionKind::ImportFrom(_) => DefinitionCategory::DeclarationAndBinding,
// a parameter always binds a value, but is only a declaration if annotated
DefinitionKind::Parameter(parameter) => {
if parameter.annotation.is_some() {
DefinitionCategory::DeclarationAndBinding
} else {
DefinitionCategory::Binding
}
}
// presence of a default is irrelevant, same logic as for a no-default parameter
DefinitionKind::ParameterWithDefault(parameter_with_default) => {
if parameter_with_default.parameter.annotation.is_some() {
DefinitionCategory::DeclarationAndBinding
} else {
DefinitionCategory::Binding
}
}
// annotated assignment is always a declaration, only a binding if there is a RHS
DefinitionKind::AnnotatedAssignment(ann_assign) => {
if ann_assign.value.is_some() {
DefinitionCategory::DeclarationAndBinding
} else {
DefinitionCategory::Declaration
}
}
// all of these bind values without declaring a type
DefinitionKind::NamedExpression(_)
| DefinitionKind::Assignment(_)
| DefinitionKind::AugmentedAssignment(_)
| DefinitionKind::For(_)
| DefinitionKind::Comprehension(_)
| DefinitionKind::WithItem(_)
| DefinitionKind::MatchPattern(_)
| DefinitionKind::ExceptHandler(_) => DefinitionCategory::Binding,
}
}
}
#[derive(Clone, Debug)]
#[allow(dead_code)]
pub struct MatchPatternDefinitionKind {
pattern: AstNodeRef<ast::Pattern>,
identifier: AstNodeRef<ast::Identifier>,
index: u32,
}
impl MatchPatternDefinitionKind {
pub(crate) fn pattern(&self) -> &ast::Pattern {
self.pattern.node()
}
pub(crate) fn index(&self) -> u32 {
self.index
}
}
#[derive(Clone, Debug)]
pub struct ComprehensionDefinitionKind {
node: AstNodeRef<ast::Comprehension>,
iterable: AstNodeRef<ast::Expr>,
target: AstNodeRef<ast::ExprName>,
first: bool,
is_async: bool,
}
impl ComprehensionDefinitionKind {
pub(crate) fn node(&self) -> &ast::Comprehension {
self.node.node()
pub(crate) fn iterable(&self) -> &ast::Expr {
self.iterable.node()
}
pub(crate) fn target(&self) -> &ast::ExprName {
self.target.node()
}
pub(crate) fn is_first(&self) -> bool {
self.first
}
pub(crate) fn is_async(&self) -> bool {
self.is_async
}
}
#[derive(Clone, Debug)]
@@ -250,7 +474,6 @@ impl ImportFromDefinitionKind {
}
#[derive(Clone, Debug)]
#[allow(dead_code)]
pub struct AssignmentDefinitionKind {
assignment: AstNodeRef<ast::StmtAssign>,
target: AstNodeRef<ast::ExprName>,
@@ -266,6 +489,63 @@ impl AssignmentDefinitionKind {
}
}
#[derive(Clone, Debug)]
pub struct WithItemDefinitionKind {
node: AstNodeRef<ast::WithItem>,
target: AstNodeRef<ast::ExprName>,
}
impl WithItemDefinitionKind {
pub(crate) fn node(&self) -> &ast::WithItem {
self.node.node()
}
pub(crate) fn target(&self) -> &ast::ExprName {
self.target.node()
}
}
#[derive(Clone, Debug)]
pub struct ForStmtDefinitionKind {
iterable: AstNodeRef<ast::Expr>,
target: AstNodeRef<ast::ExprName>,
is_async: bool,
}
impl ForStmtDefinitionKind {
pub(crate) fn iterable(&self) -> &ast::Expr {
self.iterable.node()
}
pub(crate) fn target(&self) -> &ast::ExprName {
self.target.node()
}
pub(crate) fn is_async(&self) -> bool {
self.is_async
}
}
#[derive(Clone, Debug)]
pub struct ExceptHandlerDefinitionKind {
handler: AstNodeRef<ast::ExceptHandlerExceptHandler>,
is_star: bool,
}
impl ExceptHandlerDefinitionKind {
pub(crate) fn node(&self) -> &ast::ExceptHandlerExceptHandler {
self.handler.node()
}
pub(crate) fn handled_exceptions(&self) -> Option<&ast::Expr> {
self.node().type_.as_deref()
}
pub(crate) fn is_star(&self) -> bool {
self.is_star
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub(crate) struct DefinitionNodeKey(NodeKey);
@@ -311,9 +591,9 @@ impl From<&ast::StmtAugAssign> for DefinitionNodeKey {
}
}
impl From<&ast::Comprehension> for DefinitionNodeKey {
fn from(node: &ast::Comprehension) -> Self {
Self(NodeKey::from_node(node))
impl From<&ast::StmtFor> for DefinitionNodeKey {
fn from(value: &ast::StmtFor) -> Self {
Self(NodeKey::from_node(value))
}
}
@@ -328,3 +608,15 @@ impl From<&ast::ParameterWithDefault> for DefinitionNodeKey {
Self(NodeKey::from_node(node))
}
}
impl From<&ast::Identifier> for DefinitionNodeKey {
fn from(identifier: &ast::Identifier) -> Self {
Self(NodeKey::from_node(identifier))
}
}
impl From<&ast::ExceptHandlerExceptHandler> for DefinitionNodeKey {
fn from(handler: &ast::ExceptHandlerExceptHandler) -> Self {
Self(NodeKey::from_node(handler))
}
}

View File

@@ -44,16 +44,16 @@ impl Symbol {
}
/// Is the symbol defined in its containing scope?
pub fn is_defined(&self) -> bool {
self.flags.contains(SymbolFlags::IS_DEFINED)
pub fn is_bound(&self) -> bool {
self.flags.contains(SymbolFlags::IS_BOUND)
}
}
bitflags! {
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub(super) struct SymbolFlags: u8 {
struct SymbolFlags: u8 {
const IS_USED = 1 << 0;
const IS_DEFINED = 1 << 1;
const IS_BOUND = 1 << 1;
/// TODO: This flag is not yet set by anything
const MARKED_GLOBAL = 1 << 2;
/// TODO: This flag is not yet set by anything
@@ -149,6 +149,10 @@ impl FileScopeId {
FileScopeId::from_u32(0)
}
pub fn is_global(self) -> bool {
self == FileScopeId::global()
}
pub fn to_scope_id(self, db: &dyn Db, file: File) -> ScopeId<'_> {
let index = semantic_index(db, file);
index.scope_ids_by_scope[self]
@@ -268,11 +272,7 @@ impl SymbolTableBuilder {
}
}
pub(super) fn add_or_update_symbol(
&mut self,
name: Name,
flags: SymbolFlags,
) -> (ScopedSymbolId, bool) {
pub(super) fn add_symbol(&mut self, name: Name) -> (ScopedSymbolId, bool) {
let hash = SymbolTable::hash_name(&name);
let entry = self
.table
@@ -281,15 +281,9 @@ impl SymbolTableBuilder {
.from_hash(hash, |id| self.table.symbols[*id].name() == &name);
match entry {
RawEntryMut::Occupied(entry) => {
let symbol = &mut self.table.symbols[*entry.key()];
symbol.insert_flags(flags);
(*entry.key(), false)
}
RawEntryMut::Occupied(entry) => (*entry.key(), false),
RawEntryMut::Vacant(entry) => {
let mut symbol = Symbol::new(name);
symbol.insert_flags(flags);
let symbol = Symbol::new(name);
let id = self.table.symbols.push(symbol);
entry.insert_with_hasher(hash, id, (), |id| {
@@ -300,6 +294,14 @@ impl SymbolTableBuilder {
}
}
pub(super) fn mark_symbol_bound(&mut self, id: ScopedSymbolId) {
self.table.symbols[id].insert_flags(SymbolFlags::IS_BOUND);
}
pub(super) fn mark_symbol_used(&mut self, id: ScopedSymbolId) {
self.table.symbols[id].insert_flags(SymbolFlags::IS_USED);
}
pub(super) fn finish(mut self) -> SymbolTable {
self.table.shrink_to_fit();
self.table

View File

@@ -1,5 +1,79 @@
//! Build a map from each use of a symbol to the definitions visible from that use, and the
//! type-narrowing constraints that apply to each definition.
//! First, some terminology:
//!
//! * A "binding" gives a new value to a variable. This includes many different Python statements
//! (assignment statements of course, but also imports, `def` and `class` statements, `as`
//! clauses in `with` and `except` statements, match patterns, and others) and even one
//! expression kind (named expressions). It notably does not include annotated assignment
//! statements without a right-hand side value; these do not assign any new value to the
//! variable. We consider function parameters to be bindings as well, since (from the perspective
//! of the function's internal scope), a function parameter begins the scope bound to a value.
//!
//! * A "declaration" establishes an upper bound type for the values that a variable may be
//! permitted to take on. Annotated assignment statements (with or without an RHS value) are
//! declarations; annotated function parameters are also declarations. We consider `def` and
//! `class` statements to also be declarations, so as to prohibit accidentally shadowing them.
//!
//! Annotated assignments with a right-hand side, and annotated function parameters, are both
//! bindings and declarations.
//!
//! We use [`Definition`] as the universal term (and Salsa tracked struct) encompassing both
//! bindings and declarations. (This sacrifices a bit of type safety in exchange for improved
//! performance via fewer Salsa tracked structs and queries, since most declarations -- typed
//! parameters and annotated assignments with RHS -- are both bindings and declarations.)
//!
//! At any given use of a variable, we can ask about both its "declared type" and its "inferred
//! type". These may be different, but the inferred type must always be assignable to the declared
//! type; that is, the declared type is always wider, and the inferred type may be more precise. If
//! we see an invalid assignment, we emit a diagnostic and abandon our inferred type, deferring to
//! the declared type (this allows an explicit annotation to override bad inference, without a
//! cast), maintaining the invariant.
//!
//! The **inferred type** represents the most precise type we believe encompasses all possible
//! values for the variable at a given use. It is based on a union of the bindings which can reach
//! that use through some control flow path, and the narrowing constraints that control flow must
//! have passed through between the binding and the use. For example, in this code:
//!
//! ```python
//! x = 1 if flag else None
//! if x is not None:
//! use(x)
//! ```
//!
//! For the use of `x` on the third line, the inferred type should be `Literal[1]`. This is based
//! on the binding on the first line, which assigns the type `Literal[1] | None`, and the narrowing
//! constraint on the second line, which rules out the type `None`, since control flow must pass
//! through this constraint to reach the use in question.
//!
//! The **declared type** represents the code author's declaration (usually through a type
//! annotation) that a given variable should not be assigned any type outside the declared type. In
//! our model, declared types are also control-flow-sensitive; we allow the code author to
//! explicitly re-declare the same variable with a different type. So for a given binding of a
//! variable, we will want to ask which declarations of that variable can reach that binding, in
//! order to determine whether the binding is permitted, or should be a type error. For example:
//!
//! ```python
//! from pathlib import Path
//! def f(path: str):
//! path: Path = Path(path)
//! ```
//!
//! In this function, the initial declared type of `path` is `str`, meaning that the assignment
//! `path = Path(path)` would be a type error, since it assigns to `path` a value whose type is not
//! assignable to `str`. This is the purpose of declared types: they prevent accidental assignment
//! of the wrong type to a variable.
//!
//! But in some cases it is useful to "shadow" or "re-declare" a variable with a new type, and we
//! permit this, as long as it is done with an explicit re-annotation. So `path: Path =
//! Path(path)`, with the explicit `: Path` annotation, is permitted.
//!
//! The general rule is that whatever declaration(s) can reach a given binding determine the
//! validity of that binding. If there is a path in which the symbol is not declared, that is a
//! declaration of `Unknown`. If multiple declarations can reach a binding, we union them, but by
//! default we also issue a type error, since this implicit union of declared types may hide an
//! error.
//!
//! To support type inference, we build a map from each use of a symbol to the bindings live at
//! that use, and the type narrowing constraints that apply to each binding.
//!
//! Let's take this code sample:
//!
@@ -7,148 +81,157 @@
//! x = 1
//! x = 2
//! y = x
//! if y is not None:
//! if flag:
//! x = 3
//! else:
//! x = 4
//! z = x
//! ```
//!
//! In this snippet, we have four definitions of `x` (the statements assigning `1`, `2`, `3`,
//! and `4` to it), and two uses of `x` (the `y = x` and `z = x` assignments). The first
//! [`Definition`] of `x` is never visible to any use, because it's immediately replaced by the
//! second definition, before any use happens. (A linter could thus flag the statement `x = 1`
//! as likely superfluous.)
//! In this snippet, we have four bindings of `x` (the statements assigning `1`, `2`, `3`, and `4`
//! to it), and two uses of `x` (the `y = x` and `z = x` assignments). The first binding of `x`
//! does not reach any use, because it's immediately replaced by the second binding, before any use
//! happens. (A linter could thus flag the statement `x = 1` as likely superfluous.)
//!
//! The first use of `x` has one definition visible to it: the assignment `x = 2`.
//! The first use of `x` has one live binding: the assignment `x = 2`.
//!
//! Things get a bit more complex when we have branches. We will definitely take either the `if` or
//! the `else` branch. Thus, the second use of `x` has two definitions visible to it: `x = 3` and
//! `x = 4`. The `x = 2` definition is no longer visible, because it must be replaced by either `x
//! = 3` or `x = 4`, no matter which branch was taken. We don't know which branch was taken, so we
//! must consider both definitions as visible, which means eventually we would (in type inference)
//! look at these two definitions and infer a type of `Literal[3, 4]` -- the union of `Literal[3]`
//! and `Literal[4]` -- for the second use of `x`.
//! the `else` branch. Thus, the second use of `x` has two live bindings: `x = 3` and `x = 4`. The
//! `x = 2` assignment is no longer visible, because it must be replaced by either `x = 3` or `x =
//! 4`, no matter which branch was taken. We don't know which branch was taken, so we must consider
//! both bindings as live, which means eventually we would (in type inference) look at these two
//! bindings and infer a type of `Literal[3, 4]` -- the union of `Literal[3]` and `Literal[4]` --
//! for the second use of `x`.
//!
//! So that's one question our use-def map needs to answer: given a specific use of a symbol, which
//! definition(s) is/are visible from that use. In
//! [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number all uses (that means a `Name` node
//! with `Load` context) so we have a `ScopedUseId` to efficiently represent each use.
//! binding(s) can reach that use. In [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number
//! all uses (that means a `Name` node with `Load` context) so we have a `ScopedUseId` to
//! efficiently represent each use.
//!
//! Another case we need to handle is when a symbol is referenced from a different scope (the most
//! obvious example of this is an import). We call this "public" use of a symbol. So the other
//! question we need to be able to answer is, what are the publicly-visible definitions of each
//! symbol?
//!
//! Technically, public use of a symbol could also occur from any point in control flow of the
//! scope where the symbol is defined (via inline imports and import cycles, in the case of an
//! import, or via a function call partway through the local scope that ends up using a symbol from
//! the scope via a global or nonlocal reference.) But modeling this fully accurately requires
//! whole-program analysis that isn't tractable for an efficient incremental compiler, since it
//! means a given symbol could have a different type every place it's referenced throughout the
//! program, depending on the shape of arbitrarily-sized call/import graphs. So we follow other
//! Python type-checkers in making the simplifying assumption that usually the scope will finish
//! execution before its symbols are made visible to other scopes; for instance, most imports will
//! import from a complete module, not a partially-executed module. (We may want to get a little
//! smarter than this in the future, in particular for closures, but for now this is where we
//! start.)
//!
//! So this means that the publicly-visible definitions of a symbol are the definitions still
//! visible at the end of the scope; effectively we have an implicit "use" of every symbol at the
//! end of the scope.
//!
//! We also need to know, for a given definition of a symbol, what type-narrowing constraints apply
//! We also need to know, for a given definition of a symbol, what type narrowing constraints apply
//! to it. For instance, in this code sample:
//!
//! ```python
//! x = 1 if flag else None
//! if x is not None:
//! y = x
//! use(x)
//! ```
//!
//! At the use of `x` in `y = x`, the visible definition of `x` is `1 if flag else None`, which
//! would infer as the type `Literal[1] | None`. But the constraint `x is not None` dominates this
//! use, which means we can rule out the possibility that `x` is `None` here, which should give us
//! the type `Literal[1]` for this use.
//! At the use of `x`, the live binding of `x` is `1 if flag else None`, which would infer as the
//! type `Literal[1] | None`. But the constraint `x is not None` dominates this use, which means we
//! can rule out the possibility that `x` is `None` here, which should give us the type
//! `Literal[1]` for this use.
//!
//! For declared types, we need to be able to answer the question "given a binding to a symbol,
//! which declarations of that symbol can reach the binding?" This allows us to emit a diagnostic
//! if the binding is attempting to bind a value of a type that is not assignable to the declared
//! type for that symbol, at that point in control flow.
//!
//! We also need to know, given a declaration of a symbol, what the inferred type of that symbol is
//! at that point. This allows us to emit a diagnostic in a case like `x = "foo"; x: int`. The
//! binding `x = "foo"` occurs before the declaration `x: int`, so according to our
//! control-flow-sensitive interpretation of declarations, the assignment is not an error. But the
//! declaration is an error, since it would violate the "inferred type must be assignable to
//! declared type" rule.
//!
//! Another case we need to handle is when a symbol is referenced from a different scope (for
//! example, an import or a nonlocal reference). We call this "public" use of a symbol. For public
//! use of a symbol, we prefer the declared type, if there are any declarations of that symbol; if
//! not, we fall back to the inferred type. So we also need to know which declarations and bindings
//! can reach the end of the scope.
//!
//! Technically, public use of a symbol could occur from any point in control flow of the scope
//! where the symbol is defined (via inline imports and import cycles, in the case of an import, or
//! via a function call partway through the local scope that ends up using a symbol from the scope
//! via a global or nonlocal reference.) But modeling this fully accurately requires whole-program
//! analysis that isn't tractable for an efficient analysis, since it means a given symbol could
//! have a different type every place it's referenced throughout the program, depending on the
//! shape of arbitrarily-sized call/import graphs. So we follow other Python type checkers in
//! making the simplifying assumption that usually the scope will finish execution before its
//! symbols are made visible to other scopes; for instance, most imports will import from a
//! complete module, not a partially-executed module. (We may want to get a little smarter than
//! this in the future for some closures, but for now this is where we start.)
//!
//! The data structure we build to answer these questions is the `UseDefMap`. It has a
//! `definitions_by_use` vector indexed by [`ScopedUseId`] and a `public_definitions` vector
//! indexed by [`ScopedSymbolId`]. The values in each of these vectors are (in principle) a list of
//! visible definitions at that use, or at the end of the scope for that symbol, with a list of the
//! dominating constraints for each of those definitions.
//! `bindings_by_use` vector of [`SymbolBindings`] indexed by [`ScopedUseId`], a
//! `declarations_by_binding` vector of [`SymbolDeclarations`] indexed by [`ScopedDefinitionId`], a
//! `bindings_by_declaration` vector of [`SymbolBindings`] indexed by [`ScopedDefinitionId`], and
//! `public_bindings` and `public_definitions` vectors indexed by [`ScopedSymbolId`]. The values in
//! each of these vectors are (in principle) a list of live bindings at that use/definition, or at
//! the end of the scope for that symbol, with a list of the dominating constraints for each
//! binding.
//!
//! In order to avoid vectors-of-vectors-of-vectors and all the allocations that would entail, we
//! don't actually store these "list of visible definitions" as a vector of [`Definition`].
//! Instead, the values in `definitions_by_use` and `public_definitions` are a [`SymbolState`]
//! struct which uses bit-sets to track definitions and constraints in terms of
//! [`ScopedDefinitionId`] and [`ScopedConstraintId`], which are indices into the `all_definitions`
//! and `all_constraints` indexvecs in the [`UseDefMap`].
//! Instead, [`SymbolBindings`] and [`SymbolDeclarations`] are structs which use bit-sets to track
//! definitions (and constraints, in the case of bindings) in terms of [`ScopedDefinitionId`] and
//! [`ScopedConstraintId`], which are indices into the `all_definitions` and `all_constraints`
//! indexvecs in the [`UseDefMap`].
//!
//! There is another special kind of possible "definition" for a symbol: there might be a path from
//! the scope entry to a given use in which the symbol is never bound.
//!
//! The simplest way to model "unbound" would be as an actual [`Definition`] itself: the initial
//! visible [`Definition`] for each symbol in a scope. But actually modeling it this way would
//! unnecessarily increase the number of [`Definition`] that Salsa must track. Since "unbound" is a
//! special definition in that all symbols share it, and it doesn't have any additional per-symbol
//! state, and constraints are irrelevant to it, we can represent it more efficiently: we use the
//! `may_be_unbound` boolean on the [`SymbolState`] struct. If this flag is `true`, it means the
//! symbol/use really has one additional visible "definition", which is the unbound state. If this
//! flag is `false`, it means we've eliminated the possibility of unbound: every path we've
//! followed includes a definition for this symbol.
//! The simplest way to model "unbound" would be as a "binding" itself: the initial "binding" for
//! each symbol in a scope. But actually modeling it this way would unnecessarily increase the
//! number of [`Definition`]s that Salsa must track. Since "unbound" is special in that all symbols
//! share it, and it doesn't have any additional per-symbol state, and constraints are irrelevant
//! to it, we can represent it more efficiently: we use the `may_be_unbound` boolean on the
//! [`SymbolBindings`] struct. If this flag is `true` for a use of a symbol, it means the symbol
//! has a path to the use in which it is never bound. If this flag is `false`, it means we've
//! eliminated the possibility of unbound: every control flow path to the use includes a binding
//! for this symbol.
//!
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use, definition, and
//! constraint as they are encountered by the
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder) AST visit. For
//! each symbol, the builder tracks the `SymbolState` for that symbol. When we hit a use of a
//! symbol, it records the current state for that symbol for that use. When we reach the end of the
//! scope, it records the state for each symbol as the public definitions of that symbol.
//! each symbol, the builder tracks the `SymbolState` (`SymbolBindings` and `SymbolDeclarations`)
//! for that symbol. When we hit a use or definition of a symbol, we record the necessary parts of
//! the current state for that symbol that we need for that use or definition. When we reach the
//! end of the scope, it records the state for each symbol as the public definitions of that
//! symbol.
//!
//! Let's walk through the above example. Initially we record for `x` that it has no visible
//! definitions, and may be unbound. When we see `x = 1`, we record that as the sole visible
//! definition of `x`, and flip `may_be_unbound` to `false`. Then we see `x = 2`, and it replaces
//! `x = 1` as the sole visible definition of `x`. When we get to `y = x`, we record that the
//! visible definitions for that use of `x` are just the `x = 2` definition.
//! Let's walk through the above example. Initially we record for `x` that it has no bindings, and
//! may be unbound. When we see `x = 1`, we record that as the sole live binding of `x`, and flip
//! `may_be_unbound` to `false`. Then we see `x = 2`, and we replace `x = 1` as the sole live
//! binding of `x`. When we get to `y = x`, we record that the live bindings for that use of `x`
//! are just the `x = 2` definition.
//!
//! Then we hit the `if` branch. We visit the `test` node (`flag` in this case), since that will
//! happen regardless. Then we take a pre-branch snapshot of the currently visible definitions for
//! all symbols, which we'll need later. Then we record `flag` as a possible constraint on the
//! currently visible definition (`x = 2`), and go ahead and visit the `if` body. When we see `x =
//! 3`, it replaces `x = 2` (constrained by `flag`) as the sole visible definition of `x`. At the
//! end of the `if` body, we take another snapshot of the currently-visible definitions; we'll call
//! this the post-if-body snapshot.
//! happen regardless. Then we take a pre-branch snapshot of the current state for all symbols,
//! which we'll need later. Then we record `flag` as a possible constraint on the current binding
//! (`x = 2`), and go ahead and visit the `if` body. When we see `x = 3`, it replaces `x = 2`
//! (constrained by `flag`) as the sole live binding of `x`. At the end of the `if` body, we take
//! another snapshot of the current symbol state; we'll call this the post-if-body snapshot.
//!
//! Now we need to visit the `else` clause. The conditions when entering the `else` clause should
//! be the pre-if conditions; if we are entering the `else` clause, we know that the `if` test
//! failed and we didn't execute the `if` body. So we first reset the builder to the pre-if state,
//! using the snapshot we took previously (meaning we now have `x = 2` as the sole visible
//! definition for `x` again), then visit the `else` clause, where `x = 4` replaces `x = 2` as the
//! sole visible definition of `x`.
//! using the snapshot we took previously (meaning we now have `x = 2` as the sole binding for `x`
//! again), then visit the `else` clause, where `x = 4` replaces `x = 2` as the sole live binding
//! of `x`.
//!
//! Now we reach the end of the if/else, and want to visit the following code. The state here needs
//! to reflect that we might have gone through the `if` branch, or we might have gone through the
//! `else` branch, and we don't know which. So we need to "merge" our current builder state
//! (reflecting the end-of-else state, with `x = 4` as the only visible definition) with our
//! post-if-body snapshot (which has `x = 3` as the only visible definition). The result of this
//! merge is that we now have two visible definitions of `x`: `x = 3` and `x = 4`.
//! (reflecting the end-of-else state, with `x = 4` as the only live binding) with our post-if-body
//! snapshot (which has `x = 3` as the only live binding). The result of this merge is that we now
//! have two live bindings of `x`: `x = 3` and `x = 4`.
//!
//! The [`UseDefMapBuilder`] itself just exposes methods for taking a snapshot, resetting to a
//! snapshot, and merging a snapshot into the current state. The logic using these methods lives in
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder), e.g. where it
//! visits a `StmtIf` node.
//!
//! (In the future we may have some other questions we want to answer as well, such as "is this
//! definition used?", which will require tracking a bit more info in our map, e.g. a "used" bit
//! for each [`Definition`] which is flipped to true when we record that definition for a use.)
use self::symbol_state::{
ConstraintIdIterator, DefinitionIdWithConstraintsIterator, ScopedConstraintId,
ScopedDefinitionId, SymbolState,
BindingIdWithConstraintsIterator, ConstraintIdIterator, DeclarationIdIterator,
ScopedConstraintId, ScopedDefinitionId, SymbolBindings, SymbolDeclarations, SymbolState,
};
use crate::semantic_index::ast_ids::ScopedUseId;
use crate::semantic_index::definition::Definition;
use crate::semantic_index::expression::Expression;
use crate::semantic_index::symbol::ScopedSymbolId;
use ruff_index::IndexVec;
use rustc_hash::FxHashMap;
use super::constraint::Constraint;
mod bitset;
mod symbol_state;
@@ -159,63 +242,135 @@ pub(crate) struct UseDefMap<'db> {
/// Array of [`Definition`] in this scope.
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
/// Array of constraints (as [`Expression`]) in this scope.
all_constraints: IndexVec<ScopedConstraintId, Expression<'db>>,
/// Array of [`Constraint`] in this scope.
all_constraints: IndexVec<ScopedConstraintId, Constraint<'db>>,
/// [`SymbolState`] visible at a [`ScopedUseId`].
definitions_by_use: IndexVec<ScopedUseId, SymbolState>,
/// [`SymbolBindings`] reaching a [`ScopedUseId`].
bindings_by_use: IndexVec<ScopedUseId, SymbolBindings>,
/// [`SymbolBindings`] or [`SymbolDeclarations`] reaching a given [`Definition`].
///
/// If the definition is a binding (only) -- `x = 1` for example -- then we need
/// [`SymbolDeclarations`] to know whether this binding is permitted by the live declarations.
///
/// If the definition is a declaration (only) -- `x: int` for example -- then we need
/// [`SymbolBindings`] to know whether this declaration is consistent with the previously
/// inferred type.
///
/// If the definition is both a declaration and a binding -- `x: int = 1` for example -- then
/// we don't actually need anything here, all we'll need to validate is that our own RHS is a
/// valid assignment to our own annotation.
definitions_by_definition: FxHashMap<Definition<'db>, SymbolDefinitions>,
/// [`SymbolState`] visible at end of scope for each symbol.
public_definitions: IndexVec<ScopedSymbolId, SymbolState>,
public_symbols: IndexVec<ScopedSymbolId, SymbolState>,
}
impl<'db> UseDefMap<'db> {
pub(crate) fn use_definitions(
pub(crate) fn bindings_at_use(
&self,
use_id: ScopedUseId,
) -> DefinitionWithConstraintsIterator<'_, 'db> {
DefinitionWithConstraintsIterator {
all_definitions: &self.all_definitions,
all_constraints: &self.all_constraints,
inner: self.definitions_by_use[use_id].visible_definitions(),
}
) -> BindingWithConstraintsIterator<'_, 'db> {
self.bindings_iterator(&self.bindings_by_use[use_id])
}
pub(crate) fn use_may_be_unbound(&self, use_id: ScopedUseId) -> bool {
self.definitions_by_use[use_id].may_be_unbound()
self.bindings_by_use[use_id].may_be_unbound()
}
pub(crate) fn public_definitions(
pub(crate) fn public_bindings(
&self,
symbol: ScopedSymbolId,
) -> DefinitionWithConstraintsIterator<'_, 'db> {
DefinitionWithConstraintsIterator {
all_definitions: &self.all_definitions,
all_constraints: &self.all_constraints,
inner: self.public_definitions[symbol].visible_definitions(),
}
) -> BindingWithConstraintsIterator<'_, 'db> {
self.bindings_iterator(self.public_symbols[symbol].bindings())
}
pub(crate) fn public_may_be_unbound(&self, symbol: ScopedSymbolId) -> bool {
self.public_definitions[symbol].may_be_unbound()
self.public_symbols[symbol].may_be_unbound()
}
pub(crate) fn bindings_at_declaration(
&self,
declaration: Definition<'db>,
) -> BindingWithConstraintsIterator<'_, 'db> {
if let SymbolDefinitions::Bindings(bindings) = &self.definitions_by_definition[&declaration]
{
self.bindings_iterator(bindings)
} else {
unreachable!("Declaration has non-Bindings in definitions_by_definition");
}
}
pub(crate) fn declarations_at_binding(
&self,
binding: Definition<'db>,
) -> DeclarationsIterator<'_, 'db> {
if let SymbolDefinitions::Declarations(declarations) =
&self.definitions_by_definition[&binding]
{
self.declarations_iterator(declarations)
} else {
unreachable!("Binding has non-Declarations in definitions_by_definition");
}
}
pub(crate) fn public_declarations(
&self,
symbol: ScopedSymbolId,
) -> DeclarationsIterator<'_, 'db> {
let declarations = self.public_symbols[symbol].declarations();
self.declarations_iterator(declarations)
}
pub(crate) fn has_public_declarations(&self, symbol: ScopedSymbolId) -> bool {
!self.public_symbols[symbol].declarations().is_empty()
}
fn bindings_iterator<'a>(
&'a self,
bindings: &'a SymbolBindings,
) -> BindingWithConstraintsIterator<'a, 'db> {
BindingWithConstraintsIterator {
all_definitions: &self.all_definitions,
all_constraints: &self.all_constraints,
inner: bindings.iter(),
}
}
fn declarations_iterator<'a>(
&'a self,
declarations: &'a SymbolDeclarations,
) -> DeclarationsIterator<'a, 'db> {
DeclarationsIterator {
all_definitions: &self.all_definitions,
inner: declarations.iter(),
may_be_undeclared: declarations.may_be_undeclared(),
}
}
}
#[derive(Debug)]
pub(crate) struct DefinitionWithConstraintsIterator<'map, 'db> {
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
all_constraints: &'map IndexVec<ScopedConstraintId, Expression<'db>>,
inner: DefinitionIdWithConstraintsIterator<'map>,
/// Either live bindings or live declarations for a symbol.
#[derive(Debug, PartialEq, Eq)]
enum SymbolDefinitions {
Bindings(SymbolBindings),
Declarations(SymbolDeclarations),
}
impl<'map, 'db> Iterator for DefinitionWithConstraintsIterator<'map, 'db> {
type Item = DefinitionWithConstraints<'map, 'db>;
#[derive(Debug)]
pub(crate) struct BindingWithConstraintsIterator<'map, 'db> {
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
all_constraints: &'map IndexVec<ScopedConstraintId, Constraint<'db>>,
inner: BindingIdWithConstraintsIterator<'map>,
}
impl<'map, 'db> Iterator for BindingWithConstraintsIterator<'map, 'db> {
type Item = BindingWithConstraints<'map, 'db>;
fn next(&mut self) -> Option<Self::Item> {
self.inner
.next()
.map(|def_id_with_constraints| DefinitionWithConstraints {
definition: self.all_definitions[def_id_with_constraints.definition],
.map(|def_id_with_constraints| BindingWithConstraints {
binding: self.all_definitions[def_id_with_constraints.definition],
constraints: ConstraintsIterator {
all_constraints: self.all_constraints,
constraint_ids: def_id_with_constraints.constraint_ids,
@@ -224,20 +379,20 @@ impl<'map, 'db> Iterator for DefinitionWithConstraintsIterator<'map, 'db> {
}
}
impl std::iter::FusedIterator for DefinitionWithConstraintsIterator<'_, '_> {}
impl std::iter::FusedIterator for BindingWithConstraintsIterator<'_, '_> {}
pub(crate) struct DefinitionWithConstraints<'map, 'db> {
pub(crate) definition: Definition<'db>,
pub(crate) struct BindingWithConstraints<'map, 'db> {
pub(crate) binding: Definition<'db>,
pub(crate) constraints: ConstraintsIterator<'map, 'db>,
}
pub(crate) struct ConstraintsIterator<'map, 'db> {
all_constraints: &'map IndexVec<ScopedConstraintId, Expression<'db>>,
all_constraints: &'map IndexVec<ScopedConstraintId, Constraint<'db>>,
constraint_ids: ConstraintIdIterator<'map>,
}
impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
type Item = Expression<'db>;
type Item = Constraint<'db>;
fn next(&mut self) -> Option<Self::Item> {
self.constraint_ids
@@ -248,25 +403,50 @@ impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
impl std::iter::FusedIterator for ConstraintsIterator<'_, '_> {}
pub(crate) struct DeclarationsIterator<'map, 'db> {
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
inner: DeclarationIdIterator<'map>,
may_be_undeclared: bool,
}
impl DeclarationsIterator<'_, '_> {
pub(crate) fn may_be_undeclared(&self) -> bool {
self.may_be_undeclared
}
}
impl<'map, 'db> Iterator for DeclarationsIterator<'map, 'db> {
type Item = Definition<'db>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|def_id| self.all_definitions[def_id])
}
}
impl std::iter::FusedIterator for DeclarationsIterator<'_, '_> {}
/// A snapshot of the definitions and constraints state at a particular point in control flow.
#[derive(Clone, Debug)]
pub(super) struct FlowSnapshot {
definitions_by_symbol: IndexVec<ScopedSymbolId, SymbolState>,
symbol_states: IndexVec<ScopedSymbolId, SymbolState>,
}
#[derive(Debug, Default)]
pub(super) struct UseDefMapBuilder<'db> {
/// Append-only array of [`Definition`]; None is unbound.
/// Append-only array of [`Definition`].
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
/// Append-only array of constraints (as [`Expression`]).
all_constraints: IndexVec<ScopedConstraintId, Expression<'db>>,
/// Append-only array of [`Constraint`].
all_constraints: IndexVec<ScopedConstraintId, Constraint<'db>>,
/// Visible definitions at each so-far-recorded use.
definitions_by_use: IndexVec<ScopedUseId, SymbolState>,
/// Live bindings at each so-far-recorded use.
bindings_by_use: IndexVec<ScopedUseId, SymbolBindings>,
/// Currently visible definitions for each symbol.
definitions_by_symbol: IndexVec<ScopedSymbolId, SymbolState>,
/// Live bindings or declarations for each so-far-recorded definition.
definitions_by_definition: FxHashMap<Definition<'db>, SymbolDefinitions>,
/// Currently live bindings and declarations for each symbol.
symbol_states: IndexVec<ScopedSymbolId, SymbolState>,
}
impl<'db> UseDefMapBuilder<'db> {
@@ -275,86 +455,104 @@ impl<'db> UseDefMapBuilder<'db> {
}
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
let new_symbol = self.definitions_by_symbol.push(SymbolState::unbound());
let new_symbol = self.symbol_states.push(SymbolState::undefined());
debug_assert_eq!(symbol, new_symbol);
}
pub(super) fn record_definition(
pub(super) fn record_binding(&mut self, symbol: ScopedSymbolId, binding: Definition<'db>) {
let def_id = self.all_definitions.push(binding);
let symbol_state = &mut self.symbol_states[symbol];
self.definitions_by_definition.insert(
binding,
SymbolDefinitions::Declarations(symbol_state.declarations().clone()),
);
symbol_state.record_binding(def_id);
}
pub(super) fn record_constraint(&mut self, constraint: Constraint<'db>) {
let constraint_id = self.all_constraints.push(constraint);
for state in &mut self.symbol_states {
state.record_constraint(constraint_id);
}
}
pub(super) fn record_declaration(
&mut self,
symbol: ScopedSymbolId,
declaration: Definition<'db>,
) {
let def_id = self.all_definitions.push(declaration);
let symbol_state = &mut self.symbol_states[symbol];
self.definitions_by_definition.insert(
declaration,
SymbolDefinitions::Bindings(symbol_state.bindings().clone()),
);
symbol_state.record_declaration(def_id);
}
pub(super) fn record_declaration_and_binding(
&mut self,
symbol: ScopedSymbolId,
definition: Definition<'db>,
) {
// We have a new definition of a symbol; this replaces any previous definitions in this
// path.
// We don't need to store anything in self.definitions_by_definition.
let def_id = self.all_definitions.push(definition);
self.definitions_by_symbol[symbol] = SymbolState::with(def_id);
}
pub(super) fn record_constraint(&mut self, constraint: Expression<'db>) {
let constraint_id = self.all_constraints.push(constraint);
for definitions in &mut self.definitions_by_symbol {
definitions.add_constraint(constraint_id);
}
let symbol_state = &mut self.symbol_states[symbol];
symbol_state.record_declaration(def_id);
symbol_state.record_binding(def_id);
}
pub(super) fn record_use(&mut self, symbol: ScopedSymbolId, use_id: ScopedUseId) {
// We have a use of a symbol; clone the currently visible definitions for that symbol, and
// record them as the visible definitions for this use.
// We have a use of a symbol; clone the current bindings for that symbol, and record them
// as the live bindings for this use.
let new_use = self
.definitions_by_use
.push(self.definitions_by_symbol[symbol].clone());
.bindings_by_use
.push(self.symbol_states[symbol].bindings().clone());
debug_assert_eq!(use_id, new_use);
}
/// Take a snapshot of the current visible-symbols state.
pub(super) fn snapshot(&self) -> FlowSnapshot {
FlowSnapshot {
definitions_by_symbol: self.definitions_by_symbol.clone(),
symbol_states: self.symbol_states.clone(),
}
}
/// Restore the current builder visible-definitions state to the given snapshot.
/// Restore the current builder symbols state to the given snapshot.
pub(super) fn restore(&mut self, snapshot: FlowSnapshot) {
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
// We never remove symbols from `symbol_states` (it's an IndexVec, and the symbol
// IDs must line up), so the current number of known symbols must always be equal to or
// greater than the number of known symbols in a previously-taken snapshot.
let num_symbols = self.definitions_by_symbol.len();
debug_assert!(num_symbols >= snapshot.definitions_by_symbol.len());
let num_symbols = self.symbol_states.len();
debug_assert!(num_symbols >= snapshot.symbol_states.len());
// Restore the current visible-definitions state to the given snapshot.
self.definitions_by_symbol = snapshot.definitions_by_symbol;
self.symbol_states = snapshot.symbol_states;
// If the snapshot we are restoring is missing some symbols we've recorded since, we need
// to fill them in so the symbol IDs continue to line up. Since they don't exist in the
// snapshot, the correct state to fill them in with is "unbound".
self.definitions_by_symbol
.resize(num_symbols, SymbolState::unbound());
// snapshot, the correct state to fill them in with is "undefined".
self.symbol_states
.resize(num_symbols, SymbolState::undefined());
}
/// Merge the given snapshot into the current state, reflecting that we might have taken either
/// path to get here. The new visible-definitions state for each symbol should include
/// definitions from both the prior state and the snapshot.
/// path to get here. The new state for each symbol should include definitions from both the
/// prior state and the snapshot.
pub(super) fn merge(&mut self, snapshot: FlowSnapshot) {
// The tricky thing about merging two Ranges pointing into `all_definitions` is that if the
// two Ranges aren't already adjacent in `all_definitions`, we will have to copy at least
// one or the other of the ranges to the end of `all_definitions` so as to make them
// adjacent. We can't ever move things around in `all_definitions` because previously
// recorded uses may still have ranges pointing to any part of it; all we can do is append.
// It's possible we may end up with some old entries in `all_definitions` that nobody is
// pointing to, but that's OK.
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
// We never remove symbols from `symbol_states` (it's an IndexVec, and the symbol
// IDs must line up), so the current number of known symbols must always be equal to or
// greater than the number of known symbols in a previously-taken snapshot.
debug_assert!(self.definitions_by_symbol.len() >= snapshot.definitions_by_symbol.len());
debug_assert!(self.symbol_states.len() >= snapshot.symbol_states.len());
let mut snapshot_definitions_iter = snapshot.definitions_by_symbol.into_iter();
for current in &mut self.definitions_by_symbol {
let mut snapshot_definitions_iter = snapshot.symbol_states.into_iter();
for current in &mut self.symbol_states {
if let Some(snapshot) = snapshot_definitions_iter.next() {
current.merge(snapshot);
} else {
// Symbol not present in snapshot, so it's unbound from that path.
current.add_unbound();
// Symbol not present in snapshot, so it's unbound/undeclared from that path.
current.set_may_be_unbound();
current.set_may_be_undeclared();
}
}
}
@@ -362,14 +560,16 @@ impl<'db> UseDefMapBuilder<'db> {
pub(super) fn finish(mut self) -> UseDefMap<'db> {
self.all_definitions.shrink_to_fit();
self.all_constraints.shrink_to_fit();
self.definitions_by_symbol.shrink_to_fit();
self.definitions_by_use.shrink_to_fit();
self.symbol_states.shrink_to_fit();
self.bindings_by_use.shrink_to_fit();
self.definitions_by_definition.shrink_to_fit();
UseDefMap {
all_definitions: self.all_definitions,
all_constraints: self.all_constraints,
definitions_by_use: self.definitions_by_use,
public_definitions: self.definitions_by_symbol,
bindings_by_use: self.bindings_by_use,
public_symbols: self.symbol_states,
definitions_by_definition: self.definitions_by_definition,
}
}
}

View File

@@ -32,17 +32,25 @@ impl<const B: usize> BitSet<B> {
bitset
}
pub(super) fn is_empty(&self) -> bool {
self.blocks().iter().all(|&b| b == 0)
}
/// Convert from Inline to Heap, if needed, and resize the Heap vector, if needed.
fn resize(&mut self, value: u32) {
let num_blocks_needed = (value / 64) + 1;
self.resize_blocks(num_blocks_needed as usize);
}
fn resize_blocks(&mut self, num_blocks_needed: usize) {
match self {
Self::Inline(blocks) => {
let mut vec = blocks.to_vec();
vec.resize(num_blocks_needed as usize, 0);
vec.resize(num_blocks_needed, 0);
*self = Self::Heap(vec);
}
Self::Heap(vec) => {
vec.resize(num_blocks_needed as usize, 0);
vec.resize(num_blocks_needed, 0);
}
}
}
@@ -89,6 +97,19 @@ impl<const B: usize> BitSet<B> {
}
}
/// Union in-place with another [`BitSet`].
pub(super) fn union(&mut self, other: &BitSet<B>) {
let mut max_len = self.blocks().len();
let other_len = other.blocks().len();
if other_len > max_len {
max_len = other_len;
self.resize_blocks(max_len);
}
for (my_block, other_block) in self.blocks_mut().iter_mut().zip(other.blocks()) {
*my_block |= other_block;
}
}
/// Return an iterator over the values (in ascending order) in this [`BitSet`].
pub(super) fn iter(&self) -> BitSetIterator<'_, B> {
let blocks = self.blocks();
@@ -218,6 +239,59 @@ mod tests {
assert_bitset(&b1, &[89]);
}
#[test]
fn union() {
let mut b1 = BitSet::<1>::with(2);
let b2 = BitSet::<1>::with(4);
b1.union(&b2);
assert_bitset(&b1, &[2, 4]);
}
#[test]
fn union_mixed_1() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(89);
b2.insert(5);
b1.union(&b2);
assert_bitset(&b1, &[4, 5, 89]);
}
#[test]
fn union_mixed_2() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(23);
b2.insert(89);
b1.union(&b2);
assert_bitset(&b1, &[4, 23, 89]);
}
#[test]
fn union_heap() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(89);
b2.insert(90);
b1.union(&b2);
assert_bitset(&b1, &[4, 89, 90]);
}
#[test]
fn union_heap_2() {
let mut b1 = BitSet::<1>::with(89);
let mut b2 = BitSet::<1>::with(89);
b1.insert(91);
b2.insert(90);
b1.union(&b2);
assert_bitset(&b1, &[89, 90, 91]);
}
#[test]
fn multiple_blocks() {
let mut b = BitSet::<2>::with(120);
@@ -225,4 +299,11 @@ mod tests {
assert!(matches!(b, BitSet::Inline(_)));
assert_bitset(&b, &[45, 120]);
}
#[test]
fn empty() {
let b = BitSet::<1>::default();
assert!(b.is_empty());
}
}

View File

@@ -1,13 +1,13 @@
//! Track visible definitions of a symbol, and applicable constraints per definition.
//! Track live bindings per symbol, applicable constraints per binding, and live declarations.
//!
//! These data structures operate entirely on scope-local newtype-indices for definitions and
//! constraints, referring to their location in the `all_definitions` and `all_constraints`
//! indexvecs in [`super::UseDefMapBuilder`].
//!
//! We need to track arbitrary associations between definitions and constraints, not just a single
//! set of currently dominating constraints (where "dominating" means "control flow must have
//! passed through it to reach this point"), because we can have dominating constraints that apply
//! to some definitions but not others, as in this code:
//! We need to track arbitrary associations between bindings and constraints, not just a single set
//! of currently dominating constraints (where "dominating" means "control flow must have passed
//! through it to reach this point"), because we can have dominating constraints that apply to some
//! bindings but not others, as in this code:
//!
//! ```python
//! x = 1 if flag else None
@@ -18,11 +18,11 @@
//! ```
//!
//! The `x is not None` constraint dominates the final use of `x`, but it applies only to the first
//! definition of `x`, not the second, so `None` is a possible value for `x`.
//! binding of `x`, not the second, so `None` is a possible value for `x`.
//!
//! And we can't just track, for each definition, an index into a list of dominating constraints,
//! either, because we can have definitions which are still visible, but subject to constraints
//! that are no longer dominating, as in this code:
//! And we can't just track, for each binding, an index into a list of dominating constraints,
//! either, because we can have bindings which are still visible, but subject to constraints that
//! are no longer dominating, as in this code:
//!
//! ```python
//! x = 0
@@ -33,13 +33,16 @@
//! ```
//!
//! From the point of view of the final use of `x`, the `x is not None` constraint no longer
//! dominates, but it does dominate the `x = 1 if flag2 else None` definition, so we have to keep
//! dominates, but it does dominate the `x = 1 if flag2 else None` binding, so we have to keep
//! track of that.
//!
//! The data structures used here ([`BitSet`] and [`smallvec::SmallVec`]) optimize for keeping all
//! data inline (avoiding lots of scattered allocations) in small-to-medium cases, and falling back
//! to heap allocation to be able to scale to arbitrary numbers of definitions and constraints when
//! needed.
//! to heap allocation to be able to scale to arbitrary numbers of live bindings and constraints
//! when needed.
//!
//! Tracking live declarations is simpler, since constraints are not involved, but otherwise very
//! similar to tracking live bindings.
use super::bitset::{BitSet, BitSetIterator};
use ruff_index::newtype_index;
use smallvec::SmallVec;
@@ -53,93 +56,200 @@ pub(super) struct ScopedDefinitionId;
pub(super) struct ScopedConstraintId;
/// Can reference this * 64 total definitions inline; more will fall back to the heap.
const INLINE_DEFINITION_BLOCKS: usize = 3;
const INLINE_BINDING_BLOCKS: usize = 3;
/// A [`BitSet`] of [`ScopedDefinitionId`], representing visible definitions of a symbol in a scope.
type Definitions = BitSet<INLINE_DEFINITION_BLOCKS>;
type DefinitionsIterator<'a> = BitSetIterator<'a, INLINE_DEFINITION_BLOCKS>;
/// A [`BitSet`] of [`ScopedDefinitionId`], representing live bindings of a symbol in a scope.
type Bindings = BitSet<INLINE_BINDING_BLOCKS>;
type BindingsIterator<'a> = BitSetIterator<'a, INLINE_BINDING_BLOCKS>;
/// Can reference this * 64 total declarations inline; more will fall back to the heap.
const INLINE_DECLARATION_BLOCKS: usize = 3;
/// A [`BitSet`] of [`ScopedDefinitionId`], representing live declarations of a symbol in a scope.
type Declarations = BitSet<INLINE_DECLARATION_BLOCKS>;
type DeclarationsIterator<'a> = BitSetIterator<'a, INLINE_DECLARATION_BLOCKS>;
/// Can reference this * 64 total constraints inline; more will fall back to the heap.
const INLINE_CONSTRAINT_BLOCKS: usize = 2;
/// Can keep inline this many visible definitions per symbol at a given time; more will go to heap.
const INLINE_VISIBLE_DEFINITIONS_PER_SYMBOL: usize = 4;
/// Can keep inline this many live bindings per symbol at a given time; more will go to heap.
const INLINE_BINDINGS_PER_SYMBOL: usize = 4;
/// One [`BitSet`] of applicable [`ScopedConstraintId`] per visible definition.
type InlineConstraintArray =
[BitSet<INLINE_CONSTRAINT_BLOCKS>; INLINE_VISIBLE_DEFINITIONS_PER_SYMBOL];
/// One [`BitSet`] of applicable [`ScopedConstraintId`] per live binding.
type InlineConstraintArray = [BitSet<INLINE_CONSTRAINT_BLOCKS>; INLINE_BINDINGS_PER_SYMBOL];
type Constraints = SmallVec<InlineConstraintArray>;
type ConstraintsIterator<'a> = std::slice::Iter<'a, BitSet<INLINE_CONSTRAINT_BLOCKS>>;
type ConstraintsIntoIterator = smallvec::IntoIter<InlineConstraintArray>;
/// Visible definitions and narrowing constraints for a single symbol at some point in control flow.
/// Live declarations for a single symbol at some point in control flow.
#[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct SymbolState {
/// [`BitSet`]: which [`ScopedDefinitionId`] are visible for this symbol?
visible_definitions: Definitions,
pub(super) struct SymbolDeclarations {
/// [`BitSet`]: which declarations (as [`ScopedDefinitionId`]) can reach the current location?
live_declarations: Declarations,
/// For each definition, which [`ScopedConstraintId`] apply?
/// Could the symbol be un-declared at this point?
may_be_undeclared: bool,
}
impl SymbolDeclarations {
fn undeclared() -> Self {
Self {
live_declarations: Declarations::default(),
may_be_undeclared: true,
}
}
/// Record a newly-encountered declaration for this symbol.
fn record_declaration(&mut self, declaration_id: ScopedDefinitionId) {
self.live_declarations = Declarations::with(declaration_id.into());
self.may_be_undeclared = false;
}
/// Add undeclared as a possibility for this symbol.
fn set_may_be_undeclared(&mut self) {
self.may_be_undeclared = true;
}
/// Return an iterator over live declarations for this symbol.
pub(super) fn iter(&self) -> DeclarationIdIterator {
DeclarationIdIterator {
inner: self.live_declarations.iter(),
}
}
pub(super) fn is_empty(&self) -> bool {
self.live_declarations.is_empty()
}
pub(super) fn may_be_undeclared(&self) -> bool {
self.may_be_undeclared
}
}
/// Live bindings and narrowing constraints for a single symbol at some point in control flow.
#[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct SymbolBindings {
/// [`BitSet`]: which bindings (as [`ScopedDefinitionId`]) can reach the current location?
live_bindings: Bindings,
/// For each live binding, which [`ScopedConstraintId`] apply?
///
/// This is a [`smallvec::SmallVec`] which should always have one [`BitSet`] of constraints per
/// definition in `visible_definitions`.
/// binding in `live_bindings`.
constraints: Constraints,
/// Could the symbol be unbound at this point?
may_be_unbound: bool,
}
/// A single [`ScopedDefinitionId`] with an iterator of its applicable [`ScopedConstraintId`].
#[derive(Debug)]
pub(super) struct DefinitionIdWithConstraints<'a> {
pub(super) definition: ScopedDefinitionId,
pub(super) constraint_ids: ConstraintIdIterator<'a>,
}
impl SymbolState {
/// Return a new [`SymbolState`] representing an unbound symbol.
pub(super) fn unbound() -> Self {
impl SymbolBindings {
fn unbound() -> Self {
Self {
visible_definitions: Definitions::default(),
live_bindings: Bindings::default(),
constraints: Constraints::default(),
may_be_unbound: true,
}
}
/// Return a new [`SymbolState`] representing a symbol with a single visible definition.
pub(super) fn with(definition_id: ScopedDefinitionId) -> Self {
let mut constraints = Constraints::with_capacity(1);
constraints.push(BitSet::default());
Self {
visible_definitions: Definitions::with(definition_id.into()),
constraints,
may_be_unbound: false,
}
}
/// Add Unbound as a possibility for this symbol.
pub(super) fn add_unbound(&mut self) {
fn set_may_be_unbound(&mut self) {
self.may_be_unbound = true;
}
/// Add given constraint to all currently-visible definitions.
pub(super) fn add_constraint(&mut self, constraint_id: ScopedConstraintId) {
/// Record a newly-encountered binding for this symbol.
pub(super) fn record_binding(&mut self, binding_id: ScopedDefinitionId) {
// The new binding replaces all previous live bindings in this path, and has no
// constraints.
self.live_bindings = Bindings::with(binding_id.into());
self.constraints = Constraints::with_capacity(1);
self.constraints.push(BitSet::default());
self.may_be_unbound = false;
}
/// Add given constraint to all live bindings.
pub(super) fn record_constraint(&mut self, constraint_id: ScopedConstraintId) {
for bitset in &mut self.constraints {
bitset.insert(constraint_id.into());
}
}
/// Iterate over currently live bindings for this symbol.
pub(super) fn iter(&self) -> BindingIdWithConstraintsIterator {
BindingIdWithConstraintsIterator {
definitions: self.live_bindings.iter(),
constraints: self.constraints.iter(),
}
}
pub(super) fn may_be_unbound(&self) -> bool {
self.may_be_unbound
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct SymbolState {
declarations: SymbolDeclarations,
bindings: SymbolBindings,
}
impl SymbolState {
/// Return a new [`SymbolState`] representing an unbound, undeclared symbol.
pub(super) fn undefined() -> Self {
Self {
declarations: SymbolDeclarations::undeclared(),
bindings: SymbolBindings::unbound(),
}
}
/// Add Unbound as a possibility for this symbol.
pub(super) fn set_may_be_unbound(&mut self) {
self.bindings.set_may_be_unbound();
}
/// Record a newly-encountered binding for this symbol.
pub(super) fn record_binding(&mut self, binding_id: ScopedDefinitionId) {
self.bindings.record_binding(binding_id);
}
/// Add given constraint to all live bindings.
pub(super) fn record_constraint(&mut self, constraint_id: ScopedConstraintId) {
self.bindings.record_constraint(constraint_id);
}
/// Add undeclared as a possibility for this symbol.
pub(super) fn set_may_be_undeclared(&mut self) {
self.declarations.set_may_be_undeclared();
}
/// Record a newly-encountered declaration of this symbol.
pub(super) fn record_declaration(&mut self, declaration_id: ScopedDefinitionId) {
self.declarations.record_declaration(declaration_id);
}
/// Merge another [`SymbolState`] into this one.
pub(super) fn merge(&mut self, b: SymbolState) {
let mut a = Self {
visible_definitions: Definitions::default(),
constraints: Constraints::default(),
may_be_unbound: self.may_be_unbound || b.may_be_unbound,
bindings: SymbolBindings {
live_bindings: Bindings::default(),
constraints: Constraints::default(),
may_be_unbound: self.bindings.may_be_unbound || b.bindings.may_be_unbound,
},
declarations: SymbolDeclarations {
live_declarations: self.declarations.live_declarations.clone(),
may_be_undeclared: self.declarations.may_be_undeclared
|| b.declarations.may_be_undeclared,
},
};
std::mem::swap(&mut a, self);
let mut a_defs_iter = a.visible_definitions.iter();
let mut b_defs_iter = b.visible_definitions.iter();
let mut a_constraints_iter = a.constraints.into_iter();
let mut b_constraints_iter = b.constraints.into_iter();
self.declarations
.live_declarations
.union(&b.declarations.live_declarations);
let mut a_defs_iter = a.bindings.live_bindings.iter();
let mut b_defs_iter = b.bindings.live_bindings.iter();
let mut a_constraints_iter = a.bindings.constraints.into_iter();
let mut b_constraints_iter = b.bindings.constraints.into_iter();
let mut opt_a_def: Option<u32> = a_defs_iter.next();
let mut opt_b_def: Option<u32> = b_defs_iter.next();
@@ -152,7 +262,7 @@ impl SymbolState {
// Helper to push `def`, with constraints in `constraints_iter`, onto `self`.
let push = |def, constraints_iter: &mut ConstraintsIntoIterator, merged: &mut Self| {
merged.visible_definitions.insert(def);
merged.bindings.live_bindings.insert(def);
// SAFETY: we only ever create SymbolState with either no definitions and no constraint
// bitsets (`::unbound`) or one definition and one constraint bitset (`::with`), and
// `::merge` always pushes one definition and one constraint bitset together (just
@@ -161,7 +271,7 @@ impl SymbolState {
let constraints = constraints_iter
.next()
.expect("definitions and constraints length mismatch");
merged.constraints.push(constraints);
merged.bindings.constraints.push(constraints);
};
loop {
@@ -191,7 +301,8 @@ impl SymbolState {
// If the same definition is visible through both paths, any constraint
// that applies on only one path is irrelevant to the resulting type from
// unioning the two paths, so we intersect the constraints.
self.constraints
self.bindings
.constraints
.last_mut()
.unwrap()
.intersect(&a_constraints);
@@ -214,40 +325,49 @@ impl SymbolState {
}
}
/// Get iterator over visible definitions with constraints.
pub(super) fn visible_definitions(&self) -> DefinitionIdWithConstraintsIterator {
DefinitionIdWithConstraintsIterator {
definitions: self.visible_definitions.iter(),
constraints: self.constraints.iter(),
}
pub(super) fn bindings(&self) -> &SymbolBindings {
&self.bindings
}
pub(super) fn declarations(&self) -> &SymbolDeclarations {
&self.declarations
}
/// Could the symbol be unbound?
pub(super) fn may_be_unbound(&self) -> bool {
self.may_be_unbound
self.bindings.may_be_unbound()
}
}
/// The default state of a symbol (if we've seen no definitions of it) is unbound.
/// The default state of a symbol, if we've seen no definitions of it, is undefined (that is,
/// both unbound and undeclared).
impl Default for SymbolState {
fn default() -> Self {
SymbolState::unbound()
SymbolState::undefined()
}
}
/// A single binding (as [`ScopedDefinitionId`]) with an iterator of its applicable
/// [`ScopedConstraintId`].
#[derive(Debug)]
pub(super) struct BindingIdWithConstraints<'a> {
pub(super) definition: ScopedDefinitionId,
pub(super) constraint_ids: ConstraintIdIterator<'a>,
}
#[derive(Debug)]
pub(super) struct DefinitionIdWithConstraintsIterator<'a> {
definitions: DefinitionsIterator<'a>,
pub(super) struct BindingIdWithConstraintsIterator<'a> {
definitions: BindingsIterator<'a>,
constraints: ConstraintsIterator<'a>,
}
impl<'a> Iterator for DefinitionIdWithConstraintsIterator<'a> {
type Item = DefinitionIdWithConstraints<'a>;
impl<'a> Iterator for BindingIdWithConstraintsIterator<'a> {
type Item = BindingIdWithConstraints<'a>;
fn next(&mut self) -> Option<Self::Item> {
match (self.definitions.next(), self.constraints.next()) {
(None, None) => None,
(Some(def), Some(constraints)) => Some(DefinitionIdWithConstraints {
(Some(def), Some(constraints)) => Some(BindingIdWithConstraints {
definition: ScopedDefinitionId::from_u32(def),
constraint_ids: ConstraintIdIterator {
wrapped: constraints.iter(),
@@ -259,7 +379,7 @@ impl<'a> Iterator for DefinitionIdWithConstraintsIterator<'a> {
}
}
impl std::iter::FusedIterator for DefinitionIdWithConstraintsIterator<'_> {}
impl std::iter::FusedIterator for BindingIdWithConstraintsIterator<'_> {}
#[derive(Debug)]
pub(super) struct ConstraintIdIterator<'a> {
@@ -276,99 +396,193 @@ impl Iterator for ConstraintIdIterator<'_> {
impl std::iter::FusedIterator for ConstraintIdIterator<'_> {}
#[derive(Debug)]
pub(super) struct DeclarationIdIterator<'a> {
inner: DeclarationsIterator<'a>,
}
impl<'a> Iterator for DeclarationIdIterator<'a> {
type Item = ScopedDefinitionId;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(ScopedDefinitionId::from_u32)
}
}
impl std::iter::FusedIterator for DeclarationIdIterator<'_> {}
#[cfg(test)]
mod tests {
use super::{ScopedConstraintId, ScopedDefinitionId, SymbolState};
impl SymbolState {
pub(crate) fn assert(&self, may_be_unbound: bool, expected: &[&str]) {
assert_eq!(self.may_be_unbound(), may_be_unbound);
let actual = self
.visible_definitions()
.map(|def_id_with_constraints| {
format!(
"{}<{}>",
def_id_with_constraints.definition.as_u32(),
def_id_with_constraints
.constraint_ids
.map(ScopedConstraintId::as_u32)
.map(|idx| idx.to_string())
.collect::<Vec<_>>()
.join(", ")
)
})
.collect::<Vec<_>>();
assert_eq!(actual, expected);
}
fn assert_bindings(symbol: &SymbolState, may_be_unbound: bool, expected: &[&str]) {
assert_eq!(symbol.may_be_unbound(), may_be_unbound);
let actual = symbol
.bindings()
.iter()
.map(|def_id_with_constraints| {
format!(
"{}<{}>",
def_id_with_constraints.definition.as_u32(),
def_id_with_constraints
.constraint_ids
.map(ScopedConstraintId::as_u32)
.map(|idx| idx.to_string())
.collect::<Vec<_>>()
.join(", ")
)
})
.collect::<Vec<_>>();
assert_eq!(actual, expected);
}
pub(crate) fn assert_declarations(
symbol: &SymbolState,
may_be_undeclared: bool,
expected: &[u32],
) {
assert_eq!(symbol.declarations.may_be_undeclared(), may_be_undeclared);
let actual = symbol
.declarations()
.iter()
.map(ScopedDefinitionId::as_u32)
.collect::<Vec<_>>();
assert_eq!(actual, expected);
}
#[test]
fn unbound() {
let cd = SymbolState::unbound();
let sym = SymbolState::undefined();
cd.assert(true, &[]);
assert_bindings(&sym, true, &[]);
}
#[test]
fn with() {
let cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
let mut sym = SymbolState::undefined();
sym.record_binding(ScopedDefinitionId::from_u32(0));
cd.assert(false, &["0<>"]);
assert_bindings(&sym, false, &["0<>"]);
}
#[test]
fn add_unbound() {
let mut cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
cd.add_unbound();
fn set_may_be_unbound() {
let mut sym = SymbolState::undefined();
sym.record_binding(ScopedDefinitionId::from_u32(0));
sym.set_may_be_unbound();
cd.assert(true, &["0<>"]);
assert_bindings(&sym, true, &["0<>"]);
}
#[test]
fn add_constraint() {
let mut cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
cd.add_constraint(ScopedConstraintId::from_u32(0));
fn record_constraint() {
let mut sym = SymbolState::undefined();
sym.record_binding(ScopedDefinitionId::from_u32(0));
sym.record_constraint(ScopedConstraintId::from_u32(0));
cd.assert(false, &["0<0>"]);
assert_bindings(&sym, false, &["0<0>"]);
}
#[test]
fn merge() {
// merging the same definition with the same constraint keeps the constraint
let mut cd0a = SymbolState::with(ScopedDefinitionId::from_u32(0));
cd0a.add_constraint(ScopedConstraintId::from_u32(0));
let mut sym0a = SymbolState::undefined();
sym0a.record_binding(ScopedDefinitionId::from_u32(0));
sym0a.record_constraint(ScopedConstraintId::from_u32(0));
let mut cd0b = SymbolState::with(ScopedDefinitionId::from_u32(0));
cd0b.add_constraint(ScopedConstraintId::from_u32(0));
let mut sym0b = SymbolState::undefined();
sym0b.record_binding(ScopedDefinitionId::from_u32(0));
sym0b.record_constraint(ScopedConstraintId::from_u32(0));
cd0a.merge(cd0b);
let mut cd0 = cd0a;
cd0.assert(false, &["0<0>"]);
sym0a.merge(sym0b);
let mut sym0 = sym0a;
assert_bindings(&sym0, false, &["0<0>"]);
// merging the same definition with differing constraints drops all constraints
let mut cd1a = SymbolState::with(ScopedDefinitionId::from_u32(1));
cd1a.add_constraint(ScopedConstraintId::from_u32(1));
let mut sym1a = SymbolState::undefined();
sym1a.record_binding(ScopedDefinitionId::from_u32(1));
sym1a.record_constraint(ScopedConstraintId::from_u32(1));
let mut cd1b = SymbolState::with(ScopedDefinitionId::from_u32(1));
cd1b.add_constraint(ScopedConstraintId::from_u32(2));
let mut sym1b = SymbolState::undefined();
sym1b.record_binding(ScopedDefinitionId::from_u32(1));
sym1b.record_constraint(ScopedConstraintId::from_u32(2));
cd1a.merge(cd1b);
let cd1 = cd1a;
cd1.assert(false, &["1<>"]);
sym1a.merge(sym1b);
let sym1 = sym1a;
assert_bindings(&sym1, false, &["1<>"]);
// merging a constrained definition with unbound keeps both
let mut cd2a = SymbolState::with(ScopedDefinitionId::from_u32(2));
cd2a.add_constraint(ScopedConstraintId::from_u32(3));
let mut sym2a = SymbolState::undefined();
sym2a.record_binding(ScopedDefinitionId::from_u32(2));
sym2a.record_constraint(ScopedConstraintId::from_u32(3));
let cd2b = SymbolState::unbound();
let sym2b = SymbolState::undefined();
cd2a.merge(cd2b);
let cd2 = cd2a;
cd2.assert(true, &["2<3>"]);
sym2a.merge(sym2b);
let sym2 = sym2a;
assert_bindings(&sym2, true, &["2<3>"]);
// merging different definitions keeps them each with their existing constraints
cd0.merge(cd2);
let cd = cd0;
cd.assert(true, &["0<0>", "2<3>"]);
sym0.merge(sym2);
let sym = sym0;
assert_bindings(&sym, true, &["0<0>", "2<3>"]);
}
#[test]
fn no_declaration() {
let sym = SymbolState::undefined();
assert_declarations(&sym, true, &[]);
}
#[test]
fn record_declaration() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(1));
assert_declarations(&sym, false, &[1]);
}
#[test]
fn record_declaration_override() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(1));
sym.record_declaration(ScopedDefinitionId::from_u32(2));
assert_declarations(&sym, false, &[2]);
}
#[test]
fn record_declaration_merge() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(1));
let mut sym2 = SymbolState::undefined();
sym2.record_declaration(ScopedDefinitionId::from_u32(2));
sym.merge(sym2);
assert_declarations(&sym, false, &[1, 2]);
}
#[test]
fn record_declaration_merge_partial_undeclared() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(1));
let sym2 = SymbolState::undefined();
sym.merge(sym2);
assert_declarations(&sym, true, &[1]);
}
#[test]
fn set_may_be_undeclared() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(0));
sym.set_may_be_undeclared();
assert_declarations(&sym, true, &[0]);
}
}

View File

@@ -8,7 +8,7 @@ use crate::module_name::ModuleName;
use crate::module_resolver::{resolve_module, Module};
use crate::semantic_index::ast_ids::HasScopedAstId;
use crate::semantic_index::semantic_index;
use crate::types::{definition_ty, global_symbol_ty_by_name, infer_scope_types, Type};
use crate::types::{binding_ty, global_symbol_ty, infer_scope_types, Type};
use crate::Db;
pub struct SemanticModel<'db> {
@@ -40,7 +40,7 @@ impl<'db> SemanticModel<'db> {
}
pub fn global_symbol_ty(&self, module: &Module, symbol_name: &str) -> Type<'db> {
global_symbol_ty_by_name(self.db, module.file(), symbol_name)
global_symbol_ty(self.db, module.file(), symbol_name)
}
}
@@ -147,24 +147,24 @@ impl HasTy for ast::Expr {
}
}
macro_rules! impl_definition_has_ty {
macro_rules! impl_binding_has_ty {
($ty: ty) => {
impl HasTy for $ty {
#[inline]
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
let index = semantic_index(model.db, model.file);
let definition = index.definition(self);
definition_ty(model.db, definition)
let binding = index.definition(self);
binding_ty(model.db, binding)
}
}
};
}
impl_definition_has_ty!(ast::StmtFunctionDef);
impl_definition_has_ty!(ast::StmtClassDef);
impl_definition_has_ty!(ast::Alias);
impl_definition_has_ty!(ast::Parameter);
impl_definition_has_ty!(ast::ParameterWithDefault);
impl_binding_has_ty!(ast::StmtFunctionDef);
impl_binding_has_ty!(ast::StmtClassDef);
impl_binding_has_ty!(ast::Alias);
impl_binding_has_ty!(ast::Parameter);
impl_binding_has_ty!(ast::ParameterWithDefault);
#[cfg(test)]
mod tests {
@@ -184,14 +184,9 @@ mod tests {
Program::from_settings(
&db,
ProgramSettings {
&ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings {
extra_paths: vec![],
src_root: SystemPathBuf::from("/src"),
site_packages: vec![],
custom_typeshed: None,
},
search_paths: SearchPathSettings::new(SystemPathBuf::from("/src")),
},
)?;

View File

@@ -13,9 +13,10 @@ use std::io;
use std::num::NonZeroUsize;
use std::ops::Deref;
use red_knot_python_semantic::PythonVersion;
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use crate::PythonVersion;
type SitePackagesDiscoveryResult<T> = Result<T, SitePackagesDiscoveryError>;
/// Abstraction for a Python virtual environment.
@@ -24,7 +25,7 @@ type SitePackagesDiscoveryResult<T> = Result<T, SitePackagesDiscoveryError>;
/// The format of this file is not defined anywhere, and exactly which keys are present
/// depends on the tool that was used to create the virtual environment.
#[derive(Debug)]
pub struct VirtualEnvironment {
pub(crate) struct VirtualEnvironment {
venv_path: SysPrefixPath,
base_executable_home_path: PythonHomePath,
include_system_site_packages: bool,
@@ -41,7 +42,7 @@ pub struct VirtualEnvironment {
}
impl VirtualEnvironment {
pub fn new(
pub(crate) fn new(
path: impl AsRef<SystemPath>,
system: &dyn System,
) -> SitePackagesDiscoveryResult<Self> {
@@ -157,7 +158,7 @@ impl VirtualEnvironment {
/// Return a list of `site-packages` directories that are available from this virtual environment
///
/// See the documentation for `site_packages_dir_from_sys_prefix` for more details.
pub fn site_packages_directories(
pub(crate) fn site_packages_directories(
&self,
system: &dyn System,
) -> SitePackagesDiscoveryResult<Vec<SystemPathBuf>> {
@@ -204,7 +205,7 @@ System site-packages will not be used for module resolution.",
}
#[derive(Debug, thiserror::Error)]
pub enum SitePackagesDiscoveryError {
pub(crate) enum SitePackagesDiscoveryError {
#[error("Invalid --venv-path argument: {0} could not be canonicalized")]
VenvDirCanonicalizationError(SystemPathBuf, #[source] io::Error),
#[error("Invalid --venv-path argument: {0} does not point to a directory on disk")]
@@ -221,7 +222,7 @@ pub enum SitePackagesDiscoveryError {
/// The various ways in which parsing a `pyvenv.cfg` file could fail
#[derive(Debug)]
pub enum PyvenvCfgParseErrorKind {
pub(crate) enum PyvenvCfgParseErrorKind {
TooManyEquals { line_number: NonZeroUsize },
MalformedKeyValuePair { line_number: NonZeroUsize },
NoHomeKey,
@@ -370,7 +371,7 @@ fn site_packages_directory_from_sys_prefix(
///
/// [`sys.prefix`]: https://docs.python.org/3/library/sys.html#sys.prefix
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct SysPrefixPath(SystemPathBuf);
pub(crate) struct SysPrefixPath(SystemPathBuf);
impl SysPrefixPath {
fn new(

View File

@@ -0,0 +1,87 @@
use crate::module_name::ModuleName;
use crate::module_resolver::resolve_module;
use crate::semantic_index::global_scope;
use crate::semantic_index::symbol::ScopeId;
use crate::types::{global_symbol_ty, Type};
use crate::Db;
/// Enumeration of various core stdlib modules, for which we have dedicated Salsa queries.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum CoreStdlibModule {
Builtins,
Types,
Typeshed,
TypingExtensions,
}
impl CoreStdlibModule {
fn name(self) -> ModuleName {
let module_name = match self {
Self::Builtins => "builtins",
Self::Types => "types",
Self::Typeshed => "_typeshed",
Self::TypingExtensions => "typing_extensions",
};
ModuleName::new_static(module_name)
.unwrap_or_else(|| panic!("{module_name} should be a valid module name!"))
}
}
/// Lookup the type of `symbol` in a given core module
///
/// Returns `Unbound` if the given core module cannot be resolved for some reason
fn core_module_symbol_ty<'db>(
db: &'db dyn Db,
core_module: CoreStdlibModule,
symbol: &str,
) -> Type<'db> {
resolve_module(db, core_module.name())
.map(|module| global_symbol_ty(db, module.file(), symbol))
.unwrap_or(Type::Unbound)
}
/// Lookup the type of `symbol` in the builtins namespace.
///
/// Returns `Unbound` if the `builtins` module isn't available for some reason.
#[inline]
pub(crate) fn builtins_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
core_module_symbol_ty(db, CoreStdlibModule::Builtins, symbol)
}
/// Lookup the type of `symbol` in the `types` module namespace.
///
/// Returns `Unbound` if the `types` module isn't available for some reason.
#[inline]
pub(crate) fn types_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
core_module_symbol_ty(db, CoreStdlibModule::Types, symbol)
}
/// Lookup the type of `symbol` in the `_typeshed` module namespace.
///
/// Returns `Unbound` if the `_typeshed` module isn't available for some reason.
#[inline]
pub(crate) fn typeshed_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
core_module_symbol_ty(db, CoreStdlibModule::Typeshed, symbol)
}
/// Lookup the type of `symbol` in the `typing_extensions` module namespace.
///
/// Returns `Unbound` if the `typing_extensions` module isn't available for some reason.
#[inline]
pub(crate) fn typing_extensions_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
core_module_symbol_ty(db, CoreStdlibModule::TypingExtensions, symbol)
}
/// Get the scope of a core stdlib module.
///
/// Can return `None` if a custom typeshed is used that is missing the core module in question.
fn core_module_scope(db: &dyn Db, core_module: CoreStdlibModule) -> Option<ScopeId<'_>> {
resolve_module(db, core_module.name()).map(|module| global_scope(db, module.file()))
}
/// Get the `builtins` module scope.
///
/// Can return `None` if a custom typeshed is used that is missing `builtins.pyi`.
pub(crate) fn builtins_module_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
core_module_scope(db, CoreStdlibModule::Builtins)
}

File diff suppressed because it is too large Load Diff

View File

@@ -25,11 +25,12 @@
//! * No type in an intersection can be a supertype of any other type in the intersection (just
//! eliminate the supertype from the intersection).
//! * An intersection containing two non-overlapping types should simplify to [`Type::Never`].
use crate::types::{IntersectionType, Type, UnionType};
use crate::types::{builtins_symbol_ty, IntersectionType, Type, UnionType};
use crate::{Db, FxOrderSet};
use smallvec::SmallVec;
pub(crate) struct UnionBuilder<'db> {
elements: FxOrderSet<Type<'db>>,
elements: Vec<Type<'db>>,
db: &'db dyn Db,
}
@@ -37,7 +38,7 @@ impl<'db> UnionBuilder<'db> {
pub(crate) fn new(db: &'db dyn Db) -> Self {
Self {
db,
elements: FxOrderSet::default(),
elements: vec![],
}
}
@@ -45,11 +46,59 @@ impl<'db> UnionBuilder<'db> {
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
match ty {
Type::Union(union) => {
self.elements.extend(&union.elements(self.db));
let new_elements = union.elements(self.db);
self.elements.reserve(new_elements.len());
for element in new_elements {
self = self.add(*element);
}
}
Type::Never => {}
_ => {
self.elements.insert(ty);
let bool_pair = if let Type::BooleanLiteral(b) = ty {
Some(Type::BooleanLiteral(!b))
} else {
None
};
let mut to_add = ty;
let mut to_remove = SmallVec::<[usize; 2]>::new();
for (index, element) in self.elements.iter().enumerate() {
if Some(*element) == bool_pair {
to_add = builtins_symbol_ty(self.db, "bool");
to_remove.push(index);
// The type we are adding is a BooleanLiteral, which doesn't have any
// subtypes. And we just found that the union already contained our
// mirror-image BooleanLiteral, so it can't also contain bool or any
// supertype of bool. Therefore, we are done.
break;
}
if ty.is_subtype_of(self.db, *element) {
return self;
} else if element.is_subtype_of(self.db, ty) {
to_remove.push(index);
}
}
match to_remove[..] {
[] => self.elements.push(to_add),
[index] => self.elements[index] = to_add,
_ => {
let mut current_index = 0;
let mut to_remove = to_remove.into_iter();
let mut next_to_remove_index = to_remove.next();
self.elements.retain(|_| {
let retain = if Some(current_index) == next_to_remove_index {
next_to_remove_index = to_remove.next();
false
} else {
true
};
current_index += 1;
retain
});
self.elements.push(to_add);
}
}
}
}
@@ -60,7 +109,7 @@ impl<'db> UnionBuilder<'db> {
match self.elements.len() {
0 => Type::Never,
1 => self.elements[0],
_ => Type::Union(UnionType::new(self.db, self.elements)),
_ => Type::Union(UnionType::new(self.db, self.elements.into())),
}
}
}
@@ -143,11 +192,12 @@ impl<'db> IntersectionBuilder<'db> {
if self.intersections.len() == 1 {
self.intersections.pop().unwrap().build(self.db)
} else {
let mut builder = UnionBuilder::new(self.db);
for inner in self.intersections {
builder = builder.add(inner.build(self.db));
}
builder.build()
UnionType::from_elements(
self.db,
self.intersections
.into_iter()
.map(|inner| inner.build(self.db)),
)
}
}
}
@@ -245,17 +295,32 @@ impl<'db> InnerIntersectionBuilder<'db> {
#[cfg(test)]
mod tests {
use super::{IntersectionBuilder, IntersectionType, Type, UnionBuilder, UnionType};
use super::{IntersectionBuilder, IntersectionType, Type, UnionType};
use crate::db::tests::TestDb;
use crate::program::{Program, SearchPathSettings};
use crate::python_version::PythonVersion;
use crate::types::{builtins_symbol_ty, UnionBuilder};
use crate::ProgramSettings;
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
fn setup_db() -> TestDb {
TestDb::new()
}
let db = TestDb::new();
impl<'db> UnionType<'db> {
fn elements_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
self.elements(db).into_iter().collect()
}
let src_root = SystemPathBuf::from("/src");
db.memory_file_system()
.create_directory_all(&src_root)
.unwrap();
Program::from_settings(
&db,
&ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings::new(src_root),
},
)
.expect("Valid search path settings");
db
}
#[test]
@@ -263,19 +328,16 @@ mod tests {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let t1 = Type::IntLiteral(1);
let Type::Union(union) = UnionBuilder::new(&db).add(t0).add(t1).build() else {
panic!("expected a union");
};
let union = UnionType::from_elements(&db, [t0, t1]).expect_union();
assert_eq!(union.elements_vec(&db), &[t0, t1]);
assert_eq!(union.elements(&db), &[t0, t1]);
}
#[test]
fn build_union_single() {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let ty = UnionBuilder::new(&db).add(t0).build();
let ty = UnionType::from_elements(&db, [t0]);
assert_eq!(ty, t0);
}
@@ -283,7 +345,6 @@ mod tests {
fn build_union_empty() {
let db = setup_db();
let ty = UnionBuilder::new(&db).build();
assert_eq!(ty, Type::Never);
}
@@ -291,32 +352,83 @@ mod tests {
fn build_union_never() {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let ty = UnionBuilder::new(&db).add(t0).add(Type::Never).build();
let ty = UnionType::from_elements(&db, [t0, Type::Never]);
assert_eq!(ty, t0);
}
#[test]
fn build_union_bool() {
let db = setup_db();
let bool_ty = builtins_symbol_ty(&db, "bool");
let t0 = Type::BooleanLiteral(true);
let t1 = Type::BooleanLiteral(true);
let t2 = Type::BooleanLiteral(false);
let t3 = Type::IntLiteral(17);
let union = UnionType::from_elements(&db, [t0, t1, t3]).expect_union();
assert_eq!(union.elements(&db), &[t0, t3]);
let union = UnionType::from_elements(&db, [t0, t1, t2, t3]).expect_union();
assert_eq!(union.elements(&db), &[bool_ty, t3]);
}
#[test]
fn build_union_flatten() {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let t1 = Type::IntLiteral(1);
let t2 = Type::IntLiteral(2);
let u1 = UnionBuilder::new(&db).add(t0).add(t1).build();
let Type::Union(union) = UnionBuilder::new(&db).add(u1).add(t2).build() else {
panic!("expected a union");
};
let u1 = UnionType::from_elements(&db, [t0, t1]);
let union = UnionType::from_elements(&db, [u1, t2]).expect_union();
assert_eq!(union.elements_vec(&db), &[t0, t1, t2]);
assert_eq!(union.elements(&db), &[t0, t1, t2]);
}
#[test]
fn build_union_simplify_subtype() {
let db = setup_db();
let t0 = builtins_symbol_ty(&db, "str").to_instance(&db);
let t1 = Type::LiteralString;
let u0 = UnionType::from_elements(&db, [t0, t1]);
let u1 = UnionType::from_elements(&db, [t1, t0]);
assert_eq!(u0, t0);
assert_eq!(u1, t0);
}
#[test]
fn build_union_no_simplify_unknown() {
let db = setup_db();
let t0 = builtins_symbol_ty(&db, "str").to_instance(&db);
let t1 = Type::Unknown;
let u0 = UnionType::from_elements(&db, [t0, t1]);
let u1 = UnionType::from_elements(&db, [t1, t0]);
assert_eq!(u0.expect_union().elements(&db), &[t0, t1]);
assert_eq!(u1.expect_union().elements(&db), &[t1, t0]);
}
#[test]
fn build_union_subsume_multiple() {
let db = setup_db();
let str_ty = builtins_symbol_ty(&db, "str").to_instance(&db);
let int_ty = builtins_symbol_ty(&db, "int").to_instance(&db);
let object_ty = builtins_symbol_ty(&db, "object").to_instance(&db);
let unknown_ty = Type::Unknown;
let u0 = UnionType::from_elements(&db, [str_ty, unknown_ty, int_ty, object_ty]);
assert_eq!(u0.expect_union().elements(&db), &[unknown_ty, object_ty]);
}
impl<'db> IntersectionType<'db> {
fn pos_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
self.positive(db).into_iter().collect()
self.positive(db).into_iter().copied().collect()
}
fn neg_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
self.negative(db).into_iter().collect()
self.negative(db).into_iter().copied().collect()
}
}
@@ -325,16 +437,14 @@ mod tests {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let ta = Type::Any;
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
let intersection = IntersectionBuilder::new(&db)
.add_positive(ta)
.add_negative(t0)
.build()
else {
panic!("expected to be an intersection");
};
.expect_intersection();
assert_eq!(inter.pos_vec(&db), &[ta]);
assert_eq!(inter.neg_vec(&db), &[t0]);
assert_eq!(intersection.pos_vec(&db), &[ta]);
assert_eq!(intersection.neg_vec(&db), &[t0]);
}
#[test]
@@ -347,16 +457,14 @@ mod tests {
.add_positive(ta)
.add_negative(t1)
.build();
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
let intersection = IntersectionBuilder::new(&db)
.add_positive(t2)
.add_positive(i0)
.build()
else {
panic!("expected to be an intersection");
};
.expect_intersection();
assert_eq!(inter.pos_vec(&db), &[t2, ta]);
assert_eq!(inter.neg_vec(&db), &[t1]);
assert_eq!(intersection.pos_vec(&db), &[t2, ta]);
assert_eq!(intersection.neg_vec(&db), &[t1]);
}
#[test]
@@ -369,16 +477,14 @@ mod tests {
.add_positive(ta)
.add_negative(t1)
.build();
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
let intersection = IntersectionBuilder::new(&db)
.add_positive(t2)
.add_negative(i0)
.build()
else {
panic!("expected to be an intersection");
};
.expect_intersection();
assert_eq!(inter.pos_vec(&db), &[t2, t1]);
assert_eq!(inter.neg_vec(&db), &[ta]);
assert_eq!(intersection.pos_vec(&db), &[t2, t1]);
assert_eq!(intersection.neg_vec(&db), &[ta]);
}
#[test]
@@ -387,16 +493,14 @@ mod tests {
let t0 = Type::IntLiteral(0);
let t1 = Type::IntLiteral(1);
let ta = Type::Any;
let u0 = UnionBuilder::new(&db).add(t0).add(t1).build();
let u0 = UnionType::from_elements(&db, [t0, t1]);
let Type::Union(union) = IntersectionBuilder::new(&db)
let union = IntersectionBuilder::new(&db)
.add_positive(ta)
.add_positive(u0)
.build()
else {
panic!("expected a union");
};
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements_vec(&db)[..] else {
.expect_union();
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements(&db)[..] else {
panic!("expected a union of two intersections");
};
assert_eq!(i0.pos_vec(&db), &[ta, t0]);

View File

@@ -1,14 +1,22 @@
//! Display implementations for types.
use std::fmt::{Display, Formatter};
use std::fmt::{self, Display, Formatter};
use ruff_db::display::FormatterJoinExtension;
use ruff_python_ast::str::Quote;
use ruff_python_literal::escape::AsciiEscape;
use crate::types::{IntersectionType, Type, UnionType};
use crate::Db;
use rustc_hash::FxHashMap;
impl<'db> Type<'db> {
pub fn display(&'db self, db: &'db dyn Db) -> DisplayType<'db> {
pub fn display(&self, db: &'db dyn Db) -> DisplayType {
DisplayType { ty: self, db }
}
fn representation(self, db: &'db dyn Db) -> DisplayRepresentation<'db> {
DisplayRepresentation { db, ty: self }
}
}
#[derive(Copy, Clone)]
@@ -18,7 +26,41 @@ pub struct DisplayType<'db> {
}
impl Display for DisplayType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let representation = self.ty.representation(self.db);
if matches!(
self.ty,
Type::IntLiteral(_)
| Type::BooleanLiteral(_)
| Type::StringLiteral(_)
| Type::BytesLiteral(_)
| Type::Class(_)
| Type::Function(_)
| Type::RevealTypeFunction(_)
) {
write!(f, "Literal[{representation}]",)
} else {
representation.fmt(f)
}
}
}
impl fmt::Debug for DisplayType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(self, f)
}
}
/// Writes the string representation of a type, which is the value displayed either as
/// `Literal[<repr>]` or `Literal[<repr1>, <repr2>]` for literal types or as `<repr>` for
/// non literals
struct DisplayRepresentation<'db> {
ty: Type<'db>,
db: &'db dyn Db,
}
impl Display for DisplayRepresentation<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self.ty {
Type::Any => f.write_str("Any"),
Type::Never => f.write_str("Never"),
@@ -29,25 +71,39 @@ impl Display for DisplayType<'_> {
write!(f, "<module '{:?}'>", file.path(self.db))
}
// TODO functions and classes should display using a fully qualified name
Type::Class(class) => write!(f, "Literal[{}]", class.name(self.db)),
Type::Instance(class) => f.write_str(&class.name(self.db)),
Type::Function(function) => write!(f, "Literal[{}]", function.name(self.db)),
Type::Class(class) => f.write_str(class.name(self.db)),
Type::Instance(class) => f.write_str(class.name(self.db)),
Type::Function(function) | Type::RevealTypeFunction(function) => {
f.write_str(function.name(self.db))
}
Type::Union(union) => union.display(self.db).fmt(f),
Type::Intersection(intersection) => intersection.display(self.db).fmt(f),
Type::IntLiteral(n) => write!(f, "Literal[{n}]"),
Type::BooleanLiteral(boolean) => {
write!(f, "Literal[{}]", if *boolean { "True" } else { "False" })
Type::IntLiteral(n) => n.fmt(f),
Type::BooleanLiteral(boolean) => f.write_str(if boolean { "True" } else { "False" }),
Type::StringLiteral(string) => {
write!(f, r#""{}""#, string.value(self.db).replace('"', r#"\""#))
}
Type::LiteralString => f.write_str("LiteralString"),
Type::BytesLiteral(bytes) => {
let escape =
AsciiEscape::with_preferred_quote(bytes.value(self.db).as_ref(), Quote::Double);
escape.bytes_repr().write(f)
}
Type::Tuple(tuple) => {
f.write_str("tuple[")?;
let elements = tuple.elements(self.db);
if elements.is_empty() {
f.write_str("()")?;
} else {
elements.display(self.db).fmt(f)?;
}
f.write_str("]")
}
}
}
}
impl std::fmt::Debug for DisplayType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self, f)
}
}
impl<'db> UnionType<'db> {
fn display(&'db self, db: &'db dyn Db) -> DisplayUnionType<'db> {
DisplayUnionType { db, ty: self }
@@ -60,54 +116,90 @@ struct DisplayUnionType<'db> {
}
impl Display for DisplayUnionType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let union = self.ty;
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let elements = self.ty.elements(self.db);
let (int_literals, other_types): (Vec<Type>, Vec<Type>) = union
.elements(self.db)
.iter()
.copied()
.partition(|ty| matches!(ty, Type::IntLiteral(_)));
// Group literal types by kind.
let mut grouped_literals = FxHashMap::default();
let mut first = true;
if !int_literals.is_empty() {
f.write_str("Literal[")?;
let mut nums: Vec<_> = int_literals
.into_iter()
.filter_map(|ty| {
if let Type::IntLiteral(n) = ty {
Some(n)
} else {
None
}
})
.collect();
nums.sort_unstable();
for num in nums {
if !first {
f.write_str(", ")?;
}
write!(f, "{num}")?;
first = false;
for element in elements {
if let Ok(literal_kind) = LiteralTypeKind::try_from(*element) {
grouped_literals
.entry(literal_kind)
.or_insert_with(Vec::new)
.push(*element);
}
f.write_str("]")?;
}
for ty in other_types {
if !first {
f.write_str(" | ")?;
};
first = false;
write!(f, "{}", ty.display(self.db))?;
let mut join = f.join(" | ");
for element in elements {
if let Ok(literal_kind) = LiteralTypeKind::try_from(*element) {
let Some(mut literals) = grouped_literals.remove(&literal_kind) else {
continue;
};
if literal_kind == LiteralTypeKind::IntLiteral {
literals.sort_unstable_by_key(|ty| ty.expect_int_literal());
}
join.entry(&DisplayLiteralGroup {
literals,
db: self.db,
});
} else {
join.entry(&element.display(self.db));
}
}
join.finish()?;
debug_assert!(grouped_literals.is_empty());
Ok(())
}
}
impl std::fmt::Debug for DisplayUnionType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self, f)
impl fmt::Debug for DisplayUnionType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(self, f)
}
}
struct DisplayLiteralGroup<'db> {
literals: Vec<Type<'db>>,
db: &'db dyn Db,
}
impl Display for DisplayLiteralGroup<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_str("Literal[")?;
f.join(", ")
.entries(self.literals.iter().map(|ty| ty.representation(self.db)))
.finish()?;
f.write_str("]")
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
enum LiteralTypeKind {
Class,
Function,
IntLiteral,
StringLiteral,
BytesLiteral,
}
impl TryFrom<Type<'_>> for LiteralTypeKind {
type Error = ();
fn try_from(value: Type<'_>) -> Result<Self, Self::Error> {
match value {
Type::Class(_) => Ok(Self::Class),
Type::Function(_) | Type::RevealTypeFunction(_) => Ok(Self::Function),
Type::IntLiteral(_) => Ok(Self::IntLiteral),
Type::StringLiteral(_) => Ok(Self::StringLiteral),
Type::BytesLiteral(_) => Ok(Self::BytesLiteral),
_ => Err(()),
}
}
}
@@ -123,30 +215,159 @@ struct DisplayIntersectionType<'db> {
}
impl Display for DisplayIntersectionType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let mut first = true;
for (neg, ty) in self
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let tys = self
.ty
.positive(self.db)
.iter()
.map(|ty| (false, ty))
.chain(self.ty.negative(self.db).iter().map(|ty| (true, ty)))
{
if !first {
f.write_str(" & ")?;
};
first = false;
if neg {
f.write_str("~")?;
};
write!(f, "{}", ty.display(self.db))?;
}
Ok(())
.map(|&ty| DisplayMaybeNegatedType {
ty,
db: self.db,
negated: false,
})
.chain(
self.ty
.negative(self.db)
.iter()
.map(|&ty| DisplayMaybeNegatedType {
ty,
db: self.db,
negated: true,
}),
);
f.join(" & ").entries(tys).finish()
}
}
impl std::fmt::Debug for DisplayIntersectionType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self, f)
impl fmt::Debug for DisplayIntersectionType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(self, f)
}
}
struct DisplayMaybeNegatedType<'db> {
ty: Type<'db>,
db: &'db dyn Db,
negated: bool,
}
impl<'db> Display for DisplayMaybeNegatedType<'db> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
if self.negated {
f.write_str("~")?;
}
self.ty.display(self.db).fmt(f)
}
}
pub(crate) trait TypeArrayDisplay<'db> {
fn display(&self, db: &'db dyn Db) -> DisplayTypeArray;
}
impl<'db> TypeArrayDisplay<'db> for Box<[Type<'db>]> {
fn display(&self, db: &'db dyn Db) -> DisplayTypeArray {
DisplayTypeArray { types: self, db }
}
}
impl<'db> TypeArrayDisplay<'db> for Vec<Type<'db>> {
fn display(&self, db: &'db dyn Db) -> DisplayTypeArray {
DisplayTypeArray { types: self, db }
}
}
pub(crate) struct DisplayTypeArray<'b, 'db> {
types: &'b [Type<'db>],
db: &'db dyn Db,
}
impl<'db> Display for DisplayTypeArray<'_, 'db> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.join(", ")
.entries(self.types.iter().map(|ty| ty.display(self.db)))
.finish()
}
}
#[cfg(test)]
mod tests {
use ruff_db::files::system_path_to_file;
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
use crate::db::tests::TestDb;
use crate::types::{global_symbol_ty, BytesLiteralType, StringLiteralType, Type, UnionType};
use crate::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
fn setup_db() -> TestDb {
let db = TestDb::new();
let src_root = SystemPathBuf::from("/src");
db.memory_file_system()
.create_directory_all(&src_root)
.unwrap();
Program::from_settings(
&db,
&ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings::new(src_root),
},
)
.expect("Valid search path settings");
db
}
#[test]
fn test_condense_literal_display_by_type() -> anyhow::Result<()> {
let mut db = setup_db();
db.write_dedented(
"src/main.py",
"
def foo(x: int) -> int:
return x + 1
def bar(s: str) -> str:
return s
class A: ...
class B: ...
",
)?;
let mod_file = system_path_to_file(&db, "src/main.py").expect("Expected file to exist.");
let union_elements = &[
Type::Unknown,
Type::IntLiteral(-1),
global_symbol_ty(&db, mod_file, "A"),
Type::StringLiteral(StringLiteralType::new(&db, Box::from("A"))),
Type::BytesLiteral(BytesLiteralType::new(&db, Box::from([0]))),
Type::BytesLiteral(BytesLiteralType::new(&db, Box::from([7]))),
Type::IntLiteral(0),
Type::IntLiteral(1),
Type::StringLiteral(StringLiteralType::new(&db, Box::from("B"))),
global_symbol_ty(&db, mod_file, "foo"),
global_symbol_ty(&db, mod_file, "bar"),
global_symbol_ty(&db, mod_file, "B"),
Type::BooleanLiteral(true),
Type::None,
];
let union = UnionType::from_elements(&db, union_elements).expect_union();
let display = format!("{}", union.display(&db));
assert_eq!(
display,
concat!(
"Unknown | ",
"Literal[-1, 0, 1] | ",
"Literal[A, B] | ",
"Literal[\"A\", \"B\"] | ",
"Literal[b\"\\x00\", b\"\\x07\"] | ",
"Literal[foo, bar] | ",
"Literal[True] | ",
"None"
)
);
Ok(())
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,10 @@
use crate::semantic_index::ast_ids::HasScopedAstId;
use crate::semantic_index::constraint::{Constraint, PatternConstraint};
use crate::semantic_index::definition::Definition;
use crate::semantic_index::expression::Expression;
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId, SymbolTable};
use crate::semantic_index::symbol_table;
use crate::types::{infer_expression_types, IntersectionBuilder, Type, TypeInference};
use crate::types::{infer_expression_types, IntersectionBuilder, Type};
use crate::Db;
use ruff_python_ast as ast;
use rustc_hash::FxHashMap;
@@ -27,62 +28,114 @@ use std::sync::Arc;
/// constraint is applied to that definition, so we'd just return `None`.
pub(crate) fn narrowing_constraint<'db>(
db: &'db dyn Db,
test: Expression<'db>,
constraint: Constraint<'db>,
definition: Definition<'db>,
) -> Option<Type<'db>> {
all_narrowing_constraints(db, test)
.get(&definition.symbol(db))
.copied()
match constraint {
Constraint::Expression(expression) => {
all_narrowing_constraints_for_expression(db, expression)
.get(&definition.symbol(db))
.copied()
}
Constraint::Pattern(pattern) => all_narrowing_constraints_for_pattern(db, pattern)
.get(&definition.symbol(db))
.copied(),
}
}
#[salsa::tracked(return_ref)]
fn all_narrowing_constraints<'db>(
fn all_narrowing_constraints_for_pattern<'db>(
db: &'db dyn Db,
test: Expression<'db>,
pattern: PatternConstraint<'db>,
) -> NarrowingConstraints<'db> {
NarrowingConstraintsBuilder::new(db, test).finish()
NarrowingConstraintsBuilder::new(db, Constraint::Pattern(pattern)).finish()
}
#[salsa::tracked(return_ref)]
fn all_narrowing_constraints_for_expression<'db>(
db: &'db dyn Db,
expression: Expression<'db>,
) -> NarrowingConstraints<'db> {
NarrowingConstraintsBuilder::new(db, Constraint::Expression(expression)).finish()
}
type NarrowingConstraints<'db> = FxHashMap<ScopedSymbolId, Type<'db>>;
struct NarrowingConstraintsBuilder<'db> {
db: &'db dyn Db,
expression: Expression<'db>,
constraint: Constraint<'db>,
constraints: NarrowingConstraints<'db>,
}
impl<'db> NarrowingConstraintsBuilder<'db> {
fn new(db: &'db dyn Db, expression: Expression<'db>) -> Self {
fn new(db: &'db dyn Db, constraint: Constraint<'db>) -> Self {
Self {
db,
expression,
constraint,
constraints: NarrowingConstraints::default(),
}
}
fn finish(mut self) -> NarrowingConstraints<'db> {
if let ast::Expr::Compare(expr_compare) = self.expression.node_ref(self.db).node() {
self.add_expr_compare(expr_compare);
match self.constraint {
Constraint::Expression(expression) => self.evaluate_expression_constraint(expression),
Constraint::Pattern(pattern) => self.evaluate_pattern_constraint(pattern),
}
// TODO other test expression kinds
self.constraints.shrink_to_fit();
self.constraints
}
fn evaluate_expression_constraint(&mut self, expression: Expression<'db>) {
if let ast::Expr::Compare(expr_compare) = expression.node_ref(self.db).node() {
self.add_expr_compare(expr_compare, expression);
}
// TODO other test expression kinds
}
fn evaluate_pattern_constraint(&mut self, pattern: PatternConstraint<'db>) {
let subject = pattern.subject(self.db);
match pattern.pattern(self.db).node() {
ast::Pattern::MatchValue(_) => {
// TODO
}
ast::Pattern::MatchSingleton(singleton_pattern) => {
self.add_match_pattern_singleton(subject, singleton_pattern);
}
ast::Pattern::MatchSequence(_) => {
// TODO
}
ast::Pattern::MatchMapping(_) => {
// TODO
}
ast::Pattern::MatchClass(_) => {
// TODO
}
ast::Pattern::MatchStar(_) => {
// TODO
}
ast::Pattern::MatchAs(_) => {
// TODO
}
ast::Pattern::MatchOr(_) => {
// TODO
}
}
}
fn symbols(&self) -> Arc<SymbolTable> {
symbol_table(self.db, self.scope())
}
fn scope(&self) -> ScopeId<'db> {
self.expression.scope(self.db)
match self.constraint {
Constraint::Expression(expression) => expression.scope(self.db),
Constraint::Pattern(pattern) => pattern.scope(self.db),
}
}
fn inference(&self) -> &'db TypeInference<'db> {
infer_expression_types(self.db, self.expression)
}
fn add_expr_compare(&mut self, expr_compare: &ast::ExprCompare) {
fn add_expr_compare(&mut self, expr_compare: &ast::ExprCompare, expression: Expression<'db>) {
let ast::ExprCompare {
range: _,
left,
@@ -99,7 +152,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
// SAFETY: we should always have a symbol for every Name node.
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
let scope = self.scope();
let inference = self.inference();
let inference = infer_expression_types(self.db, expression);
for (op, comparator) in std::iter::zip(&**ops, &**comparators) {
let comp_ty = inference.expression_ty(comparator.scoped_ast_id(self.db, scope));
if matches!(op, ast::CmpOp::IsNot) {
@@ -112,4 +165,22 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
}
}
}
fn add_match_pattern_singleton(
&mut self,
subject: &ast::Expr,
pattern: &ast::PatternMatchSingleton,
) {
if let Some(ast::ExprName { id, .. }) = subject.as_name_expr() {
// SAFETY: we should always have a symbol for every Name node.
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
let ty = match pattern.value {
ast::Singleton::None => Type::None,
ast::Singleton::True => Type::BooleanLiteral(true),
ast::Singleton::False => Type::BooleanLiteral(false),
};
self.constraints.insert(symbol, ty);
}
}
}

View File

@@ -1 +0,0 @@
1ace5718deaf3041f8e3d1dc9c9e8a8e830e517f

View File

@@ -1,100 +0,0 @@
import sys
from _typeshed import StrPath
from collections.abc import Mapping
LC_CTYPE: int
LC_COLLATE: int
LC_TIME: int
LC_MONETARY: int
LC_NUMERIC: int
LC_ALL: int
CHAR_MAX: int
def setlocale(category: int, locale: str | None = None, /) -> str: ...
def localeconv() -> Mapping[str, int | str | list[int]]: ...
if sys.version_info >= (3, 11):
def getencoding() -> str: ...
def strcoll(os1: str, os2: str, /) -> int: ...
def strxfrm(string: str, /) -> str: ...
# native gettext functions
# https://docs.python.org/3/library/locale.html#access-to-message-catalogs
# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626
if sys.platform != "win32":
LC_MESSAGES: int
ABDAY_1: int
ABDAY_2: int
ABDAY_3: int
ABDAY_4: int
ABDAY_5: int
ABDAY_6: int
ABDAY_7: int
ABMON_1: int
ABMON_2: int
ABMON_3: int
ABMON_4: int
ABMON_5: int
ABMON_6: int
ABMON_7: int
ABMON_8: int
ABMON_9: int
ABMON_10: int
ABMON_11: int
ABMON_12: int
DAY_1: int
DAY_2: int
DAY_3: int
DAY_4: int
DAY_5: int
DAY_6: int
DAY_7: int
ERA: int
ERA_D_T_FMT: int
ERA_D_FMT: int
ERA_T_FMT: int
MON_1: int
MON_2: int
MON_3: int
MON_4: int
MON_5: int
MON_6: int
MON_7: int
MON_8: int
MON_9: int
MON_10: int
MON_11: int
MON_12: int
CODESET: int
D_T_FMT: int
D_FMT: int
T_FMT: int
T_FMT_AMPM: int
AM_STR: int
PM_STR: int
RADIXCHAR: int
THOUSEP: int
YESEXPR: int
NOEXPR: int
CRNCYSTR: int
ALT_DIGITS: int
def nl_langinfo(key: int, /) -> str: ...
# This is dependent on `libintl.h` which is a part of `gettext`
# system dependency. These functions might be missing.
# But, we always say that they are present.
def gettext(msg: str, /) -> str: ...
def dgettext(domain: str | None, msg: str, /) -> str: ...
def dcgettext(domain: str | None, msg: str, category: int, /) -> str: ...
def textdomain(domain: str | None, /) -> str: ...
def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: ...
def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: ...

View File

@@ -1 +0,0 @@
DEBUG: bool | None

View File

@@ -1,49 +0,0 @@
import sys
codes: dict[str, int]
messages: dict[int, str]
XML_ERROR_ABORTED: str
XML_ERROR_ASYNC_ENTITY: str
XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF: str
XML_ERROR_BAD_CHAR_REF: str
XML_ERROR_BINARY_ENTITY_REF: str
XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING: str
XML_ERROR_DUPLICATE_ATTRIBUTE: str
XML_ERROR_ENTITY_DECLARED_IN_PE: str
XML_ERROR_EXTERNAL_ENTITY_HANDLING: str
XML_ERROR_FEATURE_REQUIRES_XML_DTD: str
XML_ERROR_FINISHED: str
XML_ERROR_INCOMPLETE_PE: str
XML_ERROR_INCORRECT_ENCODING: str
XML_ERROR_INVALID_TOKEN: str
XML_ERROR_JUNK_AFTER_DOC_ELEMENT: str
XML_ERROR_MISPLACED_XML_PI: str
XML_ERROR_NOT_STANDALONE: str
XML_ERROR_NOT_SUSPENDED: str
XML_ERROR_NO_ELEMENTS: str
XML_ERROR_NO_MEMORY: str
XML_ERROR_PARAM_ENTITY_REF: str
XML_ERROR_PARTIAL_CHAR: str
XML_ERROR_PUBLICID: str
XML_ERROR_RECURSIVE_ENTITY_REF: str
XML_ERROR_SUSPENDED: str
XML_ERROR_SUSPEND_PE: str
XML_ERROR_SYNTAX: str
XML_ERROR_TAG_MISMATCH: str
XML_ERROR_TEXT_DECL: str
XML_ERROR_UNBOUND_PREFIX: str
XML_ERROR_UNCLOSED_CDATA_SECTION: str
XML_ERROR_UNCLOSED_TOKEN: str
XML_ERROR_UNDECLARING_PREFIX: str
XML_ERROR_UNDEFINED_ENTITY: str
XML_ERROR_UNEXPECTED_STATE: str
XML_ERROR_UNKNOWN_ENCODING: str
XML_ERROR_XML_DECL: str
if sys.version_info >= (3, 11):
XML_ERROR_RESERVED_PREFIX_XML: str
XML_ERROR_RESERVED_PREFIX_XMLNS: str
XML_ERROR_RESERVED_NAMESPACE_URI: str
XML_ERROR_INVALID_ARGUMENT: str
XML_ERROR_NO_BUFFER: str
XML_ERROR_AMPLIFICATION_LIMIT_BREACH: str

View File

@@ -1,11 +0,0 @@
XML_CTYPE_ANY: int
XML_CTYPE_CHOICE: int
XML_CTYPE_EMPTY: int
XML_CTYPE_MIXED: int
XML_CTYPE_NAME: int
XML_CTYPE_SEQ: int
XML_CQUANT_NONE: int
XML_CQUANT_OPT: int
XML_CQUANT_PLUS: int
XML_CQUANT_REP: int

View File

@@ -11,7 +11,6 @@ repository = { workspace = true }
license = { workspace = true }
[dependencies]
red_knot_python_semantic = { workspace = true }
red_knot_workspace = { workspace = true }
ruff_db = { workspace = true }
ruff_notebook = { workspace = true }

View File

@@ -3,11 +3,11 @@
use std::num::NonZeroUsize;
use std::panic::PanicInfo;
use lsp_server as lsp;
use lsp_types as types;
use lsp_server::Message;
use lsp_types::{
ClientCapabilities, DiagnosticOptions, NotebookCellSelector, NotebookDocumentSyncOptions,
NotebookSelector, TextDocumentSyncCapability, TextDocumentSyncOptions,
ClientCapabilities, DiagnosticOptions, DiagnosticServerCapabilities, MessageType,
ServerCapabilities, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
Url,
};
use self::connection::{Connection, ConnectionInitializer};
@@ -74,7 +74,7 @@ impl Server {
init_params.client_info.as_ref(),
);
let mut workspace_for_url = |url: lsp_types::Url| {
let mut workspace_for_url = |url: Url| {
let Some(workspace_settings) = workspace_settings.as_mut() else {
return (url, ClientSettings::default());
};
@@ -93,13 +93,18 @@ impl Server {
}).collect())
.or_else(|| {
tracing::warn!("No workspace(s) were provided during initialization. Using the current working directory as a default workspace...");
let uri = types::Url::from_file_path(std::env::current_dir().ok()?).ok()?;
let uri = Url::from_file_path(std::env::current_dir().ok()?).ok()?;
Some(vec![workspace_for_url(uri)])
})
.ok_or_else(|| {
anyhow::anyhow!("Failed to get the current working directory while creating a default workspace.")
})?;
if workspaces.len() > 1 {
// TODO(dhruvmanila): Support multi-root workspaces
anyhow::bail!("Multi-root workspaces are not supported yet");
}
Ok(Self {
connection,
worker_threads,
@@ -149,7 +154,7 @@ impl Server {
try_show_message(
"The Ruff language server exited with a panic. See the logs for more details."
.to_string(),
lsp_types::MessageType::ERROR,
MessageType::ERROR,
)
.ok();
}));
@@ -182,9 +187,9 @@ impl Server {
break;
}
let task = match msg {
lsp::Message::Request(req) => api::request(req),
lsp::Message::Notification(notification) => api::notification(notification),
lsp::Message::Response(response) => scheduler.response(response),
Message::Request(req) => api::request(req),
Message::Notification(notification) => api::notification(notification),
Message::Response(response) => scheduler.response(response),
};
scheduler.dispatch(task);
}
@@ -206,28 +211,17 @@ impl Server {
.unwrap_or_default()
}
fn server_capabilities(position_encoding: PositionEncoding) -> types::ServerCapabilities {
types::ServerCapabilities {
fn server_capabilities(position_encoding: PositionEncoding) -> ServerCapabilities {
ServerCapabilities {
position_encoding: Some(position_encoding.into()),
diagnostic_provider: Some(types::DiagnosticServerCapabilities::Options(
DiagnosticOptions {
identifier: Some(crate::DIAGNOSTIC_NAME.into()),
..Default::default()
},
)),
notebook_document_sync: Some(types::OneOf::Left(NotebookDocumentSyncOptions {
save: Some(false),
notebook_selector: [NotebookSelector::ByCells {
notebook: None,
cells: vec![NotebookCellSelector {
language: "python".to_string(),
}],
}]
.to_vec(),
diagnostic_provider: Some(DiagnosticServerCapabilities::Options(DiagnosticOptions {
identifier: Some(crate::DIAGNOSTIC_NAME.into()),
..Default::default()
})),
text_document_sync: Some(TextDocumentSyncCapability::Options(
TextDocumentSyncOptions {
open_close: Some(true),
change: Some(TextDocumentSyncKind::INCREMENTAL),
..Default::default()
},
)),

View File

@@ -1,13 +1,15 @@
use crate::{server::schedule::Task, session::Session, system::url_to_system_path};
use lsp_server as server;
use crate::server::schedule::Task;
use crate::session::Session;
use crate::system::{url_to_any_system_path, AnySystemPath};
mod diagnostics;
mod notifications;
mod requests;
mod traits;
use notifications as notification;
use red_knot_workspace::db::RootDatabase;
use requests as request;
use self::traits::{NotificationHandler, RequestHandler};
@@ -43,6 +45,7 @@ pub(super) fn notification<'a>(notif: server::Notification) -> Task<'a> {
match notif.method.as_str() {
notification::DidCloseTextDocumentHandler::METHOD => local_notification_task::<notification::DidCloseTextDocumentHandler>(notif),
notification::DidOpenTextDocumentHandler::METHOD => local_notification_task::<notification::DidOpenTextDocumentHandler>(notif),
notification::DidChangeTextDocumentHandler::METHOD => local_notification_task::<notification::DidChangeTextDocumentHandler>(notif),
notification::DidOpenNotebookHandler::METHOD => {
local_notification_task::<notification::DidOpenNotebookHandler>(notif)
}
@@ -82,12 +85,18 @@ fn background_request_task<'a, R: traits::BackgroundDocumentRequestHandler>(
Ok(Task::background(schedule, move |session: &Session| {
let url = R::document_url(&params).into_owned();
let Ok(path) = url_to_system_path(&url) else {
let Ok(path) = url_to_any_system_path(&url) else {
return Box::new(|_, _| {});
};
let db = session
.workspace_db_for_path(path.as_std_path())
.map(RootDatabase::snapshot);
let db = match path {
AnySystemPath::System(path) => {
match session.workspace_db_for_path(path.as_std_path()) {
Some(db) => db.snapshot(),
None => session.default_workspace_db().snapshot(),
}
}
AnySystemPath::SystemVirtual(_) => session.default_workspace_db().snapshot(),
};
let Some(snapshot) = session.take_snapshot(url) else {
return Box::new(|_, _| {});

View File

@@ -1,9 +1,11 @@
mod did_change;
mod did_close;
mod did_close_notebook;
mod did_open;
mod did_open_notebook;
mod set_trace;
pub(super) use did_change::DidChangeTextDocumentHandler;
pub(super) use did_close::DidCloseTextDocumentHandler;
pub(super) use did_close_notebook::DidCloseNotebookHandler;
pub(super) use did_open::DidOpenTextDocumentHandler;

View File

@@ -0,0 +1,55 @@
use lsp_server::ErrorCode;
use lsp_types::notification::DidChangeTextDocument;
use lsp_types::DidChangeTextDocumentParams;
use red_knot_workspace::watch::ChangeEvent;
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
use crate::server::api::LSPResult;
use crate::server::client::{Notifier, Requester};
use crate::server::Result;
use crate::session::Session;
use crate::system::{url_to_any_system_path, AnySystemPath};
pub(crate) struct DidChangeTextDocumentHandler;
impl NotificationHandler for DidChangeTextDocumentHandler {
type NotificationType = DidChangeTextDocument;
}
impl SyncNotificationHandler for DidChangeTextDocumentHandler {
fn run(
session: &mut Session,
_notifier: Notifier,
_requester: &mut Requester,
params: DidChangeTextDocumentParams,
) -> Result<()> {
let Ok(path) = url_to_any_system_path(&params.text_document.uri) else {
return Ok(());
};
let key = session.key_from_url(params.text_document.uri);
session
.update_text_document(&key, params.content_changes, params.text_document.version)
.with_failure_code(ErrorCode::InternalError)?;
match path {
AnySystemPath::System(path) => {
let db = match session.workspace_db_for_path_mut(path.as_std_path()) {
Some(db) => db,
None => session.default_workspace_db_mut(),
};
db.apply_changes(vec![ChangeEvent::file_content_changed(path)], None);
}
AnySystemPath::SystemVirtual(virtual_path) => {
let db = session.default_workspace_db_mut();
db.apply_changes(vec![ChangeEvent::ChangedVirtual(virtual_path)], None);
}
}
// TODO(dhruvmanila): Publish diagnostics if the client doesnt support pull diagnostics
Ok(())
}
}

View File

@@ -1,8 +1,7 @@
use lsp_server::ErrorCode;
use lsp_types::notification::DidCloseTextDocument;
use lsp_types::DidCloseTextDocumentParams;
use ruff_db::files::File;
use red_knot_workspace::watch::ChangeEvent;
use crate::server::api::diagnostics::clear_diagnostics;
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
@@ -10,7 +9,7 @@ use crate::server::api::LSPResult;
use crate::server::client::{Notifier, Requester};
use crate::server::Result;
use crate::session::Session;
use crate::system::url_to_system_path;
use crate::system::{url_to_any_system_path, AnySystemPath};
pub(crate) struct DidCloseTextDocumentHandler;
@@ -25,7 +24,7 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler {
_requester: &mut Requester,
params: DidCloseTextDocumentParams,
) -> Result<()> {
let Ok(path) = url_to_system_path(&params.text_document.uri) else {
let Ok(path) = url_to_any_system_path(&params.text_document.uri) else {
return Ok(());
};
@@ -34,8 +33,9 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler {
.close_document(&key)
.with_failure_code(ErrorCode::InternalError)?;
if let Some(db) = session.workspace_db_for_path_mut(path.as_std_path()) {
File::sync_path(db, &path);
if let AnySystemPath::SystemVirtual(virtual_path) = path {
let db = session.default_workspace_db_mut();
db.apply_changes(vec![ChangeEvent::DeletedVirtual(virtual_path)], None);
}
clear_diagnostics(key.url(), &notifier)?;

View File

@@ -1,14 +1,14 @@
use lsp_types::notification::DidCloseNotebookDocument;
use lsp_types::DidCloseNotebookDocumentParams;
use ruff_db::files::File;
use red_knot_workspace::watch::ChangeEvent;
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
use crate::server::api::LSPResult;
use crate::server::client::{Notifier, Requester};
use crate::server::Result;
use crate::session::Session;
use crate::system::url_to_system_path;
use crate::system::{url_to_any_system_path, AnySystemPath};
pub(crate) struct DidCloseNotebookHandler;
@@ -23,7 +23,7 @@ impl SyncNotificationHandler for DidCloseNotebookHandler {
_requester: &mut Requester,
params: DidCloseNotebookDocumentParams,
) -> Result<()> {
let Ok(path) = url_to_system_path(&params.notebook_document.uri) else {
let Ok(path) = url_to_any_system_path(&params.notebook_document.uri) else {
return Ok(());
};
@@ -32,8 +32,9 @@ impl SyncNotificationHandler for DidCloseNotebookHandler {
.close_document(&key)
.with_failure_code(lsp_server::ErrorCode::InternalError)?;
if let Some(db) = session.workspace_db_for_path_mut(path.as_std_path()) {
File::sync_path(db, &path);
if let AnySystemPath::SystemVirtual(virtual_path) = path {
let db = session.default_workspace_db_mut();
db.apply_changes(vec![ChangeEvent::DeletedVirtual(virtual_path)], None);
}
Ok(())

View File

@@ -1,13 +1,14 @@
use lsp_types::notification::DidOpenTextDocument;
use lsp_types::DidOpenTextDocumentParams;
use ruff_db::files::system_path_to_file;
use red_knot_workspace::watch::ChangeEvent;
use ruff_db::Db;
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
use crate::server::client::{Notifier, Requester};
use crate::server::Result;
use crate::session::Session;
use crate::system::url_to_system_path;
use crate::system::{url_to_any_system_path, AnySystemPath};
use crate::TextDocument;
pub(crate) struct DidOpenTextDocumentHandler;
@@ -23,17 +24,25 @@ impl SyncNotificationHandler for DidOpenTextDocumentHandler {
_requester: &mut Requester,
params: DidOpenTextDocumentParams,
) -> Result<()> {
let Ok(path) = url_to_system_path(&params.text_document.uri) else {
let Ok(path) = url_to_any_system_path(&params.text_document.uri) else {
return Ok(());
};
let document = TextDocument::new(params.text_document.text, params.text_document.version);
session.open_text_document(params.text_document.uri, document);
if let Some(db) = session.workspace_db_for_path_mut(path.as_std_path()) {
// TODO(dhruvmanila): Store the `file` in `DocumentController`
let file = system_path_to_file(db, &path).unwrap();
file.sync(db);
match path {
AnySystemPath::System(path) => {
let db = match session.workspace_db_for_path_mut(path.as_std_path()) {
Some(db) => db,
None => session.default_workspace_db_mut(),
};
db.apply_changes(vec![ChangeEvent::Opened(path)], None);
}
AnySystemPath::SystemVirtual(virtual_path) => {
let db = session.default_workspace_db_mut();
db.files().virtual_file(db, &virtual_path);
}
}
// TODO(dhruvmanila): Publish diagnostics if the client doesn't support pull diagnostics

View File

@@ -2,7 +2,8 @@ use lsp_server::ErrorCode;
use lsp_types::notification::DidOpenNotebookDocument;
use lsp_types::DidOpenNotebookDocumentParams;
use ruff_db::files::system_path_to_file;
use red_knot_workspace::watch::ChangeEvent;
use ruff_db::Db;
use crate::edit::NotebookDocument;
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
@@ -10,7 +11,7 @@ use crate::server::api::LSPResult;
use crate::server::client::{Notifier, Requester};
use crate::server::Result;
use crate::session::Session;
use crate::system::url_to_system_path;
use crate::system::{url_to_any_system_path, AnySystemPath};
pub(crate) struct DidOpenNotebookHandler;
@@ -25,7 +26,7 @@ impl SyncNotificationHandler for DidOpenNotebookHandler {
_requester: &mut Requester,
params: DidOpenNotebookDocumentParams,
) -> Result<()> {
let Ok(path) = url_to_system_path(&params.notebook_document.uri) else {
let Ok(path) = url_to_any_system_path(&params.notebook_document.uri) else {
return Ok(());
};
@@ -38,10 +39,18 @@ impl SyncNotificationHandler for DidOpenNotebookHandler {
.with_failure_code(ErrorCode::InternalError)?;
session.open_notebook_document(params.notebook_document.uri.clone(), notebook);
if let Some(db) = session.workspace_db_for_path_mut(path.as_std_path()) {
// TODO(dhruvmanila): Store the `file` in `DocumentController`
let file = system_path_to_file(db, &path).unwrap();
file.sync(db);
match path {
AnySystemPath::System(path) => {
let db = match session.workspace_db_for_path_mut(path.as_std_path()) {
Some(db) => db,
None => session.default_workspace_db_mut(),
};
db.apply_changes(vec![ChangeEvent::Opened(path)], None);
}
AnySystemPath::SystemVirtual(virtual_path) => {
let db = session.default_workspace_db_mut();
db.files().virtual_file(db, &virtual_path);
}
}
// TODO(dhruvmanila): Publish diagnostics if the client doesn't support pull diagnostics

View File

@@ -26,13 +26,11 @@ impl BackgroundDocumentRequestHandler for DocumentDiagnosticRequestHandler {
fn run_with_snapshot(
snapshot: DocumentSnapshot,
db: Option<RootDatabase>,
db: RootDatabase,
_notifier: Notifier,
_params: DocumentDiagnosticParams,
) -> Result<DocumentDiagnosticReportResult> {
let diagnostics = db
.map(|db| compute_diagnostics(&snapshot, &db))
.unwrap_or_default();
let diagnostics = compute_diagnostics(&snapshot, &db);
Ok(DocumentDiagnosticReportResult::Report(
DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
@@ -48,10 +46,19 @@ impl BackgroundDocumentRequestHandler for DocumentDiagnosticRequestHandler {
fn compute_diagnostics(snapshot: &DocumentSnapshot, db: &RootDatabase) -> Vec<Diagnostic> {
let Some(file) = snapshot.file(db) else {
tracing::info!(
"No file found for snapshot for '{}'",
snapshot.query().file_url()
);
return vec![];
};
let Ok(diagnostics) = db.check_file(file) else {
return vec![];
let diagnostics = match db.check_file(file) {
Ok(diagnostics) => diagnostics,
Err(cancelled) => {
tracing::info!("Diagnostics computation {cancelled}");
return vec![];
}
};
diagnostics
@@ -65,12 +72,12 @@ fn to_lsp_diagnostic(message: &str) -> Diagnostic {
let words = message.split(':').collect::<Vec<_>>();
let (range, message) = match words.as_slice() {
[_filename, line, column, message] => {
let line = line.parse::<u32>().unwrap_or_default();
[_, _, line, column, message] | [_, line, column, message] => {
let line = line.parse::<u32>().unwrap_or_default().saturating_sub(1);
let column = column.parse::<u32>().unwrap_or_default();
(
Range::new(
Position::new(line.saturating_sub(1), column.saturating_sub(1)),
Position::new(line, column.saturating_sub(1)),
Position::new(line, column),
),
message.trim(),

View File

@@ -34,7 +34,7 @@ pub(super) trait BackgroundDocumentRequestHandler: RequestHandler {
fn run_with_snapshot(
snapshot: DocumentSnapshot,
db: Option<RootDatabase>,
db: RootDatabase,
notifier: Notifier,
params: <<Self as RequestHandler>::RequestType as Request>::Params,
) -> super::Result<<<Self as RequestHandler>::RequestType as Request>::Result>;

View File

@@ -6,16 +6,16 @@ use std::path::{Path, PathBuf};
use std::sync::Arc;
use anyhow::anyhow;
use lsp_types::{ClientCapabilities, Url};
use lsp_types::{ClientCapabilities, TextDocumentContentChangeEvent, Url};
use red_knot_python_semantic::{ProgramSettings, PythonVersion, SearchPathSettings};
use red_knot_workspace::db::RootDatabase;
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::files::{system_path_to_file, File};
use ruff_db::system::SystemPath;
use ruff_db::Db;
use crate::edit::{DocumentKey, NotebookDocument};
use crate::system::{url_to_system_path, LSPSystem};
use crate::edit::{DocumentKey, DocumentVersion, NotebookDocument};
use crate::system::{url_to_any_system_path, AnySystemPath, LSPSystem};
use crate::{PositionEncoding, TextDocument};
pub(crate) use self::capabilities::ResolvedClientCapabilities;
@@ -67,19 +67,10 @@ impl Session {
.ok_or_else(|| anyhow!("Workspace path is not a valid UTF-8 path: {:?}", path))?;
let system = LSPSystem::new(index.clone());
let metadata = WorkspaceMetadata::from_path(system_path, &system)?;
// TODO(dhruvmanila): Get the values from the client settings
let program_settings = ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings {
extra_paths: vec![],
src_root: system_path.to_path_buf(),
site_packages: vec![],
custom_typeshed: None,
},
};
let metadata = WorkspaceMetadata::from_path(system_path, &system, None)?;
// TODO(micha): Handle the case where the program settings are incorrect more gracefully.
workspaces.insert(path, RootDatabase::new(metadata, program_settings, system)?);
workspaces.insert(path, RootDatabase::new(metadata, system)?);
}
Ok(Self {
@@ -92,6 +83,12 @@ impl Session {
})
}
// TODO(dhruvmanila): Ideally, we should have a single method for `workspace_db_for_path_mut`
// and `default_workspace_db_mut` but the borrow checker doesn't allow that.
// https://github.com/astral-sh/ruff/pull/13041#discussion_r1726725437
/// Returns a reference to the workspace [`RootDatabase`] corresponding to the given path, if
/// any.
pub(crate) fn workspace_db_for_path(&self, path: impl AsRef<Path>) -> Option<&RootDatabase> {
self.workspaces
.range(..=path.as_ref().to_path_buf())
@@ -99,6 +96,8 @@ impl Session {
.map(|(_, db)| db)
}
/// Returns a mutable reference to the workspace [`RootDatabase`] corresponding to the given
/// path, if any.
pub(crate) fn workspace_db_for_path_mut(
&mut self,
path: impl AsRef<Path>,
@@ -109,6 +108,19 @@ impl Session {
.map(|(_, db)| db)
}
/// Returns a reference to the default workspace [`RootDatabase`]. The default workspace is the
/// minimum root path in the workspace map.
pub(crate) fn default_workspace_db(&self) -> &RootDatabase {
// SAFETY: Currently, red knot only support a single workspace.
self.workspaces.values().next().unwrap()
}
/// Returns a mutable reference to the default workspace [`RootDatabase`].
pub(crate) fn default_workspace_db_mut(&mut self) -> &mut RootDatabase {
// SAFETY: Currently, red knot only support a single workspace.
self.workspaces.values_mut().next().unwrap()
}
pub fn key_from_url(&self, url: Url) -> DocumentKey {
self.index().key_from_url(url)
}
@@ -135,6 +147,20 @@ impl Session {
self.index_mut().open_text_document(url, document);
}
/// Updates a text document at the associated `key`.
///
/// The document key must point to a text document, or this will throw an error.
pub(crate) fn update_text_document(
&mut self,
key: &DocumentKey,
content_changes: Vec<TextDocumentContentChangeEvent>,
new_version: DocumentVersion,
) -> crate::Result<()> {
let position_encoding = self.position_encoding;
self.index_mut()
.update_text_document(key, content_changes, new_version, position_encoding)
}
/// De-registers a document, specified by its key.
/// Calling this multiple times for the same document is a logic error.
pub(crate) fn close_document(&mut self, key: &DocumentKey) -> crate::Result<()> {
@@ -221,6 +247,7 @@ impl Drop for MutIndexGuard<'_> {
/// An immutable snapshot of `Session` that references
/// a specific document.
#[derive(Debug)]
pub struct DocumentSnapshot {
resolved_client_capabilities: Arc<ResolvedClientCapabilities>,
document_ref: index::DocumentQuery,
@@ -241,7 +268,12 @@ impl DocumentSnapshot {
}
pub(crate) fn file(&self, db: &RootDatabase) -> Option<File> {
let path = url_to_system_path(self.document_ref.file_url()).ok()?;
system_path_to_file(db, path).ok()
match url_to_any_system_path(self.document_ref.file_url()).ok()? {
AnySystemPath::System(path) => system_path_to_file(db, path).ok(),
AnySystemPath::SystemVirtual(virtual_path) => db
.files()
.try_virtual_file(&virtual_path)
.map(|virtual_file| virtual_file.file()),
}
}
}

View File

@@ -8,27 +8,40 @@ use ruff_db::file_revision::FileRevision;
use ruff_db::system::walk_directory::WalkDirectoryBuilder;
use ruff_db::system::{
DirectoryEntry, FileType, Metadata, OsSystem, Result, System, SystemPath, SystemPathBuf,
SystemVirtualPath,
SystemVirtualPath, SystemVirtualPathBuf,
};
use ruff_notebook::{Notebook, NotebookError};
use crate::session::index::Index;
use crate::DocumentQuery;
/// Converts the given [`Url`] to a [`SystemPathBuf`].
/// Converts the given [`Url`] to an [`AnySystemPath`].
///
/// If the URL scheme is `file`, then the path is converted to a [`SystemPathBuf`]. Otherwise, the
/// URL is converted to a [`SystemVirtualPathBuf`].
///
/// This fails in the following cases:
/// * The URL scheme is not `file`.
/// * The URL cannot be converted to a file path (refer to [`Url::to_file_path`]).
/// * If the URL is not a valid UTF-8 string.
pub(crate) fn url_to_system_path(url: &Url) -> std::result::Result<SystemPathBuf, ()> {
pub(crate) fn url_to_any_system_path(url: &Url) -> std::result::Result<AnySystemPath, ()> {
if url.scheme() == "file" {
Ok(SystemPathBuf::from_path_buf(url.to_file_path()?).map_err(|_| ())?)
Ok(AnySystemPath::System(
SystemPathBuf::from_path_buf(url.to_file_path()?).map_err(|_| ())?,
))
} else {
Err(())
Ok(AnySystemPath::SystemVirtual(
SystemVirtualPath::new(url.as_str()).to_path_buf(),
))
}
}
/// Represents either a [`SystemPath`] or a [`SystemVirtualPath`].
#[derive(Debug)]
pub(crate) enum AnySystemPath {
System(SystemPathBuf),
SystemVirtual(SystemVirtualPathBuf),
}
#[derive(Debug)]
pub(crate) struct LSPSystem {
/// A read-only copy of the index where the server stores all the open documents and settings.
@@ -144,19 +157,6 @@ impl System for LSPSystem {
}
}
fn virtual_path_metadata(&self, path: &SystemVirtualPath) -> Result<Metadata> {
// Virtual paths only exists in the LSP system, so we don't need to check the OS system.
let document = self
.system_virtual_path_to_document_ref(path)?
.ok_or_else(|| virtual_path_not_found(path))?;
Ok(Metadata::new(
document_revision(&document),
None,
FileType::File,
))
}
fn read_virtual_path_to_string(&self, path: &SystemVirtualPath) -> Result<String> {
let document = self
.system_virtual_path_to_document_ref(path)?

View File

@@ -20,9 +20,9 @@ default = ["console_error_panic_hook"]
[dependencies]
red_knot_python_semantic = { workspace = true }
red_knot_workspace = { workspace = true }
red_knot_workspace = { workspace = true, default-features = false, features = ["deflate"] }
ruff_db = { workspace = true }
ruff_db = { workspace = true, features = [] }
ruff_notebook = { workspace = true }
console_error_panic_hook = { workspace = true, optional = true }

View File

@@ -3,8 +3,8 @@ use std::any::Any;
use js_sys::Error;
use wasm_bindgen::prelude::*;
use red_knot_python_semantic::{ProgramSettings, SearchPathSettings};
use red_knot_workspace::db::RootDatabase;
use red_knot_workspace::workspace::settings::Configuration;
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::files::{system_path_to_file, File};
use ruff_db::system::walk_directory::WalkDirectoryBuilder;
@@ -41,16 +41,17 @@ impl Workspace {
#[wasm_bindgen(constructor)]
pub fn new(root: &str, settings: &Settings) -> Result<Workspace, Error> {
let system = WasmSystem::new(SystemPath::new(root));
let workspace =
WorkspaceMetadata::from_path(SystemPath::new(root), &system).map_err(into_error)?;
let workspace = WorkspaceMetadata::from_path(
SystemPath::new(root),
&system,
Some(Configuration {
target_version: Some(settings.target_version.into()),
..Configuration::default()
}),
)
.map_err(into_error)?;
let program_settings = ProgramSettings {
target_version: settings.target_version.into(),
search_paths: SearchPathSettings::default(),
};
let db =
RootDatabase::new(workspace, program_settings, system.clone()).map_err(into_error)?;
let db = RootDatabase::new(workspace, system.clone()).map_err(into_error)?;
Ok(Self { db, system })
}
@@ -233,13 +234,6 @@ impl System for WasmSystem {
Notebook::from_source_code(&content)
}
fn virtual_path_metadata(
&self,
_path: &SystemVirtualPath,
) -> ruff_db::system::Result<Metadata> {
Err(not_found())
}
fn read_virtual_path_to_string(
&self,
_path: &SystemVirtualPath,

View File

@@ -11,14 +11,14 @@ fn check() {
};
let mut workspace = Workspace::new("/", &settings).expect("Workspace to be created");
let test = workspace
workspace
.open_file("test.py", "import random22\n")
.expect("File to be opened");
let result = workspace.check_file(&test).expect("Check to succeed");
let result = workspace.check().expect("Check to succeed");
assert_eq!(
result,
vec!["/test.py:1:8: Import 'random22' could not be resolved.",]
vec!["/test.py:1:8: Cannot resolve import 'random22'."]
);
}

View File

@@ -18,17 +18,24 @@ ruff_cache = { workspace = true }
ruff_db = { workspace = true, features = ["os", "cache"] }
ruff_python_ast = { workspace = true }
ruff_text_size = { workspace = true }
ruff_vendored = { workspace = true }
anyhow = { workspace = true }
crossbeam = { workspace = true }
notify = { workspace = true }
rayon = { workspace = true }
rustc-hash = { workspace = true }
salsa = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
[dev-dependencies]
ruff_db = { workspace = true, features = ["testing"]}
ruff_db = { workspace = true, features = ["testing"] }
tempfile = { workspace = true }
[features]
default = ["zstd"]
zstd = ["ruff_vendored/zstd"]
deflate = ["ruff_vendored/deflate"]
[lints]
workspace = true

View File

@@ -0,0 +1,2 @@
x = 0
(x := x + 1)

View File

@@ -0,0 +1,3 @@
x = 0
if x := x + 1:
pass

View File

@@ -0,0 +1,11 @@
def bool(x) -> bool:
return True
class MyClass: ...
def MyClass() -> MyClass: ...
def x(self) -> x: ...

View File

@@ -0,0 +1,2 @@
def bool(x=bool):
return x

View File

@@ -0,0 +1,2 @@
with foo() as self.bar:
pass

View File

@@ -0,0 +1,3 @@
match x:
case [1, 0] if x := x[:0]:
y = 1

View File

@@ -1,15 +1,14 @@
use std::panic::RefUnwindSafe;
use std::sync::Arc;
use red_knot_python_semantic::{
vendored_typeshed_stubs, Db as SemanticDb, Program, ProgramSettings,
};
use salsa::plumbing::ZalsaDatabase;
use salsa::{Cancelled, Event};
use red_knot_python_semantic::{Db as SemanticDb, Program};
use ruff_db::files::{File, Files};
use ruff_db::system::System;
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast};
use salsa::plumbing::ZalsaDatabase;
use salsa::{Cancelled, Event};
use crate::workspace::{check_file, Workspace, WorkspaceMetadata};
@@ -27,11 +26,7 @@ pub struct RootDatabase {
}
impl RootDatabase {
pub fn new<S>(
workspace: WorkspaceMetadata,
settings: ProgramSettings,
system: S,
) -> anyhow::Result<Self>
pub fn new<S>(workspace: WorkspaceMetadata, system: S) -> anyhow::Result<Self>
where
S: System + 'static + Send + Sync + RefUnwindSafe,
{
@@ -42,11 +37,11 @@ impl RootDatabase {
system: Arc::new(system),
};
let workspace = Workspace::from_metadata(&db, workspace);
// Initialize the `Program` singleton
Program::from_settings(&db, settings)?;
Program::from_settings(&db, workspace.settings().program())?;
db.workspace = Some(Workspace::from_metadata(&db, workspace));
db.workspace = Some(workspace);
Ok(db)
}
@@ -61,6 +56,8 @@ impl RootDatabase {
}
pub fn check_file(&self, file: File) -> Result<Vec<String>, Cancelled> {
let _span = tracing::debug_span!("check_file", file=%file.path(self)).entered();
self.with_db(|db| check_file(db, file))
}
@@ -127,7 +124,7 @@ impl SemanticDb for RootDatabase {
#[salsa::db]
impl SourceDb for RootDatabase {
fn vendored(&self) -> &VendoredFileSystem {
vendored_typeshed_stubs()
ruff_vendored::file_system()
}
fn system(&self) -> &dyn System {
@@ -160,10 +157,11 @@ impl Db for RootDatabase {}
#[cfg(test)]
pub(crate) mod tests {
use salsa::Event;
use std::sync::Arc;
use red_knot_python_semantic::{vendored_typeshed_stubs, Db as SemanticDb};
use salsa::Event;
use red_knot_python_semantic::Db as SemanticDb;
use ruff_db::files::Files;
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
@@ -185,7 +183,7 @@ pub(crate) mod tests {
Self {
storage: salsa::Storage::default(),
system: TestSystem::default(),
vendored: vendored_typeshed_stubs().clone(),
vendored: ruff_vendored::file_system().clone(),
files: Files::default(),
events: Arc::default(),
}

View File

@@ -1,22 +1,33 @@
use rustc_hash::FxHashSet;
use red_knot_python_semantic::Program;
use ruff_db::files::{system_path_to_file, File, Files};
use ruff_db::system::walk_directory::WalkState;
use ruff_db::system::SystemPath;
use ruff_db::Db;
use rustc_hash::FxHashSet;
use crate::db::RootDatabase;
use crate::watch;
use crate::watch::{CreatedKind, DeletedKind};
use crate::workspace::settings::Configuration;
use crate::workspace::WorkspaceMetadata;
impl RootDatabase {
#[tracing::instrument(level = "debug", skip(self, changes))]
pub fn apply_changes(&mut self, changes: Vec<watch::ChangeEvent>) {
#[tracing::instrument(level = "debug", skip(self, changes, base_configuration))]
pub fn apply_changes(
&mut self,
changes: Vec<watch::ChangeEvent>,
base_configuration: Option<&Configuration>,
) {
let workspace = self.workspace();
let workspace_path = workspace.root(self).to_path_buf();
let program = Program::get(self);
let custom_stdlib_versions_path = program
.custom_stdlib_search_path(self)
.map(|path| path.join("VERSIONS"));
let mut workspace_change = false;
// Changes to a custom stdlib path's VERSIONS
let mut custom_stdlib_change = false;
// Packages that need reloading
let mut changed_packages = FxHashSet::default();
// Paths that were added
@@ -39,7 +50,7 @@ impl RootDatabase {
};
for change in changes {
if let Some(path) = change.path() {
if let Some(path) = change.system_path() {
if matches!(
path.file_name(),
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
@@ -54,10 +65,15 @@ impl RootDatabase {
continue;
}
if Some(path) == custom_stdlib_versions_path.as_deref() {
custom_stdlib_change = true;
}
}
match change {
watch::ChangeEvent::Changed { path, kind: _ } => sync_path(self, &path),
watch::ChangeEvent::Changed { path, kind: _ }
| watch::ChangeEvent::Opened(path) => sync_path(self, &path),
watch::ChangeEvent::Created { kind, path } => {
match kind {
@@ -100,7 +116,13 @@ impl RootDatabase {
} else {
sync_recursively(self, &path);
// TODO: Remove after converting `package.files()` to a salsa query.
if custom_stdlib_versions_path
.as_ref()
.is_some_and(|versions_path| versions_path.starts_with(&path))
{
custom_stdlib_change = true;
}
if let Some(package) = workspace.package(self, &path) {
changed_packages.insert(package);
} else {
@@ -109,6 +131,17 @@ impl RootDatabase {
}
}
watch::ChangeEvent::CreatedVirtual(path)
| watch::ChangeEvent::ChangedVirtual(path) => {
File::sync_virtual_path(self, &path);
}
watch::ChangeEvent::DeletedVirtual(path) => {
if let Some(virtual_file) = self.files().try_virtual_file(&path) {
virtual_file.close(self);
}
}
watch::ChangeEvent::Rescan => {
workspace_change = true;
Files::sync_all(self);
@@ -118,7 +151,11 @@ impl RootDatabase {
}
if workspace_change {
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
match WorkspaceMetadata::from_path(
&workspace_path,
self.system(),
base_configuration.cloned(),
) {
Ok(metadata) => {
tracing::debug!("Reloading workspace after structural change.");
// TODO: Handle changes in the program settings.
@@ -130,6 +167,11 @@ impl RootDatabase {
}
return;
} else if custom_stdlib_change {
let search_paths = workspace.search_path_settings(self).clone();
if let Err(error) = program.update_search_paths(self, &search_paths) {
tracing::error!("Failed to set the new search paths: {error}");
}
}
let mut added_paths = added_paths.into_iter().filter(|path| {

View File

@@ -1,5 +1,4 @@
pub mod db;
pub mod lint;
pub mod site_packages;
pub mod watch;
pub mod workspace;

View File

@@ -7,7 +7,7 @@ use red_knot_python_semantic::types::Type;
use red_knot_python_semantic::{HasTy, ModuleName, SemanticModel};
use ruff_db::files::File;
use ruff_db::parsed::{parsed_module, ParsedModule};
use ruff_db::source::{line_index, source_text, SourceText};
use ruff_db::source::{source_text, SourceText};
use ruff_python_ast as ast;
use ruff_python_ast::visitor::{walk_expr, walk_stmt, Visitor};
use ruff_text_size::{Ranged, TextSize};
@@ -48,19 +48,6 @@ pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Vec<String> {
};
visitor.visit_body(&ast.body);
diagnostics = visitor.diagnostics;
} else {
let path = file_id.path(db);
let line_index = line_index(db.upcast(), file_id);
diagnostics.extend(parsed.errors().iter().map(|err| {
let source_location = line_index.source_location(err.location.start(), source.as_str());
format!(
"{}:{}:{}: {}",
path.as_str(),
source_location.row,
source_location.column,
err,
)
}));
}
diagnostics
@@ -127,22 +114,19 @@ fn lint_maybe_undefined(context: &SemanticLintContext, name: &ast::ExprName) {
return;
}
let semantic = &context.semantic;
match name.ty(semantic) {
Type::Unbound => {
context.push_diagnostic(format_diagnostic(
context,
&format!("Name '{}' used when not defined.", &name.id),
name.start(),
));
}
Type::Union(union) if union.contains(semantic.db(), Type::Unbound) => {
context.push_diagnostic(format_diagnostic(
context,
&format!("Name '{}' used when possibly not defined.", &name.id),
name.start(),
));
}
_ => {}
let ty = name.ty(semantic);
if ty.is_unbound() {
context.push_diagnostic(format_diagnostic(
context,
&format!("Name '{}' used when not defined.", &name.id),
name.start(),
));
} else if ty.may_be_unbound(semantic.db()) {
context.push_diagnostic(format_diagnostic(
context,
&format!("Name '{}' used when possibly not defined.", &name.id),
name.start(),
));
}
}
@@ -177,7 +161,7 @@ fn lint_bad_override(context: &SemanticLintContext, class: &ast::StmtClassDef) {
if ty.has_decorator(db, override_ty) {
let method_name = ty.name(db);
if class_ty
.inherited_class_member(db, &method_name)
.inherited_class_member(db, method_name)
.is_unbound()
{
// TODO should have a qualname() method to support nested classes
@@ -286,14 +270,9 @@ mod tests {
Program::from_settings(
&db,
ProgramSettings {
&ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings {
extra_paths: Vec::new(),
src_root,
site_packages: vec![],
custom_typeshed: None,
},
search_paths: SearchPathSettings::new(src_root),
},
)
.expect("Valid program settings");

View File

@@ -1,4 +1,4 @@
use ruff_db::system::{SystemPath, SystemPathBuf};
use ruff_db::system::{SystemPath, SystemPathBuf, SystemVirtualPathBuf};
pub use watcher::{directory_watcher, EventHandler, Watcher};
pub use workspace_watcher::WorkspaceWatcher;
@@ -20,6 +20,9 @@ mod workspace_watcher;
/// event instead of emitting an event for each file or subdirectory in that path.
#[derive(Debug, PartialEq, Eq)]
pub enum ChangeEvent {
/// The file corresponding to the given path was opened in an editor.
Opened(SystemPathBuf),
/// A new path was created
Created {
path: SystemPathBuf,
@@ -38,6 +41,15 @@ pub enum ChangeEvent {
kind: DeletedKind,
},
/// A new virtual path was created.
CreatedVirtual(SystemVirtualPathBuf),
/// The content of a virtual path was changed.
ChangedVirtual(SystemVirtualPathBuf),
/// A virtual path was deleted.
DeletedVirtual(SystemVirtualPathBuf),
/// The file watcher failed to observe some changes and now is out of sync with the file system.
///
/// This can happen if many files are changed at once. The consumer should rescan all files to catch up
@@ -46,16 +58,27 @@ pub enum ChangeEvent {
}
impl ChangeEvent {
pub fn file_name(&self) -> Option<&str> {
self.path().and_then(|path| path.file_name())
/// Creates a new [`Changed`] event for the file content at the given path.
///
/// [`Changed`]: ChangeEvent::Changed
pub fn file_content_changed(path: SystemPathBuf) -> ChangeEvent {
ChangeEvent::Changed {
path,
kind: ChangedKind::FileContent,
}
}
pub fn path(&self) -> Option<&SystemPath> {
pub fn file_name(&self) -> Option<&str> {
self.system_path().and_then(|path| path.file_name())
}
pub fn system_path(&self) -> Option<&SystemPath> {
match self {
ChangeEvent::Created { path, .. }
ChangeEvent::Opened(path)
| ChangeEvent::Created { path, .. }
| ChangeEvent::Changed { path, .. }
| ChangeEvent::Deleted { path, .. } => Some(path),
ChangeEvent::Rescan => None,
_ => None,
}
}
}

View File

@@ -5,6 +5,8 @@ use salsa::{Durability, Setter as _};
pub use metadata::{PackageMetadata, WorkspaceMetadata};
use red_knot_python_semantic::types::check_types;
use red_knot_python_semantic::SearchPathSettings;
use ruff_db::parsed::parsed_module;
use ruff_db::source::{line_index, source_text, SourceDiagnostic};
use ruff_db::{
files::{system_path_to_file, File},
@@ -13,7 +15,8 @@ use ruff_db::{
use ruff_python_ast::{name::Name, PySourceType};
use ruff_text_size::Ranged;
use crate::workspace::files::{Index, Indexed, PackageFiles};
use crate::db::RootDatabase;
use crate::workspace::files::{Index, Indexed, IndexedIter, PackageFiles};
use crate::{
db::Db,
lint::{lint_semantic, lint_syntax},
@@ -21,6 +24,7 @@ use crate::{
mod files;
mod metadata;
pub mod settings;
/// The project workspace as a Salsa ingredient.
///
@@ -81,6 +85,10 @@ pub struct Workspace {
/// The (first-party) packages in this workspace.
#[return_ref]
package_tree: BTreeMap<SystemPathBuf, Package>,
/// The unresolved search path configuration.
#[return_ref]
pub search_path_settings: SearchPathSettings,
}
/// A first-party package in a workspace.
@@ -109,10 +117,14 @@ impl Workspace {
packages.insert(package.root.clone(), Package::from_metadata(db, package));
}
Workspace::builder(metadata.root, packages)
.durability(Durability::MEDIUM)
.open_fileset_durability(Durability::LOW)
.new(db)
Workspace::builder(
metadata.root,
packages,
metadata.settings.program.search_paths,
)
.durability(Durability::MEDIUM)
.open_fileset_durability(Durability::LOW)
.new(db)
}
pub fn root(self, db: &dyn Db) -> &SystemPath {
@@ -143,6 +155,11 @@ impl Workspace {
new_packages.insert(path, package);
}
if &metadata.settings.program.search_paths != self.search_path_settings(db) {
self.set_search_path_settings(db)
.to(metadata.settings.program.search_paths);
}
self.set_package_tree(db).to(new_packages);
}
@@ -174,23 +191,35 @@ impl Workspace {
}
/// Checks all open files in the workspace and its dependencies.
#[tracing::instrument(level = "debug", skip_all)]
pub fn check(self, db: &dyn Db) -> Vec<String> {
pub fn check(self, db: &RootDatabase) -> Vec<String> {
let workspace_span = tracing::debug_span!("check_workspace");
let _span = workspace_span.enter();
tracing::debug!("Checking workspace");
let files = WorkspaceFiles::new(db, self);
let result = Arc::new(std::sync::Mutex::new(Vec::new()));
let inner_result = Arc::clone(&result);
let mut result = Vec::new();
let db = db.snapshot();
let workspace_span = workspace_span.clone();
if let Some(open_files) = self.open_files(db) {
for file in open_files {
result.extend_from_slice(&check_file(db, *file));
rayon::scope(move |scope| {
for file in &files {
let result = inner_result.clone();
let db = db.snapshot();
let workspace_span = workspace_span.clone();
scope.spawn(move |_| {
let check_file_span = tracing::debug_span!(parent: &workspace_span, "check_file", file=%file.path(&db));
let _entered = check_file_span.entered();
let file_diagnostics = check_file(&db, file);
result.lock().unwrap().extend(file_diagnostics);
});
}
} else {
for package in self.packages(db) {
result.extend(package.check(db));
}
}
});
result
Arc::into_inner(result).unwrap().into_inner().unwrap()
}
/// Opens a file in the workspace.
@@ -268,7 +297,6 @@ impl Workspace {
}
}
#[salsa::tracked]
impl Package {
pub fn root(self, db: &dyn Db) -> &SystemPath {
self.root_buf(db)
@@ -308,19 +336,6 @@ impl Package {
index.insert(file);
}
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) fn check(self, db: &dyn Db) -> Vec<String> {
tracing::debug!("Checking package '{}'", self.root(db));
let mut result = Vec::new();
for file in &self.files(db) {
let diagnostics = check_file(db, file);
result.extend_from_slice(&diagnostics);
}
result
}
/// Returns the files belonging to this package.
pub fn files(self, db: &dyn Db) -> Indexed<'_> {
let files = self.file_set(db);
@@ -331,11 +346,7 @@ impl Package {
tracing::debug_span!("index_package_files", package = %self.name(db)).entered();
let files = discover_package_files(db, self.root(db));
tracing::info!(
"Indexed {} files for package '{}'",
files.len(),
self.name(db)
);
tracing::info!("Found {} files in package '{}'", files.len(), self.name(db));
vacant.set(files)
}
Index::Indexed(indexed) => indexed,
@@ -372,9 +383,7 @@ impl Package {
#[salsa::tracked]
pub(super) fn check_file(db: &dyn Db, file: File) -> Vec<String> {
let path = file.path(db);
let _span = tracing::debug_span!("check_file", file=%path).entered();
tracing::debug!("Checking file '{path}'");
tracing::debug!("Checking file '{path}'", path = file.path(db));
let mut diagnostics = Vec::new();
@@ -393,6 +402,17 @@ pub(super) fn check_file(db: &dyn Db, file: File) -> Vec<String> {
return diagnostics;
}
let parsed = parsed_module(db.upcast(), file);
if !parsed.errors().is_empty() {
let path = file.path(db);
let line_index = line_index(db.upcast(), file);
diagnostics.extend(parsed.errors().iter().map(|err| {
let source_location = line_index.source_location(err.location.start(), source.as_str());
format!("{path}:{source_location}: {message}", message = err.error)
}));
}
for diagnostic in check_types(db.upcast(), file) {
let index = line_index(db.upcast(), diagnostic.file());
let location = index.source_location(diagnostic.start(), source.as_str());
@@ -451,6 +471,73 @@ fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
files
}
#[derive(Debug)]
enum WorkspaceFiles<'a> {
OpenFiles(&'a FxHashSet<File>),
PackageFiles(Vec<Indexed<'a>>),
}
impl<'a> WorkspaceFiles<'a> {
fn new(db: &'a dyn Db, workspace: Workspace) -> Self {
if let Some(open_files) = workspace.open_files(db) {
WorkspaceFiles::OpenFiles(open_files)
} else {
WorkspaceFiles::PackageFiles(
workspace
.packages(db)
.map(|package| package.files(db))
.collect(),
)
}
}
}
impl<'a> IntoIterator for &'a WorkspaceFiles<'a> {
type Item = File;
type IntoIter = WorkspaceFilesIter<'a>;
fn into_iter(self) -> Self::IntoIter {
match self {
WorkspaceFiles::OpenFiles(files) => WorkspaceFilesIter::OpenFiles(files.iter()),
WorkspaceFiles::PackageFiles(package_files) => {
let mut package_files = package_files.iter();
WorkspaceFilesIter::PackageFiles {
current: package_files.next().map(IntoIterator::into_iter),
package_files,
}
}
}
}
}
enum WorkspaceFilesIter<'db> {
OpenFiles(std::collections::hash_set::Iter<'db, File>),
PackageFiles {
package_files: std::slice::Iter<'db, Indexed<'db>>,
current: Option<IndexedIter<'db>>,
},
}
impl Iterator for WorkspaceFilesIter<'_> {
type Item = File;
fn next(&mut self) -> Option<Self::Item> {
match self {
WorkspaceFilesIter::OpenFiles(files) => files.next().copied(),
WorkspaceFilesIter::PackageFiles {
package_files,
current,
} => loop {
if let Some(file) = current.as_mut().and_then(Iterator::next) {
return Some(file);
}
*current = Some(package_files.next()?.into_iter());
},
}
}
}
#[cfg(test)]
mod tests {
use ruff_db::files::system_path_to_file;

View File

@@ -158,9 +158,11 @@ impl Deref for Indexed<'_> {
}
}
pub(super) type IndexedIter<'a> = std::iter::Copied<std::collections::hash_set::Iter<'a, File>>;
impl<'a> IntoIterator for &'a Indexed<'_> {
type Item = File;
type IntoIter = std::iter::Copied<std::collections::hash_set::Iter<'a, File>>;
type IntoIter = IndexedIter<'a>;
fn into_iter(self) -> Self::IntoIter {
self.files.iter().copied()

View File

@@ -1,3 +1,4 @@
use crate::workspace::settings::{Configuration, WorkspaceSettings};
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_python_ast::name::Name;
@@ -7,6 +8,8 @@ pub struct WorkspaceMetadata {
/// The (first-party) packages in this workspace.
pub(super) packages: Vec<PackageMetadata>,
pub(super) settings: WorkspaceSettings,
}
/// A first-party package in a workspace.
@@ -21,7 +24,11 @@ pub struct PackageMetadata {
impl WorkspaceMetadata {
/// Discovers the closest workspace at `path` and returns its metadata.
pub fn from_path(path: &SystemPath, system: &dyn System) -> anyhow::Result<WorkspaceMetadata> {
pub fn from_path(
path: &SystemPath,
system: &dyn System,
base_configuration: Option<Configuration>,
) -> anyhow::Result<WorkspaceMetadata> {
assert!(
system.is_directory(path),
"Workspace root path must be a directory"
@@ -38,9 +45,20 @@ impl WorkspaceMetadata {
root: root.clone(),
};
// TODO: Load the configuration from disk.
let mut configuration = Configuration::default();
if let Some(base_configuration) = base_configuration {
configuration.extend(base_configuration);
}
// TODO: Respect the package configurations when resolving settings (e.g. for the target version).
let settings = configuration.into_workspace_settings(&root);
let workspace = WorkspaceMetadata {
root,
packages: vec![package],
settings,
};
Ok(workspace)
@@ -53,6 +71,10 @@ impl WorkspaceMetadata {
pub fn packages(&self) -> &[PackageMetadata] {
&self.packages
}
pub fn settings(&self) -> &WorkspaceSettings {
&self.settings
}
}
impl PackageMetadata {

View File

@@ -0,0 +1,89 @@
use red_knot_python_semantic::{ProgramSettings, PythonVersion, SearchPathSettings, SitePackages};
use ruff_db::system::{SystemPath, SystemPathBuf};
/// The resolved configurations.
///
/// The main difference to [`Configuration`] is that default values are filled in.
#[derive(Debug, Clone)]
pub struct WorkspaceSettings {
pub(super) program: ProgramSettings,
}
impl WorkspaceSettings {
pub fn program(&self) -> &ProgramSettings {
&self.program
}
}
/// The configuration for the workspace or a package.
#[derive(Debug, Default, Clone)]
pub struct Configuration {
pub target_version: Option<PythonVersion>,
pub search_paths: SearchPathConfiguration,
}
impl Configuration {
/// Extends this configuration by using the values from `with` for all values that are absent in `self`.
pub fn extend(&mut self, with: Configuration) {
self.target_version = self.target_version.or(with.target_version);
self.search_paths.extend(with.search_paths);
}
pub fn into_workspace_settings(self, workspace_root: &SystemPath) -> WorkspaceSettings {
WorkspaceSettings {
program: ProgramSettings {
target_version: self.target_version.unwrap_or_default(),
search_paths: self.search_paths.into_settings(workspace_root),
},
}
}
}
#[derive(Debug, Default, Clone, Eq, PartialEq)]
pub struct SearchPathConfiguration {
/// List of user-provided paths that should take first priority in the module resolution.
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
/// or pyright's stubPath configuration setting.
pub extra_paths: Option<Vec<SystemPathBuf>>,
/// The root of the workspace, used for finding first-party modules.
pub src_root: Option<SystemPathBuf>,
/// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types.
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
/// bundled as a zip file in the binary
pub custom_typeshed: Option<SystemPathBuf>,
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
pub site_packages: Option<SitePackages>,
}
impl SearchPathConfiguration {
pub fn into_settings(self, workspace_root: &SystemPath) -> SearchPathSettings {
let site_packages = self.site_packages.unwrap_or(SitePackages::Known(vec![]));
SearchPathSettings {
extra_paths: self.extra_paths.unwrap_or_default(),
src_root: self
.src_root
.unwrap_or_else(|| workspace_root.to_path_buf()),
custom_typeshed: self.custom_typeshed,
site_packages,
}
}
pub fn extend(&mut self, with: SearchPathConfiguration) {
if let Some(extra_paths) = with.extra_paths {
self.extra_paths.get_or_insert(extra_paths);
}
if let Some(src_root) = with.src_root {
self.src_root.get_or_insert(src_root);
}
if let Some(custom_typeshed) = with.custom_typeshed {
self.custom_typeshed.get_or_insert(custom_typeshed);
}
if let Some(site_packages) = with.site_packages {
self.site_packages.get_or_insert(site_packages);
}
}
}

View File

@@ -1,6 +1,7 @@
use red_knot_python_semantic::{
HasTy, ProgramSettings, PythonVersion, SearchPathSettings, SemanticModel,
};
use std::fs;
use std::path::PathBuf;
use red_knot_python_semantic::{HasTy, SemanticModel};
use red_knot_workspace::db::RootDatabase;
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::files::{system_path_to_file, File};
@@ -9,41 +10,38 @@ use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
use ruff_python_ast::visitor::source_order;
use ruff_python_ast::visitor::source_order::SourceOrderVisitor;
use ruff_python_ast::{Alias, Expr, Parameter, ParameterWithDefault, Stmt};
use std::fs;
use std::path::PathBuf;
fn setup_db(workspace_root: SystemPathBuf) -> anyhow::Result<RootDatabase> {
let system = OsSystem::new(&workspace_root);
let workspace = WorkspaceMetadata::from_path(&workspace_root, &system)?;
let search_paths = SearchPathSettings {
extra_paths: vec![],
src_root: workspace_root,
custom_typeshed: None,
site_packages: vec![],
};
let settings = ProgramSettings {
target_version: PythonVersion::default(),
search_paths,
};
RootDatabase::new(workspace, settings, system)
fn setup_db(workspace_root: &SystemPath) -> anyhow::Result<RootDatabase> {
let system = OsSystem::new(workspace_root);
let workspace = WorkspaceMetadata::from_path(workspace_root, &system, None)?;
RootDatabase::new(workspace, system)
}
/// Test that all snippets in testcorpus can be checked without panic
#[test]
#[allow(clippy::print_stdout)]
fn corpus_no_panic() -> anyhow::Result<()> {
let corpus = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("resources/test/corpus");
let system_corpus = SystemPath::from_std_path(&corpus).expect("corpus path to be UTF8");
let db = setup_db(system_corpus.to_path_buf())?;
let root = SystemPathBuf::from_path_buf(tempfile::TempDir::new()?.into_path()).unwrap();
let db = setup_db(&root)?;
for path in fs::read_dir(&corpus).expect("corpus to be a directory") {
let path = path.expect("path to not be an error").path();
println!("checking {path:?}");
let path = SystemPathBuf::from_path_buf(path.clone()).expect("path to be UTF-8");
let corpus = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("resources/test/corpus");
for path in fs::read_dir(&corpus)? {
let source = path?.path();
println!("checking {source:?}");
let source_fn = source.file_name().unwrap().to_str().unwrap();
let py_dest = root.join(source_fn);
fs::copy(&source, py_dest.as_std_path())?;
// this test is only asserting that we can pull every expression type without a panic
// (and some non-expressions that clearly define a single type)
let file = system_path_to_file(&db, path).expect("file to exist");
let file = system_path_to_file(&db, py_dest).unwrap();
pull_types(&db, file);
// try the file as a stub also
println!("re-checking as .pyi");
let pyi_dest = root.join(format!("{source_fn}i"));
std::fs::copy(source, pyi_dest.as_std_path())?;
let file = system_path_to_file(&db, pyi_dest).unwrap();
pull_types(&db, file);
}
Ok(())

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.6.1"
version = "0.6.8"
publish = true
authors = { workspace = true }
edition = { workspace = true }
@@ -14,7 +14,9 @@ default-run = "ruff"
[dependencies]
ruff_cache = { workspace = true }
ruff_db = { workspace = true, default-features = false, features = ["os"] }
ruff_diagnostics = { workspace = true }
ruff_graph = { workspace = true, features = ["serde", "clap"] }
ruff_linter = { workspace = true, features = ["clap"] }
ruff_macros = { workspace = true }
ruff_notebook = { workspace = true }
@@ -36,6 +38,7 @@ clap_complete_command = { workspace = true }
clearscreen = { workspace = true }
colored = { workspace = true }
filetime = { workspace = true }
globwalk = { workspace = true }
ignore = { workspace = true }
is-macro = { workspace = true }
itertools = { workspace = true }
@@ -59,8 +62,11 @@ wild = { workspace = true }
[dev-dependencies]
# Enable test rules during development
ruff_linter = { workspace = true, features = ["clap", "test-rules"] }
assert_fs = { workspace = true }
# Avoid writing colored snapshots when running tests from the terminal
colored = { workspace = true, features = ["no-color"] }
indoc = { workspace = true }
insta = { workspace = true, features = ["filters", "json"] }
insta-cmd = { workspace = true }
tempfile = { workspace = true }

View File

@@ -7,13 +7,11 @@ use std::sync::Arc;
use anyhow::{anyhow, bail};
use clap::builder::{TypedValueParser, ValueParserFactory};
use clap::{command, Parser};
use clap::{command, Parser, Subcommand};
use colored::Colorize;
use path_absolutize::path_dedot;
use regex::Regex;
use rustc_hash::FxHashMap;
use toml;
use ruff_graph::Direction;
use ruff_linter::line_width::LineLength;
use ruff_linter::logging::LogLevel;
use ruff_linter::registry::Rule;
@@ -27,6 +25,8 @@ use ruff_text_size::TextRange;
use ruff_workspace::configuration::{Configuration, RuleSelection};
use ruff_workspace::options::{Options, PycodestyleOptions};
use ruff_workspace::resolver::ConfigurationTransformer;
use rustc_hash::FxHashMap;
use toml;
/// All configuration options that can be passed "globally",
/// i.e., can be passed to all subcommands
@@ -132,6 +132,9 @@ pub enum Command {
Format(FormatCommand),
/// Run the language server.
Server(ServerCommand),
/// Run analysis over Python source code.
#[clap(subcommand)]
Analyze(AnalyzeCommand),
/// Display Ruff's version
Version {
#[arg(long, value_enum, default_value = "text")]
@@ -139,6 +142,35 @@ pub enum Command {
},
}
#[derive(Debug, Subcommand)]
pub enum AnalyzeCommand {
/// Generate a map of Python file dependencies or dependents.
Graph(AnalyzeGraphCommand),
}
#[derive(Clone, Debug, clap::Parser)]
pub struct AnalyzeGraphCommand {
/// List of files or directories to include.
#[clap(help = "List of files or directories to include [default: .]")]
files: Vec<PathBuf>,
/// The direction of the import map. By default, generates a dependency map, i.e., a map from
/// file to files that it depends on. Use `--direction dependents` to generate a map from file
/// to files that depend on it.
#[clap(long, value_enum, default_value_t)]
direction: Direction,
/// Attempt to detect imports from string literals.
#[clap(long)]
detect_string_imports: bool,
/// Enable preview mode. Use `--no-preview` to disable.
#[arg(long, overrides_with("no_preview"))]
preview: bool,
#[clap(long, overrides_with("preview"), hide = true)]
no_preview: bool,
/// The minimum Python version that should be supported.
#[arg(long, value_enum)]
target_version: Option<PythonVersion>,
}
// The `Parser` derive is for ruff_dev, for ruff `Args` would be sufficient
#[derive(Clone, Debug, clap::Parser)]
#[allow(clippy::struct_excessive_bools)]
@@ -700,6 +732,7 @@ impl CheckCommand {
output_format: resolve_output_format(self.output_format)?,
show_fixes: resolve_bool_arg(self.show_fixes, self.no_show_fixes),
extension: self.extension,
..ExplicitConfigOverrides::default()
};
let config_args = ConfigArguments::from_cli_arguments(global_options, cli_overrides)?;
@@ -732,8 +765,34 @@ impl FormatCommand {
target_version: self.target_version,
cache_dir: self.cache_dir,
extension: self.extension,
..ExplicitConfigOverrides::default()
};
// Unsupported on the formatter CLI, but required on `Overrides`.
let config_args = ConfigArguments::from_cli_arguments(global_options, cli_overrides)?;
Ok((format_arguments, config_args))
}
}
impl AnalyzeGraphCommand {
/// Partition the CLI into command-line arguments and configuration
/// overrides.
pub fn partition(
self,
global_options: GlobalConfigArgs,
) -> anyhow::Result<(AnalyzeGraphArgs, ConfigArguments)> {
let format_arguments = AnalyzeGraphArgs {
files: self.files,
direction: self.direction,
};
let cli_overrides = ExplicitConfigOverrides {
detect_string_imports: if self.detect_string_imports {
Some(true)
} else {
None
},
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
target_version: self.target_version,
..ExplicitConfigOverrides::default()
};
@@ -896,7 +955,7 @@ A `--config` flag must either be a path to a `.toml` configuration file
// the user was trying to pass in a path to a configuration file
// or some inline TOML.
// We want to display the most helpful error to the user as possible.
if std::path::Path::new(value)
if Path::new(value)
.extension()
.map_or(false, |ext| ext.eq_ignore_ascii_case("toml"))
{
@@ -1156,6 +1215,13 @@ impl LineColumnParseError {
}
}
/// CLI settings that are distinct from configuration (commands, lists of files, etc.).
#[derive(Clone, Debug)]
pub struct AnalyzeGraphArgs {
pub files: Vec<PathBuf>,
pub direction: Direction,
}
/// Configuration overrides provided via dedicated CLI flags:
/// `--line-length`, `--respect-gitignore`, etc.
#[derive(Clone, Default)]
@@ -1187,6 +1253,7 @@ struct ExplicitConfigOverrides {
output_format: Option<OutputFormat>,
show_fixes: Option<bool>,
extension: Option<Vec<ExtensionPair>>,
detect_string_imports: Option<bool>,
}
impl ConfigurationTransformer for ExplicitConfigOverrides {
@@ -1271,6 +1338,9 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
if let Some(extension) = &self.extension {
config.extension = Some(extension.iter().cloned().collect());
}
if let Some(detect_string_imports) = &self.detect_string_imports {
config.analyze.detect_string_imports = Some(*detect_string_imports);
}
config
}

View File

@@ -10,7 +10,9 @@ use ruff_linter::linter::add_noqa_to_path;
use ruff_linter::source_kind::SourceKind;
use ruff_linter::warn_user_once;
use ruff_python_ast::{PySourceType, SourceType};
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
use ruff_workspace::resolver::{
match_exclusion, python_files_in_path, PyprojectConfig, ResolvedFile,
};
use crate::args::ConfigArguments;
@@ -57,6 +59,15 @@ pub(crate) fn add_noqa(
.and_then(|parent| package_roots.get(parent))
.and_then(|package| *package);
let settings = resolver.resolve(path);
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
&& match_exclusion(
resolved_file.path(),
resolved_file.file_name(),
&settings.linter.exclude,
)
{
return None;
}
let source_kind = match SourceKind::from_path(path, source_type) {
Ok(Some(source_kind)) => source_kind,
Ok(None) => return None,

View File

@@ -0,0 +1,251 @@
use crate::args::{AnalyzeGraphArgs, ConfigArguments};
use crate::resolve::resolve;
use crate::{resolve_default_files, ExitStatus};
use anyhow::Result;
use log::{debug, warn};
use path_absolutize::CWD;
use ruff_db::system::{SystemPath, SystemPathBuf};
use ruff_graph::{Direction, ImportMap, ModuleDb, ModuleImports};
use ruff_linter::{warn_user, warn_user_once};
use ruff_python_ast::{PySourceType, SourceType};
use ruff_workspace::resolver::{match_exclusion, python_files_in_path, ResolvedFile};
use rustc_hash::FxHashMap;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
/// Generate an import map.
pub(crate) fn analyze_graph(
args: AnalyzeGraphArgs,
config_arguments: &ConfigArguments,
) -> Result<ExitStatus> {
// Construct the "default" settings. These are used when no `pyproject.toml`
// files are present, or files are injected from outside the hierarchy.
let pyproject_config = resolve(config_arguments, None)?;
if pyproject_config.settings.analyze.preview.is_disabled() {
warn_user!("`ruff analyze graph` is experimental and may change without warning");
}
// Write all paths relative to the current working directory.
let root =
SystemPathBuf::from_path_buf(CWD.clone()).expect("Expected a UTF-8 working directory");
// Find all Python files.
let files = resolve_default_files(args.files, false);
let (paths, resolver) = python_files_in_path(&files, &pyproject_config, config_arguments)?;
if paths.is_empty() {
warn_user_once!("No Python files found under the given path(s)");
return Ok(ExitStatus::Success);
}
// Resolve all package roots.
let package_roots = resolver
.package_roots(
&paths
.iter()
.flatten()
.map(ResolvedFile::path)
.collect::<Vec<_>>(),
)
.into_iter()
.map(|(path, package)| (path.to_path_buf(), package.map(Path::to_path_buf)))
.collect::<FxHashMap<_, _>>();
// Create a database from the source roots.
let db = ModuleDb::from_src_roots(
package_roots
.values()
.filter_map(|package| package.as_deref())
.filter_map(|package| package.parent())
.map(Path::to_path_buf)
.filter_map(|path| SystemPathBuf::from_path_buf(path).ok()),
pyproject_config
.settings
.analyze
.target_version
.as_tuple()
.into(),
)?;
let imports = {
// Create a cache for resolved globs.
let glob_resolver = Arc::new(Mutex::new(GlobResolver::default()));
// Collect and resolve the imports for each file.
let result = Arc::new(Mutex::new(Vec::new()));
let inner_result = Arc::clone(&result);
let db = db.snapshot();
rayon::scope(move |scope| {
for resolved_file in paths {
let Ok(resolved_file) = resolved_file else {
continue;
};
let path = resolved_file.path();
let package = path
.parent()
.and_then(|parent| package_roots.get(parent))
.and_then(Clone::clone);
// Resolve the per-file settings.
let settings = resolver.resolve(path);
let string_imports = settings.analyze.detect_string_imports;
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
// Skip excluded files.
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
&& match_exclusion(
resolved_file.path(),
resolved_file.file_name(),
&settings.analyze.exclude,
)
{
continue;
}
// Ignore non-Python files.
let source_type = match settings.analyze.extension.get(path) {
None => match SourceType::from(&path) {
SourceType::Python(source_type) => source_type,
SourceType::Toml(_) => {
debug!("Ignoring TOML file: {}", path.display());
continue;
}
},
Some(language) => PySourceType::from(language),
};
if matches!(source_type, PySourceType::Ipynb) {
debug!("Ignoring Jupyter notebook: {}", path.display());
continue;
}
// Convert to system paths.
let Ok(package) = package.map(SystemPathBuf::from_path_buf).transpose() else {
warn!("Failed to convert package to system path");
continue;
};
let Ok(path) = SystemPathBuf::from_path_buf(resolved_file.into_path()) else {
warn!("Failed to convert path to system path");
continue;
};
let db = db.snapshot();
let glob_resolver = glob_resolver.clone();
let root = root.clone();
let result = inner_result.clone();
scope.spawn(move |_| {
// Identify any imports via static analysis.
let mut imports =
ModuleImports::detect(&db, &path, package.as_deref(), string_imports)
.unwrap_or_else(|err| {
warn!("Failed to generate import map for {path}: {err}");
ModuleImports::default()
});
debug!("Discovered {} imports for {}", imports.len(), path);
// Append any imports that were statically defined in the configuration.
if let Some((root, globs)) = include_dependencies {
let mut glob_resolver = glob_resolver.lock().unwrap();
imports.extend(glob_resolver.resolve(root, globs));
}
// Convert the path (and imports) to be relative to the working directory.
let path = path
.strip_prefix(&root)
.map(SystemPath::to_path_buf)
.unwrap_or(path);
let imports = imports.relative_to(&root);
result.lock().unwrap().push((path, imports));
});
}
});
// Collect the results.
Arc::into_inner(result).unwrap().into_inner()?
};
// Generate the import map.
let import_map = match args.direction {
Direction::Dependencies => ImportMap::dependencies(imports),
Direction::Dependents => ImportMap::dependents(imports),
};
// Print to JSON.
writeln!(
std::io::stdout(),
"{}",
serde_json::to_string_pretty(&import_map)?
)?;
std::mem::forget(db);
Ok(ExitStatus::Success)
}
/// A resolver for glob sets.
#[derive(Default, Debug)]
struct GlobResolver {
cache: GlobCache,
}
impl GlobResolver {
/// Resolve a set of globs, anchored at a given root.
fn resolve(&mut self, root: PathBuf, globs: Vec<String>) -> Vec<SystemPathBuf> {
if let Some(cached) = self.cache.get(&root, &globs) {
return cached.clone();
}
let walker = match globwalk::GlobWalkerBuilder::from_patterns(&root, &globs)
.file_type(globwalk::FileType::FILE)
.build()
{
Ok(walker) => walker,
Err(err) => {
warn!("Failed to read glob walker: {err}");
return Vec::new();
}
};
let mut paths = Vec::new();
for entry in walker {
let entry = match entry {
Ok(entry) => entry,
Err(err) => {
warn!("Failed to read glob entry: {err}");
continue;
}
};
let path = match SystemPathBuf::from_path_buf(entry.into_path()) {
Ok(path) => path,
Err(err) => {
warn!("Failed to convert path to system path: {}", err.display());
continue;
}
};
paths.push(path);
}
self.cache.insert(root, globs, paths.clone());
paths
}
}
/// A cache for resolved globs.
#[derive(Default, Debug)]
struct GlobCache(FxHashMap<PathBuf, FxHashMap<Vec<String>, Vec<SystemPathBuf>>>);
impl GlobCache {
/// Insert a resolved glob.
fn insert(&mut self, root: PathBuf, globs: Vec<String>, paths: Vec<SystemPathBuf>) {
self.0.entry(root).or_default().insert(globs, paths);
}
/// Get a resolved glob.
fn get(&self, root: &Path, globs: &[String]) -> Option<&Vec<SystemPathBuf>> {
self.0.get(root).and_then(|map| map.get(globs))
}
}

View File

@@ -810,7 +810,11 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
.map(|rule| format!("`{}`", rule.noqa_code()))
.collect();
rule_names.sort();
warn_user_once!("The following rules may cause conflicts when used with the formatter: {}. To avoid unexpected behavior, we recommend disabling these rules, either by removing them from the `select` or `extend-select` configuration, or adding them to the `ignore` configuration.", rule_names.join(", "));
if let [rule] = rule_names.as_slice() {
warn_user_once!("The following rule may cause conflicts when used with the formatter: {rule}. To avoid unexpected behavior, we recommend disabling this rule, either by removing it from the `select` or `extend-select` configuration, or adding it to the `ignore` configuration.");
} else {
warn_user_once!("The following rules may cause conflicts when used with the formatter: {}. To avoid unexpected behavior, we recommend disabling these rules, either by removing them from the `select` or `extend-select` configuration, or adding them to the `ignore` configuration.", rule_names.join(", "));
}
}
// Next, validate settings-specific incompatibilities.

View File

@@ -1,4 +1,5 @@
pub(crate) mod add_noqa;
pub(crate) mod analyze_graph;
pub(crate) mod check;
pub(crate) mod check_stdin;
pub(crate) mod clean;

View File

@@ -20,7 +20,9 @@ use ruff_linter::settings::types::OutputFormat;
use ruff_linter::{fs, warn_user, warn_user_once};
use ruff_workspace::Settings;
use crate::args::{Args, CheckCommand, Command, FormatCommand};
use crate::args::{
AnalyzeCommand, AnalyzeGraphCommand, Args, CheckCommand, Command, FormatCommand,
};
use crate::printer::{Flags as PrinterFlags, Printer};
pub mod args;
@@ -186,6 +188,7 @@ pub fn run(
Command::Check(args) => check(args, global_options),
Command::Format(args) => format(args, global_options),
Command::Server(args) => server(args),
Command::Analyze(AnalyzeCommand::Graph(args)) => analyze_graph(args, global_options),
}
}
@@ -199,6 +202,15 @@ fn format(args: FormatCommand, global_options: GlobalConfigArgs) -> Result<ExitS
}
}
fn analyze_graph(
args: AnalyzeGraphCommand,
global_options: GlobalConfigArgs,
) -> Result<ExitStatus> {
let (cli, config_arguments) = args.partition(global_options)?;
commands::analyze_graph::analyze_graph(cli, &config_arguments)
}
fn server(args: ServerCommand) -> Result<ExitStatus> {
let four = NonZeroUsize::new(4).unwrap();

View File

@@ -3,6 +3,7 @@ use std::process::ExitCode;
use clap::{Parser, Subcommand};
use colored::Colorize;
use log::error;
use std::io::Write;
use ruff::args::{Args, Command};
use ruff::{run, ExitStatus};
@@ -86,7 +87,16 @@ pub fn main() -> ExitCode {
Ok(code) => code.into(),
Err(err) => {
{
use std::io::Write;
// Exit "gracefully" on broken pipe errors.
//
// See: https://github.com/BurntSushi/ripgrep/blob/bf63fe8f258afc09bae6caa48f0ae35eaf115005/crates/core/main.rs#L47C1-L61C14
for cause in err.chain() {
if let Some(ioerr) = cause.downcast_ref::<std::io::Error>() {
if ioerr.kind() == std::io::ErrorKind::BrokenPipe {
return ExitCode::from(0);
}
}
}
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
let mut stderr = std::io::stderr().lock();

View File

@@ -151,15 +151,15 @@ impl Printer {
let fix_prefix = format!("[{}]", "*".cyan());
if self.unsafe_fixes.is_hint() {
if fixables.applicable > 0 && fixables.unapplicable_unsafe > 0 {
let es = if fixables.unapplicable_unsafe == 1 {
if fixables.applicable > 0 && fixables.inapplicable_unsafe > 0 {
let es = if fixables.inapplicable_unsafe == 1 {
""
} else {
"es"
};
writeln!(writer,
"{fix_prefix} {} fixable with the `--fix` option ({} hidden fix{es} can be enabled with the `--unsafe-fixes` option).",
fixables.applicable, fixables.unapplicable_unsafe
fixables.applicable, fixables.inapplicable_unsafe
)?;
} else if fixables.applicable > 0 {
// Only applicable fixes
@@ -169,15 +169,15 @@ impl Printer {
fixables.applicable,
)?;
} else {
// Only unapplicable fixes
let es = if fixables.unapplicable_unsafe == 1 {
// Only inapplicable fixes
let es = if fixables.inapplicable_unsafe == 1 {
""
} else {
"es"
};
writeln!(writer,
"No fixes available ({} hidden fix{es} can be enabled with the `--unsafe-fixes` option).",
fixables.unapplicable_unsafe
fixables.inapplicable_unsafe
)?;
}
} else {
@@ -194,7 +194,7 @@ impl Printer {
// Check if there are unapplied fixes
let unapplied = {
if let Some(fixables) = fixables {
fixables.unapplicable_unsafe
fixables.inapplicable_unsafe
} else {
0
}
@@ -545,33 +545,33 @@ fn print_fix_summary(writer: &mut dyn Write, fixed: &FixMap) -> Result<()> {
#[derive(Debug)]
struct FixableStatistics {
applicable: u32,
unapplicable_unsafe: u32,
inapplicable_unsafe: u32,
}
impl FixableStatistics {
fn try_from(diagnostics: &Diagnostics, unsafe_fixes: UnsafeFixes) -> Option<Self> {
let mut applicable = 0;
let mut unapplicable_unsafe = 0;
let mut inapplicable_unsafe = 0;
for message in &diagnostics.messages {
if let Some(fix) = message.fix() {
if fix.applies(unsafe_fixes.required_applicability()) {
applicable += 1;
} else {
// Do not include unapplicable fixes at other levels that do not provide an opt-in
// Do not include inapplicable fixes at other levels that do not provide an opt-in
if fix.applicability().is_unsafe() {
unapplicable_unsafe += 1;
inapplicable_unsafe += 1;
}
}
}
}
if applicable == 0 && unapplicable_unsafe == 0 {
if applicable == 0 && inapplicable_unsafe == 0 {
None
} else {
Some(Self {
applicable,
unapplicable_unsafe,
inapplicable_unsafe,
})
}
}

View File

@@ -0,0 +1,424 @@
//! Tests the interaction of the `analyze graph` command.
#![cfg(not(target_arch = "wasm32"))]
#![cfg(not(windows))]
use assert_fs::prelude::*;
use std::process::Command;
use std::str;
use anyhow::Result;
use assert_fs::fixture::ChildPath;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use tempfile::TempDir;
fn command() -> Command {
let mut command = Command::new(get_cargo_bin("ruff"));
command.arg("analyze");
command.arg("graph");
command.arg("--preview");
command
}
const INSTA_FILTERS: &[(&str, &str)] = &[
// Rewrite Windows output to Unix output
(r"\\", "/"),
];
#[test]
fn dependencies() -> Result<()> {
let tempdir = TempDir::new()?;
let root = ChildPath::new(tempdir.path());
root.child("ruff").child("__init__.py").write_str("")?;
root.child("ruff")
.child("a.py")
.write_str(indoc::indoc! {r#"
import ruff.b
"#})?;
root.child("ruff")
.child("b.py")
.write_str(indoc::indoc! {r#"
from ruff import c
"#})?;
root.child("ruff")
.child("c.py")
.write_str(indoc::indoc! {r#"
from . import d
"#})?;
root.child("ruff")
.child("d.py")
.write_str(indoc::indoc! {r#"
from .e import f
"#})?;
root.child("ruff")
.child("e.py")
.write_str(indoc::indoc! {r#"
def f(): pass
"#})?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r###"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/c.py"
],
"ruff/c.py": [
"ruff/d.py"
],
"ruff/d.py": [
"ruff/e.py"
],
"ruff/e.py": []
}
----- stderr -----
"###);
});
Ok(())
}
#[test]
fn dependents() -> Result<()> {
let tempdir = TempDir::new()?;
let root = ChildPath::new(tempdir.path());
root.child("ruff").child("__init__.py").write_str("")?;
root.child("ruff")
.child("a.py")
.write_str(indoc::indoc! {r#"
import ruff.b
"#})?;
root.child("ruff")
.child("b.py")
.write_str(indoc::indoc! {r#"
from ruff import c
"#})?;
root.child("ruff")
.child("c.py")
.write_str(indoc::indoc! {r#"
from . import d
"#})?;
root.child("ruff")
.child("d.py")
.write_str(indoc::indoc! {r#"
from .e import f
"#})?;
root.child("ruff")
.child("e.py")
.write_str(indoc::indoc! {r#"
def f(): pass
"#})?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().arg("--direction").arg("dependents").current_dir(&root), @r###"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [],
"ruff/b.py": [
"ruff/a.py"
],
"ruff/c.py": [
"ruff/b.py"
],
"ruff/d.py": [
"ruff/c.py"
],
"ruff/e.py": [
"ruff/d.py"
]
}
----- stderr -----
"###);
});
Ok(())
}
#[test]
fn string_detection() -> Result<()> {
let tempdir = TempDir::new()?;
let root = ChildPath::new(tempdir.path());
root.child("ruff").child("__init__.py").write_str("")?;
root.child("ruff")
.child("a.py")
.write_str(indoc::indoc! {r#"
import ruff.b
"#})?;
root.child("ruff")
.child("b.py")
.write_str(indoc::indoc! {r#"
import importlib
importlib.import_module("ruff.c")
"#})?;
root.child("ruff").child("c.py").write_str("")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r###"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [],
"ruff/c.py": []
}
----- stderr -----
"###);
});
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r###"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/c.py"
],
"ruff/c.py": []
}
----- stderr -----
"###);
});
Ok(())
}
#[test]
fn globs() -> Result<()> {
let tempdir = TempDir::new()?;
let root = ChildPath::new(tempdir.path());
root.child("ruff.toml").write_str(indoc::indoc! {r#"
[analyze]
include-dependencies = { "ruff/a.py" = ["ruff/b.py"], "ruff/b.py" = ["ruff/*.py"], "ruff/c.py" = ["*.json"] }
"#})?;
root.child("ruff").child("__init__.py").write_str("")?;
root.child("ruff").child("a.py").write_str("")?;
root.child("ruff").child("b.py").write_str("")?;
root.child("ruff").child("c.py").write_str("")?;
root.child("ruff").child("d.json").write_str("")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r###"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/__init__.py",
"ruff/a.py",
"ruff/b.py",
"ruff/c.py"
],
"ruff/c.py": [
"ruff/d.json"
]
}
----- stderr -----
"###);
});
Ok(())
}
#[test]
fn exclude() -> Result<()> {
let tempdir = TempDir::new()?;
let root = ChildPath::new(tempdir.path());
root.child("ruff.toml").write_str(indoc::indoc! {r#"
[analyze]
exclude = ["ruff/c.py"]
"#})?;
root.child("ruff").child("__init__.py").write_str("")?;
root.child("ruff")
.child("a.py")
.write_str(indoc::indoc! {r#"
import ruff.b
"#})?;
root.child("ruff").child("b.py").write_str("")?;
root.child("ruff").child("c.py").write_str("")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r###"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": []
}
----- stderr -----
"###);
});
Ok(())
}
#[test]
fn wildcard() -> Result<()> {
let tempdir = TempDir::new()?;
let root = ChildPath::new(tempdir.path());
root.child("ruff").child("__init__.py").write_str("")?;
root.child("ruff")
.child("a.py")
.write_str(indoc::indoc! {r#"
from ruff.b import *
"#})?;
root.child("ruff")
.child("b.py")
.write_str(indoc::indoc! {r#"
from ruff import c
"#})?;
root.child("ruff")
.child("c.py")
.write_str(indoc::indoc! {r#"
from ruff.utils import *
"#})?;
root.child("ruff")
.child("utils")
.child("__init__.py")
.write_str("from .helpers import *")?;
root.child("ruff")
.child("utils")
.child("helpers.py")
.write_str("")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r###"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/c.py"
],
"ruff/c.py": [
"ruff/utils/__init__.py"
],
"ruff/utils/__init__.py": [
"ruff/utils/helpers.py"
],
"ruff/utils/helpers.py": []
}
----- stderr -----
"###);
});
Ok(())
}
#[test]
fn nested_imports() -> Result<()> {
let tempdir = TempDir::new()?;
let root = ChildPath::new(tempdir.path());
root.child("ruff").child("__init__.py").write_str("")?;
root.child("ruff")
.child("a.py")
.write_str(indoc::indoc! {r#"
match x:
case 1:
import ruff.b
"#})?;
root.child("ruff")
.child("b.py")
.write_str(indoc::indoc! {r#"
try:
import ruff.c
except ImportError as e:
import ruff.d
"#})?;
root.child("ruff")
.child("c.py")
.write_str(indoc::indoc! {r#"def c(): ..."#})?;
root.child("ruff")
.child("d.py")
.write_str(indoc::indoc! {r#"def d(): ..."#})?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/c.py",
"ruff/d.py"
],
"ruff/c.py": [],
"ruff/d.py": []
}
----- stderr -----
"#);
});
Ok(())
}

View File

@@ -326,18 +326,18 @@ fn docstring_options() -> Result<()> {
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
r"
[format]
docstring-code-format = true
docstring-code-line-length = 20
"#,
",
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--config"])
.arg(&ruff_toml)
.arg("-")
.pass_stdin(r#"
.pass_stdin(r"
def f(x):
'''
Something about `f`. And an example:
@@ -357,7 +357,7 @@ def f(x):
>>> foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
'''
pass
"#), @r###"
"), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -509,9 +509,9 @@ fn syntax_error() -> Result<()> {
fs::write(
tempdir.path().join("main.py"),
r#"
r"
from module import =
"#,
",
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
@@ -785,7 +785,7 @@ if condition:
print('Should change quotes')
----- stderr -----
warning: The following rules may cause conflicts when used with the formatter: `COM812`. To avoid unexpected behavior, we recommend disabling these rules, either by removing them from the `select` or `extend-select` configuration, or adding them to the `ignore` configuration.
warning: The following rule may cause conflicts when used with the formatter: `COM812`. To avoid unexpected behavior, we recommend disabling this rule, either by removing it from the `select` or `extend-select` configuration, or adding it to the `ignore` configuration.
"###);
Ok(())
}
@@ -1945,11 +1945,10 @@ fn range_end_only() {
def foo(arg1, arg2,):
print("Should format this" )
"#), @r###"
"#), @r#"
success: true
exit_code: 0
----- stdout -----
def foo(
arg1,
arg2,
@@ -1958,7 +1957,7 @@ def foo(arg1, arg2,):
----- stderr -----
"###);
"#);
}
#[test]

View File

@@ -158,15 +158,15 @@ fn check_default_files() -> Result<()> {
let tempdir = TempDir::new()?;
fs::write(
tempdir.path().join("foo.py"),
r#"
r"
import foo # unused import
"#,
",
)?;
fs::write(
tempdir.path().join("bar.py"),
r#"
r"
import bar # unused import
"#,
",
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
@@ -906,10 +906,10 @@ fn full_output_preview_config() -> Result<()> {
let pyproject_toml = tempdir.path().join("pyproject.toml");
fs::write(
&pyproject_toml,
r#"
r"
[tool.ruff]
preview = true
"#,
",
)?;
let mut cmd = RuffCheck::default().config(&pyproject_toml).build();
assert_cmd_snapshot!(cmd.pass_stdin("l = 1"), @r###"

View File

@@ -1619,6 +1619,58 @@ print(
Ok(())
}
#[test]
fn add_noqa_exclude() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
[lint]
exclude = ["excluded.py"]
select = ["RUF015"]
"#,
)?;
let test_path = tempdir.path().join("noqa.py");
fs::write(
&test_path,
r#"
def first_square():
return [x * x for x in range(20)][0]
"#,
)?;
let exclude_path = tempdir.path().join("excluded.py");
fs::write(
&exclude_path,
r#"
def first_square():
return [x * x for x in range(20)][0]
"#,
)?;
insta::with_settings!({
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.args(STDIN_BASE_OPTIONS)
.args(["--add-noqa"]), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Added 1 noqa directive.
"###);
});
Ok(())
}
/// Infer `3.11` from `requires-python` in `pyproject.toml`.
#[test]
fn requires_python() -> Result<()> {

View File

@@ -200,7 +200,7 @@ linter.safety_table.forced_unsafe = []
linter.target_version = Py37
linter.preview = disabled
linter.explicit_preview_rules = false
linter.extension.mapping = {}
linter.extension = ExtensionMapping({})
linter.allowed_confusables = []
linter.builtins = []
linter.dummy_variable_rgx = ^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$
@@ -388,4 +388,12 @@ formatter.magic_trailing_comma = respect
formatter.docstring_code_format = disabled
formatter.docstring_code_line_width = dynamic
# Analyze Settings
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = Py37
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
----- stderr -----

View File

@@ -37,13 +37,14 @@ name = "red_knot"
harness = false
[dependencies]
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true }
criterion = { workspace = true, default-features = false }
once_cell = { workspace = true }
rayon = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
url = { workspace = true }
ureq = { workspace = true }
criterion = { workspace = true, default-features = false }
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true }
[dev-dependencies]
ruff_db = { workspace = true }

Some files were not shown because too many files have changed in this diff Show More