Compare commits

..

126 Commits

Author SHA1 Message Date
Alex Waygood
288ade8381 [ty] Early return in Type::has_relation_to_impl for Type variants that are always mutually disjoint 2026-01-05 18:44:34 +00:00
Jack O'Connor
4712503c6d [ty] cargo insta test --force-update-snapshots (#22313)
Snapshot tests recently started reporting this warning:

> Snapshot test passes but the existing value is in a legacy format.
> Please run cargo insta test --force-update-snapshots to update to a
> newer format.

This PR is the result of that forced update.

One file (crates/ruff_db/src/diagnostic/render/full.rs) seems to get
corrupted, because it contains strings with unprintable characters that
trigger some bug in cargo-insta. I've manually reverted that file, and
also manually reverted the `input_file:` lines, which we like.
2026-01-05 07:55:47 -08:00
Alex Waygood
6b3de1517a [ty] Improve tracebacks when installing dependencies fails in ty_benchmark (#22399) 2026-01-05 14:55:08 +00:00
Alex Waygood
f3dea6e5c9 [ty] Optimize IntersectionType for the common case of a single negated element (#22344)
Co-authored-by: Micha Reiser <micha@reiser.io>
2026-01-05 13:41:50 +00:00
Micha Reiser
24dd149e03 [ty] Extract relation module from types.rs (#22232) 2026-01-05 13:16:49 +00:00
Alex Waygood
b8d527ff46 [ty] Optimize and simplify UnionElement::try_reduce (#22339) 2026-01-05 12:54:44 +00:00
Aria Desires
e63cf978ae [ty] Implement support for explicit markdown code fences in docstring rendering (#22373)
* Fixes https://github.com/astral-sh/ty/issues/2291
2026-01-05 07:13:24 -05:00
Rob Hand
3dab4ff8ad [ty] (docs) - Note insta is required for working with ty tests in ty CONTRIBUTING.md (#22332) 2026-01-05 11:05:13 +01:00
Jason K Hall
24580e2ee8 flake8-simplify: avoid unnecessary builtins import for SIM105 (#22358) 2026-01-05 10:58:46 +01:00
renovate[bot]
3d3af6f7c8 Update pre-commit dependencies (#22393)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Micha Reiser <micha@reiser.io>
2026-01-05 10:24:40 +01:00
renovate[bot]
7cc34c081a Update CodSpeedHQ/action action to v4.5.1 (#22389)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-01-05 09:10:06 +01:00
renovate[bot]
7a95013f56 Update Rust crate serde_json to v1.0.148 (#22387)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-01-05 09:09:23 +01:00
renovate[bot]
2395954d9a Update Rust crate schemars to v1.2.0 (#22391)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Micha Reiser <micha@reiser.io>
2026-01-05 09:08:58 +01:00
renovate[bot]
670bd01fb5 Update Rust crate arc-swap to v1.8.0 (#22390)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-01-05 09:08:32 +01:00
renovate[bot]
eae5c685f8 Update Rust crate proc-macro2 to v1.0.104 (#22386)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-01-05 09:05:21 +01:00
renovate[bot]
994f05f3ca Update Rust crate tempfile to v3.24.0 (#22392)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-01-05 09:04:59 +01:00
renovate[bot]
8dcecf323b Update taiki-e/install-action action to v2.65.6 (#22388)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-01-05 08:03:31 +00:00
renovate[bot]
b12c94e411 Update Rust crate jiff to v0.2.17 (#22384)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-01-05 07:57:34 +00:00
renovate[bot]
a9c3ea9674 Update Rust crate matchit to v0.9.1 (#22385)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-01-05 07:56:54 +00:00
renovate[bot]
704c57f491 Update Rust crate insta to v1.45.1 (#22383)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-01-05 07:56:36 +00:00
renovate[bot]
7a27662eca Update cargo-bins/cargo-binstall action to v1.16.6 (#22380)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-01-05 08:30:34 +01:00
renovate[bot]
ce2490ee93 Update dependency @cloudflare/workers-types to v4.20251229.0 (#22381)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-01-05 08:30:16 +01:00
Charlie Marsh
92a2f2c992 [ty] Apply class decorators via try_call() (#22375)
## Summary

Decorators are now called with the class as an argument, and the return
type becomes the class's type. This mirrors how function decorators
already work.

Closes https://github.com/astral-sh/ty/issues/2313.
2026-01-04 17:11:00 -05:00
Charlie Marsh
11b551c2be Add a CLAUDE.md (#22370)
## Summary

This is a starting point based on my own experiments. Feedback and
changes welcome -- I think we should iterate on this a lot as we go.
2026-01-04 15:00:31 -05:00
Micha Reiser
b85c0190c5 [ty] Use upstream GetSize implementation for OrderMap and OrderSet (#22374) 2026-01-04 19:54:03 +00:00
Micha Reiser
46a4bfc478 [ty] Use default HashSet for TypeCollector (#22368) 2026-01-04 18:58:30 +00:00
Alex Waygood
0c53395917 [ty] Add a second benchmark for enums with many members (#22364) 2026-01-04 17:58:20 +00:00
Alex Waygood
8464aca795 Bump docstring-adder pin (#22361) 2026-01-03 20:21:45 +00:00
Alex Waygood
e1439beab2 [ty] Use UnionType helper methods more consistently (#22357) 2026-01-03 14:19:06 +00:00
Felix Scherz
fd86e699b5 [ty] narrow TypedDict unions with not in (#22349)
Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
2026-01-03 13:12:57 +00:00
Alex Waygood
d0f841bff2 Add help: subdiagnostics for several Ruff rules that can sometimes appear to disagree with ty (#22331) 2026-01-02 22:10:39 +00:00
Alex Waygood
74978cfff2 Error on unused ty: ignore comments when dogfooding ty on our own scripts (#22347) 2026-01-02 20:27:09 +00:00
Alex Waygood
10a417aaf6 [ty] Specify heap_size for SynthesizedTypedDictType (#22345) 2026-01-02 20:09:35 +00:00
Matthew Mckee
a2e0ff57c3 Run cargo sort (#22310) 2026-01-02 19:58:15 +00:00
Nikolas Hearp
0804030ee9 [pylint] Ignore identical members (PLR1714) (#22220)
## Summary

This PR closes #21692. `PLR1714` will no longer flag if all members are
identical. I iterate through the equality comparisons and if they are
all equal the rule does not flag.

## Test Plan

Additional tests were added with identical members.
2026-01-02 12:56:17 -05:00
Alex Waygood
26230b1ed3 [ty] Use IntersectionType::from_elements more (#22329) 2026-01-01 15:01:00 +00:00
github-actions[bot]
295ae836fd [ty] Sync vendored typeshed stubs (#22324)
Co-authored-by: typeshedbot <>
2026-01-01 02:59:55 +00:00
Alex Waygood
9677364847 Bump docstring-adder pin (#22323) 2026-01-01 02:44:24 +00:00
github-actions[bot]
8e45bac3c1 [ty] Sync vendored typeshed stubs (#22321)
Co-authored-by: typeshedbot <>
Co-authored-by: Alex Waygood <alex.waygood@gmail.com>
2026-01-01 01:29:12 +00:00
Alex Waygood
7366a9e951 Bump docstring-adder pin (#22319) 2025-12-31 22:37:53 +00:00
Brent Westbrook
15aa74206e [pylint] Improve diagnostic range for PLC0206 (#22312)
Summary
--

This PR fixes #14900 by:

- Restricting the diagnostic range from the whole `for` loop to only the
`target in iter` part
- Adding secondary annotations to each use of the `dict[key]` accesses
- Adding a `fix_title` suggesting to use `for key in dict.items()`

I thought this approach sounded slightly nicer than the alternative of
renaming the rule to focus on each indexing operation mentioned in
https://github.com/astral-sh/ruff/issues/14900#issuecomment-2543923625,
but I don't feel too strongly. This was easy to implement with our new
diagnostic infrastructure too.

This produces an example annotation like this:

```
PLC0206 Extracting value from dictionary without calling `.items()`
  --> dict_index_missing_items.py:59:5
   |
58 | # A case with multiple uses of the value to show off the secondary annotations
59 | for instrument in ORCHESTRA:
   |     ^^^^^^^^^^^^^^^^^^^^^^^
60 |     data = json.dumps(
61 |         {
62 |             "instrument": instrument,
63 |             "section": ORCHESTRA[instrument],
   |                        ---------------------
64 |         }
65 |     )
66 |
67 |     print(f"saving data for {instrument} in {ORCHESTRA[instrument]}")
   |                                              ---------------------
68 |
69 |     with open(f"{instrument}/{ORCHESTRA[instrument]}.txt", "w") as f:
   |                               ---------------------
70 |         f.write(data)
   |
help: Use `for instrument, value in ORCHESTRA.items()` instead
```

which I think is a big improvement over:

```
PLC0206 Extracting value from dictionary without calling `.items()`
  --> dict_index_missing_items.py:59:1
   |
58 |   # A case with multiple uses of the value to show off the secondary annotations
59 | / for instrument in ORCHESTRA:
60 | |     data = json.dumps(
61 | |         {
62 | |             "instrument": instrument,
63 | |             "section": ORCHESTRA[instrument],
64 | |         }
65 | |     )
66 | |
67 | |     print(f"saving data for {instrument} in {ORCHESTRA[instrument]}")
68 | |
69 | |     with open(f"{instrument}/{ORCHESTRA[instrument]}.txt", "w") as f:
70 | |         f.write(data)
   | |_____________________^
   |
```

The secondary annotation feels a bit bare without a message, but I
thought it
might be too busy to include one. Something like `value extracted here`
or
`indexed here` might work if we do want to include a brief message.

To avoid collecting a `Vec` of annotation ranges, I added a `&Checker`
to the
rule's visitor to emit diagnostics as we go instead of at the end.

Test Plan
--

Existing tests, plus a new case showing off multiple secondary
annotations
2025-12-31 13:54:58 -05:00
ValdonVitijaa
77c2f4c6cb [flake8-unused-arguments] Mark **kwargs in TypeVar as used (ARG001) (#22214)
## Summary

Fixes false positive in ARG001 when `**kwargs` is passed to
`typing.TypeVar`

Closes #22178

When `**kwargs` is used in a `typing.TypeVar` call, the checker was not
recognizing it as a usage, leading to false positive "unused function
argument" warnings.

### Root Cause

In the AST, keyword arguments are represented by the `Keyword` struct
with an `arg` field of type `Option<Identifier>`:
- Named keywords like `bound=int` have `arg = Some("bound")`
- Dictionary unpacking like `**kwargs` has `arg = None`

The existing code only handled the `Some(id)` case, never visiting the
expression when `arg` was `None`, so `**kwargs` was never marked as
used.

### Changes

Added an `else` branch to handle `**kwargs` unpacking by calling
`visit_non_type_definition(value)` when `arg` is `None`. This ensures
the `kwargs` variable is properly visited and marked as used by the
semantic model.

## Test Plan

Tested with the following code:

```python
import typing

def f(
    *args: object,
    default: object = None,
    **kwargs: object,
) -> None:
    typing.TypeVar(*args, **kwargs)
```

Before :

`ARG001 Unused function argument: kwargs
`

After : 

`All checks passed!`

Run the example with the following command(from the root of ruff and
please change the path to the module that contains the code example):

`cargo run -p ruff -- check /path/to/file.py --isolated --select=ARG
--no-cache`
2025-12-31 11:07:56 -05:00
Rob Hand
758926eecd [ty] Add blurb for newer crates to `ty/CONTIBUTING.md (#22309)
Co-authored-by: Micha Reiser <micha@reiser.io>
2025-12-31 07:58:33 +00:00
Matthew Mckee
6433b88ffa Clean up pre-commit config (#22311) 2025-12-31 08:36:20 +01:00
Charlie Marsh
f619783066 [ty] Treat __setattr__ as fallback-only (#22014)
## Summary

Closes https://github.com/astral-sh/ty/issues/1460.
2025-12-30 19:01:10 -05:00
Ibraheem Ahmed
ff05428ce6 [ty] Subtyping for bidirectional inference (#21930)
## Summary

Supersedes https://github.com/astral-sh/ruff/pull/21747. This version
uses the constraint solver directly, which means we should benefit from
constraint solver improvements for free.

Resolves https://github.com/astral-sh/ty/issues/1576.
2025-12-30 17:03:20 -05:00
Matthew Mckee
4f2529f353 [ty] Fix typo in cli docs for respect_ignore_files arg (#22308) 2025-12-30 21:38:50 +01:00
Rob Hand
6f9ea73ac9 [ty] Remove TY_MAX_PARALLELISM as conformance runs no longer panic (#22307) 2025-12-30 20:42:59 +01:00
Charlie Marsh
12dd27da52 [ty] Support narrowing for tuple matches with literal elements (#22303)
## Summary

See:
https://github.com/astral-sh/ruff/pull/22299#issuecomment-3699913849.
2025-12-30 13:45:07 -05:00
Alex Waygood
e0e1e9535e [ty] Convert several comments in ty_extensions.pyi to docstrings (#22305) 2025-12-30 17:41:43 +00:00
github-actions[bot]
7173c7ea3f [ty] Sync vendored typeshed stubs (#22302)
Co-authored-by: typeshedbot <>
Co-authored-by: Alex Waygood <alex.waygood@gmail.com>
2025-12-30 17:24:13 +00:00
Alex Waygood
5013752c6c Bump docstring-adder pin (#22301) 2025-12-30 17:03:54 +00:00
Kevin Yang
b2b9d91859 [airflow] Passing positional argument into airflow.lineage.hook.HookLineageCollector.create_asset is not allowed (AIR303) (#22046)
## Summary

This is a follow up PR to https://github.com/astral-sh/ruff/pull/21096

The new code AIR303 is added for checking function signature change in
Airflow 3.0. The new rule added to AIR303 will check if positional
argument is passed into
`airflow.lineage.hook.HookLineageCollector.create_asset`. Since this
method is updated to accept only keywords argument, passing positional
argument into it is not allowed, and will raise an error. The test is
done by checking whether positional argument with 0 index can be found.

## Test Plan

A new test file is added to the fixtures for the code AIR303. Snapshot
test is updated accordingly.

<img width="1444" height="513" alt="Screenshot from 2025-12-17 20-54-48"
src="https://github.com/user-attachments/assets/bc235195-e986-4743-9bf7-bba65805fb87"
/>

<img width="981" height="433" alt="Screenshot from 2025-12-17 21-34-29"
src="https://github.com/user-attachments/assets/492db71f-58f2-40ba-ad2f-f74852fa5a6b"
/>
2025-12-30 11:39:08 -05:00
Brent Westbrook
c483b59ddd [ruff] Add non-empty-init-module (RUF067) (#22143)
Summary
--

This PR adds a new rule, `non-empty-init-module`, which restricts the
kind of
code that can be included in an `__init__.py` file. By default,
docstrings,
imports, and assignments to `__all__` are allowed. When the new
configuration
option `lint.ruff.strictly-empty-init-modules` is enabled, no code at
all is
allowed.

This closes #9848, where these two variants correspond to different
rules in the

[`flake8-empty-init-modules`](https://github.com/samueljsb/flake8-empty-init-modules/)
linter. The upstream rules are EIM001, which bans all code, and EIM002,
which
bans non-import/docstring/`__all__` code. Since we discussed folding
these into
one rule on [Discord], I just added the rule to the `RUF` group instead
of
adding a new `EIM` plugin.

I'm not really sure we need to flag docstrings even when the strict
setting is
enabled, but I just followed upstream for now. Similarly, as I noted in
a TODO
comment, we could also allow more statements involving `__all__`, such
as
`__all__.append(...)` or `__all__.extend(...)`. The current version only
allows
assignments, like upstream, as well as annotated and augmented
assignments,
unlike upstream.

I think when we discussed this previously, we considered flagging the
module
itself as containing code, but for now I followed the upstream
implementation of
flagging each statement in the module that breaks the rule (actually the
upstream linter flags each _line_, including comments). This will
obviously be a
bit noisier, emitting many diagnostics for the same module. But this
also seems
preferable because it flags every statement that needs to be fixed up
front
instead of only emitting one diagnostic for the whole file that persists
as you
keep removing more lines. It was also easy to implement in
`analyze::statement`
without a separate visitor.

The first commit adds the rule and baseline tests, the second commit
adds the
option and a diff test showing the additional diagnostics when the
setting is
enabled.

I noticed a small (~2%) performance regression on our largest benchmark,
so I also added a cached `Checker::in_init_module` field and method
instead of the `Checker::path` method. This was almost the only reason
for the `Checker::path` field at all, but there's one remaining
reference in a `warn_user!`
[call](https://github.com/astral-sh/ruff/blob/main/crates/ruff_linter/src/checkers/ast/analyze/definitions.rs#L188).

Test Plan
--

New tests adapted from the upstream linter

## Ecosystem Report

I've spot-checked the ecosystem report, and the results look "correct."
This is obviously a very noisy rule if you do include code in
`__init__.py` files. We could make it less noisy by adding more
exceptions (e.g. allowing `if TYPE_CHECKING` blocks, allowing
`__getattr__` functions, allowing imports from `importlib` assignments),
but I'm sort of inclined just to start simple and see what users need.

[Discord]:
https://discord.com/channels/1039017663004942429/1082324250112823306/1440086001035771985

---------

Co-authored-by: Micha Reiser <micha@reiser.io>
2025-12-30 11:32:10 -05:00
Charlie Marsh
57218753be [ty] Narrow TypedDict literal access in match statements (#22299)
## Summary

Closes https://github.com/astral-sh/ty/issues/2279.
2025-12-30 11:29:09 -05:00
Brent Westbrook
2ada8b6634 Document options for more rules (#22295)
Summary
--

This is a follow up to #22198 documenting more rule options I found
while going
through all of our rules.

The second commit renames the internal
`flake8_gettext::Settings::functions_names` field to `function_names` to
match
the external configuration option. I guess this is technically breaking
because
it's exposed to users via `--show-settings`, but I don't think we
consider that
part of our stable API. I can definitely revert that if needed, though.

The other changes are just like #22198, adding new `## Options` sections
to
rules to document the settings they use. I missed these in the previous
PR
because they were used outside the rule implementations themselves. Most
of
these settings are checked where the rules' implementation functions are
called
instead.

Oh, the last commit also updates the removal date for
`typing.ByteString`, which
got pushed back in the 3.14 release. I snuck that in today since I never
opened
this PR last week.

I also fixed one reference link in RUF041.

Test Plan
--

Docs checks in CI
2025-12-30 08:44:11 -05:00
RasmusNygren
0edd97dd41 [ty] Add autocomplete suggestions for class arguments (#22110) 2025-12-30 13:10:56 +00:00
renovate[bot]
f8f4ca8fbc Update dependency react-resizable-panels to v4 (#22279)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Micha Reiser <micha@reiser.io>
2025-12-30 11:04:25 +01:00
renovate[bot]
3d35dbd334 Update actions/upload-artifact digest to b7c566a (#22250)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Micha Reiser <micha@reiser.io>
2025-12-30 09:07:58 +00:00
renovate[bot]
a652b411b8 Update NPM Development dependencies (#22289)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Micha Reiser <micha@reiser.io>
2025-12-30 10:04:24 +01:00
RasmusNygren
4dac3d105d [ty] Add skip_dunders option to CompletionTestBuilder (#22293)
Co-authored-by: Micha Reiser <micha@reiser.io>
2025-12-30 08:10:14 +00:00
Shunsuke Shibayama
77ad107617 [ty] increase the max number of diagnostics for sympy in ty_walltime benchmark (#22296) 2025-12-30 13:20:19 +09:00
Charlie Marsh
b925ae5061 [ty] Avoid including property in subclasses properties (#22088)
## Summary

As-is, the following rejects `return self.value` in `def other` in the
subclass
([link](https://play.ty.dev/f55b47b2-313e-45d1-ba45-fde410bed32e))
because `self.value` is resolving to `Unknown | int | float | property`:

```python
class Base:
    _value: float = 0.0

    @property
    def value(self) -> float:
        return self._value

    @value.setter
    def value(self, v: float) -> None:
        self._value = v

    @property
    def other(self) -> float:
        return self.value

    @other.setter
    def other(self, v: float) -> None:
        self.value = v

class Derived(Base):
    @property
    def other(self) -> float:
        return self.value

    @other.setter
    def other(self, v: float) -> None:
        reveal_type(self.value)  # revealed: int | float
        self.value = v
```

I believe the root cause is that we're not excluding properties when
searching for class methods, so we're treating the `other` setter as a
classmethod. I don't fully understand how that ends up materializing as
`| property` on the union though.
2025-12-30 03:28:03 +00:00
Charlie Marsh
9333f15433 [ty] Fix match exhaustiveness for enum | None unions (#22290)
## Summary

If we match on an `TestEnum | None`, then when adding a case like
`~Literal[TestEnum.FOO]` (i.e., after `if value == TestEnum.FOO:
return`), we'd distribute `Literal[TestEnum.BAR]` on the entire builder,
creating `None & Literal[TestEnum.BAR]` which simplified to `Never`.
Instead, we should only expand to the remaining members for pieces of
the intersection that contain the enum.

Now, `(TestEnum | None) & ~Literal[TestEnum.FOO] &
~Literal[TestEnum.BAR]` correctly simplifies to `None` instead of
`Never`.

Closes https://github.com/astral-sh/ty/issues/2260.
2025-12-29 22:19:28 -05:00
Shunsuke Shibayama
c429ef8407 [ty] don't expand type aliases via type mappings unless necessary (#22241)
## Summary

`apply_type_mapping` always expands type aliases and operates on the
resulting types, which can lead to cluttered results due to excessive
type alias expansion in places where it is not actually needed.

Specifically, type aliases are expanded when displaying method
signatures, because we use `TypeMapping::BindSelf` to get the method
signature.

```python
type Scalar = int | float
type Array1d = list[Scalar] | tuple[Scalar]

def f(x: Scalar | Array1d) -> None: pass
reveal_type(f)  # revealed: def f(x: Scalar | Array1d) -> None

class Foo:
    def f(self, x: Scalar | Array1d) -> None: pass
# should be `bound method Foo.f(x: Scalar | Array1d) -> None`
reveal_type(Foo().f)  # revealed: bound method Foo.f(x: int | float | list[int | float] | tuple[int | float]) -> None
```

In this PR, when type mapping is performed on a type alias, the
expansion result without type mapping is compared with the expansion
result after type mapping, and if the two are equivalent, the expansion
is deemed redundant and canceled.

## Test Plan

mdtest updated
2025-12-29 19:02:56 -08:00
Eric Mark Martin
8716b4e230 [ty] implement typing.TypeGuard (#20974)
## Summary

Resolve(s) astral-sh/ty#117, astral-sh/ty#1569

Implement `typing.TypeGuard`. Due to the fact that it [overrides
anything previously known about the checked
value](https://typing.python.org/en/latest/spec/narrowing.html#typeguard)---

> When a conditional statement includes a call to a user-defined type
guard function, and that function returns true, the expression passed as
the first positional argument to the type guard function should be
assumed by a static type checker to take on the type specified in the
TypeGuard return type, unless and until it is further narrowed within
the conditional code block.

---we have to substantially rework the constraints system. In
particular, we make constraints represented as a disjunctive normal form
(DNF) where each term includes a regular constraint, and one or more
disjuncts with a typeguard constraint. Some test cases (including some
with more complex boolean logic) are added to `type_guards.md`.


## Test Plan

- update existing tests
- add new tests for more complex boolean logic with `TypeGuard`
- add new tests for `TypeGuard` variance

---------

Co-authored-by: Carl Meyer <carl@astral.sh>
2025-12-29 17:54:17 -08:00
Alex Waygood
9dadf2724c [ty] Add documentation for ty_extensions.Top and ty_extensions.Bottom (#22245)
Co-authored-by: Carl Meyer <carl@astral.sh>
2025-12-29 19:43:17 +00:00
Charlie Marsh
3d8ae2e476 [ty] Avoid showing misleading hint for unpacked tuple arguments (#22286)
## Summary

We could implement support for showing multiple argument names, though
this seems to match PyCharm.

Closes https://github.com/astral-sh/ty/issues/2250.
2025-12-29 13:25:08 -05:00
Alex Waygood
ebc1323ccb Bump shellcheck to the latest version (#22285) 2025-12-29 18:22:07 +00:00
Alex Waygood
0584081dc8 Don't run ruff-ecosystem on PRs that only touch ruff_benchmark (#22246) 2025-12-29 17:33:53 +00:00
renovate[bot]
b31ff2c5ab Update Rust crate libc to v0.2.178 (#22257)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 18:24:54 +01:00
renovate[bot]
ce27dc9b2d Update dependency monaco-editor to ^0.55.0 (#22264)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 18:24:41 +01:00
renovate[bot]
ab542698ad Update actions/download-artifact digest to 37930b1 (#22249)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Micha Reiser <micha@reiser.io>
2025-12-29 17:20:38 +00:00
renovate[bot]
a737a56c53 Move quickcheck dependeny pins to workspace Cargo.toml (#22247) 2025-12-29 17:12:31 +00:00
renovate[bot]
a1c3f16358 Update pre-commit dependencies (#22281)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Micha Reiser <micha@reiser.io>
2025-12-29 17:11:28 +00:00
renovate[bot]
8f54701f0f Update docker/setup-buildx-action action to v3.12.0 (#22266)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 18:11:20 +01:00
renovate[bot]
a03a65fec5 Update actions/cache action to v5 (#22274)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 18:10:52 +01:00
renovate[bot]
f800ad3fad Update docker/metadata-action action to v5.10.0 (#22265)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 18:09:50 +01:00
renovate[bot]
64baa366f2 Update dependency node to v24 (#22278)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 18:09:31 +01:00
renovate[bot]
ce4dd7f12d Update actions/checkout action to v6 (#22275)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 18:08:53 +01:00
renovate[bot]
4b0aa96645 Update GitHub Artifact Actions (#22280)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 18:07:22 +01:00
renovate[bot]
05a5b51ab1 Update taiki-e/install-action action to v2.65.1 (#22273)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 18:02:44 +01:00
renovate[bot]
7cf1ca399a Update dependency mdformat-mkdocs to v5 (#22277)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 17:53:22 +01:00
renovate[bot]
475016616b Update Rust crate serde_with to v3.16.1 (#22270)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 11:48:10 -05:00
renovate[bot]
ea2ff92a06 Update Rust crate supports-hyperlinks to v3.2.0 (#22271)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 16:44:05 +00:00
renovate[bot]
96d7c4bb6a Update Rust crate schemars to v1.1.0 (#22269)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 17:43:01 +01:00
renovate[bot]
9261904c41 Update Rust crate criterion to 0.8.0 (#22267)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 16:42:24 +00:00
renovate[bot]
4d05dcf4cd Update Rust crate toml to v0.9.10 (#22260)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 17:41:17 +01:00
renovate[bot]
81542ca64c Update Rust crate tracing to v0.1.44 (#22261)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 17:39:37 +01:00
renovate[bot]
085a44e38d Update Rust crate tracing-indicatif to v0.3.14 (#22262)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 17:38:47 +01:00
renovate[bot]
2919ec9bd5 Update dependency mdformat to v1 (#22276)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 17:38:15 +01:00
renovate[bot]
28a56796fe Update astral-sh/setup-uv action to v7.1.6 (#22252)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 11:37:26 -05:00
renovate[bot]
78504bd57d Update Rust crate insta to v1.45.0 (#22268)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 17:37:19 +01:00
renovate[bot]
756f8a5c18 Update Rust crate serde_json to v1.0.146 (#22259)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 11:36:50 -05:00
renovate[bot]
36d1cccdc3 Update actions/setup-node action to v6.1.0 (#22263)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 16:36:35 +00:00
renovate[bot]
130c1f83a5 Update Rust crate uuid to v1.19.0 (#22272)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 16:34:49 +00:00
renovate[bot]
86d2fc8531 Update salsa digest to 309c249 (#22251)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 17:32:45 +01:00
renovate[bot]
ab85a38d39 Update Rust crate log to v0.4.29 (#22258)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 17:26:18 +01:00
renovate[bot]
388c1b6f10 Update dependency mkdocs-material to v9.7.1 (#22254)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 17:25:12 +01:00
renovate[bot]
d7bc1a02bc Update cargo-bins/cargo-binstall action to v1.16.5 (#22253)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 17:24:51 +01:00
renovate[bot]
cb87fb7424 Update dependency ruff to v0.14.10 (#22255)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 17:23:43 +01:00
renovate[bot]
d13e01ed17 Update Rust crate camino to v1.2.2 (#22256)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-29 16:23:24 +00:00
Alex Waygood
fbb5c8aa3c [ty] Fix LiteralString import in ty_extensions.pyi (#22244) 2025-12-29 14:59:19 +00:00
Harutaka Kawamura
6730350bbd [refurb] Mark FURB192 fix as always unsafe (#22210)
## Summary

Close #22204

## Test Plan

Existing checks
2025-12-29 09:17:33 -05:00
samypr100
e71fd9c040 ci(zizmor): remove broad zizmor ignores (#22199) 2025-12-29 10:52:30 +01:00
Matthew Mckee
fde33baaa5 [ty] Make the implicit shadowing message on invalid assignment diagnostic info (#22219) 2025-12-29 10:30:48 +01:00
Micha Reiser
8efa14ae1b [ty] Limit the returned completions to reduce lag (#22240) 2025-12-29 10:16:32 +01:00
Micha Reiser
6776543b62 [ty] Reduce tracing level for constriant.rs logs (#22239) 2025-12-29 09:22:17 +01:00
Carl Meyer
4c4e652b38 [ty] callable type of a type object is not function-like (#22226) 2025-12-28 11:24:45 -08:00
Micha Reiser
d5c39d3f9f [ty] Fix property-tests (#22229) 2025-12-28 09:58:48 +01:00
Matthew Mckee
dea48ecef0 [ty] Add option to disable syntax errors in the language server (#22217)
Co-authored-by: Micha Reiser <micha@reiser.io>
2025-12-27 20:24:38 +00:00
Carl Meyer
55c8707be6 [ty] fix display of top ParamSpec specialization (#22227)
## Summary

I only noticed this in the ecosystem report of
https://github.com/astral-sh/ruff/pull/22213 after merging it. The
change to displaying `Top[]` wrapper around the entire signature instead
of just the parameters had the side effect of not showing it at all when
displaying a top ParamSpec specialization. This PR fixes that.

Marking internal since this is a fixup of a not-released PR.

## Test Plan

Added mdtest that fails without this PR.
2025-12-27 11:14:22 -08:00
Carl Meyer
6d5fb09e92 [ty] fix and simplify callable type materializations (#22213)
## Summary

A couple things I noticed when taking another look at the callable type
materializations.

1) Previously we wrongly ignored the return type when
bottom-materializing a callable with gradual signature, and always
changed it to `Never`.
2) We weren't correctly handling overloads that included a gradual
signature. Rather than separately materializing each overload, we would
just mark the entire callable as "top" or replace the entire callable
with the bottom signature.

Really, "top parameters" is something that belongs on the `Parameters`,
not on the entire `CallableType`. Conveniently, we already have
`ParametersKind` where we can track this, right next to where we already
track `ParametersKind::Gradual`. This saves a bit of memory, fixes the
two bugs above, and simplifies the implementation considerably (net
removal of 100+ LOC, a bunch of places that shouldn't need to care about
topness of a callable no longer need to.)

One user-visible change from this is that I now display the "top
callable" as `(Top[...]) -> object` instead of `Top[(...) -> object]`. I
think this is a (minor) improvement, because it wraps exactly the part
in `Top` that needs to be, rather than misleadingly wrapping the entire
callable type, including the return type (which has already been
separately materialized). I think the prior display would be
particularly confusing if the return type also has its own `Top` in it:
previously we could have e.g. `Top[(...) -> Top[list[Unknown]]]`, which
I think is less clear than the new `(Top[...]) -> Top[list[Unknown]]`.

## Test Plan

Added mdtests that failed before this PR and pass after it.

### Ecosystem

The changed diagnostics are all either the change to `Top` display, or
else known non-deterministic output. The added diagnostics are all true
positives:

The added diagnostic at
aa35ca1965/torchvision/transforms/v2/_utils.py (L149)
is a true positive that wasn't caught by the previous version. `str` is
not assignable to `Callable[[Any], Any]` (strings are not callable), nor
is the top callable (top callable includes callables that do not take a
single required positional argument.)

The added diagnostic at
081535ad9b/starlette/routing.py (L67)
is also a (pedantic) true positive. It's the same case as #1567 -- the
code assumes that it is impossible for a subclass of `Response` to
implement `__await__` (yielding something other than a `Response`).

The pytest added diagnostics are also both similar true positives: they
make the assumption that an object cannot simultaneously be a `Sequence`
and callable, or an `Iterable` and callable.
2025-12-27 10:45:07 -08:00
Micha Reiser
fffd3e5cfb [ty] Re-use vec when building a VariableLengthTypeVarTuple with the builder (#22225) 2025-12-27 17:06:30 +01:00
RasmusNygren
7ac1874ca0 [ty] Use the AST to suppress keywords in decorators (#22224) 2025-12-27 13:29:45 +00:00
Alex Waygood
7290bdc41e [ty] Use a length-2 array for UnionTypeInstance::_value_expr_types (#22222) 2025-12-27 11:47:46 +00:00
Simon Lamon
c032e27566 [ty] Rename non-subscriptable error code to not-subscriptable (#22193) 2025-12-27 11:44:35 +00:00
Micha Reiser
da188d5cf6 [ty] Reduce monomorphization in add_binding (#22196) 2025-12-27 11:17:27 +01:00
Micha Reiser
5d32ab8175 [ty] Return slices for Tuple methods (#22192) 2025-12-27 10:30:34 +01:00
Matthew Mckee
6342cec842 [ty] Promote float and complex when promoting literals (#22215)
## Summary

Resolve https://github.com/astral-sh/ty/issues/2226

We need to add a special case in `apply_type_mapping` instead of
directly in `promote_literals_impl` because we do not reach this with
non generic non tuple nominal instances. We still ensure we apply the
normal mapping if we do not see `float` or `complex` instances.

## Test Plan

Update existing mdtest and add a new case to `literal_promotion.md`
2025-12-26 16:19:23 -08:00
Brent Westbrook
95a532f9fd [pylint] Restore the fix safety docs for PLW0133 (#22211)
Summary
--

Noticed while responding to #22201 that the last sentence here just ends
abruptly. It turns out that I missed this change when reviewing #21382.

Test Plan
--

CI
2025-12-26 11:45:15 -05:00
Brent Westbrook
c842de5c4c Document the options used by more rules (#22198)
Summary
--

While analyzing our rules, I wanted to know which of them use
configuration options but noticed that some of them were not documented
(or at least not documented in a separate `## Options` section).

I had Claude generate an initial list of candidate rules, but it
contained a lot of false positives that I filtered out, and I ended up
adding all of these sections myself. I'm not claiming that the options
lists are exhaustive (as in the rules may use additional options beyond
what I found), but this will at least help with my goal of determining
whether or not a rule is configurable at all and also hopefully be
helpful in general.

I mostly just tacked on an `## Options` section without any commentary,
but I added a couple lines of explanation when I felt that the meaning
of the options wasn't obvious from the context.

I also noticed a bit of variation in the `flake8-simplify` rules from
doing this. Some of them offer a diagnostic but no fix depending on the
resulting line length of the suggestion, while others offer neither. I'm
not sure we need to do anything different here, but it seemed worth
mentioning.

Test Plan
--

Docs tests to make sure the links are right
2025-12-26 11:24:49 -05:00
Micha Reiser
9693375e10 [ty] Reduce monomorphization (#22195) 2025-12-26 10:02:20 +01:00
Matthew Mckee
1ec3503cc3 [ty] Fix playground inlay hint location (#22200) 2025-12-26 09:20:57 +01:00
Alex Waygood
19b10993e1 [ty] Automatically re-run ecosystem-analyzer workflow on subsequent pushes to a PR, if the PR has the ecosystem-analyzer label (#22179)
## Summary

This PR reworks our ecosystem-analyzer workflow so that it automatically
reruns if a PR with the `ecosystem-analyzer` label has new commits
pushed to it, or is reopened after previously being closed. It's
currently easy to forget that you need to remove and re-add the label to
trigger a fresh workflow run, which can then mean that there are stale
(misleading) results in the PR comment posted by the bot. It also means
that it takes longer for CI to finish than it would otherwise, because
it might be a few minutes after pushing new commits to the PR before you
remember that you also need to remove and re-add the label.

To write this PR, I consulted:
- The GitHub workflow trigger documentation:
https://docs.github.com/en/actions/reference/workflows-and-actions/events-that-trigger-workflows#pull_request
- This Stack Overflow answer:
https://stackoverflow.com/a/59588725/13990016

## Test Plan

I experimented with pushing commits to this PR and closing/reopening it,
and both of these actions triggered fresh runs of the ecosystem-analyzer
worfklow when the label was present on the PR. However, removing the
label again meant that the workflow was no longer triggered by these
actions.
2025-12-25 17:41:19 +00:00
Micha Reiser
014abe1ee1 [ty] Fix completion in decorators with missing declaration (#22177) 2025-12-25 15:05:47 +00:00
1901 changed files with 18115 additions and 11012 deletions

View File

@@ -1,67 +0,0 @@
{
"permissions": {
"allow": [
"Bash(cargo build:*)",
"Read(//home/micha/astral/discord.py/discord/**)",
"Bash(source:*)",
"Bash(./target/profiling/ty check:*)",
"Bash(tee:*)",
"Read(//home/micha/astral/discord.py/.venv/**)",
"Read(//home/micha/astral/discord.py/**)",
"Bash(perf record:*)",
"Bash(perf report:*)",
"Bash(time:*)",
"Bash(../ruff/target/profiling/ty check discord/audit_logs.py -vv)",
"Bash(sed:*)",
"Read(//home/micha/git/TypeScript/**)",
"Bash(cargo test:*)",
"Bash(MDTEST_TEST_FILTER='union_types.md - Union types - Unions of tuples' cargo test -p ty_python_semantic --test mdtest -- mdtest__union_types)",
"Bash(timeout 10 cargo test --package ty_python_semantic --lib types::tests::divergent_type)",
"Bash(timeout 30 cargo test:*)",
"Bash(git stash:*)",
"Bash(timeout 60 time:*)",
"Bash(for i in 1 2 3 4 5)",
"Bash(do echo \"Run $i:\")",
"Bash(done)",
"Bash(for i in 1 2 3)",
"Bash(cargo fuzz run:*)",
"Bash(timeout 60 cargo fuzz run -s none ty_check_invalid_syntax -- -timeout=1)",
"Bash(for:*)",
"Bash(do echo \"=== Checking $crash ===\")",
"Bash(uvx ty@latest check \"$crash\")",
"Bash(do echo \"=== $crash ===\")",
"Bash(while read crash)",
"Bash(cargo fuzz cmin:*)",
"Bash(cargo +nightly fuzz cmin:*)",
"Bash(timeout 120 cargo fuzz run -s none ty_check_invalid_syntax -- -timeout=1)",
"Bash(awk:*)",
"Bash(while read file)",
"Bash(cat:*)",
"Bash(uvx ty@latest:*)",
"Bash(do cp \"$crash\" /tmp/isolated_crash.py)",
"Bash(echo \"=== $crash ===\")",
"Bash(do echo \"=== test$i.py ===\")",
"Bash(do echo \"=== Testing $crash ===\")",
"Bash(tree:*)",
"Bash(cut:*)",
"Bash(grep:*)",
"Bash(ls:*)",
"Bash(xargs basename:*)",
"Bash(wc:*)",
"Bash(find:*)",
"Bash({} ;)",
"Bash(git checkout:*)",
"Bash(do)",
"Bash(if ! grep -q \"use crate::types\" \"$f\")",
"Bash(! grep -q \"crate::types::\" \"$f\")",
"Bash(then)",
"Bash(else)",
"Bash(fi)",
"Bash(1)",
"Bash(__NEW_LINE__ echo \"Done\")",
"Bash(rm:*)"
],
"deny": [],
"ask": []
}
}

View File

@@ -13,3 +13,8 @@ self-hosted-runner:
- github-windows-2025-x86_64-8
- github-windows-2025-x86_64-16
- codspeed-macro
paths:
".github/workflows/mypy_primer.yaml":
ignore:
- 'condition "false" is always evaluated to false. remove the if: section'

View File

@@ -39,7 +39,7 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
submodules: recursive
persist-credentials: false
@@ -59,7 +59,7 @@ jobs:
"${MODULE_NAME}" --help
python -m "${MODULE_NAME}" --help
- name: "Upload sdist"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: wheels-sdist
path: dist
@@ -68,7 +68,7 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: macos-14
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
submodules: recursive
persist-credentials: false
@@ -84,7 +84,7 @@ jobs:
target: x86_64
args: --release --locked --out dist
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: wheels-macos-x86_64
path: dist
@@ -99,7 +99,7 @@ jobs:
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: artifacts-macos-x86_64
path: |
@@ -110,7 +110,7 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: macos-14
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
submodules: recursive
persist-credentials: false
@@ -131,7 +131,7 @@ jobs:
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: wheels-aarch64-apple-darwin
path: dist
@@ -146,7 +146,7 @@ jobs:
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: artifacts-aarch64-apple-darwin
path: |
@@ -166,7 +166,7 @@ jobs:
- target: aarch64-pc-windows-msvc
arch: x64
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
submodules: recursive
persist-credentials: false
@@ -192,7 +192,7 @@ jobs:
"${MODULE_NAME}" --help
python -m "${MODULE_NAME}" --help
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: wheels-${{ matrix.platform.target }}
path: dist
@@ -203,7 +203,7 @@ jobs:
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: artifacts-${{ matrix.platform.target }}
path: |
@@ -219,7 +219,7 @@ jobs:
- x86_64-unknown-linux-gnu
- i686-unknown-linux-gnu
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
submodules: recursive
persist-credentials: false
@@ -242,7 +242,7 @@ jobs:
"${MODULE_NAME}" --help
python -m "${MODULE_NAME}" --help
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: wheels-${{ matrix.target }}
path: dist
@@ -260,7 +260,7 @@ jobs:
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: artifacts-${{ matrix.target }}
path: |
@@ -296,7 +296,7 @@ jobs:
arch: riscv64
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
submodules: recursive
persist-credentials: false
@@ -327,7 +327,7 @@ jobs:
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: wheels-${{ matrix.platform.target }}
path: dist
@@ -345,7 +345,7 @@ jobs:
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: artifacts-${{ matrix.platform.target }}
path: |
@@ -361,7 +361,7 @@ jobs:
- x86_64-unknown-linux-musl
- i686-unknown-linux-musl
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
submodules: recursive
persist-credentials: false
@@ -389,7 +389,7 @@ jobs:
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: wheels-${{ matrix.target }}
path: dist
@@ -407,7 +407,7 @@ jobs:
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: artifacts-${{ matrix.target }}
path: |
@@ -427,7 +427,7 @@ jobs:
arch: armv7
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
submodules: recursive
persist-credentials: false
@@ -456,7 +456,7 @@ jobs:
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: wheels-${{ matrix.platform.target }}
path: dist
@@ -474,7 +474,7 @@ jobs:
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: artifacts-${{ matrix.platform.target }}
path: |

View File

@@ -20,6 +20,12 @@ on:
env:
RUFF_BASE_IMG: ghcr.io/${{ github.repository_owner }}/ruff
permissions:
contents: read
# TODO(zanieb): Ideally, this would be `read` on dry-run but that will require
# significant changes to the workflow.
packages: write # zizmor: ignore[excessive-permissions]
jobs:
docker-build:
name: Build Docker image (ghcr.io/astral-sh/ruff) for ${{ matrix.platform }}
@@ -33,12 +39,12 @@ jobs:
- linux/amd64
- linux/arm64
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
submodules: recursive
persist-credentials: false
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
@@ -63,7 +69,7 @@ jobs:
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
with:
images: ${{ env.RUFF_BASE_IMG }}
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
@@ -96,7 +102,7 @@ jobs:
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digests
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: digests-${{ env.PLATFORM_TUPLE }}
path: /tmp/digests/*
@@ -113,17 +119,17 @@ jobs:
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
steps:
- name: Download digests
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
with:
images: ${{ env.RUFF_BASE_IMG }}
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
@@ -167,7 +173,7 @@ jobs:
- debian:bookworm-slim,bookworm-slim,debian-slim
- buildpack-deps:bookworm,bookworm,debian
steps:
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
@@ -219,7 +225,7 @@ jobs:
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
# ghcr.io prefers index level annotations
env:
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
@@ -256,17 +262,17 @@ jobs:
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
steps:
- name: Download digests
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
env:
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
with:

View File

@@ -49,8 +49,10 @@ jobs:
py-fuzzer: ${{ steps.check_py_fuzzer.outputs.changed }}
# Flag that is set to "true" when code related to the playground changes.
playground: ${{ steps.check_playground.outputs.changed }}
# Flag that is set to "true" when code related to the benchmarks changes.
benchmarks: ${{ steps.check_benchmarks.outputs.changed }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
fetch-depth: 0
persist-credentials: false
@@ -95,6 +97,7 @@ jobs:
':!crates/ruff_python_formatter/**' \
':!crates/ruff_formatter/**' \
':!crates/ruff_dev/**' \
':!crates/ruff_benchmark/**' \
':scripts/*' \
':python/**' \
':.github/workflows/ci.yaml' \
@@ -202,6 +205,21 @@ jobs:
':crates/ruff_python_trivia/**' \
':crates/ruff_source_file/**' \
':crates/ruff_text_size/**' \
':.github/workflows/ci.yaml' \
; then
echo "changed=false" >> "$GITHUB_OUTPUT"
else
echo "changed=true" >> "$GITHUB_OUTPUT"
fi
- name: Check if the benchmark code changed
id: check_benchmarks
env:
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
run: |
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
':Cargo.toml' \
':Cargo.lock' \
':crates/ruff_benchmark/**' \
':.github/workflows/ci.yaml' \
; then
@@ -215,7 +233,7 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: "Install Rust toolchain"
@@ -229,7 +247,7 @@ jobs:
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
@@ -251,7 +269,7 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
@@ -263,15 +281,15 @@ jobs:
- name: "Install mold"
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
uses: taiki-e/install-action@28a9d316db64b78a951f3f8587a5d08cc97ad8eb # v2.65.6
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
uses: taiki-e/install-action@28a9d316db64b78a951f3f8587a5d08cc97ad8eb # v2.65.6
with:
tool: cargo-insta
- name: "Install uv"
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
with:
enable-cache: "true"
- name: ty mdtests (GitHub annotations)
@@ -314,7 +332,7 @@ jobs:
(needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main')
timeout-minutes: 20
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
@@ -325,11 +343,11 @@ jobs:
- name: "Install mold"
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
uses: taiki-e/install-action@28a9d316db64b78a951f3f8587a5d08cc97ad8eb # v2.65.6
with:
tool: cargo-nextest
- name: "Install uv"
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
with:
enable-cache: "true"
- name: "Run tests"
@@ -349,7 +367,7 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
@@ -358,11 +376,11 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo nextest"
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
uses: taiki-e/install-action@28a9d316db64b78a951f3f8587a5d08cc97ad8eb # v2.65.6
with:
tool: cargo-nextest
- name: "Install uv"
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
with:
enable-cache: "true"
- name: "Run tests"
@@ -377,7 +395,7 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 10
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
@@ -385,9 +403,9 @@ jobs:
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 22
node-version: 24
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
@@ -409,7 +427,7 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: SebRollen/toml-action@b1b3628f55fc3a28208d4203ada8b737e9687876 # v1.2.0
@@ -438,7 +456,7 @@ jobs:
if: ${{ github.ref == 'refs/heads/main' || needs.determine_changes.outputs.fuzz == 'true' || needs.determine_changes.outputs.code == 'true' }}
timeout-minutes: 10
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
@@ -450,7 +468,7 @@ jobs:
- name: "Install mold"
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
- name: "Install cargo-binstall"
uses: cargo-bins/cargo-binstall@3fc81674af4165a753833a94cae9f91d8849049f # v1.16.2
uses: cargo-bins/cargo-binstall@80aaafe04903087c333980fa2686259ddd34b2d9 # v1.16.6
- name: "Install cargo-fuzz"
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
@@ -465,10 +483,10 @@ jobs:
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
shared-key: ruff-linux-debug
@@ -497,13 +515,13 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 5
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: "Install Rust toolchain"
run: rustup component add rustfmt
# Run all code generation scripts, and verify that the current output is
@@ -530,15 +548,20 @@ jobs:
needs: determine_changes
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
# Ecosystem check needs linter and/or formatter changes.
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
if: |
!contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' &&
(
needs.determine_changes.outputs.linter == 'true' ||
needs.determine_changes.outputs.formatter == 'true'
)
timeout-minutes: 20
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: ${{ github.event.pull_request.base.ref }}
persist-credentials: false
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
with:
python-version: ${{ env.PYTHON_VERSION }}
activate-environment: true
@@ -559,7 +582,7 @@ jobs:
cargo build --bin ruff
mv target/debug/ruff target/debug/ruff-baseline
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
clean: false
@@ -625,7 +648,7 @@ jobs:
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
# Make sure to update the bot if you rename the artifact.
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
name: Upload Results
with:
name: ecosystem-result
@@ -640,11 +663,11 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && (needs.determine_changes.outputs.ty == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }}
timeout-minutes: ${{ github.repository == 'astral-sh/ruff' && 10 || 20 }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
fetch-depth: 0
persist-credentials: false
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
@@ -687,10 +710,10 @@ jobs:
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: cargo-bins/cargo-binstall@3fc81674af4165a753833a94cae9f91d8849049f # v1.16.2
- uses: cargo-bins/cargo-binstall@80aaafe04903087c333980fa2686259ddd34b2d9 # v1.16.6
- run: cargo binstall --no-confirm cargo-shear
- run: cargo shear
@@ -700,10 +723,10 @@ jobs:
needs: determine_changes
if: ${{ needs.determine_changes.outputs.ty == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
@@ -722,7 +745,7 @@ jobs:
timeout-minutes: 20
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
@@ -751,18 +774,18 @@ jobs:
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-16' || 'ubuntu-latest' }}
timeout-minutes: 10
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 22
node-version: 24
- name: "Cache pre-commit"
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: ~/.cache/pre-commit
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
@@ -771,7 +794,7 @@ jobs:
echo '```console' > "$GITHUB_STEP_SUMMARY"
# Enable color output for pre-commit and remove it for the summary
# Use --hook-stage=manual to enable slower pre-commit hooks that are skipped by default
SKIP=cargo-fmt,clippy,dev-generate-all uvx --python="${PYTHON_VERSION}" pre-commit run --all-files --show-diff-on-failure --color=always --hook-stage=manual | \
SKIP=cargo-fmt uvx --python="${PYTHON_VERSION}" pre-commit run --all-files --show-diff-on-failure --color=always --hook-stage=manual | \
tee >(sed -E 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})*)?[mGK]//g' >> "$GITHUB_STEP_SUMMARY") >&1
exit_code="${PIPESTATUS[0]}"
echo '```' >> "$GITHUB_STEP_SUMMARY"
@@ -782,7 +805,7 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
@@ -791,7 +814,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: Install uv
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
with:
python-version: 3.13
activate-environment: true
@@ -813,7 +836,7 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 10
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
@@ -839,7 +862,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
name: "Checkout ruff source"
with:
persist-credentials: false
@@ -855,7 +878,7 @@ jobs:
- name: Build Ruff binary
run: cargo build -p ruff --bin ruff
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
name: "Checkout ruff-lsp source"
with:
persist-credentials: false
@@ -890,7 +913,7 @@ jobs:
- determine_changes
if: ${{ (needs.determine_changes.outputs.playground == 'true') }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: "Install Rust toolchain"
@@ -898,9 +921,9 @@ jobs:
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 22
node-version: 24
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
@@ -926,7 +949,9 @@ jobs:
(
github.ref == 'refs/heads/main' ||
needs.determine_changes.outputs.formatter == 'true' ||
needs.determine_changes.outputs.linter == 'true'
needs.determine_changes.outputs.linter == 'true' ||
needs.determine_changes.outputs.parser == 'true' ||
needs.determine_changes.outputs.benchmarks == 'true'
)
timeout-minutes: 20
permissions:
@@ -934,20 +959,20 @@ jobs:
id-token: write # required for OIDC authentication with CodSpeed
steps:
- name: "Checkout Branch"
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: "Install Rust toolchain"
run: rustup show
- name: "Install codspeed"
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
uses: taiki-e/install-action@28a9d316db64b78a951f3f8587a5d08cc97ad8eb # v2.65.6
with:
tool: cargo-codspeed
@@ -955,7 +980,7 @@ jobs:
run: cargo codspeed build --features "codspeed,ruff_instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
- name: "Run benchmarks"
uses: CodSpeedHQ/action@346a2d8a8d9d38909abd0bc3d23f773110f076ad # v4.4.1
uses: CodSpeedHQ/action@972e3437949c89e1357ebd1a2dbc852fcbc57245 # v4.5.1
with:
mode: simulation
run: cargo codspeed run
@@ -968,12 +993,13 @@ jobs:
github.repository == 'astral-sh/ruff' &&
(
github.ref == 'refs/heads/main' ||
needs.determine_changes.outputs.ty == 'true'
needs.determine_changes.outputs.ty == 'true' ||
needs.determine_changes.outputs.benchmarks == 'true'
)
timeout-minutes: 20
steps:
- name: "Checkout Branch"
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
@@ -985,7 +1011,7 @@ jobs:
run: rustup show
- name: "Install codspeed"
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
uses: taiki-e/install-action@28a9d316db64b78a951f3f8587a5d08cc97ad8eb # v2.65.6
with:
tool: cargo-codspeed
@@ -993,7 +1019,7 @@ jobs:
run: cargo codspeed build -m instrumentation --features "codspeed,ty_instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench ty
- name: "Upload benchmark binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: benchmarks-instrumented-ty-binary
path: target/codspeed/simulation/ruff_benchmark
@@ -1015,18 +1041,18 @@ jobs:
- "attrs|hydra|datetype"
steps:
- name: "Checkout Branch"
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: "Install codspeed"
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
uses: taiki-e/install-action@28a9d316db64b78a951f3f8587a5d08cc97ad8eb # v2.65.6
with:
tool: cargo-codspeed
- name: "Download benchmark binary"
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v4.3.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: benchmarks-instrumented-ty-binary
path: target/codspeed/simulation/ruff_benchmark
@@ -1035,7 +1061,7 @@ jobs:
run: chmod +x target/codspeed/simulation/ruff_benchmark/ty
- name: "Run benchmarks"
uses: CodSpeedHQ/action@346a2d8a8d9d38909abd0bc3d23f773110f076ad # v4.4.1
uses: CodSpeedHQ/action@972e3437949c89e1357ebd1a2dbc852fcbc57245 # v4.5.1
with:
mode: simulation
run: cargo codspeed run --bench ty "${{ matrix.benchmark }}"
@@ -1046,24 +1072,33 @@ jobs:
# so hardcoding depot here is fine
runs-on: depot-ubuntu-22.04-arm-4
needs: determine_changes
if: ${{ github.repository == 'astral-sh/ruff' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.ty == 'true' || github.ref == 'refs/heads/main') }}
if: |
github.repository == 'astral-sh/ruff' &&
(
!contains(github.event.pull_request.labels.*.name, 'no-test') &&
(
needs.determine_changes.outputs.ty == 'true' ||
needs.determine_changes.outputs.benchmarks == 'true' ||
github.ref == 'refs/heads/main'
)
)
timeout-minutes: 20
steps:
- name: "Checkout Branch"
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: "Install Rust toolchain"
run: rustup show
- name: "Install codspeed"
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
uses: taiki-e/install-action@28a9d316db64b78a951f3f8587a5d08cc97ad8eb # v2.65.6
with:
tool: cargo-codspeed
@@ -1071,7 +1106,7 @@ jobs:
run: cargo codspeed build -m walltime --features "codspeed,ty_walltime" --profile profiling --no-default-features -p ruff_benchmark
- name: "Upload benchmark binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: benchmarks-walltime-binary
path: target/codspeed/walltime/ruff_benchmark
@@ -1094,19 +1129,19 @@ jobs:
- "pydantic|multithreaded|freqtrade"
steps:
- name: "Checkout Branch"
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: "Install codspeed"
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
uses: taiki-e/install-action@28a9d316db64b78a951f3f8587a5d08cc97ad8eb # v2.65.6
with:
tool: cargo-codspeed
- name: "Download benchmark binary"
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: benchmarks-walltime-binary
path: target/codspeed/walltime/ruff_benchmark
@@ -1115,7 +1150,7 @@ jobs:
run: chmod +x target/codspeed/walltime/ruff_benchmark/ty_walltime
- name: "Run benchmarks"
uses: CodSpeedHQ/action@346a2d8a8d9d38909abd0bc3d23f773110f076ad # v4.4.1
uses: CodSpeedHQ/action@972e3437949c89e1357ebd1a2dbc852fcbc57245 # v4.5.1
env:
# enabling walltime flamegraphs adds ~6 minutes to the CI time, and they don't
# appear to provide much useful insight for our walltime benchmarks right now

View File

@@ -31,10 +31,10 @@ jobs:
# Don't run the cron job on forks:
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"

View File

@@ -41,14 +41,14 @@ jobs:
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
path: ruff
fetch-depth: 0
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
@@ -70,7 +70,7 @@ jobs:
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
# Make sure to update the bot if you rename the artifact.
- name: Upload diff
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: mypy_primer_diff
path: mypy_primer.diff
@@ -80,14 +80,14 @@ jobs:
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
path: ruff
fetch-depth: 0
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
@@ -108,7 +108,7 @@ jobs:
scripts/mypy_primer.sh
- name: Upload diff
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: mypy_primer_memory_diff
path: mypy_primer_memory.diff
@@ -122,14 +122,14 @@ jobs:
# TODO: Enable once we fixed the non-deterministic diagnostics
if: false
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
path: ruff
fetch-depth: 0
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:

View File

@@ -17,11 +17,14 @@ on:
required: true
type: string
permissions:
contents: read
jobs:
mkdocs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: ${{ inputs.ref }}
persist-credentials: true

View File

@@ -26,14 +26,14 @@ jobs:
env:
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 22
node-version: 24
package-manager-cache: false
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
- name: "Install Node dependencies"

View File

@@ -22,8 +22,8 @@ jobs:
id-token: write
steps:
- name: "Install uv"
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
pattern: wheels-*
path: wheels

View File

@@ -30,14 +30,14 @@ jobs:
env:
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 22
node-version: 24
package-manager-cache: false
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
- name: "Install Node dependencies"

View File

@@ -29,7 +29,7 @@ jobs:
target: [web, bundler, nodejs]
fail-fast: false
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: "Install Rust toolchain"
@@ -45,9 +45,9 @@ jobs:
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
mv /tmp/package.json crates/ruff_wasm/pkg
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 22
node-version: 24
registry-url: "https://registry.npmjs.org"
- name: "Publish (dry-run)"
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}

View File

@@ -70,7 +70,7 @@ jobs:
shell: bash
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.2/cargo-dist-installer.sh | sh"
- name: Cache dist
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: cargo-dist-cache
path: ~/.cargo/bin/dist
@@ -86,7 +86,7 @@ jobs:
cat plan-dist-manifest.json
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: artifacts-plan-dist-manifest
path: plan-dist-manifest.json
@@ -128,14 +128,14 @@ jobs:
persist-credentials: false
submodules: recursive
- name: Install cached dist
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
- name: Fetch local artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
with:
pattern: artifacts-*
path: target/distrib/
@@ -153,7 +153,7 @@ jobs:
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
- name: "Upload artifacts"
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: artifacts-build-global
path: |
@@ -179,14 +179,14 @@ jobs:
persist-credentials: false
submodules: recursive
- name: Install cached dist
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
# Fetch artifacts from scratch-storage
- name: Fetch artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
with:
pattern: artifacts-*
path: target/distrib/
@@ -200,7 +200,7 @@ jobs:
cat dist-manifest.json
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
# Overwrite the previous copy
name: artifacts-dist-manifest
@@ -256,7 +256,7 @@ jobs:
submodules: recursive
# Create a GitHub Release while uploading all files to it
- name: "Download GitHub Artifacts"
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
with:
pattern: artifacts-*
path: artifacts

View File

@@ -61,12 +61,12 @@ jobs:
permissions:
contents: write
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
name: Checkout Ruff
with:
path: ruff
persist-credentials: true
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
name: Checkout typeshed
with:
repository: python/typeshed
@@ -76,7 +76,7 @@ jobs:
run: |
git config --global user.name typeshedbot
git config --global user.email '<>'
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: Sync typeshed stubs
run: |
rm -rf "ruff/${VENDORED_TYPESHED}"
@@ -125,12 +125,12 @@ jobs:
permissions:
contents: write
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
name: Checkout Ruff
with:
persist-credentials: true
ref: ${{ env.UPSTREAM_BRANCH}}
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: Setup git
run: |
git config --global user.name typeshedbot
@@ -164,12 +164,12 @@ jobs:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
name: Checkout Ruff
with:
persist-credentials: true
ref: ${{ env.UPSTREAM_BRANCH}}
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: Setup git
run: |
git config --global user.name typeshedbot

View File

@@ -4,7 +4,13 @@ permissions: {}
on:
pull_request:
types: [labeled]
# The default for `pull_request` is to trigger on `synchronize`, `opened` and `reopened`.
# We also add `labeled` here so that the workflow triggers when a label is initially added.
types:
- labeled
- synchronize
- opened
- reopened
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
@@ -23,16 +29,16 @@ jobs:
name: Compute diagnostic diff
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
timeout-minutes: 20
if: contains(github.event.label.name, 'ecosystem-analyzer')
if: contains( github.event.pull_request.labels.*.name, 'ecosystem-analyzer')
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
path: ruff
fetch-depth: 0
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
with:
enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
@@ -114,7 +120,7 @@ jobs:
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
# Make sure to update the bot if you rename the artifact.
- name: "Upload full report"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: full-report
path: dist/
@@ -122,19 +128,19 @@ jobs:
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
# Make sure to update the bot if you rename the artifact.
- name: Upload comment
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: comment.md
path: comment.md
- name: Upload diagnostics diff
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: diff.html
path: dist/diff.html
- name: Upload timing diff
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: timing.html
path: dist/timing.html

View File

@@ -1,3 +1,7 @@
# This workflow is a cron job that generates a report describing
# all diagnostics ty emits across the whole ecosystem. The report
# is uploaded to https://ty-ecosystem-ext.pages.dev/ on a weekly basis.
name: ty ecosystem-report
permissions: {}
@@ -21,14 +25,14 @@ jobs:
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
path: ruff
fetch-depth: 0
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
with:
enable-cache: true
@@ -72,7 +76,7 @@ jobs:
# NOTE: astral-sh-bot uses this artifact to publish the ecosystem report.
# Make sure to update the bot if you rename the artifact.
- name: "Upload ecosystem report"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: full-report
path: dist/

View File

@@ -37,13 +37,13 @@ jobs:
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
timeout-minutes: 10
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
path: ruff
fetch-depth: 0
persist-credentials: false
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
repository: python/typing
ref: ${{ env.CONFORMANCE_SUITE_COMMIT }}
@@ -59,9 +59,6 @@ jobs:
- name: Compute diagnostic diff
shell: bash
env:
# TODO: Remove this once we fixed the remaining panics in the conformance suite.
TY_MAX_PARALLELISM: 1
run: |
RUFF_DIR="$GITHUB_WORKSPACE/ruff"
@@ -104,7 +101,7 @@ jobs:
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
# Make sure to update the bot if you rename the artifact.
- name: Upload diff
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: typing_conformance_diagnostics_diff
path: typing_conformance_diagnostics.diff
@@ -112,7 +109,7 @@ jobs:
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
# Make sure to update the bot if you rename the artifact.
- name: Upload conformance suite commit
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: conformance-suite-commit
path: conformance-suite-commit

23
.github/zizmor.yml vendored
View File

@@ -1,23 +0,0 @@
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
# https://docs.zizmor.sh/configuration/
#
# TODO: can we remove the ignores here so that our workflows are more secure?
rules:
cache-poisoning:
ignore:
- build-docker.yml
excessive-permissions:
# it's hard to test what the impact of removing these ignores would be
# without actually running the release workflow...
ignore:
- build-docker.yml
- publish-docs.yml
secrets-inherit:
# `cargo dist` makes extensive use of `secrets: inherit`,
# and we can't easily fix that until an upstream release changes that.
disable: true
template-injection:
ignore:
# like with `secrets-inherit`, `cargo dist` introduces some
# template injections. We've manually audited these usages for safety.
- release.yml

View File

@@ -5,7 +5,7 @@ exclude: |
.github/workflows/release.yml|
crates/ty_vendored/vendor/.*|
crates/ty_project/resources/.*|
crates/ty_python_types/resources/corpus/.*|
crates/ty_python_semantic/resources/corpus/.*|
crates/ty/docs/(configuration|rules|cli|environment).md|
crates/ruff_benchmark/resources/.*|
crates/ruff_linter/resources/.*|
@@ -22,7 +22,7 @@ exclude: |
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
rev: v6.0.0
hooks:
- id: check-merge-conflict
@@ -32,13 +32,13 @@ repos:
- id: validate-pyproject
- repo: https://github.com/executablebooks/mdformat
rev: 0.7.22
rev: 1.0.0
hooks:
- id: mdformat
language: python # means renovate will also update `additional_dependencies`
additional_dependencies:
- mdformat-mkdocs==4.0.0
- mdformat-footnote==0.1.1
- mdformat-mkdocs==5.0.0
- mdformat-footnote==0.1.2
exclude: |
(?x)^(
docs/formatter/black\.md
@@ -46,7 +46,7 @@ repos:
)$
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.45.0
rev: v0.47.0
hooks:
- id: markdownlint-fix
exclude: |
@@ -56,7 +56,7 @@ repos:
)$
- repo: https://github.com/adamchainz/blacken-docs
rev: 1.19.1
rev: 1.20.0
hooks:
- id: blacken-docs
language: python # means renovate will also update `additional_dependencies`
@@ -67,10 +67,10 @@ repos:
.*?invalid(_.+)*_syntax\.md
)$
additional_dependencies:
- black==25.1.0
- black==25.12.0
- repo: https://github.com/crate-ci/typos
rev: v1.34.0
rev: v1.40.0
hooks:
- id: typos
@@ -84,7 +84,7 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.12.7
rev: v0.14.10
hooks:
- id: ruff-format
- id: ruff-check
@@ -94,7 +94,7 @@ repos:
# Prettier
- repo: https://github.com/rbubley/mirrors-prettier
rev: v3.6.2
rev: v3.7.4
hooks:
- id: prettier
types: [yaml]
@@ -102,19 +102,19 @@ repos:
# zizmor detects security vulnerabilities in GitHub Actions workflows.
# Additional configuration for the tool is found in `.github/zizmor.yml`
- repo: https://github.com/zizmorcore/zizmor-pre-commit
rev: v1.16.0
rev: v1.19.0
hooks:
- id: zizmor
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.33.2
rev: 0.36.0
hooks:
- id: check-github-workflows
# `actionlint` hook, for verifying correct syntax in GitHub Actions workflows.
# Some additional configuration for `actionlint` can be found in `.github/actionlint.yaml`.
- repo: https://github.com/rhysd/actionlint
rev: v1.7.7
rev: v1.7.9
hooks:
- id: actionlint
stages:
@@ -129,12 +129,9 @@ repos:
# actionlint has a shellcheck integration which extracts shell scripts in `run:` steps from GitHub Actions
# and checks these with shellcheck. This is arguably its most useful feature,
# but the integration only works if shellcheck is installed
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.11.1"
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: v0.10.0.1
rev: v0.11.0.1
hooks:
- id: shellcheck
ci:
skip: [cargo-fmt, dev-generate-all]

64
CLAUDE.md Normal file
View File

@@ -0,0 +1,64 @@
# Ruff Repository
This repository contains both Ruff (a Python linter and formatter) and ty (a Python type checker). The crates follow a naming convention: `ruff_*` for Ruff-specific code and `ty_*` for ty-specific code. ty reuses several Ruff crates, including the Python parser (`ruff_python_parser`) and AST definitions (`ruff_python_ast`).
## Running Tests
Run all tests (using `nextest` for faster execution):
```sh
cargo nextest run
```
Run tests for a specific crate:
```sh
cargo nextest run -p ty_python_semantic
```
Run a specific mdtest (use a substring of the test name):
```sh
MDTEST_TEST_FILTER="<filter>" cargo nextest run -p ty_python_semantic mdtest
```
Update snapshots after running tests:
```sh
cargo insta accept
```
## Running Clippy
```sh
cargo clippy --workspace --all-targets --all-features -- -D warnings
```
## Running Debug Builds
Use debug builds (not `--release`) when developing, as release builds lack debug assertions and have slower compile times.
Run Ruff:
```sh
cargo run --bin ruff -- check path/to/file.py
```
Run ty:
```sh
cargo run --bin ty -- check path/to/file.py
```
## Pull Requests
When working on ty, PR titles should start with `[ty]` and be tagged with the `ty` GitHub label.
## Development Guidelines
- All changes must be tested. If you're not testing your changes, you're not done.
- Get your tests to pass. If you didn't run the tests, your code does not work.
- Follow existing code style. Check neighboring files for patterns.
- Always run `uvx pre-commit run -a` at the end of a task.
- Avoid writing significant amounts of new code. This is often a sign that we're missing an existing method or mechanism that could help solve the problem. Look for existing utilities first.
- Avoid falling back to patterns that require `panic!`, `unreachable!`, or `.unwrap()`. Instead, try to encode those constraints in the type system.

287
Cargo.lock generated
View File

@@ -17,6 +17,15 @@ dependencies = [
"memchr",
]
[[package]]
name = "alloca"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5a7d05ea6aea7e9e64d25b9156ba2fee3fdd659e34e41063cd2fc7cd020d7f4"
dependencies = [
"cc",
]
[[package]]
name = "allocator-api2"
version = "0.2.21"
@@ -137,9 +146,12 @@ dependencies = [
[[package]]
name = "arc-swap"
version = "1.7.1"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
checksum = "51d03449bb8ca2cc2ef70869af31463d1ae5ccc8fa3e334b307203fbf815207e"
dependencies = [
"rustversion",
]
[[package]]
name = "argfile"
@@ -208,12 +220,6 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
[[package]]
name = "base64"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "bincode"
version = "2.0.1"
@@ -280,6 +286,9 @@ name = "bitflags"
version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
dependencies = [
"serde_core",
]
[[package]]
name = "bitvec"
@@ -351,9 +360,9 @@ dependencies = [
[[package]]
name = "camino"
version = "1.2.1"
version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609"
checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48"
dependencies = [
"serde_core",
]
@@ -657,7 +666,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
dependencies = [
"lazy_static",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -666,7 +675,7 @@ version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e"
dependencies = [
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -767,18 +776,20 @@ dependencies = [
[[package]]
name = "criterion"
version = "0.7.0"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1c047a62b0cc3e145fa84415a3191f628e980b194c2755aa12300a4e6cbd928"
checksum = "4d883447757bb0ee46f233e9dc22eb84d93a9508c9b868687b274fc431d886bf"
dependencies = [
"alloca",
"anes",
"cast",
"ciborium",
"clap",
"criterion-plot 0.6.0",
"criterion-plot 0.8.1",
"itertools 0.13.0",
"num-traits",
"oorandom",
"page_size",
"regex",
"serde",
"serde_json",
@@ -798,9 +809,9 @@ dependencies = [
[[package]]
name = "criterion-plot"
version = "0.6.0"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b1bcc0dc7dfae599d84ad0b1a55f80cde8af3725da8313b528da95ef783e338"
checksum = "ed943f81ea2faa8dcecbbfa50164acf95d555afec96a27871663b300e387b2e4"
dependencies = [
"cast",
"itertools 0.13.0",
@@ -1022,7 +1033,7 @@ dependencies = [
"libc",
"option-ext",
"redox_users",
"windows-sys 0.61.0",
"windows-sys 0.60.2",
]
[[package]]
@@ -1114,7 +1125,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
dependencies = [
"libc",
"windows-sys 0.61.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -1637,9 +1648,9 @@ dependencies = [
[[package]]
name = "insta"
version = "1.43.2"
version = "1.45.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
checksum = "983e3b24350c84ab8a65151f537d67afbbf7153bb9f1110e03e9fa9b07f67a5c"
dependencies = [
"console 0.15.11",
"once_cell",
@@ -1649,6 +1660,7 @@ dependencies = [
"ron",
"serde",
"similar",
"tempfile",
]
[[package]]
@@ -1715,7 +1727,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
dependencies = [
"hermit-abi",
"libc",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -1769,9 +1781,9 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jiff"
version = "0.2.16"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49cce2b81f2098e7e3efc35bc2e0a6b7abec9d34128283d7a26fa8f32a6dbb35"
checksum = "a87d9b8105c23642f50cbbae03d1f75d8422c5cb98ce7ee9271f7ff7505be6b8"
dependencies = [
"jiff-static",
"jiff-tzdb-platform",
@@ -1779,14 +1791,14 @@ dependencies = [
"portable-atomic",
"portable-atomic-util",
"serde_core",
"windows-sys 0.61.0",
"windows-sys 0.52.0",
]
[[package]]
name = "jiff-static"
version = "0.2.16"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "980af8b43c3ad5d8d349ace167ec8170839f753a42d233ba19e08afe1850fa69"
checksum = "b787bebb543f8969132630c51fd0afab173a86c6abae56ff3b9e5e3e3f9f6e58"
dependencies = [
"proc-macro2",
"quote",
@@ -1862,9 +1874,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "libc"
version = "0.2.177"
version = "0.2.178"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091"
[[package]]
name = "libcst"
@@ -1970,9 +1982,9 @@ dependencies = [
[[package]]
name = "log"
version = "0.4.28"
version = "0.4.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
[[package]]
name = "lsp-server"
@@ -2048,9 +2060,9 @@ checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5"
[[package]]
name = "matchit"
version = "0.9.0"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ea5f97102eb9e54ab99fb70bb175589073f554bdadfb74d9bd656482ea73e2a"
checksum = "b3eede3bdf92f3b4f9dc04072a9ce5ab557d5ec9038773bf9ffcd5588b3cc05b"
[[package]]
name = "memchr"
@@ -2289,6 +2301,16 @@ dependencies = [
"memchr",
]
[[package]]
name = "page_size"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30d5b2194ed13191c1999ae0704b7839fb18384fa22e49b57eeaa97d79ce40da"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "parking_lot"
version = "0.12.4"
@@ -2612,9 +2634,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.103"
version = "1.0.104"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8"
checksum = "9695f8df41bb4f3d222c95a67532365f569318332d03d5f3f67f37b20e6ebdf0"
dependencies = [
"unicode-ident",
]
@@ -2876,13 +2898,16 @@ checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001"
[[package]]
name = "ron"
version = "0.7.1"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88073939a61e5b7680558e6be56b419e208420c2adb92be54921fa6b72283f1a"
checksum = "fd490c5b18261893f14449cbd28cb9c0b637aebf161cd77900bfdedaff21ec32"
dependencies = [
"base64",
"bitflags 1.3.2",
"bitflags 2.10.0",
"once_cell",
"serde",
"serde_derive",
"typeid",
"unicode-ident",
]
[[package]]
@@ -3083,7 +3108,6 @@ dependencies = [
"ty",
"ty_project",
"ty_python_semantic",
"ty_python_types",
"ty_static",
"url",
]
@@ -3132,7 +3156,6 @@ dependencies = [
"serde",
"ty_module_resolver",
"ty_python_semantic",
"ty_python_types",
"zip",
]
@@ -3226,7 +3249,6 @@ name = "ruff_memory_usage"
version = "0.0.0"
dependencies = [
"get-size2",
"ordermap",
]
[[package]]
@@ -3597,15 +3619,15 @@ checksum = "781442f29170c5c93b7185ad559492601acdc71d5bb0706f5868094f45cfcd08"
[[package]]
name = "rustix"
version = "1.1.2"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e"
checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34"
dependencies = [
"bitflags 2.10.0",
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.61.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -3623,7 +3645,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.25.2"
source = "git+https://github.com/salsa-rs/salsa.git?rev=ce80691fa0b87dc2fd2235a26544e63e5e43d8d3#ce80691fa0b87dc2fd2235a26544e63e5e43d8d3"
source = "git+https://github.com/salsa-rs/salsa.git?rev=309c249088fdeef0129606fa34ec2eefc74736ff#309c249088fdeef0129606fa34ec2eefc74736ff"
dependencies = [
"boxcar",
"compact_str",
@@ -3648,12 +3670,12 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.25.2"
source = "git+https://github.com/salsa-rs/salsa.git?rev=ce80691fa0b87dc2fd2235a26544e63e5e43d8d3#ce80691fa0b87dc2fd2235a26544e63e5e43d8d3"
source = "git+https://github.com/salsa-rs/salsa.git?rev=309c249088fdeef0129606fa34ec2eefc74736ff#309c249088fdeef0129606fa34ec2eefc74736ff"
[[package]]
name = "salsa-macros"
version = "0.25.2"
source = "git+https://github.com/salsa-rs/salsa.git?rev=ce80691fa0b87dc2fd2235a26544e63e5e43d8d3#ce80691fa0b87dc2fd2235a26544e63e5e43d8d3"
source = "git+https://github.com/salsa-rs/salsa.git?rev=309c249088fdeef0129606fa34ec2eefc74736ff#309c249088fdeef0129606fa34ec2eefc74736ff"
dependencies = [
"proc-macro2",
"quote",
@@ -3672,9 +3694,9 @@ dependencies = [
[[package]]
name = "schemars"
version = "1.0.5"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1317c3bf3e7df961da95b0a56a172a02abead31276215a0497241a7624b487ce"
checksum = "54e910108742c57a770f492731f99be216a52fadd361b06c8fb59d74ccc267d2"
dependencies = [
"dyn-clone",
"ref-cast",
@@ -3685,9 +3707,9 @@ dependencies = [
[[package]]
name = "schemars_derive"
version = "1.0.5"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f760a6150d45dd66ec044983c124595ae76912e77ed0b44124cb3e415cce5d9"
checksum = "4908ad288c5035a8eb12cfdf0d49270def0a268ee162b75eeee0f85d155a7c45"
dependencies = [
"proc-macro2",
"quote",
@@ -3761,15 +3783,15 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.145"
version = "1.0.148"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
checksum = "3084b546a1dd6289475996f182a22aba973866ea8e8b02c51d9f46b1336a22da"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
"serde_core",
"zmij",
]
[[package]]
@@ -3785,9 +3807,9 @@ dependencies = [
[[package]]
name = "serde_spanned"
version = "1.0.3"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392"
checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776"
dependencies = [
"serde_core",
]
@@ -3803,9 +3825,9 @@ dependencies = [
[[package]]
name = "serde_with"
version = "3.15.1"
version = "3.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa66c845eee442168b2c8134fec70ac50dc20e760769c8ba0ad1319ca1959b04"
checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7"
dependencies = [
"serde_core",
"serde_with_macros",
@@ -3813,9 +3835,9 @@ dependencies = [
[[package]]
name = "serde_with_macros"
version = "3.15.1"
version = "3.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b91a903660542fced4e99881aa481bdbaec1634568ee02e0b8bd57c64cb38955"
checksum = "52a8e3ca0ca629121f70ab50f95249e5a6f925cc0f6ffe8256c45b728875706c"
dependencies = [
"darling",
"proc-macro2",
@@ -3965,9 +3987,9 @@ dependencies = [
[[package]]
name = "supports-hyperlinks"
version = "3.1.0"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "804f44ed3c63152de6a9f90acbea1a110441de43006ea51bcce8f436196a288b"
checksum = "e396b6523b11ccb83120b115a0b7366de372751aa6edf19844dfb13a6af97e91"
[[package]]
name = "syn"
@@ -3999,15 +4021,15 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tempfile"
version = "3.23.0"
version = "3.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16"
checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c"
dependencies = [
"fastrand",
"getrandom 0.3.4",
"once_cell",
"rustix",
"windows-sys 0.61.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -4201,9 +4223,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "toml"
version = "0.9.8"
version = "0.9.10+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8"
checksum = "0825052159284a1a8b4d6c0c86cbc801f2da5afd2b225fa548c72f2e74002f48"
dependencies = [
"indexmap",
"serde_core",
@@ -4216,9 +4238,9 @@ dependencies = [
[[package]]
name = "toml_datetime"
version = "0.7.3"
version = "0.7.5+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533"
checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347"
dependencies = [
"serde_core",
]
@@ -4237,24 +4259,24 @@ dependencies = [
[[package]]
name = "toml_parser"
version = "1.0.4"
version = "1.0.6+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e"
checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44"
dependencies = [
"winnow",
]
[[package]]
name = "toml_writer"
version = "1.0.4"
version = "1.0.6+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2"
checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607"
[[package]]
name = "tracing"
version = "0.1.43"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
dependencies = [
"log",
"pin-project-lite",
@@ -4275,9 +4297,9 @@ dependencies = [
[[package]]
name = "tracing-core"
version = "0.1.35"
version = "0.1.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
dependencies = [
"once_cell",
"valuable",
@@ -4296,9 +4318,9 @@ dependencies = [
[[package]]
name = "tracing-indicatif"
version = "0.3.13"
version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04d4e11e0e27acef25a47f27e9435355fecdc488867fa2bc90e75b0700d2823d"
checksum = "e1ef6990e0438749f0080573248e96631171a0b5ddfddde119aa5ba8c3a9c47e"
dependencies = [
"indicatif",
"tracing",
@@ -4444,7 +4466,6 @@ dependencies = [
"ty_module_resolver",
"ty_project",
"ty_python_semantic",
"ty_python_types",
"ty_vendored",
]
@@ -4507,7 +4528,6 @@ dependencies = [
"ty_combine",
"ty_module_resolver",
"ty_python_semantic",
"ty_python_types",
"ty_static",
"ty_vendored",
]
@@ -4521,12 +4541,19 @@ dependencies = [
"bitvec",
"camino",
"colored 3.0.0",
"compact_str",
"datatest-stable",
"drop_bomb",
"get-size2",
"glob",
"hashbrown 0.16.1",
"indexmap",
"indoc",
"insta",
"itertools 0.14.0",
"memchr",
"ordermap",
"pretty_assertions",
"quickcheck",
"quickcheck_macros",
"ruff_annotate_snippets",
@@ -4536,7 +4563,9 @@ dependencies = [
"ruff_macros",
"ruff_memory_usage",
"ruff_python_ast",
"ruff_python_literal",
"ruff_python_parser",
"ruff_python_stdlib",
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
@@ -4550,59 +4579,12 @@ dependencies = [
"strsim",
"strum",
"strum_macros",
"test-case",
"thiserror 2.0.17",
"tracing",
"ty_combine",
"ty_module_resolver",
"ty_static",
"ty_vendored",
]
[[package]]
name = "ty_python_types"
version = "0.0.0"
dependencies = [
"anyhow",
"bitflags 2.10.0",
"camino",
"compact_str",
"datatest-stable",
"drop_bomb",
"get-size2",
"glob",
"indexmap",
"indoc",
"insta",
"itertools 0.14.0",
"memchr",
"ordermap",
"pretty_assertions",
"quickcheck",
"quickcheck_macros",
"ruff_db",
"ruff_diagnostics",
"ruff_macros",
"ruff_memory_usage",
"ruff_python_ast",
"ruff_python_literal",
"ruff_python_parser",
"ruff_python_stdlib",
"ruff_source_file",
"ruff_text_size",
"rustc-hash",
"salsa",
"schemars",
"serde",
"serde_json",
"smallvec",
"static_assertions",
"strum",
"strum_macros",
"test-case",
"tracing",
"ty_module_resolver",
"ty_python_semantic",
"ty_static",
"ty_test",
"ty_vendored",
]
@@ -4683,7 +4665,6 @@ dependencies = [
"tracing",
"ty_module_resolver",
"ty_python_semantic",
"ty_python_types",
"ty_static",
"ty_vendored",
]
@@ -4730,6 +4711,12 @@ version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
[[package]]
name = "typeid"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c"
[[package]]
name = "typenum"
version = "1.18.0"
@@ -4889,9 +4876,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "uuid"
version = "1.18.1"
version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2"
checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a"
dependencies = [
"getrandom 0.3.4",
"js-sys",
@@ -4902,9 +4889,9 @@ dependencies = [
[[package]]
name = "uuid-macro-internal"
version = "1.18.1"
version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9384a660318abfbd7f8932c34d67e4d1ec511095f95972ddc01e19d7ba8413f"
checksum = "39d11901c36b3650df7acb0f9ebe624f35b5ac4e1922ecd3c57f444648429594"
dependencies = [
"proc-macro2",
"quote",
@@ -5124,15 +5111,37 @@ dependencies = [
"glob",
]
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
dependencies = [
"windows-sys 0.61.0",
"windows-sys 0.52.0",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-core"
version = "0.62.0"
@@ -5516,6 +5525,12 @@ dependencies = [
"zstd",
]
[[package]]
name = "zmij"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30e0d8dffbae3d840f64bda38e28391faef673a7b5a6017840f2a106c8145868"
[[package]]
name = "zstd"
version = "0.11.2+zstd.1.5.2"

View File

@@ -48,7 +48,6 @@ ty_ide = { path = "crates/ty_ide" }
ty_module_resolver = { path = "crates/ty_module_resolver" }
ty_project = { path = "crates/ty_project", default-features = false }
ty_python_semantic = { path = "crates/ty_python_semantic" }
ty_python_types = { path = "crates/ty_python_types" }
ty_server = { path = "crates/ty_server" }
ty_static = { path = "crates/ty_static" }
ty_test = { path = "crates/ty_test" }
@@ -59,8 +58,8 @@ anstream = { version = "0.6.18" }
anstyle = { version = "1.0.10" }
anyhow = { version = "1.0.80" }
arc-swap = { version = "1.7.1" }
assert_fs = { version = "1.1.0" }
argfile = { version = "0.2.0" }
assert_fs = { version = "1.1.0" }
bincode = { version = "2.0.0" }
bitflags = { version = "2.5.0" }
bitvec = { version = "1.0.1", default-features = false, features = [
@@ -72,30 +71,31 @@ camino = { version = "1.1.7" }
clap = { version = "4.5.3", features = ["derive"] }
clap_complete_command = { version = "0.6.0" }
clearscreen = { version = "4.0.0" }
csv = { version = "1.3.1" }
divan = { package = "codspeed-divan-compat", version = "4.0.4" }
codspeed-criterion-compat = { version = "4.0.4", default-features = false }
colored = { version = "3.0.0" }
compact_str = "0.9.0"
console_error_panic_hook = { version = "0.1.7" }
console_log = { version = "1.0.0" }
countme = { version = "3.0.1" }
compact_str = "0.9.0"
criterion = { version = "0.7.0", default-features = false }
criterion = { version = "0.8.0", default-features = false }
crossbeam = { version = "0.8.4" }
csv = { version = "1.3.1" }
dashmap = { version = "6.0.1" }
datatest-stable = { version = "0.3.3" }
dunce = { version = "1.0.5" }
divan = { package = "codspeed-divan-compat", version = "4.0.4" }
drop_bomb = { version = "0.1.5" }
dunce = { version = "1.0.5" }
etcetera = { version = "0.11.0" }
fern = { version = "0.7.0" }
filetime = { version = "0.2.23" }
getrandom = { version = "0.3.1" }
get-size2 = { version = "0.7.3", features = [
"derive",
"smallvec",
"hashbrown",
"compact-str",
"ordermap"
] }
getrandom = { version = "0.3.1" }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
globwalk = { version = "0.9.1" }
@@ -117,8 +117,8 @@ is-macro = { version = "0.3.5" }
is-wsl = { version = "0.4.0" }
itertools = { version = "0.14.0" }
jiff = { version = "0.2.0" }
js-sys = { version = "0.3.69" }
jod-thread = { version = "1.0.0" }
js-sys = { version = "0.3.69" }
libc = { version = "0.2.153" }
libcst = { version = "1.8.4", default-features = false }
log = { version = "0.4.17" }
@@ -140,6 +140,8 @@ pretty_assertions = "1.3.0"
proc-macro2 = { version = "1.0.79" }
pyproject-toml = { version = "0.13.4" }
quick-junit = { version = "0.5.0" }
quickcheck = { version = "1.0.3", default-features = false }
quickcheck_macros = { version = "1.0.0" }
quote = { version = "1.0.23" }
rand = { version = "0.9.0" }
rayon = { version = "1.10.0" }
@@ -148,7 +150,7 @@ regex-automata = { version = "0.4.9" }
rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "ce80691fa0b87dc2fd2235a26544e63e5e43d8d3", default-features = false, features = [
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "309c249088fdeef0129606fa34ec2eefc74736ff", default-features = false, features = [
"compact_str",
"macros",
"salsa_unstable",
@@ -196,9 +198,9 @@ tryfn = { version = "0.2.1" }
typed-arena = { version = "2.0.2" }
unic-ucd-category = { version = "0.9" }
unicode-ident = { version = "1.0.12" }
unicode-normalization = { version = "0.1.23" }
unicode-width = { version = "0.2.0" }
unicode_names2 = { version = "1.2.2" }
unicode-normalization = { version = "0.1.23" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics"] }
walkdir = { version = "2.3.2" }
@@ -208,8 +210,13 @@ wild = { version = "2" }
zip = { version = "0.6.6", default-features = false }
[workspace.metadata.cargo-shear]
ignored = ["getrandom", "ruff_options_metadata", "uuid", "get-size2", "ty_completion_eval"]
ignored = [
"getrandom",
"ruff_options_metadata",
"uuid",
"get-size2",
"ty_completion_eval",
]
[workspace.lints.rust]
unsafe_code = "warn"
@@ -269,17 +276,10 @@ if_not_else = "allow"
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
large_stack_arrays = "allow"
[profile.release]
lto = "fat"
codegen-units = 16
# Profile to build a minimally sized binary for ruff/ty
[profile.minimal-size]
inherits = "release"
opt-level = "z"
codegen-units = 1
# Some crates don't change as much but benefit more from
# more expensive optimization passes, so we selectively
# decrease codegen-units in some cases.
@@ -290,6 +290,12 @@ codegen-units = 1
[profile.release.package.salsa]
codegen-units = 1
# Profile to build a minimally sized binary for ruff/ty
[profile.minimal-size]
inherits = "release"
opt-level = "z"
codegen-units = 1
[profile.dev.package.insta]
opt-level = 3

View File

@@ -12,6 +12,13 @@ license = { workspace = true }
readme = "../../README.md"
default-run = "ruff"
[package.metadata.cargo-shear]
# Used via macro expansion.
ignored = ["jiff"]
[package.metadata.dist]
dist = true
[dependencies]
ruff_cache = { workspace = true }
ruff_db = { workspace = true, default-features = false, features = ["os"] }
@@ -61,6 +68,12 @@ tracing = { workspace = true, features = ["log"] }
walkdir = { workspace = true }
wild = { workspace = true }
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), not(target_os = "aix"), not(target_os = "android"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dependencies]
tikv-jemallocator = { workspace = true }
[target.'cfg(target_os = "windows")'.dependencies]
mimalloc = { workspace = true }
[dev-dependencies]
# Enable test rules during development
ruff_linter = { workspace = true, features = ["clap", "test-rules"] }
@@ -76,18 +89,5 @@ ruff_python_trivia = { workspace = true }
tempfile = { workspace = true }
test-case = { workspace = true }
[package.metadata.cargo-shear]
# Used via macro expansion.
ignored = ["jiff"]
[package.metadata.dist]
dist = true
[target.'cfg(target_os = "windows")'.dependencies]
mimalloc = { workspace = true }
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), not(target_os = "aix"), not(target_os = "android"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dependencies]
tikv-jemallocator = { workspace = true }
[lints]
workspace = true

View File

@@ -1305,7 +1305,7 @@ mod tests {
settings.add_filter(r"(Panicked at) [^:]+:\d+:\d+", "$1 <location>");
let _s = settings.bind_to_scope();
assert_snapshot!(str::from_utf8(&buf)?, @r"
assert_snapshot!(str::from_utf8(&buf)?, @"
io: test.py: Permission denied
--> test.py:1:1

View File

@@ -4,4 +4,3 @@ source: crates/ruff/src/commands/check.rs
/home/ferris/project/code.py:1:1: E902 Permission denied (os error 13)
/home/ferris/project/notebook.ipynb:1:1: E902 Permission denied (os error 13)
/home/ferris/project/pyproject.toml:1:1: E902 Permission denied (os error 13)

View File

@@ -1,6 +1,5 @@
---
source: crates/ruff/src/version.rs
expression: version
snapshot_kind: text
---
0.0.0

View File

@@ -1,6 +1,5 @@
---
source: crates/ruff/src/version.rs
expression: version
snapshot_kind: text
---
0.0.0 (53b0f5d92 2023-10-19)

View File

@@ -1,6 +1,5 @@
---
source: crates/ruff/src/version.rs
expression: version
snapshot_kind: text
---
0.0.0+24 (53b0f5d92 2023-10-19)

View File

@@ -1,7 +1,6 @@
---
source: crates/ruff/src/version.rs
expression: version
snapshot_kind: text
---
{
"version": "0.0.0",

View File

@@ -132,29 +132,29 @@ fn dependents() -> Result<()> {
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().arg("--direction").arg("dependents").current_dir(&root), @r###"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [],
"ruff/b.py": [
"ruff/a.py"
],
"ruff/c.py": [
"ruff/b.py"
],
"ruff/d.py": [
"ruff/c.py"
],
"ruff/e.py": [
"ruff/d.py"
]
}
assert_cmd_snapshot!(command().arg("--direction").arg("dependents").current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [],
"ruff/b.py": [
"ruff/a.py"
],
"ruff/c.py": [
"ruff/b.py"
],
"ruff/d.py": [
"ruff/c.py"
],
"ruff/e.py": [
"ruff/d.py"
]
}
----- stderr -----
"###);
----- stderr -----
"#);
});
Ok(())
@@ -184,21 +184,21 @@ fn string_detection() -> Result<()> {
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r###"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [],
"ruff/c.py": []
}
assert_cmd_snapshot!(command().current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [],
"ruff/c.py": []
}
----- stderr -----
"###);
----- stderr -----
"#);
});
insta::with_settings!({
@@ -319,7 +319,7 @@ fn globs() -> Result<()> {
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r###"
assert_cmd_snapshot!(command().current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
@@ -340,7 +340,7 @@ fn globs() -> Result<()> {
}
----- stderr -----
"###);
"#);
});
Ok(())
@@ -368,7 +368,7 @@ fn exclude() -> Result<()> {
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r###"
assert_cmd_snapshot!(command().current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
@@ -381,7 +381,7 @@ fn exclude() -> Result<()> {
}
----- stderr -----
"###);
"#);
});
Ok(())
@@ -421,7 +421,7 @@ fn wildcard() -> Result<()> {
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r###"
assert_cmd_snapshot!(command().current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
@@ -443,7 +443,7 @@ fn wildcard() -> Result<()> {
}
----- stderr -----
"###);
"#);
});
Ok(())
@@ -639,7 +639,7 @@ fn venv() -> Result<()> {
}, {
assert_cmd_snapshot!(
command().args(["--python", "none"]).arg("packages/albatross").current_dir(&root),
@r"
@"
success: false
exit_code: 2
----- stdout -----
@@ -695,7 +695,7 @@ fn notebook_basic() -> Result<()> {
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r###"
assert_cmd_snapshot!(command().current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
@@ -708,7 +708,7 @@ fn notebook_basic() -> Result<()> {
}
----- stderr -----
"###);
"#);
});
Ok(())
@@ -765,7 +765,7 @@ fn notebook_with_magic() -> Result<()> {
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r###"
assert_cmd_snapshot!(command().current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
@@ -778,7 +778,7 @@ fn notebook_with_magic() -> Result<()> {
}
----- stderr -----
"###);
"#);
});
Ok(())

View File

@@ -29,7 +29,7 @@ fn type_checking_imports() -> anyhow::Result<()> {
("ruff/c.py", ""),
])?;
assert_cmd_snapshot!(test.command(), @r###"
assert_cmd_snapshot!(test.command(), @r#"
success: true
exit_code: 0
----- stdout -----
@@ -46,12 +46,12 @@ fn type_checking_imports() -> anyhow::Result<()> {
}
----- stderr -----
"###);
"#);
assert_cmd_snapshot!(
test.command()
.arg("--no-type-checking-imports"),
@r###"
@r#"
success: true
exit_code: 0
----- stdout -----
@@ -65,7 +65,7 @@ fn type_checking_imports() -> anyhow::Result<()> {
}
----- stderr -----
"###
"#
);
Ok(())
@@ -103,7 +103,7 @@ fn type_checking_imports_from_config() -> anyhow::Result<()> {
),
])?;
assert_cmd_snapshot!(test.command(), @r###"
assert_cmd_snapshot!(test.command(), @r#"
success: true
exit_code: 0
----- stdout -----
@@ -117,7 +117,7 @@ fn type_checking_imports_from_config() -> anyhow::Result<()> {
}
----- stderr -----
"###);
"#);
test.write_file(
"ruff.toml",
@@ -127,7 +127,7 @@ fn type_checking_imports_from_config() -> anyhow::Result<()> {
"#,
)?;
assert_cmd_snapshot!(test.command(), @r###"
assert_cmd_snapshot!(test.command(), @r#"
success: true
exit_code: 0
----- stdout -----
@@ -144,7 +144,7 @@ fn type_checking_imports_from_config() -> anyhow::Result<()> {
}
----- stderr -----
"###
"#
);
Ok(())

View File

@@ -51,7 +51,7 @@ fn default_files() -> Result<()> {
assert_cmd_snapshot!(test.format_command()
.arg("--isolated")
.arg("--check"), @r"
.arg("--check"), @"
success: false
exit_code: 1
----- stdout -----
@@ -71,7 +71,7 @@ fn format_warn_stdin_filename_with_files() -> Result<()> {
assert_cmd_snapshot!(test.format_command()
.args(["--isolated", "--stdin-filename", "foo.py"])
.arg("foo.py")
.pass_stdin("foo = 1"), @r"
.pass_stdin("foo = 1"), @"
success: true
exit_code: 0
----- stdout -----
@@ -87,7 +87,7 @@ fn format_warn_stdin_filename_with_files() -> Result<()> {
fn nonexistent_config_file() -> Result<()> {
let test = CliTest::new()?;
assert_cmd_snapshot!(test.format_command()
.args(["--config", "foo.toml", "."]), @r"
.args(["--config", "foo.toml", "."]), @"
success: false
exit_code: 2
----- stdout -----
@@ -111,7 +111,7 @@ fn nonexistent_config_file() -> Result<()> {
fn config_override_rejected_if_invalid_toml() -> Result<()> {
let test = CliTest::new()?;
assert_cmd_snapshot!(test.format_command()
.args(["--config", "foo = bar", "."]), @r"
.args(["--config", "foo = bar", "."]), @"
success: false
exit_code: 2
----- stdout -----
@@ -145,7 +145,7 @@ fn too_many_config_files() -> Result<()> {
.arg("ruff.toml")
.arg("--config")
.arg("ruff2.toml")
.arg("."), @r"
.arg("."), @"
success: false
exit_code: 2
----- stdout -----
@@ -168,7 +168,7 @@ fn config_file_and_isolated() -> Result<()> {
.arg("--isolated")
.arg("--config")
.arg("ruff.toml")
.arg("."), @r"
.arg("."), @"
success: false
exit_code: 2
----- stdout -----
@@ -390,7 +390,7 @@ fn mixed_line_endings() -> Result<()> {
assert_cmd_snapshot!(test.format_command()
.arg("--diff")
.arg("--isolated")
.arg("."), @r"
.arg("."), @"
success: true
exit_code: 0
----- stdout -----
@@ -446,7 +446,7 @@ OTHER = "OTHER"
// Explicitly pass test.py, should be formatted regardless of it being excluded by format.exclude
.arg("test.py")
// Format all other files in the directory, should respect the `exclude` and `format.exclude` options
.arg("."), @r"
.arg("."), @"
success: false
exit_code: 1
----- stdout -----
@@ -469,7 +469,7 @@ fn deduplicate_directory_and_explicit_file() -> Result<()> {
.arg("--check")
.arg(".")
.arg("main.py"),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -495,7 +495,7 @@ from module import =
assert_cmd_snapshot!(test.format_command()
.arg("--check")
.arg("--isolated")
.arg("main.py"), @r"
.arg("main.py"), @"
success: false
exit_code: 2
----- stdout -----
@@ -522,7 +522,7 @@ if __name__ == "__main__":
assert_cmd_snapshot!(test.format_command()
.arg("--isolated")
.arg("--check")
.arg("main.py"), @r"
.arg("main.py"), @"
success: false
exit_code: 1
----- stdout -----
@@ -534,7 +534,7 @@ if __name__ == "__main__":
assert_cmd_snapshot!(test.format_command()
.arg("--isolated")
.arg("main.py"), @r"
.arg("main.py"), @"
success: true
exit_code: 0
----- stdout -----
@@ -545,7 +545,7 @@ if __name__ == "__main__":
assert_cmd_snapshot!(test.format_command()
.arg("--isolated")
.arg("main.py"), @r"
.arg("main.py"), @"
success: true
exit_code: 0
----- stdout -----
@@ -614,7 +614,7 @@ fn output_format_notebook() -> Result<()> {
assert_cmd_snapshot!(
test.format_command().args(["--isolated", "--preview", "--check"]).arg(path),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -672,7 +672,7 @@ if __name__ == "__main__":
assert_cmd_snapshot!(test.format_command()
.arg("--isolated")
.arg("--exit-non-zero-on-format")
.arg("main.py"), @r"
.arg("main.py"), @"
success: false
exit_code: 1
----- stdout -----
@@ -685,7 +685,7 @@ if __name__ == "__main__":
assert_cmd_snapshot!(test.format_command()
.arg("--isolated")
.arg("--exit-non-zero-on-format")
.arg("main.py"), @r"
.arg("main.py"), @"
success: true
exit_code: 0
----- stdout -----
@@ -701,7 +701,7 @@ if __name__ == "__main__":
assert_cmd_snapshot!(test.format_command()
.arg("--isolated")
.arg("--exit-non-zero-on-fix")
.arg("main.py"), @r"
.arg("main.py"), @"
success: false
exit_code: 1
----- stdout -----
@@ -714,7 +714,7 @@ if __name__ == "__main__":
assert_cmd_snapshot!(test.format_command()
.arg("--isolated")
.arg("--exit-non-zero-on-fix")
.arg("main.py"), @r"
.arg("main.py"), @"
success: true
exit_code: 0
----- stdout -----
@@ -771,7 +771,7 @@ OTHER = "OTHER"
// Explicitly pass test.py, should not be formatted because of --force-exclude
.arg("test.py")
// Format all other files in the directory, should respect the `exclude` and `format.exclude` options
.arg("."), @r"
.arg("."), @"
success: false
exit_code: 1
----- stdout -----
@@ -931,7 +931,7 @@ tab-size = 2
.pass_stdin(r"
if True:
pass
"), @r"
"), @"
success: false
exit_code: 2
----- stdout -----
@@ -1144,7 +1144,7 @@ def say_hy(name: str):
assert_cmd_snapshot!(test.format_command()
.arg("--config")
.arg("ruff.toml")
.arg("test.py"), @r"
.arg("test.py"), @"
success: true
exit_code: 0
----- stdout -----
@@ -1184,7 +1184,7 @@ def say_hy(name: str):
assert_cmd_snapshot!(test.format_command()
.arg("--config")
.arg("ruff.toml")
.arg("test.py"), @r"
.arg("test.py"), @"
success: true
exit_code: 0
----- stdout -----
@@ -1216,7 +1216,7 @@ def say_hy(name: str):
assert_cmd_snapshot!(test.format_command()
.arg("--config")
.arg("ruff.toml")
.arg("test.py"), @r"
.arg("test.py"), @"
success: true
exit_code: 0
----- stdout -----
@@ -1246,7 +1246,7 @@ fn test_diff() -> Result<()> {
assert_cmd_snapshot!(
test.format_command().args(["--isolated", "--diff"]).args(paths),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1311,7 +1311,7 @@ fn test_diff_no_change() -> Result<()> {
let paths = [fixtures.join("unformatted.py")];
assert_cmd_snapshot!(
test.format_command().args(["--isolated", "--diff"]).args(paths),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1341,7 +1341,7 @@ fn test_diff_stdin_unformatted() -> Result<()> {
test.format_command()
.args(["--isolated", "--diff", "-", "--stdin-filename", "unformatted.py"])
.pass_stdin(unformatted),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1366,7 +1366,7 @@ fn test_diff_stdin_formatted() -> Result<()> {
let unformatted = fs::read(fixtures.join("formatted.py")).unwrap();
assert_cmd_snapshot!(
test.format_command().args(["--isolated", "--diff", "-"]).pass_stdin(unformatted),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -1873,7 +1873,7 @@ include = ["*.ipy"]
assert_cmd_snapshot!(test.format_command()
.args(["--config", "ruff.toml"])
.args(["--extension", "ipy:ipynb"])
.arg("."), @r"
.arg("."), @"
success: false
exit_code: 2
----- stdout -----
@@ -1938,7 +1938,7 @@ include = ["*.ipy"]
assert_cmd_snapshot!(test.format_command()
.args(["--config", "ruff.toml"])
.args(["--extension", "ipy:ipynb"])
.arg("."), @r"
.arg("."), @"
success: true
exit_code: 0
----- stdout -----
@@ -2021,7 +2021,7 @@ def file2(arg1, arg2,):
assert_cmd_snapshot!(test.format_command()
.args(["--isolated", "--range=1:8-1:15"])
.arg("file1.py")
.arg("file2.py"), @r"
.arg("file2.py"), @"
success: false
exit_code: 2
----- stdout -----
@@ -2068,7 +2068,7 @@ fn range_start_larger_than_end() -> Result<()> {
def foo(arg1, arg2,):
print("Shouldn't format this" )
"#), @r"
"#), @"
success: false
exit_code: 2
----- stdout -----
@@ -2168,7 +2168,7 @@ fn range_missing_line() -> Result<()> {
def foo(arg1, arg2,):
print("Should format this" )
"#), @r"
"#), @"
success: false
exit_code: 2
----- stdout -----
@@ -2192,7 +2192,7 @@ fn zero_line_number() -> Result<()> {
def foo(arg1, arg2,):
print("Should format this" )
"#), @r"
"#), @"
success: false
exit_code: 2
----- stdout -----
@@ -2217,7 +2217,7 @@ fn column_and_line_zero() -> Result<()> {
def foo(arg1, arg2,):
print("Should format this" )
"#), @r"
"#), @"
success: false
exit_code: 2
----- stdout -----
@@ -2274,7 +2274,7 @@ fn range_formatting_notebook() -> Result<()> {
"nbformat": 4,
"nbformat_minor": 5
}
"#), @r"
"#), @"
success: false
exit_code: 2
----- stdout -----
@@ -2355,7 +2355,7 @@ fn cookiecutter_globbing() -> Result<()> {
])?;
assert_cmd_snapshot!(test.format_command()
.args(["--isolated", "--diff", "."]), @r"
.args(["--isolated", "--diff", "."]), @"
success: true
exit_code: 0
----- stdout -----
@@ -2374,7 +2374,7 @@ fn stable_output_format_warning() -> Result<()> {
test.format_command()
.args(["--output-format=full", "-"])
.pass_stdin(""),
@r"
@"
success: true
exit_code: 0
----- stdout -----

View File

@@ -39,7 +39,7 @@ inline-quotes = "single"
.arg("ruff.toml")
.args(["--stdin-filename", "test.py"])
.arg("-")
.pass_stdin(r#"a = "abcba".strip("aba")"#), @r"
.pass_stdin(r#"a = "abcba".strip("aba")"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -76,18 +76,18 @@ inline-quotes = "single"
.arg("--config")
.arg("ruff.toml")
.arg("-")
.pass_stdin(r#"a = "abcba".strip("aba")"#), @r"
success: false
exit_code: 1
----- stdout -----
-:1:5: Q000 [*] Double quotes found but single quotes preferred
-:1:5: B005 Using `.strip()` with multi-character strings is misleading
-:1:19: Q000 [*] Double quotes found but single quotes preferred
Found 3 errors.
[*] 2 fixable with the `--fix` option.
.pass_stdin(r#"a = "abcba".strip("aba")"#), @"
success: false
exit_code: 1
----- stdout -----
-:1:5: Q000 [*] Double quotes found but single quotes preferred
-:1:5: B005 Using `.strip()` with multi-character strings is misleading
-:1:19: Q000 [*] Double quotes found but single quotes preferred
Found 3 errors.
[*] 2 fixable with the `--fix` option.
----- stderr -----
");
----- stderr -----
");
Ok(())
}
@@ -110,7 +110,7 @@ inline-quotes = "single"
.arg("--config")
.arg("ruff.toml")
.arg("-")
.pass_stdin(r#"a = "abcba".strip("aba")"#), @r"
.pass_stdin(r#"a = "abcba".strip("aba")"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -150,7 +150,7 @@ inline-quotes = "single"
.arg("--config")
.arg("ruff.toml")
.arg("-")
.pass_stdin(r#"a = "abcba".strip("aba")"#), @r"
.pass_stdin(r#"a = "abcba".strip("aba")"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -220,7 +220,7 @@ OTHER = "OTHER"
// Explicitly pass test.py, should be linted regardless of it being excluded by lint.exclude
.arg("test.py")
// Lint all other files in the directory, should respect the `exclude` and `lint.exclude` options
.arg("."), @r"
.arg("."), @"
success: false
exit_code: 1
----- stdout -----
@@ -259,16 +259,16 @@ exclude = ["main.py"]
.arg(".")
// Explicitly pass main.py, should be linted regardless of it being excluded by lint.exclude
.arg("main.py"),
@r"
success: false
exit_code: 1
----- stdout -----
main.py:1:8: F401 [*] `os` imported but unused
Found 1 error.
[*] 1 fixable with the `--fix` option.
@"
success: false
exit_code: 1
----- stdout -----
main.py:1:8: F401 [*] `os` imported but unused
Found 1 error.
[*] 1 fixable with the `--fix` option.
----- stderr -----
"
----- stderr -----
"
);
Ok(())
@@ -299,7 +299,7 @@ from test import say_hy
if __name__ == "__main__":
say_hy("dear Ruff contributor")
"#), @r"
"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -340,7 +340,7 @@ max-line-length = 100
_ = "---------------------------------------------------------------------------亜亜亜亜亜亜"
# longer than 100
_ = "---------------------------------------------------------------------------亜亜亜亜亜亜亜亜亜亜亜亜亜亜"
"#), @r"
"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -381,7 +381,7 @@ from test import say_hy
if __name__ == "__main__":
say_hy("dear Ruff contributor")
"#), @r"
"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -422,7 +422,7 @@ from test import say_hy
if __name__ == "__main__":
say_hy("dear Ruff contributor")
"#), @r"
"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -461,7 +461,7 @@ ignore = ["D203", "D212"]
assert_cmd_snapshot!(fixture
.check_command()
.current_dir(fixture.root().join("subdirectory"))
, @r"
, @"
success: true
exit_code: 0
----- stdout -----
@@ -478,7 +478,7 @@ ignore = ["D203", "D212"]
fn nonexistent_config_file() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.args(["--config", "foo.toml", "."]), @r"
.args(["--config", "foo.toml", "."]), @"
success: false
exit_code: 2
----- stdout -----
@@ -501,7 +501,7 @@ fn nonexistent_config_file() {
fn config_override_rejected_if_invalid_toml() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.args(["--config", "foo = bar", "."]), @r"
.args(["--config", "foo = bar", "."]), @"
success: false
exit_code: 2
----- stdout -----
@@ -537,7 +537,7 @@ fn too_many_config_files() -> Result<()> {
.arg("ruff.toml")
.arg("--config")
.arg("ruff2.toml")
.arg("."), @r"
.arg("."), @"
success: false
exit_code: 2
----- stdout -----
@@ -556,7 +556,7 @@ fn too_many_config_files() -> Result<()> {
fn extend_passed_via_config_argument() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.args(["--config", "extend = 'foo.toml'", "."]), @r"
.args(["--config", "extend = 'foo.toml'", "."]), @"
success: false
exit_code: 2
----- stdout -----
@@ -588,17 +588,17 @@ extend = "ruff3.toml"
)?;
assert_cmd_snapshot!(fixture
.check_command(), @r"
success: false
exit_code: 2
----- stdout -----
.check_command(), @"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
ruff failed
Cause: Failed to load extended configuration `[TMP]/ruff3.toml` (`[TMP]/ruff.toml` extends `[TMP]/ruff2.toml` extends `[TMP]/ruff3.toml`)
Cause: Failed to read [TMP]/ruff3.toml
Cause: No such file or directory (os error 2)
");
----- stderr -----
ruff failed
Cause: Failed to load extended configuration `[TMP]/ruff3.toml` (`[TMP]/ruff.toml` extends `[TMP]/ruff2.toml` extends `[TMP]/ruff3.toml`)
Cause: Failed to read [TMP]/ruff3.toml
Cause: No such file or directory (os error 2)
");
Ok(())
}
@@ -627,7 +627,7 @@ extend = "ruff.toml"
assert_cmd_snapshot!(fixture
.check_command(),
@r"
@"
success: false
exit_code: 2
----- stdout -----
@@ -659,7 +659,7 @@ select = [E501]
assert_cmd_snapshot!(
fixture.check_command(),
@r"
@"
success: false
exit_code: 2
----- stdout -----
@@ -688,7 +688,7 @@ fn config_file_and_isolated() -> Result<()> {
.arg("--config")
.arg("ruff.toml")
.arg("--isolated")
.arg("."), @r"
.arg("."), @"
success: false
exit_code: 2
----- stdout -----
@@ -742,7 +742,7 @@ x = "longer_than_90_charactersssssssssssssssssssssssssssssssssssssssssssssssssss
.args(["--config", "lint.extend-select=['E501', 'F841']"])
.args(["--config", "lint.isort.combine-as-imports = false"])
.arg("-")
.pass_stdin(test_code), @r"
.pass_stdin(test_code), @"
success: false
exit_code: 1
----- stdout -----
@@ -761,7 +761,7 @@ fn valid_toml_but_nonexistent_option_provided_via_config_argument() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.args([".", "--config", "extend-select=['F481']"]), // No such code as F481!
@r"
@"
success: false
exit_code: 2
----- stdout -----
@@ -788,7 +788,7 @@ fn each_toml_option_requires_a_new_flag_1() {
// commas can't be used to delimit different config overrides;
// you need a new --config flag for each override
.args([".", "--config", "extend-select=['F841'], line-length=90"]),
@r"
@"
success: false
exit_code: 2
----- stdout -----
@@ -819,7 +819,7 @@ fn each_toml_option_requires_a_new_flag_2() {
// spaces *also* can't be used to delimit different config overrides;
// you need a new --config flag for each override
.args([".", "--config", "extend-select=['F841'] line-length=90"]),
@r"
@"
success: false
exit_code: 2
----- stdout -----
@@ -886,7 +886,7 @@ fn value_given_to_table_key_is_not_inline_table_2() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.args([".", "--config", r#"lint=123"#]),
@r"
@"
success: false
exit_code: 2
----- stdout -----
@@ -953,7 +953,7 @@ select=["E501"]
.arg("ruff.toml")
.args(["--config", "line-length=110"])
.arg("-")
.pass_stdin(test_code), @r"
.pass_stdin(test_code), @"
success: false
exit_code: 1
----- stdout -----
@@ -976,7 +976,7 @@ fn complex_config_setting_overridden_via_cli() -> Result<()> {
.args(["--config", "lint.per-file-ignores = {'generated.py' = ['N801']}"])
.args(["--stdin-filename", "generated.py"])
.arg("-")
.pass_stdin(test_code), @r"
.pass_stdin(test_code), @"
success: true
exit_code: 0
----- stdout -----
@@ -993,7 +993,7 @@ fn deprecated_config_option_overridden_via_cli() {
.args(STDIN_BASE_OPTIONS)
.args(["--config", "select=['N801']", "-"])
.pass_stdin("class lowercase: ..."),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1061,7 +1061,7 @@ include = ["*.ipy"]
.check_command()
.args(["--config", "ruff.toml"])
.args(["--extension", "ipy:ipynb"])
.arg("."), @r"
.arg("."), @"
success: false
exit_code: 1
----- stdout -----
@@ -1111,7 +1111,7 @@ external = ["AAA"]
.pass_stdin(r#"
# flake8: noqa: AAA101, BBB102
import os
"#), @r"
"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -1147,7 +1147,7 @@ required-version = "0.1.0"
.arg("-")
.pass_stdin(r#"
import os
"#), @r"
"#), @"
success: false
exit_code: 2
----- stdout -----
@@ -1184,7 +1184,7 @@ required-version = "{version}"
.arg("-")
.pass_stdin(r#"
import os
"#), @r"
"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -1222,7 +1222,7 @@ required-version = ">{version}"
.arg("-")
.pass_stdin(r#"
import os
"#), @r"
"#), @"
success: false
exit_code: 2
----- stdout -----
@@ -1252,7 +1252,7 @@ required-version = ">=0.1.0"
.arg("-")
.pass_stdin(r#"
import os
"#), @r"
"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -1289,7 +1289,7 @@ import os
def func():
x = 1
"#), @r"
"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -1323,7 +1323,7 @@ fn negated_per_file_ignores() -> Result<()> {
.arg("ruff.toml")
.arg("--select")
.arg("RUF901")
, @r"
, @"
success: false
exit_code: 1
----- stdout -----
@@ -1355,7 +1355,7 @@ fn negated_per_file_ignores_absolute() -> Result<()> {
.arg("ruff.toml")
.arg("--select")
.arg("RUF901")
, @r"
, @"
success: false
exit_code: 1
----- stdout -----
@@ -1389,7 +1389,7 @@ fn negated_per_file_ignores_overlap() -> Result<()> {
.arg("ruff.toml")
.arg("--select")
.arg("RUF901")
, @r"
, @"
success: true
exit_code: 0
----- stdout -----
@@ -1423,7 +1423,7 @@ import os # F401
def function():
import os # F811
print(os.name)
"#), @r"
"#), @"
success: true
exit_code: 0
----- stdout -----
@@ -1466,7 +1466,7 @@ import sys
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py"),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1482,7 +1482,7 @@ import sys
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.args(["--preview"]),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -1497,7 +1497,7 @@ import sys
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.args(["--ignore-noqa", "--preview"]),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1539,7 +1539,7 @@ def first_square():
.arg("-")
.pass_stdin(r#"
"#), @r"
"#), @"
success: true
exit_code: 0
----- stdout -----
@@ -1551,7 +1551,8 @@ def first_square():
let test_code =
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
insta::assert_snapshot!(test_code, @r"
insta::assert_snapshot!(test_code, @"
def first_square():
return [x * x for x in range(20)][0] # noqa: RUF015
");
@@ -1587,7 +1588,7 @@ def unused(x):
.arg("-")
.pass_stdin(r#"
"#), @r"
"#), @"
success: true
exit_code: 0
----- stdout -----
@@ -1599,7 +1600,8 @@ def unused(x):
let test_code =
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
insta::assert_snapshot!(test_code, @r"
insta::assert_snapshot!(test_code, @"
def unused(x): # noqa: ANN001, ANN201, D103
pass
");
@@ -1635,7 +1637,7 @@ import a
.arg("-")
.pass_stdin(r#"
"#), @r"
"#), @"
success: true
exit_code: 0
----- stdout -----
@@ -1647,7 +1649,8 @@ import a
let test_code =
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
insta::assert_snapshot!(test_code, @r"
insta::assert_snapshot!(test_code, @"
import z # noqa: I001
import c
import a
@@ -1684,7 +1687,7 @@ def unused(x): # noqa: ANN001, ARG001, D103
.arg("-")
.pass_stdin(r#"
"#), @r"
"#), @"
success: true
exit_code: 0
----- stdout -----
@@ -1696,7 +1699,8 @@ def unused(x): # noqa: ANN001, ARG001, D103
let test_code =
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
insta::assert_snapshot!(test_code, @r"
insta::assert_snapshot!(test_code, @"
def unused(x): # noqa: ANN001, ANN201, ARG001, D103
pass
");
@@ -1732,7 +1736,7 @@ import os
.arg("-")
.pass_stdin(r#"
"#), @r"
"#), @"
success: true
exit_code: 0
----- stdout -----
@@ -1743,7 +1747,8 @@ import os
let test_code =
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
insta::assert_snapshot!(test_code, @r"
insta::assert_snapshot!(test_code, @"
# ruff: noqa F401
import os
");
@@ -1779,7 +1784,7 @@ import os
.arg("-")
.pass_stdin(r#"
"#), @r"
"#), @"
success: true
exit_code: 0
----- stdout -----
@@ -1790,7 +1795,8 @@ import os
let test_code =
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
insta::assert_snapshot!(test_code, @r"
insta::assert_snapshot!(test_code, @"
# ruff: disable[F401]
import os
");
@@ -1831,7 +1837,7 @@ print(
.arg("-")
.pass_stdin(r#"
"#), @r"
"#), @"
success: true
exit_code: 0
----- stdout -----
@@ -1844,6 +1850,7 @@ print(
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
insta::assert_snapshot!(test_code, @r#"
print(
"""First line
second line
@@ -1886,7 +1893,7 @@ def first_square():
assert_cmd_snapshot!(fixture
.check_command()
.args(["--add-noqa"]), @r"
.args(["--add-noqa"]), @"
success: true
exit_code: 0
----- stdout -----
@@ -1915,7 +1922,7 @@ from foo import ( # noqa: F401
.check_command()
.arg("--add-noqa")
.arg("--select=F401")
.arg("noqa.py"), @r"
.arg("noqa.py"), @"
success: true
exit_code: 0
----- stdout -----
@@ -1942,7 +1949,7 @@ def foo():
.check_command()
.arg("--add-noqa=TODO: fix")
.arg("--select=F401,F841")
.arg("test.py"), @r"
.arg("test.py"), @"
success: true
exit_code: 0
----- stdout -----
@@ -1952,12 +1959,12 @@ def foo():
");
let content = fs::read_to_string(fixture.root().join("test.py"))?;
insta::assert_snapshot!(content, @r"
import os # noqa: F401 TODO: fix
insta::assert_snapshot!(content, @"
import os # noqa: F401 TODO: fix
def foo():
x = 1 # noqa: F841 TODO: fix
");
def foo():
x = 1 # noqa: F841 TODO: fix
");
Ok(())
}
@@ -1971,7 +1978,7 @@ fn add_noqa_with_newline_in_reason() -> Result<()> {
.check_command()
.arg("--add-noqa=line1\nline2")
.arg("--select=F401")
.arg("test.py"), @r###"
.arg("test.py"), @"
success: false
exit_code: 2
----- stdout -----
@@ -1979,7 +1986,7 @@ fn add_noqa_with_newline_in_reason() -> Result<()> {
----- stderr -----
ruff failed
Cause: --add-noqa <reason> cannot contain newline characters
"###);
");
Ok(())
}
@@ -2003,7 +2010,7 @@ select = ["UP006"]
.arg("pyproject.toml")
.args(["--stdin-filename", "test.py"])
.arg("-")
.pass_stdin(r#"from typing import List; foo: List[int]"#), @r"
.pass_stdin(r#"from typing import List; foo: List[int]"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -2030,7 +2037,7 @@ select = ["UP006"]
.arg("pyproject.toml")
.args(["--stdin-filename", "test.py"])
.arg("-")
.pass_stdin(r#"from typing import List; foo: List[int]"#), @r"
.pass_stdin(r#"from typing import List; foo: List[int]"#), @"
success: true
exit_code: 0
----- stdout -----
@@ -2061,7 +2068,7 @@ select = ["UP006"]
.arg("pyproject.toml")
.args(["--stdin-filename", "test.py"])
.arg("-")
.pass_stdin(r#"from typing import List; foo: List[int]"#), @r"
.pass_stdin(r#"from typing import List; foo: List[int]"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -2094,7 +2101,7 @@ select = ["UP006"]
.arg("pyproject.toml")
.args(["--stdin-filename", "test.py"])
.arg("-")
.pass_stdin(r#"from typing import List; foo: List[int]"#), @r"
.pass_stdin(r#"from typing import List; foo: List[int]"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -2127,7 +2134,7 @@ select = ["UP006"]
.arg("pyproject.toml")
.args(["--stdin-filename", "test.py"])
.arg("-")
.pass_stdin(r#"from typing import List; foo: List[int]"#), @r"
.pass_stdin(r#"from typing import List; foo: List[int]"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -2263,7 +2270,7 @@ requires-python = ">= 3.11"
.check_command()
.args(["--select","UP007"])
.arg(".")
, @r###"
, @"
success: false
exit_code: 1
----- stdout -----
@@ -2272,7 +2279,7 @@ requires-python = ">= 3.11"
[*] 1 fixable with the `--fix` option.
----- stderr -----
"###);
");
Ok(())
}
@@ -2348,7 +2355,7 @@ from typing import Union;foo: Union[int, str] = 1"#,
assert_cmd_snapshot!(fixture
.check_command()
.arg("test.py")
, @r"
, @"
success: false
exit_code: 1
----- stdout -----
@@ -2591,7 +2598,7 @@ fn checks_notebooks_in_stable() -> anyhow::Result<()> {
.check_command()
.arg("--select")
.arg("F401")
, @r"
, @"
success: false
exit_code: 1
----- stdout -----
@@ -2619,7 +2626,7 @@ fn nested_implicit_namespace_package() -> Result<()> {
.check_command()
.arg("--select")
.arg("INP")
, @r"
, @"
success: true
exit_code: 0
----- stdout -----
@@ -2633,7 +2640,7 @@ fn nested_implicit_namespace_package() -> Result<()> {
.arg("--select")
.arg("INP")
.arg("--preview")
, @r"
, @"
success: false
exit_code: 1
----- stdout -----
@@ -2764,7 +2771,7 @@ fn flake8_import_convention_nfkc_normalization() -> Result<()> {
.arg("ruff.toml")
.arg("-")
.pass_stdin("")
, @r###"
, @"
success: false
exit_code: 2
----- stdout -----
@@ -2772,7 +2779,7 @@ fn flake8_import_convention_nfkc_normalization() -> Result<()> {
----- stderr -----
ruff failed
Cause: Invalid alias for module 'test.module': alias normalizes to '__debug__', which is not allowed.
"###);
");
Ok(())
}
@@ -2822,7 +2829,7 @@ fn pyupgrade_up026_respects_isort_required_import_fix() {
.arg("--fix")
.arg("--no-cache")
.pass_stdin("1\n"),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -2849,7 +2856,7 @@ fn pyupgrade_up026_respects_isort_required_import_from_fix() {
.arg("--fix")
.arg("--no-cache")
.pass_stdin("from mock import mock\n"),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -2883,7 +2890,7 @@ d: Literal[None,] | Literal[None]
.arg("--preview")
.arg("--diff")
.arg("-")
.pass_stdin(snippet), @r"
.pass_stdin(snippet), @"
success: false
exit_code: 1
----- stdout -----
@@ -2937,7 +2944,7 @@ def func(t: _T) -> _T:
return x
"#
),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -2976,7 +2983,7 @@ class Foo[_T, __T]:
pass
"#
),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3041,7 +3048,7 @@ fn a005_module_shadowing_strict() -> Result<()> {
.arg("--config")
.arg(r#"lint.flake8-builtins.strict-checking = true"#)
.args(["--select", "A005"]),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3069,7 +3076,7 @@ fn a005_module_shadowing_non_strict() -> Result<()> {
.arg("--config")
.arg(r#"lint.flake8-builtins.strict-checking = false"#)
.args(["--select", "A005"]),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3094,7 +3101,7 @@ fn a005_module_shadowing_strict_default() -> Result<()> {
assert_cmd_snapshot!(fixture.check_command()
.args(["--select", "A005"]),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3127,7 +3134,7 @@ T = TypeVar("T")
class A(Generic[T]):
var: T
"#),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3157,7 +3164,7 @@ T = TypeVar("T")
class A(Generic[T]):
var: T
"#),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -3177,7 +3184,7 @@ fn walrus_before_py38() {
.arg("--target-version=py38")
.arg("-")
.pass_stdin(r#"(x := 1)"#),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -3195,7 +3202,7 @@ fn walrus_before_py38() {
.arg("--preview")
.arg("-")
.pass_stdin(r#"(x := 1)"#),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3222,7 +3229,7 @@ match 2:
print("it's one")
"#
),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -3245,7 +3252,7 @@ match 2:
print("it's one")
"#
),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3270,7 +3277,7 @@ match 2:
print("it's one")
"#
),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3296,7 +3303,7 @@ fn cache_syntax_errors() -> Result<()> {
assert_cmd_snapshot!(
cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3309,7 +3316,7 @@ fn cache_syntax_errors() -> Result<()> {
// this should *not* be cached, like normal parse errors
assert_cmd_snapshot!(
cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3345,14 +3352,14 @@ fn cookiecutter_globbing() -> Result<()> {
assert_cmd_snapshot!(fixture
.check_command()
.arg("--select=F811"), @r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
.arg("--select=F811"), @"
success: true
exit_code: 0
----- stdout -----
All checks passed!
----- stderr -----
");
----- stderr -----
");
// after removing the config file with the ignore, F811 applies, so the glob worked above
fs::remove_file(
@@ -3363,7 +3370,7 @@ fn cookiecutter_globbing() -> Result<()> {
assert_cmd_snapshot!(fixture
.check_command()
.arg("--select=F811"), @r"
.arg("--select=F811"), @"
success: false
exit_code: 1
----- stdout -----
@@ -3388,15 +3395,15 @@ fn cookiecutter_globbing_no_project_root() -> Result<()> {
assert_cmd_snapshot!(fixture
.check_command()
.current_dir(fixture.root().join("{{cookiecutter.repo_name}}"))
.args(["--extend-per-file-ignores", "generated.py:Q"]), @r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
.args(["--extend-per-file-ignores", "generated.py:Q"]), @"
success: true
exit_code: 0
----- stdout -----
All checks passed!
----- stderr -----
warning: No Python files found under the given path(s)
");
----- stderr -----
warning: No Python files found under the given path(s)
");
Ok(())
}
@@ -3417,7 +3424,7 @@ fn semantic_syntax_errors() -> Result<()> {
assert_cmd_snapshot!(
cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3431,7 +3438,7 @@ fn semantic_syntax_errors() -> Result<()> {
// this should *not* be cached, like normal parse errors
assert_cmd_snapshot!(
cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3449,7 +3456,7 @@ fn semantic_syntax_errors() -> Result<()> {
.arg("--preview")
.arg("-")
.pass_stdin(contents),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3483,7 +3490,7 @@ class Foo:
.arg("--target-version=py39")
.arg("-")
.pass_stdin(contents),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -3581,7 +3588,7 @@ nested_optional: Optional[Optional[Optional[str]]] = None
.args(["--select", "UP045", "--diff", "--target-version", "py312"])
.arg("-")
.pass_stdin(contents),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3606,7 +3613,7 @@ fn show_fixes_in_full_output_with_preview_enabled() {
.arg("--preview")
.arg("-")
.pass_stdin("import math"),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3637,7 +3644,7 @@ fn rule_panic_mixed_results_concise() -> Result<()> {
fixture.check_command()
.args(["--select", "RUF9", "--preview"])
.args(["normal.py", "panic.py"]),
@r"
@"
success: false
exit_code: 2
----- stdout -----
@@ -3672,7 +3679,7 @@ fn rule_panic_mixed_results_full() -> Result<()> {
fixture.command()
.args(["check", "--select", "RUF9", "--preview", "--output-format=full", "--no-cache"])
.args(["normal.py", "panic.py"]),
@r"
@"
success: false
exit_code: 2
----- stdout -----
@@ -3781,7 +3788,7 @@ fn supported_file_extensions() -> Result<()> {
fixture.check_command()
.args(["--select", "F401"])
.arg("src"),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -3861,7 +3868,7 @@ fn supported_file_extensions_preview_enabled() -> Result<()> {
fixture.check_command()
.args(["--select", "F401", "--preview"])
.arg("src"),
@r"
@"
success: false
exit_code: 1
----- stdout -----

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:
@@ -17,7 +17,6 @@ info:
- "--fix"
- "-"
stdin: "1"
snapshot_kind: text
---
success: false
exit_code: 2

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -12,7 +12,6 @@ info:
- "--target-version"
- py39
- input.py
snapshot_kind: text
---
success: false
exit_code: 1

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -125,7 +125,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.functions_names = [
linter.flake8_gettext.function_names = [
_,
gettext,
ngettext,
@@ -261,6 +261,7 @@ linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
linter.ruff.strictly_empty_init_modules = false
# Formatter Settings
formatter.exclude = []

View File

@@ -127,7 +127,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.functions_names = [
linter.flake8_gettext.function_names = [
_,
gettext,
ngettext,
@@ -263,6 +263,7 @@ linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
linter.ruff.strictly_empty_init_modules = false
# Formatter Settings
formatter.exclude = []

View File

@@ -129,7 +129,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.functions_names = [
linter.flake8_gettext.function_names = [
_,
gettext,
ngettext,
@@ -265,6 +265,7 @@ linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
linter.ruff.strictly_empty_init_modules = false
# Formatter Settings
formatter.exclude = []

View File

@@ -129,7 +129,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.functions_names = [
linter.flake8_gettext.function_names = [
_,
gettext,
ngettext,
@@ -265,6 +265,7 @@ linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
linter.ruff.strictly_empty_init_modules = false
# Formatter Settings
formatter.exclude = []

View File

@@ -126,7 +126,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.functions_names = [
linter.flake8_gettext.function_names = [
_,
gettext,
ngettext,
@@ -262,6 +262,7 @@ linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
linter.ruff.strictly_empty_init_modules = false
# Formatter Settings
formatter.exclude = []

View File

@@ -126,7 +126,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.functions_names = [
linter.flake8_gettext.function_names = [
_,
gettext,
ngettext,
@@ -262,6 +262,7 @@ linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
linter.ruff.strictly_empty_init_modules = false
# Formatter Settings
formatter.exclude = []

View File

@@ -125,7 +125,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.functions_names = [
linter.flake8_gettext.function_names = [
_,
gettext,
ngettext,
@@ -261,6 +261,7 @@ linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
linter.ruff.strictly_empty_init_modules = false
# Formatter Settings
formatter.exclude = []

View File

@@ -125,7 +125,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.functions_names = [
linter.flake8_gettext.function_names = [
_,
gettext,
ngettext,
@@ -261,6 +261,7 @@ linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
linter.ruff.strictly_empty_init_modules = false
# Formatter Settings
formatter.exclude = []

View File

@@ -125,7 +125,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.functions_names = [
linter.flake8_gettext.function_names = [
_,
gettext,
ngettext,
@@ -261,6 +261,7 @@ linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
linter.ruff.strictly_empty_init_modules = false
# Formatter Settings
formatter.exclude = []

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/lint.rs
source: crates/ruff/tests/cli/lint.rs
info:
program: ruff
args:

View File

@@ -18,13 +18,13 @@ fn check_in_deleted_directory_errors() {
set_current_dir(&temp_path).unwrap();
drop(temp_dir);
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)).arg("check"), @r###"
success: false
exit_code: 2
----- stdout -----
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)).arg("check"), @"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
ruff failed
Cause: Working directory does not exist
"###);
----- stderr -----
ruff failed
Cause: Working directory does not exist
");
}

View File

@@ -97,7 +97,7 @@ impl<'a> RuffCheck<'a> {
fn stdin_success() {
let mut cmd = RuffCheck::default().args([]).build();
assert_cmd_snapshot!(cmd
.pass_stdin(""), @r"
.pass_stdin(""), @"
success: true
exit_code: 0
----- stdout -----
@@ -111,7 +111,7 @@ fn stdin_success() {
fn stdin_error() {
let mut cmd = RuffCheck::default().args([]).build();
assert_cmd_snapshot!(cmd
.pass_stdin("import os\n"), @r"
.pass_stdin("import os\n"), @"
success: false
exit_code: 1
----- stdout -----
@@ -136,7 +136,7 @@ fn stdin_filename() {
.args(["--stdin-filename", "F401.py"])
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("import os\n"), @r"
.pass_stdin("import os\n"), @"
success: false
exit_code: 1
----- stdout -----
@@ -172,7 +172,7 @@ import bar # unused import
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--isolated", "--no-cache", "--select", "F401"]).current_dir(tempdir.path()), @r"
.args(["check", "--isolated", "--no-cache", "--select", "F401"]).current_dir(tempdir.path()), @"
success: false
exit_code: 1
----- stdout -----
@@ -208,7 +208,7 @@ fn check_warn_stdin_filename_with_files() {
.filename("foo.py")
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("import os\n"), @r"
.pass_stdin("import os\n"), @"
success: false
exit_code: 1
----- stdout -----
@@ -235,7 +235,7 @@ fn stdin_source_type_py() {
.args(["--stdin-filename", "TCH.py"])
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("import os\n"), @r"
.pass_stdin("import os\n"), @"
success: false
exit_code: 1
----- stdout -----
@@ -261,7 +261,7 @@ fn stdin_source_type_pyi() {
.args(["--stdin-filename", "TCH.pyi", "--select", "TCH"])
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("import os\n"), @r"
.pass_stdin("import os\n"), @"
success: true
exit_code: 0
----- stdout -----
@@ -294,7 +294,7 @@ fn stdin_json() {
fn stdin_fix_py() {
let mut cmd = RuffCheck::default().args(["--fix"]).build();
assert_cmd_snapshot!(cmd
.pass_stdin("import os\nimport sys\n\nprint(sys.version)\n"), @r"
.pass_stdin("import os\nimport sys\n\nprint(sys.version)\n"), @"
success: true
exit_code: 0
----- stdout -----
@@ -572,7 +572,7 @@ fn stdin_override_parser_ipynb() {
},
"nbformat": 4,
"nbformat_minor": 5
}"#), @r"
}"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -610,7 +610,7 @@ fn stdin_override_parser_py() {
])
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("import os\n"), @r"
.pass_stdin("import os\n"), @"
success: false
exit_code: 1
----- stdout -----
@@ -633,7 +633,7 @@ fn stdin_override_parser_py() {
fn stdin_fix_when_not_fixable_should_still_print_contents() {
let mut cmd = RuffCheck::default().args(["--fix"]).build();
assert_cmd_snapshot!(cmd
.pass_stdin("import os\nimport sys\n\nif (1, 2):\n print(sys.version)\n"), @r###"
.pass_stdin("import os\nimport sys\n\nif (1, 2):\n print(sys.version)\n"), @"
success: false
exit_code: 1
----- stdout -----
@@ -654,14 +654,14 @@ fn stdin_fix_when_not_fixable_should_still_print_contents() {
|
Found 2 errors (1 fixed, 1 remaining).
"###);
");
}
#[test]
fn stdin_fix_when_no_issues_should_still_print_contents() {
let mut cmd = RuffCheck::default().args(["--fix"]).build();
assert_cmd_snapshot!(cmd
.pass_stdin("import sys\n\nprint(sys.version)\n"), @r"
.pass_stdin("import sys\n\nprint(sys.version)\n"), @"
success: true
exit_code: 0
----- stdout -----
@@ -805,7 +805,7 @@ fn stdin_format_jupyter() {
fn stdin_parse_error() {
let mut cmd = RuffCheck::default().build();
assert_cmd_snapshot!(cmd
.pass_stdin("from foo import\n"), @r"
.pass_stdin("from foo import\n"), @"
success: false
exit_code: 1
----- stdout -----
@@ -826,7 +826,7 @@ fn stdin_parse_error() {
fn stdin_multiple_parse_error() {
let mut cmd = RuffCheck::default().build();
assert_cmd_snapshot!(cmd
.pass_stdin("from foo import\nbar =\n"), @r"
.pass_stdin("from foo import\nbar =\n"), @"
success: false
exit_code: 1
----- stdout -----
@@ -857,7 +857,7 @@ fn parse_error_not_included() {
// Parse errors are always shown
let mut cmd = RuffCheck::default().args(["--select=I"]).build();
assert_cmd_snapshot!(cmd
.pass_stdin("foo =\n"), @r"
.pass_stdin("foo =\n"), @"
success: false
exit_code: 1
----- stdout -----
@@ -878,7 +878,7 @@ fn parse_error_not_included() {
fn full_output_preview() {
let mut cmd = RuffCheck::default().args(["--preview"]).build();
assert_cmd_snapshot!(cmd
.pass_stdin("l = 1"), @r"
.pass_stdin("l = 1"), @"
success: false
exit_code: 1
----- stdout -----
@@ -907,7 +907,7 @@ preview = true
",
)?;
let mut cmd = RuffCheck::default().config(&pyproject_toml).build();
assert_cmd_snapshot!(cmd.pass_stdin("l = 1"), @r"
assert_cmd_snapshot!(cmd.pass_stdin("l = 1"), @"
success: false
exit_code: 1
----- stdout -----
@@ -929,7 +929,7 @@ preview = true
fn full_output_format() {
let mut cmd = RuffCheck::default().output_format("full").build();
assert_cmd_snapshot!(cmd
.pass_stdin("l = 1"), @r"
.pass_stdin("l = 1"), @"
success: false
exit_code: 1
----- stdout -----
@@ -967,7 +967,7 @@ fn rule_f401_output_text() {
#[test]
fn rule_invalid_rule_name() {
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404"]), @r"
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404"]), @"
success: false
exit_code: 2
----- stdout -----
@@ -981,7 +981,7 @@ fn rule_invalid_rule_name() {
#[test]
fn rule_invalid_rule_name_output_json() {
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404", "--output-format", "json"]), @r"
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404", "--output-format", "json"]), @"
success: false
exit_code: 2
----- stdout -----
@@ -995,7 +995,7 @@ fn rule_invalid_rule_name_output_json() {
#[test]
fn rule_invalid_rule_name_output_text() {
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404", "--output-format", "text"]), @r"
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404", "--output-format", "text"]), @"
success: false
exit_code: 2
----- stdout -----
@@ -1016,7 +1016,7 @@ fn show_statistics() {
.pass_stdin(r#"
def mvce(keys, values):
return {key: value for key, value in zip(keys, values)}
"#), @r"
"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -1037,7 +1037,7 @@ fn show_statistics_unsafe_fixes() {
.pass_stdin(r#"
def mvce(keys, values):
return {key: value for key, value in zip(keys, values)}
"#), @r"
"#), @"
success: false
exit_code: 1
----- stdout -----
@@ -1152,7 +1152,7 @@ fn show_statistics_partial_fix() {
.args(["--select", "UP035", "--statistics"])
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("from typing import List, AsyncGenerator"), @r"
.pass_stdin("from typing import List, AsyncGenerator"), @"
success: false
exit_code: 1
----- stdout -----
@@ -1173,7 +1173,7 @@ fn show_statistics_syntax_errors() {
// ParseError
assert_cmd_snapshot!(
cmd.pass_stdin("x ="),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1186,7 +1186,7 @@ fn show_statistics_syntax_errors() {
// match before 3.10, UnsupportedSyntaxError
assert_cmd_snapshot!(
cmd.pass_stdin("match 2:\n case 1: ..."),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1199,7 +1199,7 @@ fn show_statistics_syntax_errors() {
// rebound comprehension variable, SemanticSyntaxError
assert_cmd_snapshot!(
cmd.pass_stdin("[x := 1 for x in range(0)]"),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1216,7 +1216,7 @@ fn preview_enabled_prefix() {
let mut cmd = RuffCheck::default()
.args(["--select", "RUF9", "--output-format=concise", "--preview"])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: false
exit_code: 1
----- stdout -----
@@ -1238,7 +1238,7 @@ fn preview_enabled_all() {
let mut cmd = RuffCheck::default()
.args(["--select", "ALL", "--output-format=concise", "--preview"])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: false
exit_code: 1
----- stdout -----
@@ -1265,7 +1265,7 @@ fn preview_enabled_direct() {
let mut cmd = RuffCheck::default()
.args(["--select", "RUF911", "--output-format=concise", "--preview"])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: false
exit_code: 1
----- stdout -----
@@ -1282,7 +1282,7 @@ fn preview_disabled_direct() {
let mut cmd = RuffCheck::default()
.args(["--select", "RUF911", "--output-format=concise"])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: true
exit_code: 0
----- stdout -----
@@ -1299,7 +1299,7 @@ fn preview_disabled_prefix_empty() {
let mut cmd = RuffCheck::default()
.args(["--select", "RUF91", "--output-format=concise"])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: true
exit_code: 0
----- stdout -----
@@ -1316,7 +1316,7 @@ fn preview_disabled_does_not_warn_for_empty_ignore_selections() {
let mut cmd = RuffCheck::default()
.args(["--ignore", "RUF9", "--output-format=concise"])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: true
exit_code: 0
----- stdout -----
@@ -1332,7 +1332,7 @@ fn preview_disabled_does_not_warn_for_empty_fixable_selections() {
let mut cmd = RuffCheck::default()
.args(["--fixable", "RUF9", "--output-format=concise"])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: true
exit_code: 0
----- stdout -----
@@ -1354,7 +1354,7 @@ fn preview_group_selector() {
])
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("I=42\n"), @r"
.pass_stdin("I=42\n"), @"
success: false
exit_code: 2
----- stdout -----
@@ -1379,7 +1379,7 @@ fn preview_enabled_group_ignore() {
"--output-format=concise",
])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: false
exit_code: 1
----- stdout -----
@@ -1400,7 +1400,7 @@ fn preview_enabled_group_ignore() {
fn removed_direct() {
// Selection of a removed rule should fail
let mut cmd = RuffCheck::default().args(["--select", "RUF931"]).build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: false
exit_code: 2
----- stdout -----
@@ -1418,7 +1418,7 @@ fn removed_direct_multiple() {
let mut cmd = RuffCheck::default()
.args(["--select", "RUF930", "--select", "RUF931"])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: false
exit_code: 2
----- stdout -----
@@ -1436,7 +1436,7 @@ fn removed_indirect() {
// Selection _including_ a removed rule without matching should not fail
// nor should the rule be used
let mut cmd = RuffCheck::default().args(["--select", "RUF93"]).build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: true
exit_code: 0
----- stdout -----
@@ -1449,7 +1449,7 @@ fn removed_indirect() {
#[test]
fn removed_ignore_direct() {
let mut cmd = RuffCheck::default().args(["--ignore", "UP027"]).build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: true
exit_code: 0
----- stdout -----
@@ -1466,7 +1466,7 @@ fn removed_ignore_multiple_direct() {
let mut cmd = RuffCheck::default()
.args(["--ignore", "UP027", "--ignore", "PLR1706"])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: true
exit_code: 0
----- stdout -----
@@ -1482,7 +1482,7 @@ fn removed_ignore_multiple_direct() {
#[test]
fn removed_ignore_remapped_direct() {
let mut cmd = RuffCheck::default().args(["--ignore", "PGH001"]).build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: true
exit_code: 0
----- stdout -----
@@ -1498,7 +1498,7 @@ fn removed_ignore_indirect() {
// `PLR170` includes removed rules but should not select or warn
// since it is not a "direct" selection
let mut cmd = RuffCheck::default().args(["--ignore", "PLR170"]).build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: true
exit_code: 0
----- stdout -----
@@ -1512,7 +1512,7 @@ fn removed_ignore_indirect() {
fn redirect_direct() {
// Selection of a redirected rule directly should use the new rule and warn
let mut cmd = RuffCheck::default().args(["--select", "RUF940"]).build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: false
exit_code: 1
----- stdout -----
@@ -1531,7 +1531,7 @@ fn redirect_indirect() {
// Selection _including_ a redirected rule without matching should not fail
// nor should the rule be used
let mut cmd = RuffCheck::default().args(["--select", "RUF94"]).build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: true
exit_code: 0
----- stdout -----
@@ -1546,7 +1546,7 @@ fn redirect_prefix() {
// Selection using a redirected prefix should switch to all rules in the
// new prefix
let mut cmd = RuffCheck::default().args(["--select", "RUF96"]).build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: false
exit_code: 1
----- stdout -----
@@ -1565,7 +1565,7 @@ fn deprecated_direct() {
// Selection of a deprecated rule without preview enabled should still work
// but a warning should be displayed
let mut cmd = RuffCheck::default().args(["--select", "RUF920"]).build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: false
exit_code: 1
----- stdout -----
@@ -1586,7 +1586,7 @@ fn deprecated_multiple_direct() {
let mut cmd = RuffCheck::default()
.args(["--select", "RUF920", "--select", "RUF921"])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: false
exit_code: 1
----- stdout -----
@@ -1609,7 +1609,7 @@ fn deprecated_indirect() {
// `RUF92` includes deprecated rules but should not warn
// since it is not a "direct" selection
let mut cmd = RuffCheck::default().args(["--select", "RUF92"]).build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: true
exit_code: 0
----- stdout -----
@@ -1625,7 +1625,7 @@ fn deprecated_direct_preview_enabled() {
let mut cmd = RuffCheck::default()
.args(["--select", "RUF920", "--preview"])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: false
exit_code: 2
----- stdout -----
@@ -1642,7 +1642,7 @@ fn deprecated_indirect_preview_enabled() {
let mut cmd = RuffCheck::default()
.args(["--select", "RUF92", "--preview"])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: true
exit_code: 0
----- stdout -----
@@ -1659,7 +1659,7 @@ fn deprecated_multiple_direct_preview_enabled() {
let mut cmd = RuffCheck::default()
.args(["--select", "RUF920", "--select", "RUF921", "--preview"])
.build();
assert_cmd_snapshot!(cmd, @r"
assert_cmd_snapshot!(cmd, @"
success: false
exit_code: 2
----- stdout -----
@@ -1720,7 +1720,7 @@ fn unreadable_dir() -> Result<()> {
.filename(unreadable_dir.to_str().unwrap())
.args([])
.build();
assert_cmd_snapshot!(cmd, @r###"
assert_cmd_snapshot!(cmd, @"
success: true
exit_code: 0
----- stdout -----
@@ -1728,7 +1728,7 @@ fn unreadable_dir() -> Result<()> {
----- stderr -----
warning: Encountered error: Permission denied (os error 13)
"###);
");
Ok(())
}
@@ -1758,7 +1758,7 @@ fn check_input_from_argfile() -> Result<()> {
(file_a_path.display().to_string().as_str(), "/path/to/a.py"),
]}, {
assert_cmd_snapshot!(cmd
.pass_stdin(""), @r"
.pass_stdin(""), @"
success: false
exit_code: 1
----- stdout -----
@@ -1787,17 +1787,17 @@ fn missing_argfile_reports_error() {
insta::with_settings!({filters => vec![
("The system cannot find the file specified.", "No such file or directory")
]}, {
assert_cmd_snapshot!(cmd, @r"
success: false
exit_code: 2
----- stdout -----
assert_cmd_snapshot!(cmd, @"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
ruff failed
Cause: Failed to read CLI arguments from files
Cause: failed to open file `!.txt`
Cause: No such file or directory (os error 2)
");
----- stderr -----
ruff failed
Cause: Failed to read CLI arguments from files
Cause: failed to open file `!.txt`
Cause: No such file or directory (os error 2)
");
});
}
@@ -1807,7 +1807,7 @@ fn check_hints_hidden_unsafe_fixes() {
.args(["--select", "RUF901,RUF902"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1829,7 +1829,7 @@ fn check_hints_hidden_unsafe_fixes_with_no_safe_fixes() {
let mut cmd = RuffCheck::default().args(["--select", "RUF902"]).build();
assert_cmd_snapshot!(cmd
.pass_stdin("x = {'a': 1, 'a': 1}\n"),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1849,7 +1849,7 @@ fn check_no_hint_for_hidden_unsafe_fixes_when_disabled() {
.args(["--select", "RUF901,RUF902", "--no-unsafe-fixes"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1873,7 +1873,7 @@ fn check_no_hint_for_hidden_unsafe_fixes_with_no_safe_fixes_when_disabled() {
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("x = {'a': 1, 'a': 1}\n"),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1892,7 +1892,7 @@ fn check_shows_unsafe_fixes_with_opt_in() {
.args(["--select", "RUF901,RUF902", "--unsafe-fixes"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1915,7 +1915,7 @@ fn fix_applies_safe_fixes_by_default() {
.args(["--select", "RUF901,RUF902", "--fix"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1936,7 +1936,7 @@ fn fix_applies_unsafe_fixes_with_opt_in() {
.args(["--select", "RUF901,RUF902", "--fix", "--unsafe-fixes"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -1955,7 +1955,7 @@ fn fix_does_not_apply_display_only_fixes() {
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("def add_to_list(item, some_list=[]): ..."),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1975,7 +1975,7 @@ fn fix_does_not_apply_display_only_fixes_with_unsafe_fixes_enabled() {
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("def add_to_list(item, some_list=[]): ..."),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -1994,7 +1994,7 @@ fn fix_only_unsafe_fixes_available() {
.args(["--select", "RUF902", "--fix"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -2014,7 +2014,7 @@ fn fix_only_flag_applies_safe_fixes_by_default() {
.args(["--select", "RUF901,RUF902", "--fix-only"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -2031,7 +2031,7 @@ fn fix_only_flag_applies_unsafe_fixes_with_opt_in() {
.args(["--select", "RUF901,RUF902", "--fix-only", "--unsafe-fixes"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -2049,7 +2049,7 @@ fn diff_shows_safe_fixes_by_default() {
.args(["--select", "RUF901,RUF902", "--diff"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -2069,7 +2069,7 @@ fn diff_shows_unsafe_fixes_with_opt_in() {
.args(["--select", "RUF901,RUF902", "--diff", "--unsafe-fixes"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -2091,7 +2091,7 @@ fn diff_does_not_show_display_only_fixes_with_unsafe_fixes_enabled() {
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("def add_to_list(item, some_list=[]): ..."),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -2106,7 +2106,7 @@ fn diff_only_unsafe_fixes_available() {
.args(["--select", "RUF902", "--diff"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -2134,7 +2134,7 @@ extend-unsafe-fixes = ["RUF901"]
.args(["--select", "RUF901,RUF902"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -2170,7 +2170,7 @@ extend-safe-fixes = ["RUF902"]
.args(["--select", "RUF901,RUF902"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -2208,7 +2208,7 @@ extend-safe-fixes = ["RUF902"]
.args(["--select", "RUF901,RUF902"])
.build();
assert_cmd_snapshot!(cmd,
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -2248,7 +2248,7 @@ extend-safe-fixes = ["RUF9"]
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("x = {'a': 1, 'a': 1}\nprint(('foo'))\nprint(str('foo'))\nisinstance(x, (int, str))\n"),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -2307,7 +2307,7 @@ def log(x, base) -> float:
.args(["--select", "D41"])
.build();
assert_cmd_snapshot!(cmd
.pass_stdin(stdin), @r"
.pass_stdin(stdin), @"
success: true
exit_code: 0
----- stdout -----
@@ -2360,7 +2360,7 @@ select = ["RUF017"]
let mut cmd = RuffCheck::default().config(&ruff_toml).build();
assert_cmd_snapshot!(cmd
.pass_stdin("x = [1, 2, 3]\ny = [4, 5, 6]\nsum([x, y], [])"),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -2401,7 +2401,7 @@ unfixable = ["RUF"]
let mut cmd = RuffCheck::default().config(&ruff_toml).build();
assert_cmd_snapshot!(cmd
.pass_stdin("x = [1, 2, 3]\ny = [4, 5, 6]\nsum([x, y], [])"),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -2431,7 +2431,7 @@ fn pyproject_toml_stdin_syntax_error() {
assert_cmd_snapshot!(
cmd.pass_stdin("[project"),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -2457,7 +2457,7 @@ fn pyproject_toml_stdin_schema_error() {
assert_cmd_snapshot!(
cmd.pass_stdin("[project]\nname = 1"),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -2484,7 +2484,7 @@ fn pyproject_toml_stdin_no_applicable_rules_selected() {
assert_cmd_snapshot!(
cmd.pass_stdin("[project"),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -2503,7 +2503,7 @@ fn pyproject_toml_stdin_no_applicable_rules_selected_2() {
assert_cmd_snapshot!(
cmd.pass_stdin("[project"),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -2522,7 +2522,7 @@ fn pyproject_toml_stdin_no_errors() {
assert_cmd_snapshot!(
cmd.pass_stdin(r#"[project]\nname = "ruff"\nversion = "0.0.0""#),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -2547,7 +2547,7 @@ fn pyproject_toml_stdin_schema_error_fix() {
assert_cmd_snapshot!(
cmd.pass_stdin("[project]\nname = 1"),
@r"
@"
success: false
exit_code: 1
----- stdout -----
@@ -2581,7 +2581,7 @@ fn pyproject_toml_stdin_schema_error_fix_only() {
assert_cmd_snapshot!(
cmd.pass_stdin("[project]\nname = 1"),
@r"
@"
success: true
exit_code: 0
----- stdout -----
@@ -2607,7 +2607,7 @@ fn pyproject_toml_stdin_schema_error_fix_diff() {
assert_cmd_snapshot!(
cmd.pass_stdin("[project]\nname = 1"),
@r"
@"
success: true
exit_code: 0
----- stdout -----

View File

@@ -29,7 +29,7 @@ fn check_project_include_defaults() {
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r"
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @"
success: true
exit_code: 0
----- stdout -----
@@ -53,7 +53,7 @@ fn check_project_respects_direct_paths() {
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files", "b.py"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r"
.args(["check", "--show-files", "b.py"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @"
success: true
exit_code: 0
----- stdout -----
@@ -72,7 +72,7 @@ fn check_project_respects_subdirectory_includes() {
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files", "subdirectory"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r"
.args(["check", "--show-files", "subdirectory"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @"
success: true
exit_code: 0
----- stdout -----
@@ -91,7 +91,7 @@ fn check_project_from_project_subdirectory_respects_includes() {
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test/subdirectory")), @r"
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test/subdirectory")), @"
success: true
exit_code: 0
----- stdout -----

View File

@@ -3,11 +3,12 @@ source: crates/ruff/tests/integration_test.rs
info:
program: ruff
args:
- "-"
- "--isolated"
- "--no-cache"
- check
- "--output-format"
- json
- "--no-cache"
- "--isolated"
- "-"
- "--stdin-filename"
- F401.py
stdin: "import os\n"
@@ -51,4 +52,3 @@ exit_code: 1
}
]
----- stderr -----

View File

@@ -238,7 +238,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.functions_names = [
linter.flake8_gettext.function_names = [
_,
gettext,
ngettext,
@@ -374,6 +374,7 @@ linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
linter.ruff.strictly_empty_init_modules = false
# Formatter Settings
formatter.exclude = []

View File

@@ -16,7 +16,7 @@ const VERSION_FILTER: [(&str, &str); 1] = [(
fn version_basics() {
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME)).arg("version"), @r"
Command::new(get_cargo_bin(BIN_NAME)).arg("version"), @"
success: true
exit_code: 0
----- stdout -----
@@ -42,7 +42,7 @@ fn config_option_allowed_but_ignored() -> Result<()> {
.arg("version")
.arg("--config")
.arg(&ruff_dot_toml)
.args(["--config", "lint.isort.extra-standard-library = ['foo', 'bar']"]), @r"
.args(["--config", "lint.isort.extra-standard-library = ['foo', 'bar']"]), @"
success: true
exit_code: 0
----- stdout -----
@@ -60,7 +60,7 @@ fn config_option_ignored_but_validated() {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.arg("version")
.args(["--config", "foo = bar"]), @r"
.args(["--config", "foo = bar"]), @"
success: false
exit_code: 2
----- stdout -----
@@ -91,7 +91,7 @@ fn config_option_ignored_but_validated() {
fn isolated_option_allowed() {
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME)).arg("version").arg("--isolated"), @r"
Command::new(get_cargo_bin(BIN_NAME)).arg("version").arg("--isolated"), @"
success: true
exit_code: 0
----- stdout -----

View File

@@ -12,10 +12,6 @@ license = "MIT OR Apache-2.0"
[lib]
[features]
default = []
testing-colors = []
[dependencies]
anstyle = { workspace = true }
memchr = { workspace = true }
@@ -23,12 +19,17 @@ unicode-width = { workspace = true }
[dev-dependencies]
ruff_annotate_snippets = { workspace = true, features = ["testing-colors"] }
anstream = { workspace = true }
serde = { workspace = true, features = ["derive"] }
snapbox = { workspace = true, features = ["diff", "term-svg", "cmd", "examples"] }
toml = { workspace = true }
tryfn = { workspace = true }
[features]
default = []
testing-colors = []
[[test]]
name = "fixtures"
harness = false

View File

@@ -16,6 +16,51 @@ bench = false
test = false
doctest = false
[dependencies]
ruff_db = { workspace = true, features = ["testing"] }
ruff_linter = { workspace = true, optional = true }
ruff_python_ast = { workspace = true }
ruff_python_formatter = { workspace = true, optional = true }
ruff_python_parser = { workspace = true, optional = true }
ruff_python_trivia = { workspace = true, optional = true }
ty_project = { workspace = true, optional = true }
anyhow = { workspace = true }
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true }
criterion = { workspace = true, default-features = false, optional = true }
divan = { workspace = true, optional = true }
serde = { workspace = true }
serde_json = { workspace = true }
tracing = { workspace = true }
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dependencies]
tikv-jemallocator = { workspace = true, optional = true }
[target.'cfg(target_os = "windows")'.dependencies]
mimalloc = { workspace = true, optional = true }
[dev-dependencies]
rayon = { workspace = true }
rustc-hash = { workspace = true }
[features]
default = ["ty_instrumented", "ty_walltime", "ruff_instrumented"]
# Enables the ruff instrumented benchmarks
ruff_instrumented = [
"criterion",
"ruff_linter",
"ruff_python_formatter",
"ruff_python_parser",
"ruff_python_trivia",
"mimalloc",
"tikv-jemallocator",
]
# Enables the ty instrumented benchmarks
ty_instrumented = ["criterion", "ty_project", "ruff_python_trivia"]
codspeed = ["codspeed-criterion-compat"]
# Enables the ty_walltime benchmarks
ty_walltime = ["ruff_db/os", "ty_project", "divan"]
[[bench]]
name = "linter"
harness = false
@@ -46,54 +91,5 @@ name = "ty_walltime"
harness = false
required-features = ["ty_walltime"]
[dependencies]
ruff_db = { workspace = true, features = ["testing"] }
ruff_python_ast = { workspace = true }
ruff_linter = { workspace = true, optional = true }
ruff_python_formatter = { workspace = true, optional = true }
ruff_python_parser = { workspace = true, optional = true }
ruff_python_trivia = { workspace = true, optional = true }
ty_project = { workspace = true, optional = true }
divan = { workspace = true, optional = true }
anyhow = { workspace = true }
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true }
criterion = { workspace = true, default-features = false, optional = true }
serde = { workspace = true }
serde_json = { workspace = true }
tracing = { workspace = true }
[lints]
workspace = true
[features]
default = ["ty_instrumented", "ty_walltime", "ruff_instrumented"]
# Enables the ruff instrumented benchmarks
ruff_instrumented = [
"criterion",
"ruff_linter",
"ruff_python_formatter",
"ruff_python_parser",
"ruff_python_trivia",
"mimalloc",
"tikv-jemallocator",
]
# Enables the ty instrumented benchmarks
ty_instrumented = [
"criterion",
"ty_project",
"ruff_python_trivia",
]
codspeed = ["codspeed-criterion-compat"]
# Enables the ty_walltime benchmarks
ty_walltime = ["ruff_db/os", "ty_project", "divan"]
[target.'cfg(target_os = "windows")'.dependencies]
mimalloc = { workspace = true, optional = true }
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dependencies]
tikv-jemallocator = { workspace = true, optional = true }
[dev-dependencies]
rustc-hash = { workspace = true }
rayon = { workspace = true }

View File

@@ -557,6 +557,60 @@ fn benchmark_many_enum_members(criterion: &mut Criterion) {
});
}
fn benchmark_many_enum_members_2(criterion: &mut Criterion) {
const NUM_ENUM_MEMBERS: usize = 48;
setup_rayon();
let mut code = "\
from enum import Enum
from typing_extensions import assert_never
class E(Enum):
"
.to_string();
for i in 0..NUM_ENUM_MEMBERS {
writeln!(&mut code, " m{i} = {i}").ok();
}
code.push_str(
"
def method(self):
match self:",
);
for i in 0..NUM_ENUM_MEMBERS {
write!(
&mut code,
"
case E.m{i}:
pass"
)
.ok();
}
write!(
&mut code,
"
case _:
assert_never(self)"
)
.ok();
criterion.bench_function("ty_micro[many_enum_members_2]", |b| {
b.iter_batched_ref(
|| setup_micro_case(&code),
|case| {
let Case { db, .. } = case;
let result = db.check();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,
);
});
}
struct ProjectBenchmark<'a> {
project: InstalledProject<'a>,
fs: MemoryFileSystem,
@@ -717,6 +771,7 @@ criterion_group!(
benchmark_complex_constrained_attributes_2,
benchmark_complex_constrained_attributes_3,
benchmark_many_enum_members,
benchmark_many_enum_members_2,
);
criterion_group!(project, anyio, attrs, hydra, datetype);
criterion_main!(check_file, micro, project);

View File

@@ -194,7 +194,7 @@ static SYMPY: Benchmark = Benchmark::new(
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY312,
},
13106,
13116,
);
static TANJUN: Benchmark = Benchmark::new(

View File

@@ -11,11 +11,11 @@ repository = { workspace = true }
license = { workspace = true }
[dependencies]
filetime = { workspace = true }
glob = { workspace = true }
globset = { workspace = true }
itertools = { workspace = true }
regex = { workspace = true }
filetime = { workspace = true }
seahash = { workspace = true }
[dev-dependencies]

View File

@@ -48,12 +48,12 @@ tracing = { workspace = true }
tracing-subscriber = { workspace = true, optional = true }
zip = { workspace = true }
[target.'cfg(target_arch="wasm32")'.dependencies]
web-time = { version = "1.1.0" }
[target.'cfg(not(target_arch="wasm32"))'.dependencies]
etcetera = { workspace = true, optional = true }
[target.'cfg(target_arch="wasm32")'.dependencies]
web-time = { version = "1.1.0" }
[dev-dependencies]
insta = { workspace = true, features = ["filters"] }
tempfile = { workspace = true }

View File

@@ -1284,7 +1284,7 @@ watermelon
let diag = env.err().primary("animals", "5", "5", "").build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:1
|
@@ -1308,7 +1308,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
warning[test-diagnostic]: main diagnostic message
--> animals:5:1
|
@@ -1328,7 +1328,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
info[test-diagnostic]: main diagnostic message
--> animals:5:1
|
@@ -1355,7 +1355,7 @@ watermelon
let diag = builder.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:1:1
|
@@ -1374,7 +1374,7 @@ watermelon
let diag = builder.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:1:1
|
@@ -1395,7 +1395,7 @@ watermelon
let diag = env.err().primary("non-ascii", "5", "5", "").build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> non-ascii:5:1
|
@@ -1414,7 +1414,7 @@ watermelon
let diag = env.err().primary("non-ascii", "2:4", "2:8", "").build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> non-ascii:2:2
|
@@ -1438,7 +1438,7 @@ watermelon
env.context(1);
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:1
|
@@ -1455,7 +1455,7 @@ watermelon
env.context(0);
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:1
|
@@ -1470,7 +1470,7 @@ watermelon
env.context(2);
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:1:1
|
@@ -1487,7 +1487,7 @@ watermelon
env.context(2);
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:11:1
|
@@ -1504,7 +1504,7 @@ watermelon
env.context(200);
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:1
|
@@ -1537,7 +1537,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:1:1
|
@@ -1581,7 +1581,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:1:1
|
@@ -1606,7 +1606,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:1:1
|
@@ -1634,7 +1634,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:1:1
|
@@ -1662,7 +1662,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:1:1
|
@@ -1687,7 +1687,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:1:1
|
@@ -1718,7 +1718,7 @@ watermelon
// window.
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:1:1
|
@@ -1756,7 +1756,7 @@ watermelon
let diag = env.err().primary("spacey-animals", "8", "8", "").build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> spacey-animals:8:1
|
@@ -1773,7 +1773,7 @@ watermelon
let diag = env.err().primary("spacey-animals", "12", "12", "").build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> spacey-animals:12:1
|
@@ -1791,7 +1791,7 @@ watermelon
let diag = env.err().primary("spacey-animals", "13", "13", "").build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> spacey-animals:13:1
|
@@ -1831,7 +1831,7 @@ watermelon
// instead of special casing the snippet assembly.
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> spacey-animals:3:1
|
@@ -1860,7 +1860,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:3:1
|
@@ -1897,7 +1897,7 @@ watermelon
);
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:3:1
|
@@ -1934,7 +1934,7 @@ watermelon
);
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:3:1
|
@@ -1962,7 +1962,7 @@ watermelon
diag.sub(env.sub_warn().primary("fruits", "3", "3", "").build());
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:3:1
|
@@ -1998,7 +1998,7 @@ watermelon
diag.sub(env.sub_warn().primary("animals", "11", "11", "").build());
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:3:1
|
@@ -2037,7 +2037,7 @@ watermelon
diag.sub(env.sub_warn().primary("fruits", "3", "3", "").build());
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:3:1
|
@@ -2085,7 +2085,7 @@ watermelon
diag.sub(env.sub_warn().secondary("animals", "3", "3", "").build());
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:3:1
|
@@ -2121,7 +2121,7 @@ watermelon
let diag = env.err().primary("animals", "5", "6", "").build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:1
|
@@ -2144,7 +2144,7 @@ watermelon
let diag = env.err().primary("animals", "5", "7:0", "").build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:1
|
@@ -2164,7 +2164,7 @@ watermelon
let diag = env.err().primary("animals", "5", "7:1", "").build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:1
|
@@ -2184,7 +2184,7 @@ watermelon
let diag = env.err().primary("animals", "5:3", "8:8", "").build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:4
|
@@ -2206,7 +2206,7 @@ watermelon
let diag = env.err().secondary("animals", "5:3", "8:8", "").build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:4
|
@@ -2238,7 +2238,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:4:1
|
@@ -2267,7 +2267,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:4:1
|
@@ -2298,7 +2298,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:1
|
@@ -2333,7 +2333,7 @@ watermelon
// better using only ASCII art.
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:1
|
@@ -2361,7 +2361,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:1
|
@@ -2393,7 +2393,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:3
|
@@ -2415,7 +2415,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:3
|
@@ -2448,7 +2448,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:8:1
|
@@ -2488,7 +2488,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:5:1
|
@@ -2532,7 +2532,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> fruits:1:1
|
@@ -2567,7 +2567,7 @@ watermelon
.build();
insta::assert_snapshot!(
env.render(&diag),
@r"
@"
error[test-diagnostic]: main diagnostic message
--> animals:11:1
|

View File

@@ -137,7 +137,7 @@ mod tests {
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @"
fib.py:1:8: error[unused-import] `os` imported but unused
fib.py:6:5: error[unused-variable] Local variable `x` is assigned to but never used
undef.py:1:4: error[undefined-name] Undefined name `a`
@@ -150,7 +150,7 @@ mod tests {
env.hide_severity(true);
env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @"
fib.py:1:8: F401 [*] `os` imported but unused
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
undef.py:1:4: F821 Undefined name `a`
@@ -164,7 +164,7 @@ mod tests {
env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly);
env.preview(true);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @"
fib.py:1:8: F401 [*] `os` imported but unused
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
undef.py:1:4: F821 Undefined name `a`
@@ -177,7 +177,7 @@ mod tests {
env.hide_severity(true);
env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @"
syntax_errors.py:1:15: invalid-syntax: Expected one or more symbol names after import
syntax_errors.py:3:12: invalid-syntax: Expected ')', found newline
");
@@ -186,7 +186,7 @@ mod tests {
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @"
syntax_errors.py:1:15: error[invalid-syntax] Expected one or more symbol names after import
syntax_errors.py:3:12: error[invalid-syntax] Expected ')', found newline
");
@@ -195,7 +195,7 @@ mod tests {
#[test]
fn notebook_output() {
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Concise);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @"
notebook.ipynb:cell 1:2:8: error[unused-import] `os` imported but unused
notebook.ipynb:cell 2:2:8: error[unused-import] `math` imported but unused
notebook.ipynb:cell 3:4:5: error[unused-variable] Local variable `x` is assigned to but never used

View File

@@ -619,19 +619,19 @@ pub(crate) mod tests {
fn read_directory_stdlib() {
let mock_typeshed = mock_typeshed();
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib"), @r"
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib"), @"
vendored://stdlib/asyncio/
vendored://stdlib/functools.pyi
");
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib/"), @r"
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib/"), @"
vendored://stdlib/asyncio/
vendored://stdlib/functools.pyi
");
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib"), @r"
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib"), @"
vendored://stdlib/asyncio/
vendored://stdlib/functools.pyi
");
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib/"), @r"
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib/"), @"
vendored://stdlib/asyncio/
vendored://stdlib/functools.pyi
");

View File

@@ -11,11 +11,6 @@ repository = { workspace = true }
license = { workspace = true }
[dependencies]
ty = { workspace = true }
ty_project = { workspace = true, features = ["schemars"] }
ty_python_semantic = { workspace = true }
ty_python_types = { workspace = true }
ty_static = { workspace = true }
ruff = { workspace = true }
ruff_formatter = { workspace = true }
ruff_linter = { workspace = true, features = ["schemars"] }
@@ -27,6 +22,10 @@ ruff_python_formatter = { workspace = true }
ruff_python_parser = { workspace = true }
ruff_python_trivia = { workspace = true }
ruff_workspace = { workspace = true, features = ["schemars"] }
ty = { workspace = true }
ty_project = { workspace = true, features = ["schemars"] }
ty_python_semantic = { workspace = true }
ty_static = { workspace = true }
anyhow = { workspace = true }
clap = { workspace = true, features = ["wrap_help"] }

View File

@@ -52,7 +52,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
}
fn generate_markdown() -> String {
let registry = ty_python_types::default_lint_registry();
let registry = ty_python_semantic::default_lint_registry();
let mut output = String::new();

View File

@@ -10,6 +10,10 @@ documentation = { workspace = true }
repository = { workspace = true }
license = { workspace = true }
[package.metadata.cargo-shear]
# Used via `CacheKey` macro expansion.
ignored = ["ruff_cache"]
[dependencies]
ruff_cache = { workspace = true }
ruff_macros = { workspace = true }
@@ -25,10 +29,6 @@ unicode-width = { workspace = true }
[dev-dependencies]
[package.metadata.cargo-shear]
# Used via `CacheKey` macro expansion.
ignored = ["ruff_cache"]
[features]
serde = ["dep:serde", "ruff_text_size/serde"]
schemars = ["dep:schemars", "ruff_text_size/schemars"]

View File

@@ -9,6 +9,10 @@ repository.workspace = true
authors.workspace = true
license.workspace = true
[package.metadata.cargo-shear]
# Used via `CacheKey` macro expansion.
ignored = ["ruff_cache"]
[dependencies]
ruff_cache = { workspace = true }
ruff_db = { workspace = true, features = ["os", "serde"] }
@@ -18,7 +22,6 @@ ruff_python_ast = { workspace = true }
ruff_python_parser = { workspace = true }
ty_module_resolver = { workspace = true }
ty_python_semantic = { workspace = true }
ty_python_types = { workspace = true }
anyhow = { workspace = true }
clap = { workspace = true, optional = true }
@@ -30,7 +33,3 @@ zip = { workspace = true, features = [] }
[lints]
workspace = true
[package.metadata.cargo-shear]
# Used via `CacheKey` macro expansion.
ignored = ["ruff_cache"]

View File

@@ -11,9 +11,8 @@ use ty_module_resolver::{SearchPathSettings, SearchPaths};
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
use ty_python_semantic::{
AnalysisSettings, Db, Program, ProgramSettings, PythonEnvironment, PythonPlatform,
PythonVersionSource, PythonVersionWithSource, SysPrefixPathOrigin,
PythonVersionSource, PythonVersionWithSource, SysPrefixPathOrigin, default_lint_registry,
};
use ty_python_types::default_lint_registry;
static EMPTY_VENDORED: std::sync::LazyLock<VendoredFileSystem> = std::sync::LazyLock::new(|| {
let mut builder = VendoredFileSystemBuilder::new(CompressionMethod::Stored);

View File

@@ -16,17 +16,17 @@ license = { workspace = true }
ruff_cache = { workspace = true }
ruff_db = { workspace = true, features = ["junit", "serde"] }
ruff_diagnostics = { workspace = true, features = ["serde"] }
ruff_notebook = { workspace = true }
ruff_macros = { workspace = true }
ruff_notebook = { workspace = true }
ruff_python_ast = { workspace = true, features = ["serde", "cache"] }
ruff_python_codegen = { workspace = true }
ruff_python_importer = { workspace = true }
ruff_python_index = { workspace = true }
ruff_python_literal = { workspace = true }
ruff_python_parser = { workspace = true }
ruff_python_semantic = { workspace = true }
ruff_python_stdlib = { workspace = true }
ruff_python_trivia = { workspace = true }
ruff_python_parser = { workspace = true }
ruff_source_file = { workspace = true, features = ["serde"] }
ruff_text_size = { workspace = true }
@@ -44,8 +44,8 @@ imperative = { workspace = true }
is-macro = { workspace = true }
is-wsl = { workspace = true }
itertools = { workspace = true }
libcst = { workspace = true }
jiff = { workspace = true }
libcst = { workspace = true }
log = { workspace = true }
memchr = { workspace = true }
natord = { workspace = true }
@@ -67,17 +67,17 @@ strum_macros = { workspace = true }
thiserror = { workspace = true }
toml = { workspace = true }
typed-arena = { workspace = true }
unicode-normalization = { workspace = true }
unicode-width = { workspace = true }
unicode_names2 = { workspace = true }
unicode-normalization = { workspace = true }
url = { workspace = true }
[dev-dependencies]
insta = { workspace = true, features = ["filters", "json", "redactions"] }
test-case = { workspace = true }
# Disable colored output in tests
colored = { workspace = true, features = ["no-color"] }
insta = { workspace = true, features = ["filters", "json", "redactions"] }
tempfile = { workspace = true }
test-case = { workspace = true }
[features]
default = []

View File

@@ -0,0 +1,31 @@
from __future__ import annotations
from airflow.lineage.hook import HookLineageCollector
# airflow.lineage.hook
hlc = HookLineageCollector()
hlc.create_asset("there")
hlc.create_asset("should", "be", "no", "posarg")
hlc.create_asset(name="but", uri="kwargs are ok")
hlc.create_asset()
HookLineageCollector().create_asset(name="but", uri="kwargs are ok")
HookLineageCollector().create_asset("there")
HookLineageCollector().create_asset("should", "be", "no", "posarg")
args = ["uri_value"]
hlc.create_asset(*args)
HookLineageCollector().create_asset(*args)
# Literal unpacking
hlc.create_asset(*["literal_uri"])
HookLineageCollector().create_asset(*["literal_uri"])
# starred args with keyword args
hlc.create_asset(*args, extra="value")
HookLineageCollector().create_asset(*args, extra="value")
# Double-starred keyword arguments
kwargs = {"uri": "value", "name": "test"}
hlc.create_asset(**kwargs)
HookLineageCollector().create_asset(**kwargs)

View File

@@ -1,5 +1,5 @@
from abc import abstractmethod
from typing import overload, cast
from typing import overload, cast, TypeVar
from typing_extensions import override
@@ -256,3 +256,15 @@ class C:
"""Docstring."""
msg = t"{x}..."
raise NotImplementedError(msg)
###
# Unused arguments with `**kwargs`.
###
def f(
default: object = None, # noqa: ARG001
**kwargs: object,
) -> None:
TypeVar(**kwargs)

View File

@@ -53,3 +53,25 @@ for i in items:
items = [1, 2, 3, 4]
for i in items:
items[i]
# A case with multiple uses of the value to show off the secondary annotations
for instrument in ORCHESTRA:
data = json.dumps(
{
"instrument": instrument,
"section": ORCHESTRA[instrument],
}
)
print(f"saving data for {instrument} in {ORCHESTRA[instrument]}")
with open(f"{instrument}/{ORCHESTRA[instrument]}.txt", "w") as f:
f.write(data)
# This should still suppress the error
for ( # noqa: PLC0206
instrument
) in ORCHESTRA:
print(f"{instrument}: {ORCHESTRA[instrument]}")

View File

@@ -73,3 +73,7 @@ foo == 1 or foo == 1.0 # Different types, same hashed value
foo == False or foo == 0 # Different types, same hashed value
foo == 0.0 or foo == 0j # Different types, same hashed value
foo == "bar" or foo == "bar" # All members identical
foo == "bar" or foo == "bar" or foo == "buzz" # All but one members identical

View File

@@ -0,0 +1,51 @@
"""This is the module docstring."""
# convenience imports:
import os
from pathlib import Path
__all__ = ["MY_CONSTANT"]
__all__ += ["foo"]
__all__: list[str] = __all__
__all__ = __all__ = __all__
MY_CONSTANT = 5
"""This is an important constant."""
os.environ["FOO"] = 1
def foo():
return Path("foo.py")
def __getattr__(name): # ok
return name
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # ok
if os.environ["FOO"] != "1": # RUF067
MY_CONSTANT = 4 # ok, don't flag nested statements
if TYPE_CHECKING: # ok
MY_CONSTANT = 3
import typing
if typing.TYPE_CHECKING: # ok
MY_CONSTANT = 2
__version__ = "1.2.3" # ok
def __dir__(): # ok
return ["foo"]
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__) # ok
__path__ = unknown.extend_path(__path__, __name__) # also ok
# non-`extend_path` assignments are not allowed
__path__ = 5 # RUF067
# also allow `__author__`
__author__ = "The Author" # ok

View File

@@ -0,0 +1,54 @@
"""
The code here is not in an `__init__.py` file and should not trigger the
lint.
"""
# convenience imports:
import os
from pathlib import Path
__all__ = ["MY_CONSTANT"]
__all__ += ["foo"]
__all__: list[str] = __all__
__all__ = __all__ = __all__
MY_CONSTANT = 5
"""This is an important constant."""
os.environ["FOO"] = 1
def foo():
return Path("foo.py")
def __getattr__(name): # ok
return name
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # ok
if os.environ["FOO"] != "1": # RUF067
MY_CONSTANT = 4 # ok, don't flag nested statements
if TYPE_CHECKING: # ok
MY_CONSTANT = 3
import typing
if typing.TYPE_CHECKING: # ok
MY_CONSTANT = 2
__version__ = "1.2.3" # ok
def __dir__(): # ok
return ["foo"]
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__) # ok
__path__ = unknown.extend_path(__path__, __name__) # also ok
# non-`extend_path` assignments are not allowed
__path__ = 5 # RUF067
# also allow `__author__`
__author__ = "The Author" # ok

View File

@@ -1043,7 +1043,7 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
]) && flake8_gettext::is_gettext_func_call(
checker,
func,
&checker.settings().flake8_gettext.functions_names,
&checker.settings().flake8_gettext.function_names,
) {
if checker.is_rule_enabled(Rule::FStringInGetTextFuncCall) {
flake8_gettext::rules::f_string_in_gettext_func_call(checker, args);
@@ -1278,6 +1278,9 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
if checker.is_rule_enabled(Rule::Airflow3SuggestedUpdate) {
airflow::rules::airflow_3_0_suggested_update_expr(checker, expr);
}
if checker.is_rule_enabled(Rule::Airflow3IncompatibleFunctionSignature) {
airflow::rules::airflow_3_incompatible_function_signature(checker, expr);
}
if checker.is_rule_enabled(Rule::UnnecessaryCastToInt) {
ruff::rules::unnecessary_cast_to_int(checker, call);
}

View File

@@ -1630,4 +1630,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
_ => {}
}
if checker.is_rule_enabled(Rule::NonEmptyInitModule) {
ruff::rules::non_empty_init_module(checker, stmt);
}
}

View File

@@ -33,7 +33,7 @@ pub(crate) fn unresolved_references(checker: &Checker) {
}
// Allow __path__.
if checker.path.ends_with("__init__.py") {
if checker.in_init_module() {
if reference.name(checker.source()) == "__path__" {
continue;
}

View File

@@ -21,7 +21,7 @@
//! represents the lint-rule analysis phase. In the future, these steps may be separated into
//! distinct passes over the AST.
use std::cell::RefCell;
use std::cell::{OnceCell, RefCell};
use std::path::Path;
use itertools::Itertools;
@@ -198,6 +198,8 @@ pub(crate) struct Checker<'a> {
parsed_type_annotation: Option<&'a ParsedAnnotation>,
/// The [`Path`] to the file under analysis.
path: &'a Path,
/// Whether `path` points to an `__init__.py` file.
in_init_module: OnceCell<bool>,
/// The [`Path`] to the package containing the current file.
package: Option<PackageRoot<'a>>,
/// The module representation of the current file (e.g., `foo.bar`).
@@ -274,6 +276,7 @@ impl<'a> Checker<'a> {
noqa_line_for,
noqa,
path,
in_init_module: OnceCell::new(),
package,
module,
source_type,
@@ -482,9 +485,11 @@ impl<'a> Checker<'a> {
self.context.settings
}
/// The [`Path`] to the file under analysis.
pub(crate) const fn path(&self) -> &'a Path {
self.path
/// Returns whether the file under analysis is an `__init__.py` file.
pub(crate) fn in_init_module(&self) -> bool {
*self
.in_init_module
.get_or_init(|| self.path.ends_with("__init__.py"))
}
/// The [`Path`] to the package containing the current file.
@@ -1873,6 +1878,9 @@ impl<'a> Visitor<'a> for Checker<'a> {
} else {
self.visit_non_type_definition(value);
}
} else {
// Ex: typing.TypeVar(**kwargs)
self.visit_non_type_definition(value);
}
}
}
@@ -3171,7 +3179,7 @@ impl<'a> Checker<'a> {
// F822
if self.is_rule_enabled(Rule::UndefinedExport) {
if is_undefined_export_in_dunder_init_enabled(self.settings())
|| !self.path.ends_with("__init__.py")
|| !self.in_init_module()
{
self.report_diagnostic(
pyflakes::rules::UndefinedExport {

View File

@@ -1060,6 +1060,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Ruff, "064") => rules::ruff::rules::NonOctalPermissions,
(Ruff, "065") => rules::ruff::rules::LoggingEagerConversion,
(Ruff, "066") => rules::ruff::rules::PropertyWithoutReturn,
(Ruff, "067") => rules::ruff::rules::NonEmptyInitModule,
(Ruff, "100") => rules::ruff::rules::UnusedNOQA,
(Ruff, "101") => rules::ruff::rules::RedirectedNOQA,
@@ -1123,6 +1124,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Airflow, "002") => rules::airflow::rules::AirflowDagNoScheduleArgument,
(Airflow, "301") => rules::airflow::rules::Airflow3Removal,
(Airflow, "302") => rules::airflow::rules::Airflow3MovedToProvider,
(Airflow, "303") => rules::airflow::rules::Airflow3IncompatibleFunctionSignature,
(Airflow, "311") => rules::airflow::rules::Airflow3SuggestedUpdate,
(Airflow, "312") => rules::airflow::rules::Airflow3SuggestedToMoveToProvider,

View File

@@ -1,6 +1,5 @@
---
source: crates/ruff_linter/src/comments/shebang.rs
expression: "ShebangDirective::try_extract(source)"
snapshot_kind: text
---
None

View File

@@ -1,6 +1,5 @@
---
source: crates/ruff_linter/src/comments/shebang.rs
expression: "ShebangDirective::try_extract(source)"
snapshot_kind: text
---
None

View File

@@ -1,7 +1,6 @@
---
source: crates/ruff_linter/src/comments/shebang.rs
expression: "ShebangDirective::try_extract(source)"
snapshot_kind: text
---
Some(
ShebangDirective(

View File

@@ -1,7 +1,6 @@
---
source: crates/ruff_linter/src/comments/shebang.rs
expression: "ShebangDirective::try_extract(source)"
snapshot_kind: text
---
Some(
ShebangDirective(

View File

@@ -1,6 +1,5 @@
---
source: crates/ruff_linter/src/comments/shebang.rs
expression: "ShebangDirective::try_extract(source)"
snapshot_kind: text
---
None

View File

@@ -18,7 +18,7 @@ use crate::registry::{Linter, RuleNamespace};
/// An emitter for producing SARIF 2.1.0-compliant JSON output.
///
/// Static Analysis Results Interchange Format (SARIF) is a standard format
/// for static analysis results. For full specfification, see:
/// for static analysis results. For full specification, see:
/// [SARIF 2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/sarif-v2.1.0.html)
pub struct SarifEmitter;

View File

@@ -1,7 +1,6 @@
---
source: crates/ruff_linter/src/message/grouped.rs
expression: content
snapshot_kind: text
---
fib.py:
1:8 F401 `os` imported but unused

View File

@@ -1326,7 +1326,7 @@ mod tests {
fn noqa_all() {
let source = "# noqa";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Ok(
Some(
NoqaLexerOutput {
@@ -1347,7 +1347,7 @@ mod tests {
fn noqa_no_code() {
let source = "# noqa:";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Err(
MissingCodes,
)
@@ -1359,7 +1359,7 @@ mod tests {
fn noqa_no_code_invalid_suffix() {
let source = "# noqa: foo";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Err(
MissingCodes,
)
@@ -1371,7 +1371,7 @@ mod tests {
fn noqa_no_code_trailing_content() {
let source = "# noqa: # Foo";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Err(
MissingCodes,
)
@@ -1383,7 +1383,7 @@ mod tests {
fn malformed_code_1() {
let source = "# noqa: F";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Err(
MissingCodes,
)
@@ -1422,7 +1422,7 @@ mod tests {
fn malformed_code_3() {
let source = "# noqa: RUF001F";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Err(
InvalidCodeSuffix,
)
@@ -1492,7 +1492,7 @@ mod tests {
fn noqa_all_case_insensitive() {
let source = "# NOQA";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Ok(
Some(
NoqaLexerOutput {
@@ -1625,7 +1625,7 @@ mod tests {
fn noqa_all_no_space() {
let source = "#noqa";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Ok(
Some(
NoqaLexerOutput {
@@ -1704,7 +1704,7 @@ mod tests {
fn noqa_all_multi_space() {
let source = "# noqa";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Ok(
Some(
NoqaLexerOutput {
@@ -1837,7 +1837,7 @@ mod tests {
fn noqa_all_leading_comment() {
let source = "# Some comment describing the noqa # noqa";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Ok(
Some(
NoqaLexerOutput {
@@ -1916,7 +1916,7 @@ mod tests {
fn noqa_all_trailing_comment() {
let source = "# noqa # Some comment describing the noqa";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Ok(
Some(
NoqaLexerOutput {
@@ -1995,7 +1995,7 @@ mod tests {
fn noqa_invalid_codes() {
let source = "# noqa: unused-import, F401, some other code";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Err(
MissingCodes,
)
@@ -2139,7 +2139,7 @@ mod tests {
fn noqa_code_invalid_code_suffix() {
let source = "# noqa: F401abc";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Err(
InvalidCodeSuffix,
)
@@ -2151,7 +2151,7 @@ mod tests {
fn noqa_invalid_suffix() {
let source = "# noqa[F401]";
let directive = lex_inline_noqa(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Err(
InvalidSuffix,
)
@@ -2163,7 +2163,7 @@ mod tests {
fn flake8_exemption_all() {
let source = "# flake8: noqa";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Ok(
Some(
NoqaLexerOutput {
@@ -2184,7 +2184,7 @@ mod tests {
fn flake8_noqa_no_code() {
let source = "# flake8: noqa:";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Err(
MissingCodes,
)
@@ -2196,7 +2196,7 @@ mod tests {
fn flake8_noqa_no_code_invalid_suffix() {
let source = "# flake8: noqa: foo";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Err(
MissingCodes,
)
@@ -2208,7 +2208,7 @@ mod tests {
fn flake8_noqa_no_code_trailing_content() {
let source = "# flake8: noqa: # Foo";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Err(
MissingCodes,
)
@@ -2220,7 +2220,7 @@ mod tests {
fn flake8_malformed_code_1() {
let source = "# flake8: noqa: F";
let directive = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Err(
MissingCodes,
)
@@ -2259,7 +2259,7 @@ mod tests {
fn flake8_malformed_code_3() {
let source = "# flake8: noqa: RUF001F";
let directive = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Err(
InvalidCodeSuffix,
)
@@ -2271,7 +2271,7 @@ mod tests {
fn ruff_exemption_all() {
let source = "# ruff: noqa";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Ok(
Some(
NoqaLexerOutput {
@@ -2292,7 +2292,7 @@ mod tests {
fn ruff_noqa_no_code() {
let source = "# ruff: noqa:";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Err(
MissingCodes,
)
@@ -2304,7 +2304,7 @@ mod tests {
fn ruff_noqa_no_code_invalid_suffix() {
let source = "# ruff: noqa: foo";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Err(
MissingCodes,
)
@@ -2316,7 +2316,7 @@ mod tests {
fn ruff_noqa_no_code_trailing_content() {
let source = "# ruff: noqa: # Foo";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Err(
MissingCodes,
)
@@ -2328,7 +2328,7 @@ mod tests {
fn ruff_malformed_code_1() {
let source = "# ruff: noqa: F";
let directive = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Err(
MissingCodes,
)
@@ -2367,7 +2367,7 @@ mod tests {
fn ruff_malformed_code_3() {
let source = "# ruff: noqa: RUF001F";
let directive = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(directive, @r"
assert_debug_snapshot!(directive, @"
Err(
InvalidCodeSuffix,
)
@@ -2379,7 +2379,7 @@ mod tests {
fn flake8_exemption_all_no_space() {
let source = "#flake8:noqa";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Ok(
Some(
NoqaLexerOutput {
@@ -2400,7 +2400,7 @@ mod tests {
fn ruff_exemption_all_no_space() {
let source = "#ruff:noqa";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Ok(
Some(
NoqaLexerOutput {
@@ -2619,7 +2619,7 @@ mod tests {
fn ruff_exemption_invalid_code_suffix() {
let source = "# ruff: noqa: F401abc";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Err(
InvalidCodeSuffix,
)
@@ -2685,7 +2685,7 @@ mod tests {
fn ruff_exemption_all_leading_comment() {
let source = "# Leading comment # ruff: noqa";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Ok(
Some(
NoqaLexerOutput {
@@ -2706,7 +2706,7 @@ mod tests {
fn ruff_exemption_all_trailing_comment() {
let source = "# ruff: noqa # Trailing comment";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Ok(
Some(
NoqaLexerOutput {
@@ -2754,7 +2754,7 @@ mod tests {
fn ruff_exemption_all_trailing_comment_no_space() {
let source = "# ruff: noqa# Trailing comment";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Ok(
Some(
NoqaLexerOutput {
@@ -2775,7 +2775,7 @@ mod tests {
fn ruff_exemption_all_trailing_comment_no_hash() {
let source = "# ruff: noqa Trailing comment";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Ok(
Some(
NoqaLexerOutput {
@@ -2823,7 +2823,7 @@ mod tests {
fn flake8_exemption_all_case_insensitive() {
let source = "# flake8: NoQa";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Ok(
Some(
NoqaLexerOutput {
@@ -2844,7 +2844,7 @@ mod tests {
fn ruff_exemption_all_case_insensitive() {
let source = "# ruff: NoQa";
let exemption = lex_file_exemption(TextRange::up_to(source.text_len()), source);
assert_debug_snapshot!(exemption, @r"
assert_debug_snapshot!(exemption, @"
Ok(
Some(
NoqaLexerOutput {

Some files were not shown because too many files have changed in this diff Show More