Compare commits

..

2 Commits

Author SHA1 Message Date
Dhruv Manilawala
ce4d4ae6ac Add methods to iter over format spec elements 2024-05-13 14:17:23 +05:30
Dhruv Manilawala
128414cd95 Add Iterator impl for StringLike parts 2024-05-13 13:48:54 +05:30
306 changed files with 2725 additions and 6149 deletions

View File

@@ -167,9 +167,6 @@ jobs:
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
run: |
cargo nextest run --all-features --profile ci
cargo test --all-features --doc

View File

@@ -1,80 +0,0 @@
name: Sync typeshed
on:
workflow_dispatch:
schedule:
# Run on the 1st and the 15th of every month:
- cron: "0 0 1,15 * *"
env:
FORCE_COLOR: 1
GH_TOKEN: ${{ github.token }}
jobs:
sync:
name: Sync typeshed
runs-on: ubuntu-latest
timeout-minutes: 20
# Don't run the cron job on forks:
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
name: Checkout Ruff
with:
path: ruff
- uses: actions/checkout@v4
name: Checkout typeshed
with:
repository: python/typeshed
path: typeshed
- name: Setup git
run: |
git config --global user.name typeshedbot
git config --global user.email '<>'
- name: Sync typeshed
id: sync
run: |
rm -rf ruff/crates/red_knot/vendor/typeshed
mkdir ruff/crates/red_knot/vendor/typeshed
cp typeshed/README.md ruff/crates/red_knot/vendor/typeshed
cp typeshed/LICENSE ruff/crates/red_knot/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/red_knot/vendor/typeshed/stdlib
rm -rf ruff/crates/red_knot/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/red_knot/vendor/typeshed/source_commit.txt
- name: Commit the changes
id: commit
if: ${{ steps.sync.outcome == 'success' }}
run: |
cd ruff
git checkout -b typeshedbot/sync-typeshed
git add .
git diff --staged --quiet || git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)"
- name: Create a PR
if: ${{ steps.sync.outcome == 'success' && steps.commit.outcome == 'success' }}
run: |
cd ruff
git push --force origin typeshedbot/sync-typeshed
gh pr list --repo $GITHUB_REPOSITORY --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
gh pr create --title "Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "internal"
create-issue-on-failure:
name: Create an issue if the typeshed sync failed
runs-on: ubuntu-latest
needs: [sync]
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.sync.result == 'failure' }}
permissions:
issues: write
steps:
- uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
await github.rest.issues.create({
owner: "astral-sh",
repo: "ruff",
title: `Automated typeshed sync failed on ${new Date().toDateString()}`,
body: "Runs are listed here: https://github.com/astral-sh/ruff/actions/workflows/sync_typeshed.yaml",
})

View File

@@ -1,65 +1,5 @@
# Changelog
## 0.4.5
### Ruff's language server is now in Beta
`v0.4.5` marks the official Beta release of `ruff server`, an integrated language server built into Ruff.
`ruff server` supports the same feature set as `ruff-lsp`, powering linting, formatting, and
code fixes in Ruff's editor integrations -- but with superior performance and
no installation required. We'd love your feedback!
You can enable `ruff server` in the [VS Code extension](https://github.com/astral-sh/ruff-vscode?tab=readme-ov-file#enabling-the-rust-based-language-server) today.
To read more about this exciting milestone, check out our [blog post](https://astral.sh/blog/ruff-v0.4.5)!
### Rule changes
- \[`flake8-future-annotations`\] Reword `future-rewritable-type-annotation` (`FA100`) message ([#11381](https://github.com/astral-sh/ruff/pull/11381))
- \[`pycodestyle`\] Consider soft keywords for `E27` rules ([#11446](https://github.com/astral-sh/ruff/pull/11446))
- \[`pyflakes`\] Recommend adding unused import bindings to `__all__` ([#11314](https://github.com/astral-sh/ruff/pull/11314))
- \[`pyflakes`\] Update documentation and deprecate `ignore_init_module_imports` ([#11436](https://github.com/astral-sh/ruff/pull/11436))
- \[`pyupgrade`\] Mark quotes as unnecessary for non-evaluated annotations ([#11485](https://github.com/astral-sh/ruff/pull/11485))
### Formatter
- Avoid multiline quotes warning with `quote-style = preserve` ([#11490](https://github.com/astral-sh/ruff/pull/11490))
### Server
- Support Jupyter Notebook files ([#11206](https://github.com/astral-sh/ruff/pull/11206))
- Support `noqa` comment code actions ([#11276](https://github.com/astral-sh/ruff/pull/11276))
- Fix automatic configuration reloading ([#11492](https://github.com/astral-sh/ruff/pull/11492))
- Fix several issues with configuration in Neovim and Helix ([#11497](https://github.com/astral-sh/ruff/pull/11497))
### CLI
- Add `--output-format` as a CLI option for `ruff config` ([#11438](https://github.com/astral-sh/ruff/pull/11438))
### Bug fixes
- Avoid `PLE0237` for property with setter ([#11377](https://github.com/astral-sh/ruff/pull/11377))
- Avoid `TCH005` for `if` stmt with `elif`/`else` block ([#11376](https://github.com/astral-sh/ruff/pull/11376))
- Avoid flagging `__future__` annotations as required for non-evaluated type annotations ([#11414](https://github.com/astral-sh/ruff/pull/11414))
- Check for ruff executable in 'bin' directory as installed by 'pip install --target'. ([#11450](https://github.com/astral-sh/ruff/pull/11450))
- Sort edits prior to deduplicating in quotation fix ([#11452](https://github.com/astral-sh/ruff/pull/11452))
- Treat escaped newline as valid sequence ([#11465](https://github.com/astral-sh/ruff/pull/11465))
- \[`flake8-pie`\] Preserve parentheses in `unnecessary-dict-kwargs` ([#11372](https://github.com/astral-sh/ruff/pull/11372))
- \[`pylint`\] Ignore `__slots__` with dynamic values ([#11488](https://github.com/astral-sh/ruff/pull/11488))
- \[`pylint`\] Remove `try` body from branch counting ([#11487](https://github.com/astral-sh/ruff/pull/11487))
- \[`refurb`\] Respect operator precedence in `FURB110` ([#11464](https://github.com/astral-sh/ruff/pull/11464))
### Documentation
- Add `--preview` to the README ([#11395](https://github.com/astral-sh/ruff/pull/11395))
- Add Python 3.13 to list of allowed Python versions ([#11411](https://github.com/astral-sh/ruff/pull/11411))
- Simplify Neovim setup documentation ([#11489](https://github.com/astral-sh/ruff/pull/11489))
- Update CONTRIBUTING.md to reflect the new parser ([#11434](https://github.com/astral-sh/ruff/pull/11434))
- Update server documentation with new migration guide ([#11499](https://github.com/astral-sh/ruff/pull/11499))
- \[`pycodestyle`\] Clarify motivation for `E713` and `E714` ([#11483](https://github.com/astral-sh/ruff/pull/11483))
- \[`pyflakes`\] Update docs to describe WAI behavior (F541) ([#11362](https://github.com/astral-sh/ruff/pull/11362))
- \[`pylint`\] Clearly indicate what is counted as a branch ([#11423](https://github.com/astral-sh/ruff/pull/11423))
## 0.4.4
### Preview features

View File

@@ -637,11 +637,11 @@ Otherwise, follow the instructions from the linux section.
`cargo dev` is a shortcut for `cargo run --package ruff_dev --bin ruff_dev`. You can run some useful
utils with it:
- `cargo dev print-ast <file>`: Print the AST of a python file using Ruff's
[Python parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser).
For `if True: pass # comment`, you can see the syntax tree, the byte offsets for start and
stop of each node and also how the `:` token, the comment and whitespace are not represented
anymore:
- `cargo dev print-ast <file>`: Print the AST of a python file using the
[RustPython parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser) that is
mainly used in Ruff. For `if True: pass # comment`, you can see the syntax tree, the byte offsets
for start and stop of each node and also how the `:` token, the comment and whitespace are not
represented anymore:
```text
[

8
Cargo.lock generated
View File

@@ -1300,7 +1300,8 @@ dependencies = [
[[package]]
name = "lsp-types"
version = "0.95.1"
source = "git+https://github.com/astral-sh/lsp-types.git?rev=3512a9f#3512a9f33eadc5402cfab1b8f7340824c8ca1439"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e34d33a8e9b006cd3fc4fe69a921affa097bae4bb65f76271f4644f9a334365"
dependencies = [
"bitflags 1.3.2",
"serde",
@@ -1939,7 +1940,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.4.5"
version = "0.4.4"
dependencies = [
"anyhow",
"argfile",
@@ -2100,7 +2101,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.4.5"
version = "0.4.4"
dependencies = [
"aho-corasick",
"annotate-snippets 0.9.2",
@@ -2376,7 +2377,6 @@ dependencies = [
"ruff_diagnostics",
"ruff_formatter",
"ruff_linter",
"ruff_notebook",
"ruff_python_ast",
"ruff_python_codegen",
"ruff_python_formatter",

View File

@@ -12,28 +12,6 @@ authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
license = "MIT"
[workspace.dependencies]
ruff = { path = "crates/ruff" }
ruff_cache = { path = "crates/ruff_cache" }
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
ruff_formatter = { path = "crates/ruff_formatter" }
ruff_index = { path = "crates/ruff_index" }
ruff_linter = { path = "crates/ruff_linter" }
ruff_macros = { path = "crates/ruff_macros" }
ruff_notebook = { path = "crates/ruff_notebook" }
ruff_python_ast = { path = "crates/ruff_python_ast" }
ruff_python_codegen = { path = "crates/ruff_python_codegen" }
ruff_python_formatter = { path = "crates/ruff_python_formatter" }
ruff_python_index = { path = "crates/ruff_python_index" }
ruff_python_literal = { path = "crates/ruff_python_literal" }
ruff_python_parser = { path = "crates/ruff_python_parser" }
ruff_python_semantic = { path = "crates/ruff_python_semantic" }
ruff_python_stdlib = { path = "crates/ruff_python_stdlib" }
ruff_python_trivia = { path = "crates/ruff_python_trivia" }
ruff_server = { path = "crates/ruff_server" }
ruff_source_file = { path = "crates/ruff_source_file" }
ruff_text_size = { path = "crates/ruff_text_size" }
ruff_workspace = { path = "crates/ruff_workspace" }
aho-corasick = { version = "1.1.3" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { version = "1.0.80" }
@@ -81,7 +59,7 @@ libc = { version = "0.2.153" }
libcst = { version = "1.1.0", default-features = false }
log = { version = "0.4.17" }
lsp-server = { version = "0.7.6" }
lsp-types = { git="https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = ["proposed"] }
lsp-types = { version = "0.95.0", features = ["proposed"] }
matchit = { version = "0.8.1" }
memchr = { version = "2.7.1" }
mimalloc = { version = "0.1.39" }

View File

@@ -152,7 +152,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.4.5
rev: v0.4.4
hooks:
# Run the linter.
- id: ruff
@@ -266,11 +266,6 @@ The remaining configuration options can be provided through a catch-all `--confi
ruff check --config "lint.per-file-ignores = {'some_file.py' = ['F841']}"
```
To opt in to the latest lint rules, formatter style changes, interface updates, and more, enable
[preview mode](https://docs.astral.sh/ruff/rules/) by setting `preview = true` in your configuration
file or passing `--preview` on the command line. Preview mode enables a collection of unstable
features that may change prior to stabilization.
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
for more on the linting and formatting commands, respectively.
@@ -433,7 +428,6 @@ Ruff is used by a number of major open-source projects and companies, including:
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python))
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
- [Mypy](https://github.com/python/mypy)
- [Nautobot](https://github.com/nautobot/nautobot)
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
- [Neon](https://github.com/neondatabase/neon)
- [Nokia](https://nokia.com/)

View File

@@ -12,11 +12,11 @@ license.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
ruff_python_parser = { workspace = true }
ruff_python_ast = { workspace = true }
ruff_text_size = { workspace = true }
ruff_index = { workspace = true }
ruff_notebook = { workspace = true }
ruff_python_parser = { path = "../ruff_python_parser" }
ruff_python_ast = { path = "../ruff_python_ast" }
ruff_text_size = { path = "../ruff_text_size" }
ruff_index = { path = "../ruff_index" }
ruff_notebook = { path = "../ruff_notebook" }
anyhow = { workspace = true }
bitflags = { workspace = true }

View File

@@ -6,4 +6,13 @@ The Red Knot crate contains code working towards multifile analysis, type infere
Red Knot vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot/vendor/typeshed`. The file `crates/red_knot/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
Updating the vendored stubs is currently done manually. On a Unix machine, follow the following steps (if you have a typeshed clone in a `typeshed` directory, and a Ruff clone in a `ruff` directory):
```shell
rm -rf ruff/crates/red_knot/vendor/typeshed
mkdir ruff/crates/red_knot/vendor/typeshed
cp typeshed/README.md ruff/crates/red_knot/vendor/typeshed
cp typeshed/LICENSE ruff/crates/red_knot/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/red_knot/vendor/typeshed/stdlib
git -C typeshed rev-parse HEAD > ruff/crates/red_knot/vendor/typeshed/source_commit.txt
```

View File

@@ -1 +1 @@
a9d7e861f7a46ae7acd56569326adef302e10f29
2d33fe212221a05661c0db5215a91cf3d7b7f072

View File

@@ -1,18 +0,0 @@
# Implicit protocols used in importlib.
# We intentionally omit deprecated and optional methods.
from collections.abc import Sequence
from importlib.machinery import ModuleSpec
from types import ModuleType
from typing import Protocol
__all__ = ["LoaderProtocol", "MetaPathFinderProtocol", "PathEntryFinderProtocol"]
class LoaderProtocol(Protocol):
def load_module(self, fullname: str, /) -> ModuleType: ...
class MetaPathFinderProtocol(Protocol):
def find_spec(self, fullname: str, path: Sequence[str] | None, target: ModuleType | None = ..., /) -> ModuleSpec | None: ...
class PathEntryFinderProtocol(Protocol):
def find_spec(self, fullname: str, target: ModuleType | None = ..., /) -> ModuleSpec | None: ...

View File

@@ -31,7 +31,7 @@ from _typeshed import (
)
from collections.abc import Awaitable, Callable, Iterable, Iterator, MutableSet, Reversible, Set as AbstractSet, Sized
from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper
from types import CellType, CodeType, TracebackType
from types import CodeType, TracebackType, _Cell
# mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping} are imported from collections.abc in builtins.pyi
from typing import ( # noqa: Y022
@@ -951,7 +951,7 @@ class tuple(Sequence[_T_co]):
class function:
# Make sure this class definition stays roughly in line with `types.FunctionType`
@property
def __closure__(self) -> tuple[CellType, ...] | None: ...
def __closure__(self) -> tuple[_Cell, ...] | None: ...
__code__: CodeType
__defaults__: tuple[Any, ...] | None
__dict__: dict[str, Any]
@@ -1333,7 +1333,7 @@ if sys.version_info >= (3, 11):
locals: Mapping[str, object] | None = None,
/,
*,
closure: tuple[CellType, ...] | None = None,
closure: tuple[_Cell, ...] | None = None,
) -> None: ...
else:
@@ -1794,7 +1794,7 @@ def __import__(
fromlist: Sequence[str] = (),
level: int = 0,
) -> types.ModuleType: ...
def __build_class__(func: Callable[[], CellType | Any], name: str, /, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ...
def __build_class__(func: Callable[[], _Cell | Any], name: str, /, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ...
if sys.version_info >= (3, 10):
from types import EllipsisType

View File

@@ -1,5 +1,3 @@
import sys
from _typeshed import StrOrBytesPath
from collections.abc import Iterator, MutableMapping
from types import TracebackType
from typing import Literal
@@ -93,10 +91,5 @@ class _error(Exception): ...
error: tuple[type[_error], type[OSError]]
if sys.version_info >= (3, 11):
def whichdb(filename: StrOrBytesPath) -> str | None: ...
def open(file: StrOrBytesPath, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ...
else:
def whichdb(filename: str) -> str | None: ...
def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ...
def whichdb(filename: str) -> str | None: ...
def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ...

View File

@@ -1,5 +1,3 @@
import sys
from _typeshed import StrOrBytesPath
from collections.abc import Iterator, MutableMapping
from types import TracebackType
from typing_extensions import Self, TypeAlias
@@ -30,8 +28,4 @@ class _Database(MutableMapping[_KeyType, bytes]):
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
) -> None: ...
if sys.version_info >= (3, 11):
def open(file: StrOrBytesPath, flag: str = "c", mode: int = 0o666) -> _Database: ...
else:
def open(file: str, flag: str = "c", mode: int = 0o666) -> _Database: ...
def open(file: str, flag: str = "c", mode: int = 0o666) -> _Database: ...

View File

@@ -1,5 +1,5 @@
import sys
from _typeshed import ReadOnlyBuffer, StrOrBytesPath
from _typeshed import ReadOnlyBuffer
from types import TracebackType
from typing import TypeVar, overload
from typing_extensions import Self, TypeAlias
@@ -38,7 +38,4 @@ if sys.platform != "win32":
__new__: None # type: ignore[assignment]
__init__: None # type: ignore[assignment]
if sys.version_info >= (3, 11):
def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ...
else:
def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ...
def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ...

View File

@@ -1,5 +1,5 @@
import sys
from _typeshed import ReadOnlyBuffer, StrOrBytesPath
from _typeshed import ReadOnlyBuffer
from types import TracebackType
from typing import TypeVar, overload
from typing_extensions import Self, TypeAlias
@@ -34,7 +34,4 @@ if sys.platform != "win32":
__new__: None # type: ignore[assignment]
__init__: None # type: ignore[assignment]
if sys.version_info >= (3, 11):
def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _dbm: ...
else:
def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _dbm: ...
def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _dbm: ...

View File

@@ -64,7 +64,7 @@ class SourceLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta):
# The base classes differ starting in 3.10:
if sys.version_info >= (3, 10):
# Please keep in sync with _typeshed.importlib.MetaPathFinderProtocol
# Please keep in sync with sys._MetaPathFinder
class MetaPathFinder(metaclass=ABCMeta):
if sys.version_info < (3, 12):
def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ...
@@ -85,7 +85,7 @@ if sys.version_info >= (3, 10):
def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ...
else:
# Please keep in sync with _typeshed.importlib.MetaPathFinderProtocol
# Please keep in sync with sys._MetaPathFinder
class MetaPathFinder(Finder):
def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ...
def invalidate_caches(self) -> None: ...

View File

@@ -3,7 +3,6 @@ import importlib.machinery
import sys
import types
from _typeshed import ReadableBuffer, StrOrBytesPath
from _typeshed.importlib import LoaderProtocol
from collections.abc import Callable
from typing import Any
from typing_extensions import ParamSpec
@@ -24,13 +23,13 @@ def source_from_cache(path: str) -> str: ...
def decode_source(source_bytes: ReadableBuffer) -> str: ...
def find_spec(name: str, package: str | None = None) -> importlib.machinery.ModuleSpec | None: ...
def spec_from_loader(
name: str, loader: LoaderProtocol | None, *, origin: str | None = None, is_package: bool | None = None
name: str, loader: importlib.abc.Loader | None, *, origin: str | None = None, is_package: bool | None = None
) -> importlib.machinery.ModuleSpec | None: ...
def spec_from_file_location(
name: str,
location: StrOrBytesPath | None = None,
*,
loader: LoaderProtocol | None = None,
loader: importlib.abc.Loader | None = None,
submodule_search_locations: list[str] | None = ...,
) -> importlib.machinery.ModuleSpec | None: ...
def module_from_spec(spec: importlib.machinery.ModuleSpec) -> types.ModuleType: ...

View File

@@ -1,7 +1,7 @@
import sys
from _typeshed import SupportsRead
from _typeshed.importlib import LoaderProtocol, MetaPathFinderProtocol, PathEntryFinderProtocol
from collections.abc import Callable, Iterable, Iterator
from importlib.abc import Loader, MetaPathFinder, PathEntryFinder
from typing import IO, Any, NamedTuple, TypeVar
from typing_extensions import deprecated
@@ -23,7 +23,7 @@ if sys.version_info < (3, 12):
_PathT = TypeVar("_PathT", bound=Iterable[str])
class ModuleInfo(NamedTuple):
module_finder: MetaPathFinderProtocol | PathEntryFinderProtocol
module_finder: MetaPathFinder | PathEntryFinder
name: str
ispkg: bool
@@ -37,11 +37,11 @@ if sys.version_info < (3, 12):
def __init__(self, fullname: str, file: IO[str], filename: str, etc: tuple[str, str, int]) -> None: ...
@deprecated("Use importlib.util.find_spec() instead. Will be removed in Python 3.14.")
def find_loader(fullname: str) -> LoaderProtocol | None: ...
def get_importer(path_item: str) -> PathEntryFinderProtocol | None: ...
def find_loader(fullname: str) -> Loader | None: ...
def get_importer(path_item: str) -> PathEntryFinder | None: ...
@deprecated("Use importlib.util.find_spec() instead. Will be removed in Python 3.14.")
def get_loader(module_or_name: str) -> LoaderProtocol | None: ...
def iter_importers(fullname: str = "") -> Iterator[MetaPathFinderProtocol | PathEntryFinderProtocol]: ...
def get_loader(module_or_name: str) -> Loader | None: ...
def iter_importers(fullname: str = "") -> Iterator[MetaPathFinder | PathEntryFinder]: ...
def iter_modules(path: Iterable[str] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: ...
def read_code(stream: SupportsRead[bytes]) -> Any: ... # undocumented
def walk_packages(

View File

@@ -1,5 +1,3 @@
import sys
from _typeshed import StrOrBytesPath
from collections.abc import Iterator, MutableMapping
from dbm import _TFlags
from types import TracebackType
@@ -43,17 +41,6 @@ class BsdDbShelf(Shelf[_VT]):
def last(self) -> tuple[str, _VT]: ...
class DbfilenameShelf(Shelf[_VT]):
if sys.version_info >= (3, 11):
def __init__(
self, filename: StrOrBytesPath, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False
) -> None: ...
else:
def __init__(self, filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> None: ...
def __init__(self, filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> None: ...
if sys.version_info >= (3, 11):
def open(
filename: StrOrBytesPath, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False
) -> Shelf[Any]: ...
else:
def open(filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: ...
def open(filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: ...

View File

@@ -474,13 +474,6 @@ if sys.version_info >= (3, 12):
ETHERTYPE_VLAN as ETHERTYPE_VLAN,
)
if sys.platform == "linux":
from _socket import ETH_P_ALL as ETH_P_ALL
if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin":
# FreeBSD >= 14.0
from _socket import PF_DIVERT as PF_DIVERT
# Re-exported from errno
EBADF: int
EAGAIN: int
@@ -532,9 +525,6 @@ class AddressFamily(IntEnum):
AF_BLUETOOTH = 32
if sys.platform == "win32" and sys.version_info >= (3, 12):
AF_HYPERV = 34
if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin" and sys.version_info >= (3, 12):
# FreeBSD >= 14.0
AF_DIVERT = 44
AF_INET = AddressFamily.AF_INET
AF_INET6 = AddressFamily.AF_INET6
@@ -587,9 +577,6 @@ if sys.platform != "win32" or sys.version_info >= (3, 9):
if sys.platform == "win32" and sys.version_info >= (3, 12):
AF_HYPERV = AddressFamily.AF_HYPERV
if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin" and sys.version_info >= (3, 12):
# FreeBSD >= 14.0
AF_DIVERT = AddressFamily.AF_DIVERT
class SocketKind(IntEnum):
SOCK_STREAM = 1

View File

@@ -1,8 +1,9 @@
import sys
from _typeshed import OptExcInfo, ProfileFunction, TraceFunction, structseq
from _typeshed.importlib import MetaPathFinderProtocol, PathEntryFinderProtocol
from builtins import object as _object
from collections.abc import AsyncGenerator, Callable, Sequence
from importlib.abc import PathEntryFinder
from importlib.machinery import ModuleSpec
from io import TextIOWrapper
from types import FrameType, ModuleType, TracebackType
from typing import Any, Final, Literal, NoReturn, Protocol, TextIO, TypeVar, final
@@ -14,6 +15,10 @@ _T = TypeVar("_T")
_ExitCode: TypeAlias = str | int | None
_OptExcInfo: TypeAlias = OptExcInfo # noqa: Y047 # TODO: obsolete, remove fall 2022 or later
# Intentionally omits one deprecated and one optional method of `importlib.abc.MetaPathFinder`
class _MetaPathFinder(Protocol):
def find_spec(self, fullname: str, path: Sequence[str] | None, target: ModuleType | None = ..., /) -> ModuleSpec | None: ...
# ----- sys variables -----
if sys.platform != "win32":
abiflags: str
@@ -39,13 +44,13 @@ if sys.version_info >= (3, 12):
last_exc: BaseException # or undefined.
maxsize: int
maxunicode: int
meta_path: list[MetaPathFinderProtocol]
meta_path: list[_MetaPathFinder]
modules: dict[str, ModuleType]
if sys.version_info >= (3, 10):
orig_argv: list[str]
path: list[str]
path_hooks: list[Callable[[str], PathEntryFinderProtocol]]
path_importer_cache: dict[str, PathEntryFinderProtocol | None]
path_hooks: list[Callable[[str], PathEntryFinder]]
path_importer_cache: dict[str, PathEntryFinder | None]
platform: str
if sys.version_info >= (3, 9):
platlibdir: str

View File

@@ -374,11 +374,7 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase):
def readlines(self, hint: int = ..., /) -> list[AnyStr]: ... # type: ignore[override]
def seek(self, offset: int, whence: int = ...) -> int: ...
def tell(self) -> int: ...
if sys.version_info >= (3, 11):
def truncate(self, size: int | None = None) -> int: ...
else:
def truncate(self, size: int | None = None) -> None: ... # type: ignore[override]
def truncate(self, size: int | None = None) -> None: ... # type: ignore[override]
@overload
def write(self: SpooledTemporaryFile[str], s: str) -> int: ...
@overload

View File

@@ -1,6 +1,5 @@
import sys
from _typeshed import SupportsKeysAndGetItem
from _typeshed.importlib import LoaderProtocol
from collections.abc import (
AsyncGenerator,
Awaitable,
@@ -17,7 +16,7 @@ from collections.abc import (
from importlib.machinery import ModuleSpec
# pytype crashes if types.MappingProxyType inherits from collections.abc.Mapping instead of typing.Mapping
from typing import Any, ClassVar, Literal, Mapping, TypeVar, final, overload # noqa: Y022
from typing import Any, ClassVar, Literal, Mapping, Protocol, TypeVar, final, overload # noqa: Y022
from typing_extensions import ParamSpec, Self, TypeVarTuple, deprecated
__all__ = [
@@ -65,11 +64,18 @@ _T2 = TypeVar("_T2")
_KT = TypeVar("_KT")
_VT_co = TypeVar("_VT_co", covariant=True)
@final
class _Cell:
def __new__(cls, contents: object = ..., /) -> Self: ...
def __eq__(self, value: object, /) -> bool: ...
__hash__: ClassVar[None] # type: ignore[assignment]
cell_contents: Any
# Make sure this class definition stays roughly in line with `builtins.function`
@final
class FunctionType:
@property
def __closure__(self) -> tuple[CellType, ...] | None: ...
def __closure__(self) -> tuple[_Cell, ...] | None: ...
__code__: CodeType
__defaults__: tuple[Any, ...] | None
__dict__: dict[str, Any]
@@ -92,7 +98,7 @@ class FunctionType:
globals: dict[str, Any],
name: str | None = ...,
argdefs: tuple[object, ...] | None = ...,
closure: tuple[CellType, ...] | None = ...,
closure: tuple[_Cell, ...] | None = ...,
) -> Self: ...
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
@overload
@@ -312,12 +318,15 @@ class SimpleNamespace:
def __setattr__(self, name: str, value: Any, /) -> None: ...
def __delattr__(self, name: str, /) -> None: ...
class _LoaderProtocol(Protocol):
def load_module(self, fullname: str, /) -> ModuleType: ...
class ModuleType:
__name__: str
__file__: str | None
@property
def __dict__(self) -> dict[str, Any]: ... # type: ignore[override]
__loader__: LoaderProtocol | None
__loader__: _LoaderProtocol | None
__package__: str | None
__path__: MutableSequence[str]
__spec__: ModuleSpec | None
@@ -327,12 +336,6 @@ class ModuleType:
# using `builtins.__import__` or `importlib.import_module` less painful
def __getattr__(self, name: str) -> Any: ...
@final
class CellType:
def __new__(cls, contents: object = ..., /) -> Self: ...
__hash__: ClassVar[None] # type: ignore[assignment]
cell_contents: Any
_YieldT_co = TypeVar("_YieldT_co", covariant=True)
_SendT_contra = TypeVar("_SendT_contra", contravariant=True)
_ReturnT_co = TypeVar("_ReturnT_co", covariant=True)
@@ -402,7 +405,7 @@ class CoroutineType(Coroutine[_YieldT_co, _SendT_contra, _ReturnT_co]):
@final
class MethodType:
@property
def __closure__(self) -> tuple[CellType, ...] | None: ... # inherited from the added function
def __closure__(self) -> tuple[_Cell, ...] | None: ... # inherited from the added function
@property
def __defaults__(self) -> tuple[Any, ...] | None: ... # inherited from the added function
@property
@@ -567,6 +570,8 @@ def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Await
@overload
def coroutine(func: _Fn) -> _Fn: ...
CellType = _Cell
if sys.version_info >= (3, 9):
class GenericAlias:
@property

View File

@@ -8,6 +8,7 @@ import typing_extensions
from _collections_abc import dict_items, dict_keys, dict_values
from _typeshed import IdentityFunction, ReadableBuffer, SupportsKeysAndGetItem
from abc import ABCMeta, abstractmethod
from contextlib import AbstractAsyncContextManager, AbstractContextManager
from re import Match as Match, Pattern as Pattern
from types import (
BuiltinFunctionType,
@@ -23,10 +24,10 @@ from types import (
)
from typing_extensions import Never as _Never, ParamSpec as _ParamSpec
if sys.version_info >= (3, 9):
from types import GenericAlias
if sys.version_info >= (3, 10):
from types import UnionType
if sys.version_info >= (3, 9):
from types import GenericAlias
__all__ = [
"AbstractSet",
@@ -401,8 +402,8 @@ class Reversible(Iterable[_T_co], Protocol[_T_co]):
def __reversed__(self) -> Iterator[_T_co]: ...
_YieldT_co = TypeVar("_YieldT_co", covariant=True)
_SendT_contra = TypeVar("_SendT_contra", contravariant=True, default=None)
_ReturnT_co = TypeVar("_ReturnT_co", covariant=True, default=None)
_SendT_contra = TypeVar("_SendT_contra", contravariant=True)
_ReturnT_co = TypeVar("_ReturnT_co", covariant=True)
class Generator(Iterator[_YieldT_co], Generic[_YieldT_co, _SendT_contra, _ReturnT_co]):
def __next__(self) -> _YieldT_co: ...
@@ -427,28 +428,24 @@ class Generator(Iterator[_YieldT_co], Generic[_YieldT_co, _SendT_contra, _Return
@property
def gi_yieldfrom(self) -> Generator[Any, Any, Any] | None: ...
# NOTE: Prior to Python 3.13 these aliases are lacking the second _ExitT_co parameter
if sys.version_info >= (3, 13):
from contextlib import AbstractAsyncContextManager as AsyncContextManager, AbstractContextManager as ContextManager
else:
from contextlib import AbstractAsyncContextManager, AbstractContextManager
# NOTE: Technically we would like this to be able to accept a second parameter as well, just
# like it's counterpart in contextlib, however `typing._SpecialGenericAlias` enforces the
# correct number of arguments at runtime, so we would be hiding runtime errors.
@runtime_checkable
class ContextManager(AbstractContextManager[_T_co, bool | None], Protocol[_T_co]): ...
@runtime_checkable
class ContextManager(AbstractContextManager[_T_co, bool | None], Protocol[_T_co]): ...
@runtime_checkable
class AsyncContextManager(AbstractAsyncContextManager[_T_co, bool | None], Protocol[_T_co]): ...
# NOTE: Technically we would like this to be able to accept a second parameter as well, just
# like it's counterpart in contextlib, however `typing._SpecialGenericAlias` enforces the
# correct number of arguments at runtime, so we would be hiding runtime errors.
@runtime_checkable
class AsyncContextManager(AbstractAsyncContextManager[_T_co, bool | None], Protocol[_T_co]): ...
@runtime_checkable
class Awaitable(Protocol[_T_co]):
@abstractmethod
def __await__(self) -> Generator[Any, Any, _T_co]: ...
# Non-default variations to accommodate couroutines, and `AwaitableGenerator` having a 4th type parameter.
_SendT_contra_nd = TypeVar("_SendT_contra_nd", contravariant=True)
_ReturnT_co_nd = TypeVar("_ReturnT_co_nd", covariant=True)
class Coroutine(Awaitable[_ReturnT_co_nd], Generic[_YieldT_co, _SendT_contra_nd, _ReturnT_co_nd]):
class Coroutine(Awaitable[_ReturnT_co], Generic[_YieldT_co, _SendT_contra, _ReturnT_co]):
__name__: str
__qualname__: str
@property
@@ -460,7 +457,7 @@ class Coroutine(Awaitable[_ReturnT_co_nd], Generic[_YieldT_co, _SendT_contra_nd,
@property
def cr_running(self) -> bool: ...
@abstractmethod
def send(self, value: _SendT_contra_nd, /) -> _YieldT_co: ...
def send(self, value: _SendT_contra, /) -> _YieldT_co: ...
@overload
@abstractmethod
def throw(
@@ -476,9 +473,9 @@ class Coroutine(Awaitable[_ReturnT_co_nd], Generic[_YieldT_co, _SendT_contra_nd,
# The parameters correspond to Generator, but the 4th is the original type.
@type_check_only
class AwaitableGenerator(
Awaitable[_ReturnT_co_nd],
Generator[_YieldT_co, _SendT_contra_nd, _ReturnT_co_nd],
Generic[_YieldT_co, _SendT_contra_nd, _ReturnT_co_nd, _S],
Awaitable[_ReturnT_co],
Generator[_YieldT_co, _SendT_contra, _ReturnT_co],
Generic[_YieldT_co, _SendT_contra, _ReturnT_co, _S],
metaclass=ABCMeta,
): ...

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.4.5"
version = "0.4.4"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -13,17 +13,17 @@ readme = "../../README.md"
default-run = "ruff"
[dependencies]
ruff_cache = { workspace = true }
ruff_diagnostics = { workspace = true }
ruff_linter = { workspace = true, features = ["clap"] }
ruff_macros = { workspace = true }
ruff_notebook = { workspace = true }
ruff_python_ast = { workspace = true }
ruff_python_formatter = { workspace = true }
ruff_server = { workspace = true }
ruff_source_file = { workspace = true }
ruff_text_size = { workspace = true }
ruff_workspace = { workspace = true }
ruff_cache = { path = "../ruff_cache" }
ruff_diagnostics = { path = "../ruff_diagnostics" }
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
ruff_macros = { path = "../ruff_macros" }
ruff_notebook = { path = "../ruff_notebook" }
ruff_python_ast = { path = "../ruff_python_ast" }
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_server = { path = "../ruff_server" }
ruff_source_file = { path = "../ruff_source_file" }
ruff_text_size = { path = "../ruff_text_size" }
ruff_workspace = { path = "../ruff_workspace" }
anyhow = { workspace = true }
argfile = { workspace = true }
@@ -60,7 +60,7 @@ wild = { workspace = true }
[dev-dependencies]
# Enable test rules during development
ruff_linter = { workspace = true, features = ["clap", "test-rules"] }
ruff_linter = { path = "../ruff_linter", features = ["clap", "test-rules"] }
# Avoid writing colored snapshots when running tests from the terminal
colored = { workspace = true, features = ["no-color"] }
insta = { workspace = true, features = ["filters", "json"] }

View File

@@ -111,13 +111,7 @@ pub enum Command {
output_format: HelpFormat,
},
/// List or describe the available configuration options.
Config {
/// Config key to show
option: Option<String>,
/// Output format
#[arg(long, value_enum, default_value = "text")]
output_format: HelpFormat,
},
Config { option: Option<String> },
/// List all supported upstream linters.
Linter {
/// Output format

View File

@@ -1,38 +1,19 @@
use anyhow::{anyhow, Result};
use crate::args::HelpFormat;
use ruff_workspace::options::Options;
use ruff_workspace::options_base::OptionsMetadata;
#[allow(clippy::print_stdout)]
pub(crate) fn config(key: Option<&str>, format: HelpFormat) -> Result<()> {
pub(crate) fn config(key: Option<&str>) -> Result<()> {
match key {
None => {
let metadata = Options::metadata();
match format {
HelpFormat::Text => {
println!("{metadata}");
}
HelpFormat::Json => {
println!("{}", &serde_json::to_string_pretty(&metadata)?);
}
}
}
None => print!("{}", Options::metadata()),
Some(key) => match Options::metadata().find(key) {
None => {
return Err(anyhow!("Unknown option: {key}"));
}
Some(entry) => match format {
HelpFormat::Text => {
print!("{entry}");
}
HelpFormat::Json => {
println!("{}", &serde_json::to_string_pretty(&entry)?);
}
},
Some(entry) => {
print!("{entry}");
}
},
}
Ok(())

View File

@@ -857,20 +857,12 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
if setting.linter.rules.enabled(Rule::BadQuotesMultilineString)
&& setting.linter.flake8_quotes.multiline_quotes == Quote::Single
&& matches!(
setting.formatter.quote_style,
QuoteStyle::Single | QuoteStyle::Double
)
{
warn_user_once!("The `flake8-quotes.multiline-quotes=\"single\"` option is incompatible with the formatter. We recommend disabling `Q001` when using the formatter, which enforces double quotes for multiline strings. Alternatively, set the `flake8-quotes.multiline-quotes` option to `\"double\"`.`");
}
if setting.linter.rules.enabled(Rule::BadQuotesDocstring)
&& setting.linter.flake8_quotes.docstring_quotes == Quote::Single
&& matches!(
setting.formatter.quote_style,
QuoteStyle::Single | QuoteStyle::Double
)
{
warn_user_once!("The `flake8-quotes.multiline-quotes=\"single\"` option is incompatible with the formatter. We recommend disabling `Q002` when using the formatter, which enforces double quotes for docstrings. Alternatively, set the `flake8-quotes.docstring-quotes` option to `\"double\"`.`");
}

View File

@@ -180,11 +180,8 @@ pub fn run(
}
Ok(ExitStatus::Success)
}
Command::Config {
option,
output_format,
} => {
commands::config::config(option.as_deref(), output_format)?;
Command::Config { option } => {
commands::config::config(option.as_deref())?;
Ok(ExitStatus::Success)
}
Command::Linter { output_format } => {

View File

@@ -1,55 +0,0 @@
//! Tests for the `ruff config` subcommand.
use std::process::Command;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
const BIN_NAME: &str = "ruff";
#[test]
fn lint_select() {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME)).arg("config").arg("lint.select"), @r###"
success: true
exit_code: 0
----- stdout -----
A list of rule codes or prefixes to enable. Prefixes can specify exact
rules (like `F841`), entire categories (like `F`), or anything in
between.
When breaking ties between enabled and disabled rules (via `select` and
`ignore`, respectively), more specific prefixes override less
specific prefixes.
Default value: ["E4", "E7", "E9", "F"]
Type: list[RuleSelector]
Example usage:
```toml
# On top of the defaults (`E4`, E7`, `E9`, and `F`), enable flake8-bugbear (`B`) and flake8-quotes (`Q`).
select = ["E4", "E7", "E9", "F", "B", "Q"]
```
----- stderr -----
"###
);
}
#[test]
fn lint_select_json() {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME)).arg("config").arg("lint.select").arg("--output-format").arg("json"), @r###"
success: true
exit_code: 0
----- stdout -----
{
"doc": "A list of rule codes or prefixes to enable. Prefixes can specify exact\nrules (like `F841`), entire categories (like `F`), or anything in\nbetween.\n\nWhen breaking ties between enabled and disabled rules (via `select` and\n`ignore`, respectively), more specific prefixes override less\nspecific prefixes.",
"default": "[\"E4\", \"E7\", \"E9\", \"F\"]",
"value_type": "list[RuleSelector]",
"scope": null,
"example": "# On top of the defaults (`E4`, E7`, `E9`, and `F`), enable flake8-bugbear (`B`) and flake8-quotes (`Q`).\nselect = [\"E4\", \"E7\", \"E9\", \"F\", \"B\", \"Q\"]",
"deprecated": null
}
----- stderr -----
"###
);
}

View File

@@ -1038,48 +1038,6 @@ def say_hy(name: str):
Ok(())
}
#[test]
fn valid_linter_options_preserve() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
[lint]
select = ["Q"]
[lint.flake8-quotes]
inline-quotes = "single"
docstring-quotes = "single"
multiline-quotes = "single"
[format]
quote-style = "preserve"
"#,
)?;
let test_path = tempdir.path().join("test.py");
fs::write(
&test_path,
r#"
def say_hy(name: str):
print(f"Hy {name}")"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--no-cache", "--config"])
.arg(&ruff_toml)
.arg(test_path), @r###"
success: true
exit_code: 0
----- stdout -----
1 file reformatted
----- stderr -----
"###);
Ok(())
}
#[test]
fn all_rules_default_options() -> Result<()> {
let tempdir = TempDir::new()?;

View File

@@ -1414,7 +1414,7 @@ fn check_input_from_argfile() -> Result<()> {
fs::write(&file_a_path, b"import os")?;
fs::write(&file_b_path, b"print('hello, world!')")?;
// Create the input file for argfile to expand
// Create a the input file for argfile to expand
let input_file_path = tempdir.path().join("file_paths.txt");
fs::write(
&input_file_path,

View File

@@ -34,29 +34,12 @@ marking it as unused, as in:
from module import member as member
```
Alternatively, you can use `__all__` to declare a symbol as part of the module's
interface, as in:
```python
# __init__.py
import some_module
__all__ = [ "some_module"]
```
## Fix safety
Fixes to remove unused imports are safe, except in `__init__.py` files.
Applying fixes to `__init__.py` files is currently in preview. The fix offered depends on the
type of the unused import. Ruff will suggest a safe fix to export first-party imports with
either a redundant alias or, if already present in the file, an `__all__` entry. If multiple
`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix
to remove third-party and standard library imports -- the fix is unsafe because the module's
interface changes.
When `ignore_init_module_imports` is disabled, fixes can remove for unused imports in `__init__` files.
These fixes are considered unsafe because they can change the public interface.
## Example
```python
import numpy as np # unused import
@@ -66,14 +49,12 @@ def area(radius):
```
Use instead:
```python
def area(radius):
return 3.14 * radius**2
```
To check the availability of a module, use `importlib.util.find_spec`:
```python
from importlib.util import find_spec

View File

@@ -38,14 +38,14 @@ serde_json = { workspace = true }
url = { workspace = true }
ureq = { workspace = true }
criterion = { workspace = true, default-features = false }
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true }
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true}
[dev-dependencies]
ruff_linter = { workspace = true }
ruff_python_ast = { workspace = true }
ruff_python_formatter = { workspace = true }
ruff_python_index = { workspace = true }
ruff_python_parser = { workspace = true }
ruff_linter = { path = "../ruff_linter" }
ruff_python_ast = { path = "../ruff_python_ast" }
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_python_index = { path = "../ruff_python_index" }
ruff_python_parser = { path = "../ruff_python_parser" }
[lints]
workspace = true

View File

@@ -10,7 +10,7 @@ use ruff_linter::settings::{flags, LinterSettings};
use ruff_linter::source_kind::SourceKind;
use ruff_linter::{registry::Rule, RuleSelector};
use ruff_python_ast::PySourceType;
use ruff_python_parser::{parse_program_tokens, tokenize, Mode};
use ruff_python_parser::{lexer, parse_program_tokens, Mode};
#[cfg(target_os = "windows")]
#[global_allocator]
@@ -55,7 +55,7 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
&case,
|b, case| {
// Tokenize the source.
let tokens = tokenize(case.code(), Mode::Module);
let tokens: Vec<_> = lexer::lex(case.code(), Mode::Module).collect();
// Parse the source.
let ast = parse_program_tokens(tokens.clone(), case.code(), false).unwrap();

View File

@@ -19,7 +19,7 @@ filetime = { workspace = true }
seahash = { workspace = true }
[dev-dependencies]
ruff_macros = { workspace = true }
ruff_macros = { path = "../ruff_macros" }
[lints]
workspace = true

View File

@@ -11,18 +11,18 @@ repository = { workspace = true }
license = { workspace = true }
[dependencies]
ruff = { workspace = true }
ruff_diagnostics = { workspace = true }
ruff_formatter = { workspace = true }
ruff_linter = { workspace = true, features = ["schemars"] }
ruff_notebook = { workspace = true }
ruff_python_ast = { workspace = true }
ruff_python_codegen = { workspace = true }
ruff_python_formatter = { workspace = true }
ruff_python_parser = { workspace = true }
ruff_python_stdlib = { workspace = true }
ruff_python_trivia = { workspace = true }
ruff_workspace = { workspace = true, features = ["schemars"] }
ruff = { path = "../ruff" }
ruff_diagnostics = { path = "../ruff_diagnostics" }
ruff_formatter = { path = "../ruff_formatter" }
ruff_linter = { path = "../ruff_linter", features = ["schemars"] }
ruff_notebook = { path = "../ruff_notebook" }
ruff_python_ast = { path = "../ruff_python_ast" }
ruff_python_codegen = { path = "../ruff_python_codegen" }
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_python_parser = { path = "../ruff_python_parser" }
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
ruff_python_trivia = { path = "../ruff_python_trivia" }
ruff_workspace = { path = "../ruff_workspace", features = ["schemars"] }
anyhow = { workspace = true }
clap = { workspace = true, features = ["wrap_help"] }

View File

@@ -14,7 +14,7 @@ license = { workspace = true }
doctest = false
[dependencies]
ruff_text_size = { workspace = true }
ruff_text_size = { path = "../ruff_text_size" }
anyhow = { workspace = true }
log = { workspace = true }

View File

@@ -11,9 +11,9 @@ repository = { workspace = true }
license = { workspace = true }
[dependencies]
ruff_cache = { workspace = true }
ruff_macros = { workspace = true }
ruff_text_size = { workspace = true }
ruff_cache = { path = "../ruff_cache" }
ruff_macros = { path = "../ruff_macros" }
ruff_text_size = { path = "../ruff_text_size" }
drop_bomb = { workspace = true }
rustc-hash = { workspace = true }

View File

@@ -14,7 +14,7 @@ license = { workspace = true }
doctest = false
[dependencies]
ruff_macros = { workspace = true }
ruff_macros = { path = "../ruff_macros" }
[dev-dependencies]
static_assertions = { workspace = true }

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.4.5"
version = "0.4.4"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -13,20 +13,20 @@ license = { workspace = true }
[lib]
[dependencies]
ruff_cache = { workspace = true }
ruff_diagnostics = { workspace = true, features = ["serde"] }
ruff_notebook = { workspace = true }
ruff_macros = { workspace = true }
ruff_python_ast = { workspace = true, features = ["serde"] }
ruff_python_codegen = { workspace = true }
ruff_python_index = { workspace = true }
ruff_python_literal = { workspace = true }
ruff_python_semantic = { workspace = true }
ruff_python_stdlib = { workspace = true }
ruff_python_trivia = { workspace = true }
ruff_python_parser = { workspace = true }
ruff_source_file = { workspace = true, features = ["serde"] }
ruff_text_size = { workspace = true }
ruff_cache = { path = "../ruff_cache" }
ruff_diagnostics = { path = "../ruff_diagnostics", features = ["serde"] }
ruff_notebook = { path = "../ruff_notebook" }
ruff_macros = { path = "../ruff_macros" }
ruff_python_ast = { path = "../ruff_python_ast", features = ["serde"] }
ruff_python_codegen = { path = "../ruff_python_codegen" }
ruff_python_index = { path = "../ruff_python_index" }
ruff_python_literal = { path = "../ruff_python_literal" }
ruff_python_semantic = { path = "../ruff_python_semantic" }
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
ruff_python_trivia = { path = "../ruff_python_trivia" }
ruff_python_parser = { path = "../ruff_python_parser" }
ruff_source_file = { path = "../ruff_source_file", features = ["serde"] }
ruff_text_size = { path = "../ruff_text_size" }
aho-corasick = { workspace = true }
annotate-snippets = { workspace = true, features = ["color"] }

View File

@@ -1,57 +0,0 @@
# type: ignore
# ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
import math
from math import inf
async def import_trio():
import trio
# These examples are probably not meant to ever wake up:
await trio.sleep(100000) # error: 116, "async"
# 'inf literal' overflow trick
await trio.sleep(1e999) # error: 116, "async"
await trio.sleep(86399)
await trio.sleep(86400)
await trio.sleep(86400.01) # error: 116, "async"
await trio.sleep(86401) # error: 116, "async"
await trio.sleep(-1) # will raise a runtime error
await trio.sleep(0) # handled by different check
# these ones _definitely_ never wake up (TODO)
await trio.sleep(float("inf"))
await trio.sleep(math.inf)
await trio.sleep(inf)
# don't require inf to be in math (TODO)
await trio.sleep(np.inf)
# don't evaluate expressions (TODO)
one_day = 86401
await trio.sleep(86400 + 1)
await trio.sleep(60 * 60 * 24 + 1)
await trio.sleep(foo())
await trio.sleep(one_day)
await trio.sleep(86400 + foo())
await trio.sleep(86400 + ...)
await trio.sleep("hello")
await trio.sleep(...)
def not_async_fun():
import trio
# does not require the call to be awaited, nor in an async fun
trio.sleep(86401) # error: 116, "async"
# also checks that we don't break visit_Call
trio.run(trio.sleep(86401)) # error: 116, "async"
async def import_from_trio():
from trio import sleep
# catch from import
await sleep(86401) # error: 116, "async"

View File

@@ -1,7 +0,0 @@
def main() -> None:
a_list: list[str] | None = []
a_list.append("hello")
def hello(y: "dict[str, int] | None") -> None:
del y

View File

@@ -90,10 +90,3 @@ def f():
def func() -> DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]:
...
def f():
from pandas import DataFrame, Series
def func(self) -> DataFrame | list[Series]:
pass

View File

@@ -63,16 +63,3 @@ if (a and
#: Okay
def f():
return 1
# Soft keywords
#: E271
type Number = int
#: E273
type Number = int
#: E275
match(foo):
case(1):
pass

View File

@@ -46,15 +46,3 @@ regex = '\\\_'
#: W605:1:7
u'foo\ bar'
#: W605:1:13
(
"foo \
bar \. baz"
)
#: W605:1:6
"foo \. bar \t"
#: W605:1:13
"foo \t bar \."

View File

@@ -1,4 +1,4 @@
"""__init__.py with nonempty __all__
"""__init__.py with __all__
Unused stdlib and third party imports are unsafe removals
@@ -33,10 +33,10 @@ from . import aliased as aliased # Ok: is redundant alias
from . import exported # Ok: is exported in __all__
from . import unused # F401: add to __all__
# from . import unused # F401: add to __all__
from . import renamed as bees # F401: add to __all__
# from . import renamed as bees # F401: add to __all__
__all__ = ["argparse", "exported"]

View File

@@ -1,11 +0,0 @@
"""__init__.py with empty __all__
"""
from . import unused # F401: add to __all__
from . import renamed as bees # F401: add to __all__
__all__ = []

View File

@@ -1 +0,0 @@
# empty module imported by __init__.py for test fixture

View File

@@ -1 +0,0 @@
# empty module imported by __init__.py for test fixture

View File

@@ -1,11 +0,0 @@
"""__init__.py with mis-typed __all__
"""
from . import unused # F401: recommend add to all w/o fix
from . import renamed as bees # F401: recommend add to all w/o fix
__all__ = None

View File

@@ -1 +0,0 @@
# empty module imported by __init__.py for test fixture

View File

@@ -1 +0,0 @@
# empty module imported by __init__.py for test fixture

View File

@@ -1,8 +0,0 @@
"""__init__.py with multiple imports added to all in one edit
"""
from . import unused, renamed as bees # F401: add to __all__
__all__ = [];

View File

@@ -1 +0,0 @@
# empty module imported by __init__.py for test fixture

View File

@@ -1 +0,0 @@
# empty module imported by __init__.py for test fixture

View File

@@ -1,16 +0,0 @@
"""__init__.py with __all__ populated by conditional plus-eq
multiple __all__ so cannot offer a fix to add to them
"""
import sys
from . import unused, exported, renamed as bees
if sys.version_info > (3, 9):
from . import also_exported
__all__ = ["exported"]
if sys.version_info >= (3, 9):
__all__ += ["also_exported"]

View File

@@ -1 +0,0 @@
# empty module imported by __init__.py for test fixture

View File

@@ -1 +0,0 @@
# empty module imported by __init__.py for test fixture

View File

@@ -1 +0,0 @@
# empty module imported by __init__.py for test fixture

View File

@@ -1 +0,0 @@
# empty module imported by __init__.py for test fixture

View File

@@ -82,16 +82,3 @@ class Foo:
@qux.setter
def qux(self, value):
self.bar = value / 2
class StudentG:
names = ("surname",)
__slots__ = (*names, "a")
def __init__(self, name, surname):
self.name = name
self.surname = surname # [assigning-non-slot]
self.setup()
def setup(self):
pass

View File

@@ -21,8 +21,6 @@ def wrong(): # [too-many-branches]
pass
try:
pass
except Exception:
pass
finally:
pass
if 2:
@@ -58,8 +56,6 @@ def good():
pass
try:
pass
except Exception:
pass
finally:
pass
if 1:
@@ -94,8 +90,6 @@ def with_statement_wrong():
pass
try:
pass
except Exception:
pass
finally:
pass
if 2:

View File

@@ -1,14 +0,0 @@
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Tuple
def foo():
# UP037
x: "Tuple[int, int]" = (0, 0)
print(x)
# OK
X: "Tuple[int, int]" = (0, 0)

View File

@@ -38,12 +38,3 @@ z = (
else
y
)
# FURB110
z = (
x
if x
else y
if y > 0
else None
)

View File

@@ -103,7 +103,7 @@ def f():
def f():
# Invalid - nonexistent error code with multibyte character
# Invalid - nonexistant error code with multibyte character
d = 1 # …noqa: F841, E50
e = 1 # …noqa: E50

View File

@@ -62,8 +62,6 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
if !checker.semantic.future_annotations_or_stub()
&& checker.settings.target_version < PythonVersion::Py39
&& checker.semantic.in_annotation()
&& checker.semantic.in_runtime_evaluated_annotation()
&& !checker.semantic.in_string_type_definition()
&& typing::is_pep585_generic(value, &checker.semantic)
{
flake8_future_annotations::rules::future_required_type_annotation(
@@ -508,9 +506,6 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
if checker.enabled(Rule::BlockingOsCallInAsyncFunction) {
flake8_async::rules::blocking_os_call(checker, call);
}
if checker.enabled(Rule::SleepForeverCall) {
flake8_async::rules::sleep_forever_call(checker, call);
}
if checker.any_enabled(&[Rule::Print, Rule::PPrint]) {
flake8_print::rules::print_call(checker, call);
}
@@ -1070,17 +1065,13 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
pyflakes::rules::invalid_print_syntax(checker, left);
}
}
Expr::BinOp(
bin_op @ ast::ExprBinOp {
left,
op: Operator::Mod,
right,
range: _,
},
) => {
if let Expr::StringLiteral(format_string @ ast::ExprStringLiteral { value, .. }) =
left.as_ref()
{
Expr::BinOp(ast::ExprBinOp {
left,
op: Operator::Mod,
right,
range: _,
}) => {
if let Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = left.as_ref() {
if checker.any_enabled(&[
Rule::PercentFormatInvalidFormat,
Rule::PercentFormatExpectedMapping,
@@ -1160,14 +1151,10 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
pyupgrade::rules::printf_string_formatting(checker, expr, right);
}
if checker.enabled(Rule::BadStringFormatCharacter) {
pylint::rules::bad_string_format_character::percent(
checker,
expr,
format_string,
);
pylint::rules::bad_string_format_character::percent(checker, expr);
}
if checker.enabled(Rule::BadStringFormatType) {
pylint::rules::bad_string_format_type(checker, bin_op, format_string);
pylint::rules::bad_string_format_type(checker, expr, right);
}
if checker.enabled(Rule::HardcodedSQLExpression) {
flake8_bandit::rules::hardcoded_sql_expression(checker, expr);
@@ -1200,8 +1187,6 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
if !checker.semantic.future_annotations_or_stub()
&& checker.settings.target_version < PythonVersion::Py310
&& checker.semantic.in_annotation()
&& checker.semantic.in_runtime_evaluated_annotation()
&& !checker.semantic.in_string_type_definition()
{
flake8_future_annotations::rules::future_required_type_annotation(
checker,

View File

@@ -2,7 +2,7 @@ use ruff_python_ast::StringLike;
use crate::checkers::ast::Checker;
use crate::codes::Rule;
use crate::rules::{flake8_bandit, flake8_pyi, flake8_quotes, pycodestyle, ruff};
use crate::rules::{flake8_bandit, flake8_pyi, flake8_quotes, ruff};
/// Run lint rules over a [`StringLike`] syntax nodes.
pub(crate) fn string_like(string_like: StringLike, checker: &mut Checker) {
@@ -36,7 +36,4 @@ pub(crate) fn string_like(string_like: StringLike, checker: &mut Checker) {
if checker.enabled(Rule::AvoidableEscapedQuote) && checker.settings.flake8_quotes.avoid_escape {
flake8_quotes::rules::avoidable_escaped_quote(checker, string_like);
}
if checker.enabled(Rule::InvalidEscapeSequence) {
pycodestyle::rules::invalid_escape_sequence(checker, string_like);
}
}

View File

@@ -2152,7 +2152,7 @@ impl<'a> Checker<'a> {
self.semantic.restore(snapshot);
if self.semantic.in_annotation() && self.semantic.in_typing_only_annotation() {
if self.semantic.in_annotation() && self.semantic.future_annotations_or_stub() {
if self.enabled(Rule::QuotedAnnotation) {
pyupgrade::rules::quoted_annotation(self, value, range);
}

View File

@@ -5,13 +5,13 @@ use std::path::Path;
use ruff_notebook::CellOffsets;
use ruff_python_ast::PySourceType;
use ruff_python_codegen::Stylist;
use ruff_python_parser::lexer::LexResult;
use ruff_diagnostics::Diagnostic;
use ruff_python_index::Indexer;
use ruff_source_file::Locator;
use crate::directives::TodoComment;
use crate::linter::TokenSource;
use crate::registry::{AsRule, Rule};
use crate::rules::pycodestyle::rules::BlankLinesChecker;
use crate::rules::{
@@ -22,7 +22,7 @@ use crate::settings::LinterSettings;
#[allow(clippy::too_many_arguments)]
pub(crate) fn check_tokens(
tokens: &TokenSource,
tokens: &[LexResult],
path: &Path,
locator: &Locator,
indexer: &Indexer,
@@ -42,7 +42,7 @@ pub(crate) fn check_tokens(
Rule::BlankLinesBeforeNestedDefinition,
]) {
BlankLinesChecker::new(locator, stylist, settings, source_type, cell_offsets)
.check_lines(tokens.kinds(), &mut diagnostics);
.check_lines(tokens, &mut diagnostics);
}
if settings.rules.enabled(Rule::BlanketTypeIgnore) {
@@ -75,6 +75,18 @@ pub(crate) fn check_tokens(
pyupgrade::rules::unnecessary_coding_comment(&mut diagnostics, locator, indexer);
}
if settings.rules.enabled(Rule::InvalidEscapeSequence) {
for (tok, range) in tokens.iter().flatten() {
pycodestyle::rules::invalid_escape_sequence(
&mut diagnostics,
locator,
indexer,
tok,
*range,
);
}
}
if settings.rules.enabled(Rule::TabIndentation) {
pycodestyle::rules::tab_indentation(&mut diagnostics, locator, indexer);
}
@@ -86,8 +98,8 @@ pub(crate) fn check_tokens(
Rule::InvalidCharacterNul,
Rule::InvalidCharacterZeroWidthSpace,
]) {
for (token, range) in tokens.kinds() {
pylint::rules::invalid_string_characters(&mut diagnostics, token, range, locator);
for (tok, range) in tokens.iter().flatten() {
pylint::rules::invalid_string_characters(&mut diagnostics, tok, *range, locator);
}
}
@@ -98,7 +110,7 @@ pub(crate) fn check_tokens(
]) {
pycodestyle::rules::compound_statements(
&mut diagnostics,
tokens.kinds(),
tokens,
locator,
indexer,
source_type,
@@ -112,7 +124,7 @@ pub(crate) fn check_tokens(
]) {
flake8_implicit_str_concat::rules::implicit(
&mut diagnostics,
tokens.kinds(),
tokens,
settings,
locator,
indexer,
@@ -124,11 +136,11 @@ pub(crate) fn check_tokens(
Rule::TrailingCommaOnBareTuple,
Rule::ProhibitedTrailingComma,
]) {
flake8_commas::rules::trailing_commas(&mut diagnostics, tokens.kinds(), locator, indexer);
flake8_commas::rules::trailing_commas(&mut diagnostics, tokens, locator, indexer);
}
if settings.rules.enabled(Rule::ExtraneousParentheses) {
pyupgrade::rules::extraneous_parentheses(&mut diagnostics, tokens.kinds(), locator);
pyupgrade::rules::extraneous_parentheses(&mut diagnostics, tokens, locator);
}
if source_type.is_stub() && settings.rules.enabled(Rule::TypeCommentInStub) {
@@ -172,7 +184,7 @@ pub(crate) fn check_tokens(
}
if settings.rules.enabled(Rule::TooManyNewlinesAtEndOfFile) {
pycodestyle::rules::too_many_newlines_at_end_of_file(&mut diagnostics, tokens.kinds());
pycodestyle::rules::too_many_newlines_at_end_of_file(&mut diagnostics, tokens);
}
diagnostics.retain(|diagnostic| settings.rules.enabled(diagnostic.kind.rule()));

View File

@@ -334,7 +334,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Flake8Async, "100") => (RuleGroup::Stable, rules::flake8_async::rules::BlockingHttpCallInAsyncFunction),
(Flake8Async, "101") => (RuleGroup::Stable, rules::flake8_async::rules::OpenSleepOrSubprocessInAsyncFunction),
(Flake8Async, "102") => (RuleGroup::Stable, rules::flake8_async::rules::BlockingOsCallInAsyncFunction),
(Flake8Async, "116") => (RuleGroup::Preview, rules::flake8_async::rules::SleepForeverCall),
// flake8-trio
(Flake8Trio, "100") => (RuleGroup::Stable, rules::flake8_trio::rules::TrioTimeoutWithoutAwait),

View File

@@ -131,7 +131,7 @@ fn extract_noqa_line_for(lxr: &[LexResult], locator: &Locator, indexer: &Indexer
// For multi-line strings, we expect `noqa` directives on the last line of the
// string.
Tok::String { flags, .. } if flags.is_triple_quoted() => {
Tok::String { kind, .. } if kind.is_triple_quoted() => {
if locator.contains_line_break(*range) {
string_mappings.push(TextRange::new(
locator.line_start(range.start()),

View File

@@ -4,26 +4,27 @@
use std::iter::FusedIterator;
use ruff_python_ast::{self as ast, Stmt, Suite};
use ruff_python_parser::{TokenKind, TokenKindIter};
use ruff_python_parser::lexer::LexResult;
use ruff_python_parser::Tok;
use ruff_text_size::{Ranged, TextSize};
use ruff_python_ast::statement_visitor::{walk_stmt, StatementVisitor};
use ruff_source_file::{Locator, UniversalNewlineIterator};
/// Extract doc lines (standalone comments) from a token sequence.
pub(crate) fn doc_lines_from_tokens(tokens: TokenKindIter) -> DocLines {
DocLines::new(tokens)
pub(crate) fn doc_lines_from_tokens(lxr: &[LexResult]) -> DocLines {
DocLines::new(lxr)
}
pub(crate) struct DocLines<'a> {
inner: TokenKindIter<'a>,
inner: std::iter::Flatten<core::slice::Iter<'a, LexResult>>,
prev: TextSize,
}
impl<'a> DocLines<'a> {
fn new(tokens: TokenKindIter<'a>) -> Self {
fn new(lxr: &'a [LexResult]) -> Self {
Self {
inner: tokens,
inner: lxr.iter().flatten(),
prev: TextSize::default(),
}
}
@@ -38,15 +39,15 @@ impl Iterator for DocLines<'_> {
let (tok, range) = self.inner.next()?;
match tok {
TokenKind::Comment => {
Tok::Comment(..) => {
if at_start_of_line {
break Some(range.start());
}
}
TokenKind::Newline | TokenKind::NonLogicalNewline => {
Tok::Newline | Tok::NonLogicalNewline => {
at_start_of_line = true;
}
TokenKind::Indent | TokenKind::Dedent => {
Tok::Indent | Tok::Dedent => {
// ignore
}
_ => {

View File

@@ -1,12 +1,10 @@
//! Interface for generating fix edits from higher-level actions (e.g., "remove an argument").
use std::borrow::Cow;
use anyhow::{Context, Result};
use ruff_diagnostics::Edit;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, ExprList, Stmt};
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Stmt};
use ruff_python_ast::{AnyNodeRef, ArgOrKeyword};
use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer;
@@ -126,7 +124,7 @@ pub(crate) fn remove_unused_imports<'a>(
/// Edits to make the specified imports explicit, e.g. change `import x` to `import x as x`.
pub(crate) fn make_redundant_alias<'a>(
member_names: impl Iterator<Item = Cow<'a, str>>,
member_names: impl Iterator<Item = &'a str>,
stmt: &Stmt,
) -> Vec<Edit> {
let aliases = match stmt {
@@ -146,53 +144,6 @@ pub(crate) fn make_redundant_alias<'a>(
.collect()
}
/// Fix to add the specified imports to the `__all__` export list.
pub(crate) fn add_to_dunder_all<'a>(
names: impl Iterator<Item = &'a str>,
expr: &Expr,
stylist: &Stylist,
) -> Vec<Edit> {
let (insertion_point, export_prefix_length) = match expr {
Expr::List(ExprList { elts, range, .. }) => (
elts.last()
.map_or(range.end() - "]".text_len(), Ranged::end),
elts.len(),
),
Expr::Tuple(tup) if tup.parenthesized => (
tup.elts
.last()
.map_or(tup.end() - ")".text_len(), Ranged::end),
tup.elts.len(),
),
Expr::Tuple(tup) if !tup.parenthesized => (
tup.elts
.last()
.expect("unparenthesized empty tuple is not possible")
.range()
.end(),
tup.elts.len(),
),
_ => {
// we don't know how to insert into this expression
return vec![];
}
};
let quote = stylist.quote();
let mut edits: Vec<_> = names
.enumerate()
.map(|(offset, name)| match export_prefix_length + offset {
0 => Edit::insertion(format!("{quote}{name}{quote}"), insertion_point),
_ => Edit::insertion(format!(", {quote}{name}{quote}"), insertion_point),
})
.collect();
if let Expr::Tuple(tup) = expr {
if tup.parenthesized && export_prefix_length + edits.len() == 1 {
edits.push(Edit::insertion(",".to_string(), insertion_point));
}
}
edits
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum Parentheses {
/// Remove parentheses, if the removed argument is the only argument left.
@@ -526,20 +477,14 @@ fn all_lines_fit(
#[cfg(test)]
mod tests {
use anyhow::{anyhow, Result};
use std::borrow::Cow;
use test_case::test_case;
use anyhow::Result;
use ruff_diagnostics::{Diagnostic, Edit, Fix};
use ruff_python_codegen::Stylist;
use ruff_python_parser::{lexer, parse_expression, parse_suite, Mode};
use ruff_diagnostics::Edit;
use ruff_python_parser::parse_suite;
use ruff_source_file::Locator;
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::fix::apply_fixes;
use crate::fix::edits::{
add_to_dunder_all, make_redundant_alias, next_stmt_break, trailing_semicolon,
};
use crate::fix::edits::{make_redundant_alias, next_stmt_break, trailing_semicolon};
#[test]
fn find_semicolon() -> Result<()> {
@@ -617,7 +562,7 @@ x = 1 \
let program = parse_suite(contents).unwrap();
let stmt = program.first().unwrap();
assert_eq!(
make_redundant_alias(["x"].into_iter().map(Cow::from), stmt),
make_redundant_alias(["x"].into_iter(), stmt),
vec![Edit::range_replacement(
String::from("x as x"),
TextRange::new(TextSize::new(7), TextSize::new(8)),
@@ -625,7 +570,7 @@ x = 1 \
"make just one item redundant"
);
assert_eq!(
make_redundant_alias(vec!["x", "y"].into_iter().map(Cow::from), stmt),
make_redundant_alias(vec!["x", "y"].into_iter(), stmt),
vec![Edit::range_replacement(
String::from("x as x"),
TextRange::new(TextSize::new(7), TextSize::new(8)),
@@ -633,7 +578,7 @@ x = 1 \
"the second item is already a redundant alias"
);
assert_eq!(
make_redundant_alias(vec!["x", "z"].into_iter().map(Cow::from), stmt),
make_redundant_alias(vec!["x", "z"].into_iter(), stmt),
vec![Edit::range_replacement(
String::from("x as x"),
TextRange::new(TextSize::new(7), TextSize::new(8)),
@@ -641,47 +586,4 @@ x = 1 \
"the third item is already aliased to something else"
);
}
#[test_case("()", &["x", "y"], r#"("x", "y")"# ; "2 into empty tuple")]
#[test_case("()", &["x"], r#"("x",)"# ; "1 into empty tuple adding a trailing comma")]
#[test_case("[]", &["x", "y"], r#"["x", "y"]"# ; "2 into empty list")]
#[test_case("[]", &["x"], r#"["x"]"# ; "1 into empty list")]
#[test_case(r#""a", "b""#, &["x", "y"], r#""a", "b", "x", "y""# ; "2 into unparenthesized tuple")]
#[test_case(r#""a", "b""#, &["x"], r#""a", "b", "x""# ; "1 into unparenthesized tuple")]
#[test_case(r#""a", "b","#, &["x", "y"], r#""a", "b", "x", "y","# ; "2 into unparenthesized tuple w/trailing comma")]
#[test_case(r#""a", "b","#, &["x"], r#""a", "b", "x","# ; "1 into unparenthesized tuple w/trailing comma")]
#[test_case(r#"("a", "b")"#, &["x", "y"], r#"("a", "b", "x", "y")"# ; "2 into nonempty tuple")]
#[test_case(r#"("a", "b")"#, &["x"], r#"("a", "b", "x")"# ; "1 into nonempty tuple")]
#[test_case(r#"("a", "b",)"#, &["x", "y"], r#"("a", "b", "x", "y",)"# ; "2 into nonempty tuple w/trailing comma")]
#[test_case(r#"("a", "b",)"#, &["x"], r#"("a", "b", "x",)"# ; "1 into nonempty tuple w/trailing comma")]
#[test_case(r#"["a", "b",]"#, &["x", "y"], r#"["a", "b", "x", "y",]"# ; "2 into nonempty list w/trailing comma")]
#[test_case(r#"["a", "b",]"#, &["x"], r#"["a", "b", "x",]"# ; "1 into nonempty list w/trailing comma")]
#[test_case(r#"["a", "b"]"#, &["x", "y"], r#"["a", "b", "x", "y"]"# ; "2 into nonempty list")]
#[test_case(r#"["a", "b"]"#, &["x"], r#"["a", "b", "x"]"# ; "1 into nonempty list")]
fn add_to_dunder_all_test(raw: &str, names: &[&str], expect: &str) -> Result<()> {
let locator = Locator::new(raw);
let edits = {
let expr = parse_expression(raw)?;
let stylist = Stylist::from_tokens(
&lexer::lex(raw, Mode::Expression).collect::<Vec<_>>(),
&locator,
);
// SUT
add_to_dunder_all(names.iter().copied(), &expr, &stylist)
};
let diag = {
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
let mut iter = edits.into_iter();
Diagnostic::new(
MissingNewlineAtEndOfFile, // The choice of rule here is arbitrary.
TextRange::default(),
)
.with_fix(Fix::safe_edits(
iter.next().ok_or(anyhow!("expected edits nonempty"))?,
iter,
))
};
assert_eq!(apply_fixes([diag].iter(), &locator).code, expect);
Ok(())
}
}

View File

@@ -321,6 +321,7 @@ mod tests {
use ruff_python_ast::PySourceType;
use ruff_python_codegen::Stylist;
use ruff_python_parser::lexer::LexResult;
use ruff_python_parser::{parse_suite, Mode};
use ruff_source_file::{LineEnding, Locator};
use ruff_text_size::TextSize;
@@ -331,7 +332,7 @@ mod tests {
fn start_of_file() -> Result<()> {
fn insert(contents: &str) -> Result<Insertion> {
let program = parse_suite(contents)?;
let tokens = ruff_python_parser::tokenize(contents, Mode::Module);
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, Mode::Module);
let locator = Locator::new(contents);
let stylist = Stylist::from_tokens(&tokens, &locator);
Ok(Insertion::start_of_file(&program, &locator, &stylist))
@@ -442,7 +443,7 @@ x = 1
#[test]
fn start_of_block() {
fn insert(contents: &str, offset: TextSize) -> Insertion {
let tokens = ruff_python_parser::tokenize(contents, Mode::Module);
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, Mode::Module);
let locator = Locator::new(contents);
let stylist = Stylist::from_tokens(&tokens, &locator);
Insertion::start_of_block(offset, &locator, &stylist, PySourceType::default())

View File

@@ -14,7 +14,7 @@ use ruff_python_ast::{PySourceType, Suite};
use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer;
use ruff_python_parser::lexer::LexResult;
use ruff_python_parser::{AsMode, ParseError, TokenKindIter, Tokens};
use ruff_python_parser::{AsMode, ParseError};
use ruff_source_file::{Locator, SourceFileBuilder};
use ruff_text_size::Ranged;
@@ -93,7 +93,7 @@ pub fn check_path(
let use_doc_lines = settings.rules.enabled(Rule::DocLineTooLong);
let mut doc_lines = vec![];
if use_doc_lines {
doc_lines.extend(doc_lines_from_tokens(tokens.kinds()));
doc_lines.extend(doc_lines_from_tokens(&tokens));
}
// Run the token-based rules.
@@ -353,7 +353,7 @@ pub fn add_noqa_to_path(
let contents = source_kind.source_code();
// Tokenize once.
let tokens = ruff_python_parser::tokenize(contents, source_type.as_mode());
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, source_type.as_mode());
// Map row and column locations to byte slices (lazily).
let locator = Locator::new(contents);
@@ -518,7 +518,8 @@ pub fn lint_fix<'a>(
// Continuously fix until the source code stabilizes.
loop {
// Tokenize once.
let tokens = ruff_python_parser::tokenize(transformed.source_code(), source_type.as_mode());
let tokens: Vec<LexResult> =
ruff_python_parser::tokenize(transformed.source_code(), source_type.as_mode());
// Map row and column locations to byte slices (lazily).
let locator = Locator::new(transformed.source_code());
@@ -714,7 +715,7 @@ impl<'a> ParseSource<'a> {
#[derive(Debug, Clone)]
pub enum TokenSource<'a> {
/// Use the precomputed tokens to generate the AST.
Tokens(Tokens),
Tokens(Vec<LexResult>),
/// Use the precomputed tokens and AST.
Precomputed {
tokens: &'a [LexResult],
@@ -722,18 +723,6 @@ pub enum TokenSource<'a> {
},
}
impl TokenSource<'_> {
/// Returns an iterator over the [`TokenKind`] and the corresponding range.
///
/// [`TokenKind`]: ruff_python_parser::TokenKind
pub fn kinds(&self) -> TokenKindIter {
match self {
TokenSource::Tokens(tokens) => tokens.kinds(),
TokenSource::Precomputed { tokens, .. } => TokenKindIter::new(tokens),
}
}
}
impl Deref for TokenSource<'_> {
type Target = [LexResult];

View File

@@ -270,6 +270,7 @@ impl Rule {
| Rule::InvalidCharacterNul
| Rule::InvalidCharacterSub
| Rule::InvalidCharacterZeroWidthSpace
| Rule::InvalidEscapeSequence
| Rule::InvalidTodoCapitalization
| Rule::InvalidTodoTag
| Rule::LineContainsFixme

View File

@@ -16,7 +16,6 @@ mod tests {
#[test_case(Rule::BlockingHttpCallInAsyncFunction, Path::new("ASYNC100.py"))]
#[test_case(Rule::OpenSleepOrSubprocessInAsyncFunction, Path::new("ASYNC101.py"))]
#[test_case(Rule::BlockingOsCallInAsyncFunction, Path::new("ASYNC102.py"))]
#[test_case(Rule::SleepForeverCall, Path::new("ASYNC116.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
let diagnostics = test_path(

View File

@@ -1,9 +1,7 @@
pub(crate) use blocking_http_call::*;
pub(crate) use blocking_os_call::*;
pub(crate) use open_sleep_or_subprocess_call::*;
pub(crate) use sleep_forever_call::*;
mod blocking_http_call;
mod blocking_os_call;
mod open_sleep_or_subprocess_call;
mod sleep_forever_call;

View File

@@ -1,110 +0,0 @@
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::{Expr, ExprCall, ExprNumberLiteral, Number};
use ruff_python_semantic::Modules;
use ruff_text_size::Ranged;
use crate::{checkers::ast::Checker, importer::ImportRequest};
/// ## What it does
/// Checks for uses of `trio.sleep()` with an interval greater than 24 hours.
///
/// ## Why is this bad?
/// `trio.sleep()` with an interval greater than 24 hours is usually intended
/// to sleep indefinitely. Instead of using a large interval,
/// `trio.sleep_forever()` better conveys the intent.
///
///
/// ## Example
/// ```python
/// import trio
///
///
/// async def func():
/// await trio.sleep(86401)
/// ```
///
/// Use instead:
/// ```python
/// import trio
///
///
/// async def func():
/// await trio.sleep_forever()
/// ```
#[violation]
pub struct SleepForeverCall;
impl Violation for SleepForeverCall {
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
#[derive_message_formats]
fn message(&self) -> String {
format!("`trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`")
}
fn fix_title(&self) -> Option<String> {
Some(format!("Replace with `trio.sleep_forever()`"))
}
}
/// ASYNC116
pub(crate) fn sleep_forever_call(checker: &mut Checker, call: &ExprCall) {
if !checker.semantic().seen_module(Modules::TRIO) {
return;
}
if call.arguments.len() != 1 {
return;
}
let Some(arg) = call.arguments.find_argument("seconds", 0) else {
return;
};
if !checker
.semantic()
.resolve_qualified_name(call.func.as_ref())
.is_some_and(|qualified_name| matches!(qualified_name.segments(), ["trio", "sleep"]))
{
return;
}
let Expr::NumberLiteral(ExprNumberLiteral { value, .. }) = arg else {
return;
};
// TODO(ekohilas): Replace with Duration::from_days(1).as_secs(); when available.
let one_day_in_secs = 60 * 60 * 24;
match value {
Number::Int(int_value) => {
let Some(int_value) = int_value.as_u64() else {
return;
};
if int_value <= one_day_in_secs {
return;
}
}
Number::Float(float_value) =>
{
#[allow(clippy::cast_precision_loss)]
if *float_value <= one_day_in_secs as f64 {
return;
}
}
Number::Complex { .. } => return,
}
let mut diagnostic = Diagnostic::new(SleepForeverCall, call.range());
let replacement_function = "sleep_forever";
diagnostic.try_set_fix(|| {
let (import_edit, binding) = checker.importer().get_or_import_symbol(
&ImportRequest::import_from("trio", replacement_function),
call.func.start(),
checker.semantic(),
)?;
let reference_edit = Edit::range_replacement(binding, call.func.range());
let arg_edit = Edit::range_replacement("()".to_string(), call.arguments.range());
Ok(Fix::unsafe_edits(import_edit, [reference_edit, arg_edit]))
});
checker.diagnostics.push(diagnostic);
}

View File

@@ -1,145 +0,0 @@
---
source: crates/ruff_linter/src/rules/flake8_async/mod.rs
---
ASYNC116.py:11:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
10 | # These examples are probably not meant to ever wake up:
11 | await trio.sleep(100000) # error: 116, "async"
| ^^^^^^^^^^^^^^^^^^ ASYNC116
12 |
13 | # 'inf literal' overflow trick
|
= help: Replace with `trio.sleep_forever()`
Unsafe fix
8 8 | import trio
9 9 |
10 10 | # These examples are probably not meant to ever wake up:
11 |- await trio.sleep(100000) # error: 116, "async"
11 |+ await trio.sleep_forever() # error: 116, "async"
12 12 |
13 13 | # 'inf literal' overflow trick
14 14 | await trio.sleep(1e999) # error: 116, "async"
ASYNC116.py:14:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
13 | # 'inf literal' overflow trick
14 | await trio.sleep(1e999) # error: 116, "async"
| ^^^^^^^^^^^^^^^^^ ASYNC116
15 |
16 | await trio.sleep(86399)
|
= help: Replace with `trio.sleep_forever()`
Unsafe fix
11 11 | await trio.sleep(100000) # error: 116, "async"
12 12 |
13 13 | # 'inf literal' overflow trick
14 |- await trio.sleep(1e999) # error: 116, "async"
14 |+ await trio.sleep_forever() # error: 116, "async"
15 15 |
16 16 | await trio.sleep(86399)
17 17 | await trio.sleep(86400)
ASYNC116.py:18:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
16 | await trio.sleep(86399)
17 | await trio.sleep(86400)
18 | await trio.sleep(86400.01) # error: 116, "async"
| ^^^^^^^^^^^^^^^^^^^^ ASYNC116
19 | await trio.sleep(86401) # error: 116, "async"
|
= help: Replace with `trio.sleep_forever()`
Unsafe fix
15 15 |
16 16 | await trio.sleep(86399)
17 17 | await trio.sleep(86400)
18 |- await trio.sleep(86400.01) # error: 116, "async"
18 |+ await trio.sleep_forever() # error: 116, "async"
19 19 | await trio.sleep(86401) # error: 116, "async"
20 20 |
21 21 | await trio.sleep(-1) # will raise a runtime error
ASYNC116.py:19:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
17 | await trio.sleep(86400)
18 | await trio.sleep(86400.01) # error: 116, "async"
19 | await trio.sleep(86401) # error: 116, "async"
| ^^^^^^^^^^^^^^^^^ ASYNC116
20 |
21 | await trio.sleep(-1) # will raise a runtime error
|
= help: Replace with `trio.sleep_forever()`
Unsafe fix
16 16 | await trio.sleep(86399)
17 17 | await trio.sleep(86400)
18 18 | await trio.sleep(86400.01) # error: 116, "async"
19 |- await trio.sleep(86401) # error: 116, "async"
19 |+ await trio.sleep_forever() # error: 116, "async"
20 20 |
21 21 | await trio.sleep(-1) # will raise a runtime error
22 22 | await trio.sleep(0) # handled by different check
ASYNC116.py:48:5: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
47 | # does not require the call to be awaited, nor in an async fun
48 | trio.sleep(86401) # error: 116, "async"
| ^^^^^^^^^^^^^^^^^ ASYNC116
49 | # also checks that we don't break visit_Call
50 | trio.run(trio.sleep(86401)) # error: 116, "async"
|
= help: Replace with `trio.sleep_forever()`
Unsafe fix
45 45 | import trio
46 46 |
47 47 | # does not require the call to be awaited, nor in an async fun
48 |- trio.sleep(86401) # error: 116, "async"
48 |+ trio.sleep_forever() # error: 116, "async"
49 49 | # also checks that we don't break visit_Call
50 50 | trio.run(trio.sleep(86401)) # error: 116, "async"
51 51 |
ASYNC116.py:50:14: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
48 | trio.sleep(86401) # error: 116, "async"
49 | # also checks that we don't break visit_Call
50 | trio.run(trio.sleep(86401)) # error: 116, "async"
| ^^^^^^^^^^^^^^^^^ ASYNC116
|
= help: Replace with `trio.sleep_forever()`
Unsafe fix
47 47 | # does not require the call to be awaited, nor in an async fun
48 48 | trio.sleep(86401) # error: 116, "async"
49 49 | # also checks that we don't break visit_Call
50 |- trio.run(trio.sleep(86401)) # error: 116, "async"
50 |+ trio.run(trio.sleep_forever()) # error: 116, "async"
51 51 |
52 52 |
53 53 | async def import_from_trio():
ASYNC116.py:57:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
56 | # catch from import
57 | await sleep(86401) # error: 116, "async"
| ^^^^^^^^^^^^ ASYNC116
|
= help: Replace with `trio.sleep_forever()`
Unsafe fix
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
3 3 | import math
4 4 | from math import inf
5 |+from trio import sleep_forever
5 6 |
6 7 |
7 8 | async def import_trio():
--------------------------------------------------------------------------------
54 55 | from trio import sleep
55 56 |
56 57 | # catch from import
57 |- await sleep(86401) # error: 116, "async"
58 |+ await sleep_forever() # error: 116, "async"

View File

@@ -57,7 +57,7 @@ pub(crate) fn hardcoded_bind_all_interfaces(checker: &mut Checker, string: Strin
}
}
ast::FStringPart::FString(f_string) => {
for literal in f_string.elements.literals() {
for literal in f_string.literals() {
if &**literal == "0.0.0.0" {
checker.diagnostics.push(Diagnostic::new(
HardcodedBindAllInterfaces,

View File

@@ -64,7 +64,7 @@ pub(crate) fn hardcoded_tmp_directory(checker: &mut Checker, string: StringLike)
check(checker, literal, literal.range());
}
ast::FStringPart::FString(f_string) => {
for literal in f_string.elements.literals() {
for literal in f_string.literals() {
check(checker, literal, literal.range());
}
}

View File

@@ -2,7 +2,8 @@ use ruff_diagnostics::{AlwaysFixableViolation, Violation};
use ruff_diagnostics::{Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_index::Indexer;
use ruff_python_parser::{TokenKind, TokenKindIter};
use ruff_python_parser::lexer::LexResult;
use ruff_python_parser::Tok;
use ruff_source_file::Locator;
use ruff_text_size::{Ranged, TextRange};
@@ -51,26 +52,26 @@ impl Token {
}
}
impl From<(TokenKind, TextRange)> for Token {
fn from((tok, range): (TokenKind, TextRange)) -> Self {
impl From<(&Tok, TextRange)> for Token {
fn from((tok, range): (&Tok, TextRange)) -> Self {
let ty = match tok {
TokenKind::Name => TokenType::Named,
TokenKind::String => TokenType::String,
TokenKind::Newline => TokenType::Newline,
TokenKind::NonLogicalNewline => TokenType::NonLogicalNewline,
TokenKind::Lpar => TokenType::OpeningBracket,
TokenKind::Rpar => TokenType::ClosingBracket,
TokenKind::Lsqb => TokenType::OpeningSquareBracket,
TokenKind::Rsqb => TokenType::ClosingBracket,
TokenKind::Colon => TokenType::Colon,
TokenKind::Comma => TokenType::Comma,
TokenKind::Lbrace => TokenType::OpeningCurlyBracket,
TokenKind::Rbrace => TokenType::ClosingBracket,
TokenKind::Def => TokenType::Def,
TokenKind::For => TokenType::For,
TokenKind::Lambda => TokenType::Lambda,
Tok::Name { .. } => TokenType::Named,
Tok::String { .. } => TokenType::String,
Tok::Newline => TokenType::Newline,
Tok::NonLogicalNewline => TokenType::NonLogicalNewline,
Tok::Lpar => TokenType::OpeningBracket,
Tok::Rpar => TokenType::ClosingBracket,
Tok::Lsqb => TokenType::OpeningSquareBracket,
Tok::Rsqb => TokenType::ClosingBracket,
Tok::Colon => TokenType::Colon,
Tok::Comma => TokenType::Comma,
Tok::Lbrace => TokenType::OpeningCurlyBracket,
Tok::Rbrace => TokenType::ClosingBracket,
Tok::Def => TokenType::Def,
Tok::For => TokenType::For,
Tok::Lambda => TokenType::Lambda,
// Import treated like a function.
TokenKind::Import => TokenType::Named,
Tok::Import => TokenType::Named,
_ => TokenType::Irrelevant,
};
#[allow(clippy::inconsistent_struct_constructor)]
@@ -226,23 +227,27 @@ impl AlwaysFixableViolation for ProhibitedTrailingComma {
/// COM812, COM818, COM819
pub(crate) fn trailing_commas(
diagnostics: &mut Vec<Diagnostic>,
tokens: TokenKindIter,
tokens: &[LexResult],
locator: &Locator,
indexer: &Indexer,
) {
let mut fstrings = 0u32;
let tokens = tokens.filter_map(|(token, tok_range)| {
match token {
let tokens = tokens.iter().filter_map(|result| {
let Ok((tok, tok_range)) = result else {
return None;
};
match tok {
// Completely ignore comments -- they just interfere with the logic.
TokenKind::Comment => None,
Tok::Comment(_) => None,
// F-strings are handled as `String` token type with the complete range
// of the outermost f-string. This means that the expression inside the
// f-string is not checked for trailing commas.
TokenKind::FStringStart => {
Tok::FStringStart(_) => {
fstrings = fstrings.saturating_add(1);
None
}
TokenKind::FStringEnd => {
Tok::FStringEnd => {
fstrings = fstrings.saturating_sub(1);
if fstrings == 0 {
indexer
@@ -255,7 +260,7 @@ pub(crate) fn trailing_commas(
}
_ => {
if fstrings == 0 {
Some(Token::from((token, tok_range)))
Some(Token::from((tok, *tok_range)))
} else {
None
}

View File

@@ -118,11 +118,7 @@ pub(crate) fn call_datetime_strptime_without_zone(checker: &mut Checker, call: &
}
}
ast::FStringPart::FString(f_string) => {
if f_string
.elements
.literals()
.any(|literal| literal.contains("%z"))
{
if f_string.literals().any(|literal| literal.contains("%z")) {
return;
}
}

View File

@@ -43,7 +43,6 @@ mod tests {
#[test_case(Path::new("no_future_import_uses_union_inner.py"))]
#[test_case(Path::new("ok_no_types.py"))]
#[test_case(Path::new("ok_uses_future.py"))]
#[test_case(Path::new("ok_quoted_type.py"))]
fn fa102(path: &Path) -> Result<()> {
let snapshot = format!("fa102_{}", path.to_string_lossy());
let diagnostics = test_path(

View File

@@ -7,6 +7,7 @@ use ruff_python_ast::Expr;
use ruff_text_size::{Ranged, TextSize};
use crate::checkers::ast::Checker;
use crate::importer::Importer;
/// ## What it does
/// Checks for uses of PEP 585- and PEP 604-style type annotations in Python
@@ -86,11 +87,13 @@ impl AlwaysFixableViolation for FutureRequiredTypeAnnotation {
/// FA102
pub(crate) fn future_required_type_annotation(checker: &mut Checker, expr: &Expr, reason: Reason) {
let mut diagnostic = Diagnostic::new(FutureRequiredTypeAnnotation { reason }, expr.range());
let required_import = AnyImport::ImportFrom(ImportFrom::member("__future__", "annotations"));
diagnostic.set_fix(Fix::unsafe_edit(
checker
.importer()
.add_import(&required_import, TextSize::default()),
));
if let Some(python_ast) = checker.semantic().definitions.python_ast() {
let required_import =
AnyImport::ImportFrom(ImportFrom::member("__future__", "annotations"));
diagnostic.set_fix(Fix::unsafe_edit(
Importer::new(python_ast, checker.locator(), checker.stylist())
.add_import(&required_import, TextSize::default()),
));
}
checker.diagnostics.push(diagnostic);
}

View File

@@ -1,6 +1,21 @@
---
source: crates/ruff_linter/src/rules/flake8_future_annotations/mod.rs
---
no_future_import_uses_lowercase.py:2:13: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
1 | def main() -> None:
2 | a_list: list[str] = []
| ^^^^^^^^^ FA102
3 | a_list.append("hello")
|
= help: Add `from __future__ import annotations`
Unsafe fix
1 |+from __future__ import annotations
1 2 | def main() -> None:
2 3 | a_list: list[str] = []
3 4 | a_list.append("hello")
no_future_import_uses_lowercase.py:6:14: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
6 | def hello(y: dict[str, int]) -> None:
@@ -14,3 +29,5 @@ no_future_import_uses_lowercase.py:6:14: FA102 [*] Missing `from __future__ impo
1 2 | def main() -> None:
2 3 | a_list: list[str] = []
3 4 | a_list.append("hello")

View File

@@ -1,6 +1,36 @@
---
source: crates/ruff_linter/src/rules/flake8_future_annotations/mod.rs
---
no_future_import_uses_union.py:2:13: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
1 | def main() -> None:
2 | a_list: list[str] | None = []
| ^^^^^^^^^ FA102
3 | a_list.append("hello")
|
= help: Add `from __future__ import annotations`
Unsafe fix
1 |+from __future__ import annotations
1 2 | def main() -> None:
2 3 | a_list: list[str] | None = []
3 4 | a_list.append("hello")
no_future_import_uses_union.py:2:13: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 604 union
|
1 | def main() -> None:
2 | a_list: list[str] | None = []
| ^^^^^^^^^^^^^^^^ FA102
3 | a_list.append("hello")
|
= help: Add `from __future__ import annotations`
Unsafe fix
1 |+from __future__ import annotations
1 2 | def main() -> None:
2 3 | a_list: list[str] | None = []
3 4 | a_list.append("hello")
no_future_import_uses_union.py:6:14: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
6 | def hello(y: dict[str, int] | None) -> None:
@@ -28,3 +58,5 @@ no_future_import_uses_union.py:6:14: FA102 [*] Missing `from __future__ import a
1 2 | def main() -> None:
2 3 | a_list: list[str] | None = []
3 4 | a_list.append("hello")

View File

@@ -1,6 +1,36 @@
---
source: crates/ruff_linter/src/rules/flake8_future_annotations/mod.rs
---
no_future_import_uses_union_inner.py:2:13: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
1 | def main() -> None:
2 | a_list: list[str | None] = []
| ^^^^^^^^^^^^^^^^ FA102
3 | a_list.append("hello")
|
= help: Add `from __future__ import annotations`
Unsafe fix
1 |+from __future__ import annotations
1 2 | def main() -> None:
2 3 | a_list: list[str | None] = []
3 4 | a_list.append("hello")
no_future_import_uses_union_inner.py:2:18: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 604 union
|
1 | def main() -> None:
2 | a_list: list[str | None] = []
| ^^^^^^^^^^ FA102
3 | a_list.append("hello")
|
= help: Add `from __future__ import annotations`
Unsafe fix
1 |+from __future__ import annotations
1 2 | def main() -> None:
2 3 | a_list: list[str | None] = []
3 4 | a_list.append("hello")
no_future_import_uses_union_inner.py:6:14: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
6 | def hello(y: dict[str | None, int]) -> None:
@@ -30,3 +60,35 @@ no_future_import_uses_union_inner.py:6:19: FA102 [*] Missing `from __future__ im
1 2 | def main() -> None:
2 3 | a_list: list[str | None] = []
3 4 | a_list.append("hello")
no_future_import_uses_union_inner.py:7:8: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
6 | def hello(y: dict[str | None, int]) -> None:
7 | z: tuple[str, str | None, str] = tuple(y)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ FA102
8 | del z
|
= help: Add `from __future__ import annotations`
Unsafe fix
1 |+from __future__ import annotations
1 2 | def main() -> None:
2 3 | a_list: list[str | None] = []
3 4 | a_list.append("hello")
no_future_import_uses_union_inner.py:7:19: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 604 union
|
6 | def hello(y: dict[str | None, int]) -> None:
7 | z: tuple[str, str | None, str] = tuple(y)
| ^^^^^^^^^^ FA102
8 | del z
|
= help: Add `from __future__ import annotations`
Unsafe fix
1 |+from __future__ import annotations
1 2 | def main() -> None:
2 3 | a_list: list[str | None] = []
3 4 | a_list.append("hello")

View File

@@ -1,4 +0,0 @@
---
source: crates/ruff_linter/src/rules/flake8_future_annotations/mod.rs
---

View File

@@ -4,9 +4,10 @@ use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::str::{leading_quote, trailing_quote};
use ruff_python_index::Indexer;
use ruff_python_parser::{TokenKind, TokenKindIter};
use ruff_python_parser::lexer::LexResult;
use ruff_python_parser::Tok;
use ruff_source_file::Locator;
use ruff_text_size::TextRange;
use ruff_text_size::{Ranged, TextRange};
use crate::settings::LinterSettings;
@@ -92,34 +93,36 @@ impl Violation for MultiLineImplicitStringConcatenation {
/// ISC001, ISC002
pub(crate) fn implicit(
diagnostics: &mut Vec<Diagnostic>,
tokens: TokenKindIter,
tokens: &[LexResult],
settings: &LinterSettings,
locator: &Locator,
indexer: &Indexer,
) {
for ((a_tok, a_range), (b_tok, b_range)) in tokens
.filter(|(token, _)| {
*token != TokenKind::Comment
.iter()
.flatten()
.filter(|(tok, _)| {
!tok.is_comment()
&& (settings.flake8_implicit_str_concat.allow_multiline
|| *token != TokenKind::NonLogicalNewline)
|| !tok.is_non_logical_newline())
})
.tuple_windows()
{
let (a_range, b_range) = match (a_tok, b_tok) {
(TokenKind::String, TokenKind::String) => (a_range, b_range),
(TokenKind::String, TokenKind::FStringStart) => {
(Tok::String { .. }, Tok::String { .. }) => (*a_range, *b_range),
(Tok::String { .. }, Tok::FStringStart(_)) => {
match indexer.fstring_ranges().innermost(b_range.start()) {
Some(b_range) => (a_range, b_range),
Some(b_range) => (*a_range, b_range),
None => continue,
}
}
(TokenKind::FStringEnd, TokenKind::String) => {
(Tok::FStringEnd, Tok::String { .. }) => {
match indexer.fstring_ranges().innermost(a_range.start()) {
Some(a_range) => (a_range, b_range),
Some(a_range) => (a_range, *b_range),
None => continue,
}
}
(TokenKind::FStringEnd, TokenKind::FStringStart) => {
(Tok::FStringEnd, Tok::FStringStart(_)) => {
match (
indexer.fstring_ranges().innermost(a_range.start()),
indexer.fstring_ranges().innermost(b_range.start()),

View File

@@ -384,11 +384,7 @@ pub(crate) fn unittest_raises_assertion(
},
call.func.range(),
);
if !checker
.indexer()
.comment_ranges()
.has_comments(call, checker.locator())
{
if !checker.indexer().has_comments(call, checker.locator()) {
if let Some(args) = to_pytest_raises_args(checker, attr.as_str(), &call.arguments) {
diagnostic.try_set_fix(|| {
let (import_edit, binding) = checker.importer().get_or_import_symbol(

Some files were not shown because too many files have changed in this diff Show More