Compare commits

..

1 Commits

Author SHA1 Message Date
konstin
7f97547b5f Add increment/decrement 2024-03-14 16:56:06 +01:00
691 changed files with 35012 additions and 48405 deletions

21
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,21 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
labels: ["internal"]
groups:
actions:
patterns:
- "*"
ignore:
# The latest versions of these are not compatible with our release workflow
- dependency-name: "actions/upload-artifact"
- dependency-name: "actions/download-artifact"
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "weekly"
labels: ["internal"]

View File

@@ -1,68 +0,0 @@
{
$schema: "https://docs.renovatebot.com/renovate-schema.json",
dependencyDashboard: true,
suppressNotifications: ["prEditedNotification"],
extends: ["config:recommended"],
labels: ["internal"],
schedule: ["before 4am on Monday"],
semanticCommits: "disabled",
separateMajorMinor: false,
prHourlyLimit: 10,
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
cargo: {
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
rangeStrategy: "update-lockfile",
},
pep621: {
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
},
npm: {
fileMatch: ["^playground/.*package\\.json$"],
},
"pre-commit": {
enabled: true,
},
packageRules: [
{
// Group upload/download artifact updates, the versions are dependent
groupName: "Artifact GitHub Actions dependencies",
matchManagers: ["github-actions"],
matchPackagePatterns: ["actions/.*-artifact"],
description: "Weekly update of artifact-related GitHub Actions dependencies",
},
{
groupName: "pre-commit dependencies",
matchManagers: ["pre-commit"],
description: "Weekly update of pre-commit dependencies",
},
{
groupName: "NPM Development dependencies",
matchManagers: ["npm"],
matchDepTypes: ["devDependencies"],
description: "Weekly update of NPM development dependencies",
},
{
groupName: "Monaco",
matchManagers: ["npm"],
matchPackagePatterns: ["monaco"],
description: "Weekly update of the Monaco editor",
},
{
groupName: "strum",
matchManagers: ["cargo"],
matchPackagePatterns: ["strum"],
description: "Weekly update of strum dependencies",
},
{
groupName: "ESLint",
matchManagers: ["npm"],
matchPackageNames: ["eslint"],
allowedVersions: "<9",
description: "Constraint ESLint to version 8 until TypeScript-eslint supports ESLint 9", // https://github.com/typescript-eslint/typescript-eslint/issues/8211
},
],
vulnerabilityAlerts: {
commitMessageSuffix: "",
labels: ["internal", "security"],
},
}

View File

@@ -35,7 +35,7 @@ jobs:
with:
fetch-depth: 0
- uses: tj-actions/changed-files@v44
- uses: tj-actions/changed-files@v42
id: changed
with:
files_yaml: |
@@ -525,23 +525,8 @@ jobs:
- uses: Swatinem/rust-cache@v2
# Codspeed comes with a very ancient cargo version (1.66) that resolves features flags differently than what we use now.
# This can result in build failures; see https://github.com/astral-sh/ruff/pull/10700.
# There's a pending codspeed PR to upgrade to a newer cargo version, but until that's merged, we need to use the workaround below.
# https://github.com/CodSpeedHQ/codspeed-rust/pull/31
# What we do is to call cargo build manually with the correct feature flags and RUSTC settings. We'll have to
# manually maintain the list of benchmarks to run with codspeed (the benefit is that we could detect which benchmarks to run and build based on the changes).
# This is inspired by https://github.com/oxc-project/oxc/blob/a0532adc654039a0c7ead7b35216dfa0b0cb8e8f/.github/workflows/benchmark.yml
- name: "Build benchmarks"
env:
RUSTFLAGS: "-C debuginfo=2 -C strip=none -g --cfg codspeed"
shell: bash
# Build all benchmarks, copy the binary to the codspeed directory, remove any `*.d` files that might have been created.
run: |
cargo build --release -p ruff_benchmark --bench parser --bench linter --bench formatter --bench lexer --features=codspeed
mkdir -p ./target/codspeed/ruff_benchmark
cp ./target/release/deps/{lexer,parser,linter,formatter}* target/codspeed/ruff_benchmark/
rm -rf ./target/codspeed/ruff_benchmark/*.d
run: cargo codspeed build --features codspeed -p ruff_benchmark
- name: "Run benchmarks"
uses: CodSpeedHQ/action@v2

1
.gitignore vendored
View File

@@ -92,7 +92,6 @@ coverage.xml
.hypothesis/
.pytest_cache/
cover/
repos/
# Translations
*.mo

View File

@@ -17,4 +17,4 @@ MD013: false
# MD024/no-duplicate-heading
MD024:
# Allow when nested under different parents e.g. CHANGELOG.md
siblings_only: true
allow_different_nesting: true

View File

@@ -13,7 +13,7 @@ exclude: |
repos:
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.16
rev: v0.15
hooks:
- id: validate-pyproject
@@ -31,7 +31,7 @@ repos:
)$
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.39.0
rev: v0.37.0
hooks:
- id: markdownlint-fix
exclude: |
@@ -41,7 +41,7 @@ repos:
)$
- repo: https://github.com/crate-ci/typos
rev: v1.20.8
rev: v1.16.22
hooks:
- id: typos
@@ -55,7 +55,7 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.7
rev: v0.1.4
hooks:
- id: ruff-format
- id: ruff
@@ -70,7 +70,7 @@ repos:
# Prettier
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.1.0
rev: v3.0.3
hooks:
- id: prettier
types: [yaml]

View File

@@ -1,192 +1,5 @@
# Changelog
## 0.3.7
### Preview features
- \[`flake8-bugbear`\] Implement `loop-iterator-mutation` (`B909`) ([#9578](https://github.com/astral-sh/ruff/pull/9578))
- \[`pylint`\] Implement rule to prefer augmented assignment (`PLR6104`) ([#9932](https://github.com/astral-sh/ruff/pull/9932))
### Bug fixes
- Avoid TOCTOU errors in cache initialization ([#10884](https://github.com/astral-sh/ruff/pull/10884))
- \[`pylint`\] Recode `nan-comparison` rule to `W0177` ([#10894](https://github.com/astral-sh/ruff/pull/10894))
- \[`pylint`\] Reverse min-max logic in `if-stmt-min-max` ([#10890](https://github.com/astral-sh/ruff/pull/10890))
## 0.3.6
### Preview features
- \[`pylint`\] Implement `bad-staticmethod-argument` (`PLW0211`) ([#10781](https://github.com/astral-sh/ruff/pull/10781))
- \[`pylint`\] Implement `if-stmt-min-max` (`PLR1730`, `PLR1731`) ([#10002](https://github.com/astral-sh/ruff/pull/10002))
- \[`pyupgrade`\] Replace `str,Enum` multiple inheritance with `StrEnum` `UP042` ([#10713](https://github.com/astral-sh/ruff/pull/10713))
- \[`refurb`\] Implement `if-expr-instead-of-or-operator` (`FURB110`) ([#10687](https://github.com/astral-sh/ruff/pull/10687))
- \[`refurb`\] Implement `int-on-sliced-str` (`FURB166`) ([#10650](https://github.com/astral-sh/ruff/pull/10650))
- \[`refurb`\] Implement `write-whole-file` (`FURB103`) ([#10802](https://github.com/astral-sh/ruff/pull/10802))
- \[`refurb`\] Support `itemgetter` in `reimplemented-operator` (`FURB118`) ([#10526](https://github.com/astral-sh/ruff/pull/10526))
- \[`flake8_comprehensions`\] Add `sum`/`min`/`max` to unnecessary comprehension check (`C419`) ([#10759](https://github.com/astral-sh/ruff/pull/10759))
### Rule changes
- \[`pydocstyle`\] Require capitalizing docstrings where the first sentence is a single word (`D403`) ([#10776](https://github.com/astral-sh/ruff/pull/10776))
- \[`pycodestyle`\] Ignore annotated lambdas in class scopes (`E731`) ([#10720](https://github.com/astral-sh/ruff/pull/10720))
- \[`flake8-pyi`\] Various improvements to PYI034 ([#10807](https://github.com/astral-sh/ruff/pull/10807))
- \[`flake8-slots`\] Flag subclasses of call-based `typing.NamedTuple`s as well as subclasses of `collections.namedtuple()` (`SLOT002`) ([#10808](https://github.com/astral-sh/ruff/pull/10808))
- \[`pyflakes`\] Allow forward references in class bases in stub files (`F821`) ([#10779](https://github.com/astral-sh/ruff/pull/10779))
- \[`pygrep-hooks`\] Improve `blanket-noqa` error message (`PGH004`) ([#10851](https://github.com/astral-sh/ruff/pull/10851))
### CLI
- Support `FORCE_COLOR` env var ([#10839](https://github.com/astral-sh/ruff/pull/10839))
### Configuration
- Support negated patterns in `[extend-]per-file-ignores` ([#10852](https://github.com/astral-sh/ruff/pull/10852))
### Bug fixes
- \[`flake8-import-conventions`\] Accept non-aliased (but correct) import in `unconventional-import-alias` (`ICN001`) ([#10729](https://github.com/astral-sh/ruff/pull/10729))
- \[`flake8-quotes`\] Add semantic model flag when inside f-string replacement field ([#10766](https://github.com/astral-sh/ruff/pull/10766))
- \[`pep8-naming`\] Recursively resolve `TypeDicts` for N815 violations ([#10719](https://github.com/astral-sh/ruff/pull/10719))
- \[`flake8-quotes`\] Respect `Q00*` ignores in `flake8-quotes` rules ([#10728](https://github.com/astral-sh/ruff/pull/10728))
- \[`flake8-simplify`\] Show negated condition in `needless-bool` diagnostics (`SIM103`) ([#10854](https://github.com/astral-sh/ruff/pull/10854))
- \[`ruff`\] Use within-scope shadowed bindings in `asyncio-dangling-task` (`RUF006`) ([#10793](https://github.com/astral-sh/ruff/pull/10793))
- \[`flake8-pytest-style`\] Fix single-tuple conversion in `pytest-parametrize-values-wrong-type` (`PT007`) ([#10862](https://github.com/astral-sh/ruff/pull/10862))
- \[`flake8-return`\] Ignore assignments to annotated variables in `unnecessary-assign` (`RET504`) ([#10741](https://github.com/astral-sh/ruff/pull/10741))
- \[`refurb`\] Do not allow any keyword arguments for `read-whole-file` in `rb` mode (`FURB101`) ([#10803](https://github.com/astral-sh/ruff/pull/10803))
- \[`pylint`\] Don't recommend decorating staticmethods with `@singledispatch` (`PLE1519`, `PLE1520`) ([#10637](https://github.com/astral-sh/ruff/pull/10637))
- \[`pydocstyle`\] Use section name range for all section-related docstring diagnostics ([#10740](https://github.com/astral-sh/ruff/pull/10740))
- Respect `# noqa` directives on `__all__` openers ([#10798](https://github.com/astral-sh/ruff/pull/10798))
## 0.3.5
### Preview features
- \[`pylint`\] Implement `modified-iterating-set` (`E4703`) ([#10473](https://github.com/astral-sh/ruff/pull/10473))
- \[`refurb`\] Implement `for-loop-set-mutations` (`FURB142`) ([#10583](https://github.com/astral-sh/ruff/pull/10583))
- \[`refurb`\] Implement `unnecessary-from-float` (`FURB164`) ([#10647](https://github.com/astral-sh/ruff/pull/10647))
- \[`refurb`\] Implement `verbose-decimal-constructor` (`FURB157`) ([#10533](https://github.com/astral-sh/ruff/pull/10533))
### Rule changes
- \[`flake8-comprehensions`\] Handled special case for `C401` which also matches `C416` ([#10596](https://github.com/astral-sh/ruff/pull/10596))
- \[`flake8-pyi`\] Mark `unaliased-collections-abc-set-import` fix as "safe" for more cases in stub files (`PYI025`) ([#10547](https://github.com/astral-sh/ruff/pull/10547))
- \[`numpy`\] Add `row_stack` to NumPy 2.0 migration rule ([#10646](https://github.com/astral-sh/ruff/pull/10646))
- \[`pycodestyle`\] Allow cell magics before an import (`E402`) ([#10545](https://github.com/astral-sh/ruff/pull/10545))
- \[`pycodestyle`\] Avoid blank line rules for the first logical line in cell ([#10291](https://github.com/astral-sh/ruff/pull/10291))
### Configuration
- Respected nested namespace packages ([#10541](https://github.com/astral-sh/ruff/pull/10541))
- \[`flake8-boolean-trap`\] Add setting for user defined allowed boolean trap ([#10531](https://github.com/astral-sh/ruff/pull/10531))
### Bug fixes
- Correctly handle references in `__all__` definitions when renaming symbols in autofixes ([#10527](https://github.com/astral-sh/ruff/pull/10527))
- Track ranges of names inside `__all__` definitions ([#10525](https://github.com/astral-sh/ruff/pull/10525))
- \[`flake8-bugbear`\] Avoid false positive for usage after `continue` (`B031`) ([#10539](https://github.com/astral-sh/ruff/pull/10539))
- \[`flake8-copyright`\] Accept commas in default copyright pattern ([#9498](https://github.com/astral-sh/ruff/pull/9498))
- \[`flake8-datetimez`\] Allow f-strings with `%z` for `DTZ007` ([#10651](https://github.com/astral-sh/ruff/pull/10651))
- \[`flake8-pytest-style`\] Fix `PT014` autofix for last item in list ([#10532](https://github.com/astral-sh/ruff/pull/10532))
- \[`flake8-quotes`\] Ignore `Q000`, `Q001` when string is inside forward ref ([#10585](https://github.com/astral-sh/ruff/pull/10585))
- \[`isort`\] Always place non-relative imports after relative imports ([#10669](https://github.com/astral-sh/ruff/pull/10669))
- \[`isort`\] Respect Unicode characters in import sorting ([#10529](https://github.com/astral-sh/ruff/pull/10529))
- \[`pyflakes`\] Fix F821 false negatives when `from __future__ import annotations` is active (attempt 2) ([#10524](https://github.com/astral-sh/ruff/pull/10524))
- \[`pyflakes`\] Make `unnecessary-lambda` an always-unsafe fix ([#10668](https://github.com/astral-sh/ruff/pull/10668))
- \[`pylint`\] Fixed false-positive on the rule `PLW1641` (`eq-without-hash`) ([#10566](https://github.com/astral-sh/ruff/pull/10566))
- \[`ruff`\] Fix panic in unused `# noqa` removal with multi-byte space (`RUF100`) ([#10682](https://github.com/astral-sh/ruff/pull/10682))
### Documentation
- Add PR title format to `CONTRIBUTING.md` ([#10665](https://github.com/astral-sh/ruff/pull/10665))
- Fix list markup to include blank lines required ([#10591](https://github.com/astral-sh/ruff/pull/10591))
- Put `flake8-logging` next to the other flake8 plugins in registry ([#10587](https://github.com/astral-sh/ruff/pull/10587))
- \[`flake8-bandit`\] Update warning message for rule `S305` to address insecure block cipher mode use ([#10602](https://github.com/astral-sh/ruff/pull/10602))
- \[`flake8-bugbear`\] Document use of anonymous assignment in `useless-expression` ([#10551](https://github.com/astral-sh/ruff/pull/10551))
- \[`flake8-datetimez`\] Clarify error messages and docs for `DTZ` rules ([#10621](https://github.com/astral-sh/ruff/pull/10621))
- \[`pycodestyle`\] Use same before vs. after numbers for `space-around-operator` ([#10640](https://github.com/astral-sh/ruff/pull/10640))
- \[`ruff`\] Change `quadratic-list-summation` docs to use `iadd` consistently ([#10666](https://github.com/astral-sh/ruff/pull/10666))
## 0.3.4
### Preview features
- \[`flake8-simplify`\] Detect implicit `else` cases in `needless-bool` (`SIM103`) ([#10414](https://github.com/astral-sh/ruff/pull/10414))
- \[`pylint`\] Implement `nan-comparison` (`PLW0117`) ([#10401](https://github.com/astral-sh/ruff/pull/10401))
- \[`pylint`\] Implement `nonlocal-and-global` (`E115`) ([#10407](https://github.com/astral-sh/ruff/pull/10407))
- \[`pylint`\] Implement `singledispatchmethod-function` (`PLE5120`) ([#10428](https://github.com/astral-sh/ruff/pull/10428))
- \[`refurb`\] Implement `list-reverse-copy` (`FURB187`) ([#10212](https://github.com/astral-sh/ruff/pull/10212))
### Rule changes
- \[`flake8-pytest-style`\] Add automatic fix for `pytest-parametrize-values-wrong-type` (`PT007`) ([#10461](https://github.com/astral-sh/ruff/pull/10461))
- \[`pycodestyle`\] Allow SPDX license headers to exceed the line length (`E501`) ([#10481](https://github.com/astral-sh/ruff/pull/10481))
### Formatter
- Fix unstable formatting for trailing subscript end-of-line comment ([#10492](https://github.com/astral-sh/ruff/pull/10492))
### Bug fixes
- Avoid code comment detection in PEP 723 script tags ([#10464](https://github.com/astral-sh/ruff/pull/10464))
- Avoid incorrect tuple transformation in single-element case (`C409`) ([#10491](https://github.com/astral-sh/ruff/pull/10491))
- Bug fix: Prevent fully defined links [`name`](link) from being reformatted ([#10442](https://github.com/astral-sh/ruff/pull/10442))
- Consider raw source code for `W605` ([#10480](https://github.com/astral-sh/ruff/pull/10480))
- Docs: Link inline settings when not part of options section ([#10499](https://github.com/astral-sh/ruff/pull/10499))
- Don't treat annotations as redefinitions in `.pyi` files ([#10512](https://github.com/astral-sh/ruff/pull/10512))
- Fix `E231` bug: Inconsistent catch compared to pycodestyle, such as when dict nested in list ([#10469](https://github.com/astral-sh/ruff/pull/10469))
- Fix pylint upstream categories not showing in docs ([#10441](https://github.com/astral-sh/ruff/pull/10441))
- Add missing `Options` references to blank line docs ([#10498](https://github.com/astral-sh/ruff/pull/10498))
- 'Revert "F821: Fix false negatives in .py files when `from __future__ import annotations` is active (#10362)"' ([#10513](https://github.com/astral-sh/ruff/pull/10513))
- Apply NFKC normalization to unicode identifiers in the lexer ([#10412](https://github.com/astral-sh/ruff/pull/10412))
- Avoid failures due to non-deterministic binding ordering ([#10478](https://github.com/astral-sh/ruff/pull/10478))
- \[`flake8-bugbear`\] Allow tuples of exceptions (`B030`) ([#10437](https://github.com/astral-sh/ruff/pull/10437))
- \[`flake8-quotes`\] Avoid syntax errors due to invalid quotes (`Q000, Q002`) ([#10199](https://github.com/astral-sh/ruff/pull/10199))
## 0.3.3
### Preview features
- \[`flake8-bandit`\]: Implement `S610` rule ([#10316](https://github.com/astral-sh/ruff/pull/10316))
- \[`pycodestyle`\] Implement `blank-line-at-end-of-file` (`W391`) ([#10243](https://github.com/astral-sh/ruff/pull/10243))
- \[`pycodestyle`\] Implement `redundant-backslash` (`E502`) ([#10292](https://github.com/astral-sh/ruff/pull/10292))
- \[`pylint`\] - implement `redeclared-assigned-name` (`W0128`) ([#9268](https://github.com/astral-sh/ruff/pull/9268))
### Rule changes
- \[`flake8_comprehensions`\] Handled special case for `C400` which also matches `C416` ([#10419](https://github.com/astral-sh/ruff/pull/10419))
- \[`flake8-bandit`\] Implement upstream updates for `S311`, `S324` and `S605` ([#10313](https://github.com/astral-sh/ruff/pull/10313))
- \[`pyflakes`\] Remove `F401` fix for `__init__` imports by default and allow opt-in to unsafe fix ([#10365](https://github.com/astral-sh/ruff/pull/10365))
- \[`pylint`\] Implement `invalid-bool-return-type` (`E304`) ([#10377](https://github.com/astral-sh/ruff/pull/10377))
- \[`pylint`\] Include builtin warnings in useless-exception-statement (`PLW0133`) ([#10394](https://github.com/astral-sh/ruff/pull/10394))
### CLI
- Add message on success to `ruff check` ([#8631](https://github.com/astral-sh/ruff/pull/8631))
### Bug fixes
- \[`PIE970`\] Allow trailing ellipsis in `typing.TYPE_CHECKING` ([#10413](https://github.com/astral-sh/ruff/pull/10413))
- Avoid `TRIO115` if the argument is a variable ([#10376](https://github.com/astral-sh/ruff/pull/10376))
- \[`F811`\] Avoid removing shadowed imports that point to different symbols ([#10387](https://github.com/astral-sh/ruff/pull/10387))
- Fix `F821` and `F822` false positives in `.pyi` files ([#10341](https://github.com/astral-sh/ruff/pull/10341))
- Fix `F821` false negatives in `.py` files when `from __future__ import annotations` is active ([#10362](https://github.com/astral-sh/ruff/pull/10362))
- Fix case where `Indexer` fails to identify continuation preceded by newline #10351 ([#10354](https://github.com/astral-sh/ruff/pull/10354))
- Sort hash maps in `Settings` display ([#10370](https://github.com/astral-sh/ruff/pull/10370))
- Track conditional deletions in the semantic model ([#10415](https://github.com/astral-sh/ruff/pull/10415))
- \[`C413`\] Wrap expressions in parentheses when negating ([#10346](https://github.com/astral-sh/ruff/pull/10346))
- \[`pycodestyle`\] Do not ignore lines before the first logical line in blank lines rules. ([#10382](https://github.com/astral-sh/ruff/pull/10382))
- \[`pycodestyle`\] Do not trigger `E225` and `E275` when the next token is a ')' ([#10315](https://github.com/astral-sh/ruff/pull/10315))
- \[`pylint`\] Avoid false-positive slot non-assignment for `__dict__` (`PLE0237`) ([#10348](https://github.com/astral-sh/ruff/pull/10348))
- Gate f-string struct size test for Rustc \< 1.76 ([#10371](https://github.com/astral-sh/ruff/pull/10371))
### Documentation
- Use `ruff.toml` format in README ([#10393](https://github.com/astral-sh/ruff/pull/10393))
- \[`RUF008`\] Make it clearer that a mutable default in a dataclass is only valid if it is typed as a ClassVar ([#10395](https://github.com/astral-sh/ruff/pull/10395))
- \[`pylint`\] Extend docs and test in `invalid-str-return-type` (`E307`) ([#10400](https://github.com/astral-sh/ruff/pull/10400))
- Remove `.` from `check` and `format` commands ([#10217](https://github.com/astral-sh/ruff/pull/10217))
## 0.3.2
### Preview features
@@ -204,7 +17,7 @@
- Fix unstable `with` items formatting ([#10274](https://github.com/astral-sh/ruff/pull/10274))
- Avoid repeating function calls in f-string conversions ([#10265](https://github.com/astral-sh/ruff/pull/10265))
- Fix E203 false positive for slices in format strings ([#10280](https://github.com/astral-sh/ruff/pull/10280))
- Fix incorrect `Parameter` range for `*args` and `**kwargs` ([#10283](https://github.com/astral-sh/ruff/pull/10283))
- Fix incorrect `Parameter` range for `*args` and `**kwargs` ([#10283](https://github.com/astral-sh/ruff/pull/10283))
- Treat `typing.Annotated` subscripts as type definitions ([#10285](https://github.com/astral-sh/ruff/pull/10285))
## 0.3.1
@@ -312,7 +125,8 @@ This release introduces the Ruff 2024.2 style, stabilizing the following changes
Highlights include:
- Initial support formatting f-strings (in `--preview`).
- Support for overriding arbitrary configuration options via the CLI through an expanded `--config` argument (e.g., `--config "lint.isort.combine-as-imports=false"`).
- Support for overriding arbitrary configuration options via the CLI through an expanded `--config`
argument (e.g., `--config "lint.isort.combine-as-imports=false"`).
- Significant performance improvements in Ruff's lexer, parser, and lint rules.
### Preview features
@@ -960,7 +774,7 @@ docstrings via the `docstring-code-format` setting.
- \[`pylint`\] Default `max-positional-args` to `max-args` ([#8998](https://github.com/astral-sh/ruff/pull/8998))
- \[`pylint`\] Add `allow-dunder-method-names` setting for `bad-dunder-method-name` (`PLW3201`) ([#8812](https://github.com/astral-sh/ruff/pull/8812))
- \[`isort`\] Add support for `from-first` setting ([#8663](https://github.com/astral-sh/ruff/pull/8663))
- \[`isort`\] Add support for `length-sort` settings ([#8841](https://github.com/astral-sh/ruff/pull/8841))
- \[`isort`\] Add support for `length-sort` settings ([#8841](https://github.com/astral-sh/ruff/pull/8841))
### Bug fixes
@@ -1089,7 +903,7 @@ docstrings via the `docstring-code-format` setting.
- \[`flake8-trio`\] Implement `TRIO115` ([#8486](https://github.com/astral-sh/ruff/pull/8486))
- \[`refurb`\] Implement `type-none-comparison` (`FURB169`) ([#8487](https://github.com/astral-sh/ruff/pull/8487))
- Flag all comparisons against builtin types in `E721` ([#8491](https://github.com/astral-sh/ruff/pull/8491))
- Make `SIM118` fix as safe when the expression is a known dictionary ([#8525](https://github.com/astral-sh/ruff/pull/8525))
- Make `SIM118` fix as safe when the expression is a known dictionary ([#8525](https://github.com/astral-sh/ruff/pull/8525))
### Formatter
@@ -1257,7 +1071,7 @@ Try it today with `ruff format`! [Check out the blog post](https://astral.sh/blo
- Add `backports.strenum` to `deprecated-imports` ([#8113](https://github.com/astral-sh/ruff/pull/8113))
- Update `SIM112` to ignore `https_proxy`, `http_proxy`, and `no_proxy` ([#8140](https://github.com/astral-sh/ruff/pull/8140))
- Update fix for `literal-membership` (`PLR6201`) to be unsafe ([#8097](https://github.com/astral-sh/ruff/pull/8097))
- Update fix for `mutable-argument-defaults` (`B006`) to be unsafe ([#8108](https://github.com/astral-sh/ruff/pull/8108))
- Update fix for `mutable-argument-defaults` (`B006`) to be unsafe ([#8108](https://github.com/astral-sh/ruff/pull/8108))
### Formatter

View File

@@ -33,18 +33,27 @@ Welcome! We're happy to have you here. Thank you in advance for your contributio
## The Basics
Ruff welcomes contributions in the form of pull requests.
Ruff welcomes contributions in the form of Pull Requests.
For small changes (e.g., bug fixes), feel free to submit a PR.
For larger changes (e.g., new lint rules, new functionality, new configuration options), consider
creating an [**issue**](https://github.com/astral-sh/ruff/issues) outlining your proposed change.
You can also join us on [Discord](https://discord.com/invite/astral-sh) to discuss your idea with the
You can also join us on [**Discord**](https://discord.com/invite/astral-sh) to discuss your idea with the
community. We've labeled [beginner-friendly tasks](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
in the issue tracker, along with [bugs](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
and [improvements](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Aaccepted)
that are ready for contributions.
If you're looking for a place to start, we recommend implementing a new lint rule (see:
[_Adding a new lint rule_](#example-adding-a-new-lint-rule), which will allow you to learn from and
pattern-match against the examples in the existing codebase. Many lint rules are inspired by
existing Python plugins, which can be used as a reference implementation.
As a concrete example: consider taking on one of the rules from the [`flake8-pyi`](https://github.com/astral-sh/ruff/issues/848)
plugin, and looking to the originating [Python source](https://github.com/PyCQA/flake8-pyi) for
guidance.
If you have suggestions on how we might improve the contributing documentation, [let us know](https://github.com/astral-sh/ruff/discussions/5693)!
### Prerequisites
@@ -98,7 +107,7 @@ RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
```
These checks will run on GitHub Actions when you open your pull request, but running them locally
These checks will run on GitHub Actions when you open your Pull Request, but running them locally
will save you time and expedite the merge process.
Note that many code changes also require updating the snapshot tests, which is done interactively
@@ -108,14 +117,7 @@ after running `cargo test` like so:
cargo insta review
```
If your pull request relates to a specific lint rule, include the category and rule code in the
title, as in the following examples:
- \[`flake8-bugbear`\] Avoid false positive for usage after `continue` (`B031`)
- \[`flake8-simplify`\] Detect implicit `else` cases in `needless-bool` (`SIM103`)
- \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
Your pull request will be reviewed by a maintainer, which may involve a few rounds of iteration
Your Pull Request will be reviewed by a maintainer, which may involve a few rounds of iteration
prior to merging.
### Project Structure
@@ -123,8 +125,8 @@ prior to merging.
Ruff is structured as a monorepo with a [flat crate structure](https://matklad.github.io/2021/08/22/large-rust-workspaces.html),
such that all crates are contained in a flat `crates` directory.
The vast majority of the code, including all lint rules, lives in the `ruff_linter` crate (located
at `crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
The vast majority of the code, including all lint rules, lives in the `ruff` crate (located at
`crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
At the time of writing, the repository includes the following crates:
@@ -197,14 +199,11 @@ and calling out to lint rule analyzer functions as it goes.
If you need to inspect the AST, you can run `cargo dev print-ast` with a Python file. Grep
for the `Diagnostic::new` invocations to understand how other, similar rules are implemented.
Once you're satisfied with your code, add tests for your rule
(see: [rule testing](#rule-testing-fixtures-and-snapshots)), and regenerate the documentation and
associated assets (like our JSON Schema) with `cargo dev generate-all`.
Once you're satisfied with your code, add tests for your rule. See [rule testing](#rule-testing-fixtures-and-snapshots)
for more details.
Finally, submit a pull request, and include the category, rule name, and rule code in the title, as
in:
> \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
Finally, regenerate the documentation and other generated assets (like our JSON Schema) with:
`cargo dev generate-all`.
#### Rule naming convention
@@ -814,8 +813,8 @@ To understand Ruff's import categorization system, we first need to define two c
"project root".)
- "Package root": The top-most directory defining the Python package that includes a given Python
file. To find the package root for a given Python file, traverse up its parent directories until
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't in a subtree
marked as a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't marked as
a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
just before that, i.e., the first directory in the package.
For example, given:

453
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -12,20 +12,22 @@ authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
license = "MIT"
[workspace.dependencies]
aho-corasick = { version = "1.1.3" }
aho-corasick = { version = "1.1.2" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { version = "1.0.80" }
argfile = { version = "0.2.0" }
argfile = { version = "0.1.6" }
assert_cmd = { version = "2.0.13" }
bincode = { version = "1.3.3" }
bitflags = { version = "2.5.0" }
bitflags = { version = "2.4.1" }
bstr = { version = "1.9.1" }
cachedir = { version = "0.3.1" }
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
clap = { version = "4.5.3", features = ["derive"] }
clap = { version = "4.5.2", features = ["derive"] }
clap_complete_command = { version = "0.5.1" }
clearscreen = { version = "3.0.0" }
clearscreen = { version = "2.0.0" }
codspeed-criterion-compat = { version = "2.4.0", default-features = false }
colored = { version = "2.1.0" }
configparser = { version = "3.0.3" }
console_error_panic_hook = { version = "0.1.7" }
console_log = { version = "1.0.0" }
countme = { version = "3.0.1" }
@@ -33,7 +35,7 @@ criterion = { version = "0.5.1", default-features = false }
crossbeam = { version = "0.8.4" }
dirs = { version = "5.0.0" }
drop_bomb = { version = "0.1.5" }
env_logger = { version = "0.11.0" }
env_logger = { version = "0.10.1" }
fern = { version = "0.6.1" }
filetime = { version = "0.2.23" }
fs-err = { version = "2.11.0" }
@@ -46,7 +48,7 @@ imperative = { version = "1.0.4" }
indicatif = { version = "0.17.8" }
indoc = { version = "2.0.4" }
insta = { version = "1.35.1", feature = ["filters", "glob"] }
insta-cmd = { version = "0.6.0" }
insta-cmd = { version = "0.4.0" }
is-macro = { version = "0.3.5" }
is-wsl = { version = "0.4.0" }
itertools = { version = "0.12.1" }
@@ -63,18 +65,17 @@ memchr = { version = "2.7.1" }
mimalloc = { version = "0.1.39" }
natord = { version = "1.0.9" }
notify = { version = "6.1.1" }
num_cpus = { version = "1.16.0" }
once_cell = { version = "1.19.0" }
path-absolutize = { version = "3.1.1" }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.6.0", features = ["serde"] }
pep440_rs = { version = "0.4.0", features = ["serde"] }
pretty_assertions = "1.3.0"
proc-macro2 = { version = "1.0.79" }
proc-macro2 = { version = "1.0.78" }
pyproject-toml = { version = "0.9.0" }
quick-junit = { version = "0.3.5" }
quote = { version = "1.0.23" }
rand = { version = "0.8.5" }
rayon = { version = "1.10.0" }
rayon = { version = "1.8.1" }
regex = { version = "1.10.2" }
result-like = { version = "0.5.0" }
rustc-hash = { version = "1.1.0" }
@@ -88,32 +89,31 @@ serde_with = { version = "3.6.0", default-features = false, features = ["macros"
shellexpand = { version = "3.0.0" }
shlex = { version = "1.3.0" }
similar = { version = "2.4.0", features = ["inline"] }
smallvec = { version = "1.13.2" }
smallvec = { version = "1.13.1" }
static_assertions = "1.1.0"
strum = { version = "0.26.0", features = ["strum_macros"] }
strum_macros = { version = "0.26.0" }
syn = { version = "2.0.55" }
strum = { version = "0.25.0", features = ["strum_macros"] }
strum_macros = { version = "0.25.3" }
syn = { version = "2.0.51" }
tempfile = { version = "3.9.0" }
test-case = { version = "3.3.1" }
thiserror = { version = "1.0.58" }
thiserror = { version = "1.0.57" }
tikv-jemallocator = { version = "0.5.0" }
toml = { version = "0.8.11" }
toml = { version = "0.8.9" }
tracing = { version = "0.1.40" }
tracing-indicatif = { version = "0.3.6" }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
tracing-tree = { version = "0.3.0" }
tracing-tree = { version = "0.2.4" }
typed-arena = { version = "2.0.2" }
unic-ucd-category = { version = "0.9" }
unicode-ident = { version = "1.0.12" }
unicode-width = { version = "0.1.11" }
unicode_names2 = { version = "1.2.2" }
unicode-normalization = { version = "0.1.23" }
ureq = { version = "2.9.6" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.92" }
wasm-bindgen-test = { version = "0.3.42" }
wasm-bindgen-test = { version = "0.3.40" }
wild = { version = "2" }
[workspace.lints.rust]

View File

@@ -32,7 +32,7 @@ An extremely fast Python linter and code formatter, written in Rust.
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
- 📦 Built-in caching, to avoid re-analyzing unchanged files
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
- 📏 Over [700 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
of popular Flake8 plugins, like flake8-bugbear
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
@@ -151,7 +151,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.3.7
rev: v0.3.2
hooks:
# Run the linter.
- id: ruff
@@ -272,7 +272,7 @@ for more on the linting and formatting commands, respectively.
<!-- Begin section: Rules -->
**Ruff supports over 800 lint rules**, many of which are inspired by popular tools like Flake8,
**Ruff supports over 700 lint rules**, many of which are inspired by popular tools like Flake8,
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
Rust as a first-party feature.
@@ -429,7 +429,6 @@ Ruff is used by a number of major open-source projects and companies, including:
- [Mypy](https://github.com/python/mypy)
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
- [Neon](https://github.com/neondatabase/neon)
- [Nokia](https://nokia.com/)
- [NoneBot](https://github.com/nonebot/nonebot2)
- [NumPyro](https://github.com/pyro-ppl/numpyro)
- [ONNX](https://github.com/onnx/onnx)

View File

@@ -3,11 +3,9 @@
extend-exclude = ["**/resources/**/*", "**/snapshots/**/*"]
[default.extend-words]
"arange" = "arange" # e.g. `numpy.arange`
hel = "hel"
whos = "whos"
spawnve = "spawnve"
ned = "ned"
pn = "pn" # `import panel as pd` is a thing
poit = "poit"
BA = "BA" # acronym for "Bad Allowed", used in testing.

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.3.7"
version = "0.3.2"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -41,7 +41,6 @@ is-macro = { workspace = true }
itertools = { workspace = true }
log = { workspace = true }
notify = { workspace = true }
num_cpus = { workspace = true }
path-absolutize = { workspace = true, features = ["once_cell_cache"] }
rayon = { workspace = true }
regex = { workspace = true }
@@ -54,7 +53,7 @@ tempfile = { workspace = true }
thiserror = { workspace = true }
toml = { workspace = true }
tracing = { workspace = true, features = ["log"] }
tracing-subscriber = { workspace = true, features = ["registry"] }
tracing-subscriber = { workspace = true, features = ["registry"]}
tracing-tree = { workspace = true }
walkdir = { workspace = true }
wild = { workspace = true }
@@ -62,8 +61,9 @@ wild = { workspace = true }
[dev-dependencies]
# Enable test rules during development
ruff_linter = { path = "../ruff_linter", features = ["clap", "test-rules"] }
assert_cmd = { workspace = true }
# Avoid writing colored snapshots when running tests from the terminal
colored = { workspace = true, features = ["no-color"] }
colored = { workspace = true, features = ["no-color"]}
insta = { workspace = true, features = ["filters", "json"] }
insta-cmd = { workspace = true }
tempfile = { workspace = true }

View File

@@ -496,7 +496,7 @@ pub struct FormatCommand {
pub range: Option<FormatRange>,
}
#[derive(Copy, Clone, Debug, clap::Parser)]
#[derive(Clone, Debug, clap::Parser)]
pub struct ServerCommand {
/// Enable preview mode; required for regular operation
#[arg(long)]

View File

@@ -375,17 +375,15 @@ pub(crate) fn init(path: &Path) -> Result<()> {
fs::create_dir_all(path.join(VERSION))?;
// Add the CACHEDIR.TAG.
cachedir::ensure_tag(path)?;
if !cachedir::is_tagged(path)? {
cachedir::add_tag(path)?;
}
// Add the .gitignore.
match fs::OpenOptions::new()
.write(true)
.create_new(true)
.open(path.join(".gitignore"))
{
Ok(mut file) => file.write_all(b"# Automatically created by ruff.\n*\n")?,
Err(err) if err.kind() == io::ErrorKind::AlreadyExists => (),
Err(err) => return Err(err.into()),
let gitignore_path = path.join(".gitignore");
if !gitignore_path.exists() {
let mut file = fs::File::create(gitignore_path)?;
file.write_all(b"# Automatically created by ruff.\n*\n")?;
}
Ok(())

View File

@@ -252,7 +252,6 @@ mod test {
for file in [&pyproject_toml, &python_file, &notebook] {
fs::OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.mode(0o000)
.open(file)?;

View File

@@ -1,5 +1,3 @@
use std::num::NonZeroUsize;
use crate::ExitStatus;
use anyhow::Result;
use ruff_linter::logging::LogLevel;
@@ -11,11 +9,7 @@ use tracing_subscriber::{
};
use tracing_tree::time::Uptime;
pub(crate) fn run_server(
preview: bool,
worker_threads: NonZeroUsize,
log_level: LogLevel,
) -> Result<ExitStatus> {
pub(crate) fn run_server(preview: bool, log_level: LogLevel) -> Result<ExitStatus> {
if !preview {
tracing::error!("--preview needs to be provided as a command line argument while the server is still unstable.\nFor example: `ruff server --preview`");
return Ok(ExitStatus::Error);
@@ -39,7 +33,7 @@ pub(crate) fn run_server(
tracing::subscriber::set_global_default(subscriber)?;
let server = Server::new(worker_threads)?;
let server = Server::new()?;
server.run().map(|()| ExitStatus::Success)
}

View File

@@ -2,7 +2,6 @@
use std::fs::File;
use std::io::{self, stdout, BufWriter, Write};
use std::num::NonZeroUsize;
use std::path::{Path, PathBuf};
use std::process::ExitCode;
use std::sync::mpsc::channel;
@@ -149,13 +148,6 @@ pub fn run(
#[cfg(windows)]
assert!(colored::control::set_virtual_terminal(true).is_ok());
// support FORCE_COLOR env var
if let Some(force_color) = std::env::var_os("FORCE_COLOR") {
if force_color.len() > 0 {
colored::control::set_override(true);
}
}
set_up_logging(global_options.log_level())?;
if let Some(deprecated_alias_warning) = deprecated_alias_warning {
@@ -212,15 +204,10 @@ fn format(args: FormatCommand, global_options: GlobalConfigArgs) -> Result<ExitS
}
}
#[allow(clippy::needless_pass_by_value)] // TODO: remove once we start taking arguments from here
fn server(args: ServerCommand, log_level: LogLevel) -> Result<ExitStatus> {
let ServerCommand { preview } = args;
// by default, we set the number of worker threads to `num_cpus`, with a maximum of 4.
let worker_threads = num_cpus::get().max(4);
commands::server::run_server(
preview,
NonZeroUsize::try_from(worker_threads).expect("a non-zero worker thread count"),
log_level,
)
commands::server::run_server(preview, log_level)
}
pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<ExitStatus> {

View File

@@ -16,7 +16,7 @@ impl std::fmt::Display for PanicError {
}
thread_local! {
static LAST_PANIC: std::cell::Cell<Option<PanicError>> = const { std::cell::Cell::new(None) };
static LAST_PANIC: std::cell::Cell<Option<PanicError>> = std::cell::Cell::new(None);
}
/// [`catch_unwind`](std::panic::catch_unwind) wrapper that sets a custom [`set_hook`](std::panic::set_hook)

View File

@@ -70,7 +70,7 @@ pub(crate) fn version() -> VersionInfo {
#[cfg(test)]
mod tests {
use insta::{assert_json_snapshot, assert_snapshot};
use insta::{assert_display_snapshot, assert_json_snapshot};
use super::{CommitInfo, VersionInfo};
@@ -80,7 +80,7 @@ mod tests {
version: "0.0.0".to_string(),
commit_info: None,
};
assert_snapshot!(version);
assert_display_snapshot!(version);
}
#[test]
@@ -95,7 +95,7 @@ mod tests {
commits_since_last_tag: 0,
}),
};
assert_snapshot!(version);
assert_display_snapshot!(version);
}
#[test]
@@ -110,7 +110,7 @@ mod tests {
commits_since_last_tag: 24,
}),
};
assert_snapshot!(version);
assert_display_snapshot!(version);
}
#[test]

View File

@@ -1353,7 +1353,6 @@ fn unreadable_pyproject_toml() -> Result<()> {
// Create an empty file with 000 permissions
fs::OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.mode(0o000)
.open(pyproject_toml)?;

View File

@@ -1168,119 +1168,3 @@ def func():
Ok(())
}
/// Per-file selects via ! negation in per-file-ignores
#[test]
fn negated_per_file_ignores() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
[lint.per-file-ignores]
"!selected.py" = ["RUF"]
"#,
)?;
let selected = tempdir.path().join("selected.py");
fs::write(selected, "")?;
let ignored = tempdir.path().join("ignored.py");
fs::write(ignored, "")?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.arg("--config")
.arg(&ruff_toml)
.arg("--select")
.arg("RUF901")
.current_dir(&tempdir)
, @r###"
success: false
exit_code: 1
----- stdout -----
selected.py:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
Found 1 error.
[*] 1 fixable with the `--fix` option.
----- stderr -----
"###);
Ok(())
}
#[test]
fn negated_per_file_ignores_absolute() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
[lint.per-file-ignores]
"!src/**.py" = ["RUF"]
"#,
)?;
let src_dir = tempdir.path().join("src");
fs::create_dir(&src_dir)?;
let selected = src_dir.join("selected.py");
fs::write(selected, "")?;
let ignored = tempdir.path().join("ignored.py");
fs::write(ignored, "")?;
insta::with_settings!({filters => vec![
// Replace windows paths
(r"\\", "/"),
]}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.arg("--config")
.arg(&ruff_toml)
.arg("--select")
.arg("RUF901")
.current_dir(&tempdir)
, @r###"
success: false
exit_code: 1
----- stdout -----
src/selected.py:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
Found 1 error.
[*] 1 fixable with the `--fix` option.
----- stderr -----
"###);
});
Ok(())
}
/// patterns are additive, can't use negative patterns to "un-ignore"
#[test]
fn negated_per_file_ignores_overlap() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
[lint.per-file-ignores]
"*.py" = ["RUF"]
"!foo.py" = ["RUF"]
"#,
)?;
let foo_file = tempdir.path().join("foo.py");
fs::write(foo_file, "")?;
let bar_file = tempdir.path().join("bar.py");
fs::write(bar_file, "")?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.arg("--config")
.arg(&ruff_toml)
.arg("--select")
.arg("RUF901")
.current_dir(&tempdir)
, @r###"
success: true
exit_code: 0
----- stdout -----
All checks passed!
----- stderr -----
"###);
Ok(())
}

View File

@@ -50,7 +50,6 @@ file_resolver.exclude = [
"venv",
]
file_resolver.extend_exclude = [
"crates/ruff/resources/",
"crates/ruff_linter/resources/",
"crates/ruff_python_formatter/resources/",
]
@@ -232,7 +231,7 @@ linter.flake8_bandit.check_typed_exception = false
linter.flake8_bugbear.extend_immutable_calls = []
linter.flake8_builtins.builtins_ignorelist = []
linter.flake8_comprehensions.allow_dict_calls_with_keyword_arguments = false
linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|,\s)\d{4})*
linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}(-\d{4})*
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0

View File

@@ -65,7 +65,7 @@ use seahash::SeaHasher;
/// The main reason is that hashes and cache keys have different constraints:
///
/// * Cache keys are less performance sensitive: Hashes must be super fast to compute for performant hashed-collections. That's
/// why some standard types don't implement [`Hash`] where it would be safe to implement [`CacheKey`], e.g. `HashSet`
/// why some standard types don't implement [`Hash`] where it would be safe to to implement [`CacheKey`], e.g. `HashSet`
/// * Cache keys must be deterministic where hash keys do not have this constraint. That's why pointers don't implement [`CacheKey`] but they implement [`Hash`].
/// * Ideally, cache keys are portable
///

View File

@@ -22,7 +22,7 @@ ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_python_parser = { path = "../ruff_python_parser" }
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
ruff_python_trivia = { path = "../ruff_python_trivia" }
ruff_workspace = { path = "../ruff_workspace", features = ["schemars"] }
ruff_workspace = { path = "../ruff_workspace", features = ["schemars"]}
anyhow = { workspace = true }
clap = { workspace = true, features = ["wrap_help"] }
@@ -31,6 +31,7 @@ imara-diff = { workspace = true }
indicatif = { workspace = true }
itertools = { workspace = true }
libcst = { workspace = true }
once_cell = { workspace = true }
pretty_assertions = { workspace = true }
rayon = { workspace = true }
regex = { workspace = true }

View File

@@ -134,7 +134,7 @@ impl Statistics {
}
}
/// We currently prefer the similarity index, but i'd like to keep this around
/// We currently prefer the the similarity index, but i'd like to keep this around
#[allow(clippy::cast_precision_loss, unused)]
pub(crate) fn jaccard_index(&self) -> f32 {
self.intersection as f32 / (self.black_input + self.ruff_output + self.intersection) as f32

View File

@@ -1,7 +1,6 @@
//! Generate Markdown documentation for applicable rules.
#![allow(clippy::print_stdout, clippy::print_stderr)]
use std::collections::HashSet;
use std::fs;
use std::path::PathBuf;
@@ -98,13 +97,12 @@ pub(crate) fn main(args: &Args) -> Result<()> {
fn process_documentation(documentation: &str, out: &mut String, rule_name: &str) {
let mut in_options = false;
let mut after = String::new();
let mut referenced_options = HashSet::new();
// HACK: This is an ugly regex hack that's necessary because mkdocs uses
// a non-CommonMark-compliant Markdown parser, which doesn't support code
// tags in link definitions
// (see https://github.com/Python-Markdown/markdown/issues/280).
let documentation = Regex::new(r"\[`([^`]*?)`]($|[^\[(])").unwrap().replace_all(
let documentation = Regex::new(r"\[`([^`]*?)`]($|[^\[])").unwrap().replace_all(
documentation,
|caps: &Captures| {
format!(
@@ -137,7 +135,6 @@ fn process_documentation(documentation: &str, out: &mut String, rule_name: &str)
let anchor = option.replace('.', "_");
out.push_str(&format!("- [`{option}`][{option}]\n"));
after.push_str(&format!("[{option}]: ../settings.md#{anchor}\n"));
referenced_options.insert(option);
continue;
}
@@ -145,20 +142,6 @@ fn process_documentation(documentation: &str, out: &mut String, rule_name: &str)
out.push_str(line);
}
let re = Regex::new(r"\[`([^`]*?)`]\[(.*?)]").unwrap();
for (_, [option, _]) in re.captures_iter(&documentation).map(|c| c.extract()) {
if let Some(OptionEntry::Field(field)) = Options::metadata().find(option) {
if referenced_options.insert(option) {
let anchor = option.replace('.', "_");
after.push_str(&format!("[{option}]: ../settings.md#{anchor}\n"));
}
if field.deprecated.is_some() {
eprintln!("Rule {rule_name} references deprecated option {option}.");
}
}
}
if !after.is_empty() {
out.push('\n');
out.push('\n');
@@ -176,7 +159,7 @@ mod tests {
process_documentation(
"
See also [`lint.mccabe.max-complexity`] and [`lint.task-tags`].
Something [`else`][other]. Some [link](https://example.com).
Something [`else`][other].
## Options
@@ -191,7 +174,7 @@ Something [`else`][other]. Some [link](https://example.com).
output,
"
See also [`lint.mccabe.max-complexity`][lint.mccabe.max-complexity] and [`lint.task-tags`][lint.task-tags].
Something [`else`][other]. Some [link](https://example.com).
Something [`else`][other].
## Options

View File

@@ -180,22 +180,8 @@ pub(crate) fn generate() -> String {
.map(|rule| (rule.upstream_category(&linter), rule))
.into_group_map();
let mut rules_by_upstream_category: Vec<_> = rules_by_upstream_category.iter().collect();
// Sort the upstream categories alphabetically by prefix.
rules_by_upstream_category.sort_by(|(a, _), (b, _)| {
a.as_ref()
.map(|category| category.prefix)
.unwrap_or_default()
.cmp(
b.as_ref()
.map(|category| category.prefix)
.unwrap_or_default(),
)
});
if rules_by_upstream_category.len() > 1 {
for (opt, rules) in rules_by_upstream_category {
for (opt, rules) in &rules_by_upstream_category {
if opt.is_some() {
let UpstreamCategoryAndPrefix { category, prefix } = opt.unwrap();
table_out.push_str(&format!("#### {category} ({prefix})"));

View File

@@ -71,14 +71,6 @@ impl Diagnostic {
}
}
/// Consumes `self` and returns a new `Diagnostic` with the given parent node.
#[inline]
#[must_use]
pub fn with_parent(mut self, parent: TextSize) -> Self {
self.set_parent(parent);
self
}
/// Set the location of the diagnostic's parent node.
#[inline]
pub fn set_parent(&mut self, parent: TextSize) {

View File

@@ -25,6 +25,11 @@ pub trait Violation: Debug + PartialEq + Eq {
/// The message used to describe the violation.
fn message(&self) -> String;
/// The explanation used in documentation and elsewhere.
fn explanation() -> Option<&'static str> {
None
}
// TODO(micha): Move `fix_title` to `Fix`, add new `advice` method that is shown as an advice.
// Change the `Diagnostic` renderer to show the advice, and render the fix message after the `Suggested fix: <here>`
@@ -45,6 +50,11 @@ pub trait AlwaysFixableViolation: Debug + PartialEq + Eq {
/// The message used to describe the violation.
fn message(&self) -> String;
/// The explanation used in documentation and elsewhere.
fn explanation() -> Option<&'static str> {
None
}
/// The title displayed for the available fix.
fn fix_title(&self) -> String;
@@ -61,6 +71,10 @@ impl<V: AlwaysFixableViolation> Violation for V {
<Self as AlwaysFixableViolation>::message(self)
}
fn explanation() -> Option<&'static str> {
<Self as AlwaysFixableViolation>::explanation()
}
fn fix_title(&self) -> Option<String> {
Some(<Self as AlwaysFixableViolation>::fix_title(self))
}

View File

@@ -24,6 +24,7 @@ tracing = { workspace = true }
unicode-width = { workspace = true }
[dev-dependencies]
insta = { workspace = true }
[features]
serde = ["dep:serde", "ruff_text_size/serde"]

View File

@@ -138,7 +138,7 @@ pub const fn empty_line() -> Line {
///
/// # Examples
///
/// The line breaks are emitted as spaces if the enclosing `Group` fits on a single line:
/// The line breaks are emitted as spaces if the enclosing `Group` fits on a a single line:
/// ```
/// use ruff_formatter::{format, format_args};
/// use ruff_formatter::prelude::*;

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.3.7"
version = "0.3.2"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -15,6 +15,7 @@ license = { workspace = true }
[dependencies]
ruff_cache = { path = "../ruff_cache" }
ruff_diagnostics = { path = "../ruff_diagnostics", features = ["serde"] }
ruff_index = { path = "../ruff_index" }
ruff_notebook = { path = "../ruff_notebook" }
ruff_macros = { path = "../ruff_macros" }
ruff_python_ast = { path = "../ruff_python_ast", features = ["serde"] }
@@ -74,9 +75,11 @@ url = { workspace = true }
[dev-dependencies]
insta = { workspace = true }
pretty_assertions = { workspace = true }
test-case = { workspace = true }
# Disable colored output in tests
colored = { workspace = true, features = ["no-color"] }
tempfile = { workspace = true }
[features]
default = []

View File

@@ -36,32 +36,3 @@ dictionary = {
# except:
# except Foo:
# except Exception as e: print(e)
# Script tag without an opening tag (Error)
# requires-python = ">=3.11"
# dependencies = [
# "requests<3",
# "rich",
# ]
# ///
# Script tag (OK)
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "requests<3",
# "rich",
# ]
# ///
# Script tag without a closing tag (OK)
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "requests<3",
# "rich",
# ]

View File

@@ -17,9 +17,3 @@ urllib.request.URLopener().open(fullurl='http://www.google.com')
urllib.request.URLopener().open('http://www.google.com')
urllib.request.URLopener().open('file:///foo/bar/baz')
urllib.request.URLopener().open(url)
urllib.request.urlopen(url=urllib.request.Request('http://www.google.com'))
urllib.request.urlopen(url=urllib.request.Request('http://www.google.com'), **kwargs)
urllib.request.urlopen(urllib.request.Request('http://www.google.com'))
urllib.request.urlopen(urllib.request.Request('file:///foo/bar/baz'))
urllib.request.urlopen(urllib.request.Request(url))

View File

@@ -31,7 +31,8 @@ def function(
kwonly_nonboolvalued_boolhint: bool = 1,
kwonly_nonboolvalued_boolstrhint: "bool" = 1,
**kw,
): ...
):
...
def used(do):
@@ -130,27 +131,4 @@ class Fit:
def __post_init__(self, force: bool) -> None:
print(force)
Fit(force=True)
# https://github.com/astral-sh/ruff/issues/10356
from django.db.models import Case, Q, Value, When
qs.annotate(
is_foo_or_bar=Case(
When(Q(is_foo=True) | Q(is_bar=True)),
then=Value(True),
),
default=Value(False),
)
# https://github.com/astral-sh/ruff/issues/10485
from pydantic import Field
from pydantic_settings import BaseSettings
class Settings(BaseSettings):
foo: bool = Field(True, exclude=True)

View File

@@ -9,69 +9,62 @@ B030:
try:
pass
except 1: # Error
except 1: # error
pass
try:
pass
except (1, ValueError): # Error
except (1, ValueError): # error
pass
try:
pass
except (ValueError, (RuntimeError, (KeyError, TypeError))): # Error
except (ValueError, (RuntimeError, (KeyError, TypeError))): # error
pass
try:
pass
except (ValueError, *(RuntimeError, (KeyError, TypeError))): # Error
except (ValueError, *(RuntimeError, (KeyError, TypeError))): # error
pass
try:
pass
except (*a, *(RuntimeError, (KeyError, TypeError))): # Error
except (*a, *(RuntimeError, (KeyError, TypeError))): # error
pass
try:
pass
except (ValueError, *(RuntimeError, TypeError)): # ok
pass
try:
pass
except (ValueError, *[RuntimeError, *(TypeError,)]): # ok
pass
try:
pass
except* a + (RuntimeError, (KeyError, TypeError)): # Error
except (*a, *b): # ok
pass
try:
pass
except (ValueError, *(RuntimeError, TypeError)): # OK
pass
try:
pass
except (ValueError, *[RuntimeError, *(TypeError,)]): # OK
except (*a, *(RuntimeError, TypeError)): # ok
pass
try:
pass
except (*a, *b): # OK
except (*a, *(b, c)): # ok
pass
try:
pass
except (*a, *(RuntimeError, TypeError)): # OK
pass
try:
pass
except (*a, *(b, c)): # OK
pass
try:
pass
except (*a, *(*b, *c)): # OK
except (*a, *(*b, *c)): # ok
pass
@@ -81,52 +74,5 @@ def what_to_catch():
try:
pass
except what_to_catch(): # OK
pass
try:
pass
except (a, b) + (c, d): # OK
pass
try:
pass
except* (a, b) + (c, d): # OK
pass
try:
pass
except* (a, (b) + (c)): # OK
pass
try:
pass
except (a, b) + (c, d) + (e, f): # OK
pass
try:
pass
except a + (b, c): # OK
pass
try:
pass
except (ValueError, *(RuntimeError, TypeError), *((ArithmeticError,) + (EOFError,))):
pass
try:
pass
except ((a, b) + (c, d)) + ((e, f) + (g)): # OK
pass
try:
pass
except (a, b) * (c, d): # B030
except what_to_catch(): # ok
pass

View File

@@ -174,49 +174,6 @@ for (_key1, _key2), (_value1, _value2) in groupby(
collect_shop_items("Jane", group[1])
collect_shop_items("Joe", group[1])
# Shouldn't trigger the warning when there is a continue, break statement.
for _section, section_items in groupby(items, key=lambda p: p[1]):
if _section == "greens":
collect_shop_items(shopper, section_items)
continue
elif _section == "frozen items":
collect_shop_items(shopper, section_items)
break
collect_shop_items(shopper, section_items)
# Shouldn't trigger the warning when there is a return statement.
for _section, section_items in groupby(items, key=lambda p: p[1]):
if _section == "greens":
collect_shop_items(shopper, section_items)
return
elif _section == "frozen items":
return section_items
collect_shop_items(shopper, section_items)
# Should trigger the warning for duplicate access, even if is a return statement after.
for _section, section_items in groupby(items, key=lambda p: p[1]):
if _section == "greens":
collect_shop_items(shopper, section_items)
collect_shop_items(shopper, section_items)
return
# Should trigger the warning for duplicate access, even if is a return in another branch.
for _section, section_items in groupby(items, key=lambda p: p[1]):
if _section == "greens":
collect_shop_items(shopper, section_items)
return
elif _section == "frozen items":
collect_shop_items(shopper, section_items)
collect_shop_items(shopper, section_items)
# Should trigger, since only one branch has a return statement.
for _section, section_items in groupby(items, key=lambda p: p[1]):
if _section == "greens":
collect_shop_items(shopper, section_items)
return
elif _section == "frozen items":
collect_shop_items(shopper, section_items)
collect_shop_items(shopper, section_items) # B031
# Let's redefine the `groupby` function to make sure we pick up the correct one.
# NOTE: This should always be at the end of the file.

View File

@@ -1,160 +0,0 @@
"""
Should emit:
B909 - on lines 11, 25, 26, 40, 46
"""
# lists
some_list = [1, 2, 3]
some_other_list = [1, 2, 3]
for elem in some_list:
# errors
some_list.remove(0)
del some_list[2]
some_list.append(elem)
some_list.sort()
some_list.reverse()
some_list.clear()
some_list.extend([1, 2])
some_list.insert(1, 1)
some_list.pop(1)
some_list.pop()
# conditional break should error
if elem == 2:
some_list.remove(0)
if elem == 3:
break
# non-errors
some_other_list.remove(elem)
del some_list
del some_other_list
found_idx = some_list.index(elem)
some_list = 3
# unconditional break should not error
if elem == 2:
some_list.remove(elem)
break
# dicts
mydicts = {"a": {"foo": 1, "bar": 2}}
for elem in mydicts:
# errors
mydicts.popitem()
mydicts.setdefault("foo", 1)
mydicts.update({"foo": "bar"})
# no errors
elem.popitem()
elem.setdefault("foo", 1)
elem.update({"foo": "bar"})
# sets
myset = {1, 2, 3}
for _ in myset:
# errors
myset.update({4, 5})
myset.intersection_update({4, 5})
myset.difference_update({4, 5})
myset.symmetric_difference_update({4, 5})
myset.add(4)
myset.discard(3)
# no errors
del myset
# members
class A:
some_list: list
def __init__(self, ls):
self.some_list = list(ls)
a = A((1, 2, 3))
# ensure member accesses are handled as errors
for elem in a.some_list:
a.some_list.remove(0)
del a.some_list[2]
# Augassign should error
foo = [1, 2, 3]
bar = [4, 5, 6]
for _ in foo:
foo *= 2
foo += bar
foo[1] = 9
foo[1:2] = bar
foo[1:2:3] = bar
foo = {1, 2, 3}
bar = {4, 5, 6}
for _ in foo: # should error
foo |= bar
foo &= bar
foo -= bar
foo ^= bar
# more tests for unconditional breaks - should not error
for _ in foo:
foo.remove(1)
for _ in bar:
bar.remove(1)
break
break
# should not error
for _ in foo:
foo.remove(1)
for _ in bar:
...
break
# should error (?)
for _ in foo:
foo.remove(1)
if bar:
bar.remove(1)
break
break
# should error
for _ in foo:
if bar:
pass
else:
foo.remove(1)
# should error
for elem in some_list:
if some_list.pop() == 2:
pass
# should not error
for elem in some_list:
if some_list.pop() == 2:
break
# should error
for elem in some_list:
if some_list.pop() == 2:
pass
else:
break
# should not error
for elem in some_list:
del some_list[elem]
some_list[elem] = 1
some_list.remove(elem)
some_list.discard(elem)

View File

@@ -1,20 +1,11 @@
# Cannot combine with C416. Should use list comprehension here.
even_nums = list(2 * x for x in range(3))
odd_nums = list(
2 * x + 1 for x in range(3)
)
# Short-circuit case, combine with C416 and should produce x = list(range(3))
x = list(x for x in range(3))
x = list(
x for x in range(3)
)
# Not built-in list.
def list(*args, **kwargs):
return None
list(2 * x for x in range(3))
list(x for x in range(3))

View File

@@ -1,30 +1,20 @@
# Cannot conbime with C416. Should use set comprehension here.
even_nums = set(2 * x for x in range(3))
odd_nums = set(
2 * x + 1 for x in range(3)
)
small_nums = f"{set(a if a < 6 else 0 for a in range(3))}"
x = set(x for x in range(3))
x = set(x for x in range(3))
y = f"{set(a if a < 6 else 0 for a in range(3))}"
_ = "{}".format(set(a if a < 6 else 0 for a in range(3)))
print(f"Hello {set(a for a in range(3))} World")
def f(x):
return x
print(f'Hello {set(a for a in "abc")} World')
print(f"Hello {set(a for a in 'abc')} World")
print(f"Hello {set(f(a) for a in 'abc')} World")
print(f"Hello { set(f(a) for a in 'abc') } World")
# Short-circuit case, combine with C416 and should produce x = set(range(3))
x = set(x for x in range(3))
x = set(
x for x in range(3)
)
print(f"Hello {set(a for a in range(3))} World")
print(f"{set(a for a in 'abc') - set(a for a in 'ab')}")
print(f"{ set(a for a in 'abc') - set(a for a in 'ab') }")
# Not built-in set.
def set(*args, **kwargs):
return None
set(2 * x for x in range(3))
set(x for x in range(3))
# The fix generated for this diagnostic is incorrect, as we add additional space
# around the set comprehension.
print(f"{ {set(a for a in 'abc')} }")

View File

@@ -16,11 +16,3 @@ tuple( # comment
tuple([ # comment
1, 2
])
tuple((
1,
))
t6 = tuple([1])
t7 = tuple((1,))
t8 = tuple([1,])

View File

@@ -13,10 +13,6 @@ all(x.id for x in bar)
all(x.id for x in bar)
any(x.id for x in bar)
all((x.id for x in bar))
# we don't lint on these in stable yet
sum([x.val for x in bar])
min([x.val for x in bar])
max([x.val for x in bar])
async def f() -> bool:

View File

@@ -1,8 +0,0 @@
sum([x.val for x in bar])
min([x.val for x in bar])
max([x.val for x in bar])
# Ok
sum(x.val for x in bar)
min(x.val for x in bar)
max(x.val for x in bar)

View File

@@ -1,3 +0,0 @@
# no lint if shadowed
def all(x): pass
all([x.id for x in bar])

View File

@@ -33,9 +33,3 @@ from datetime import datetime
# no replace orastimezone unqualified
datetime.strptime("something", "something")
# F-strings
datetime.strptime("something", f"%Y-%m-%dT%H:%M:%S{('.%f' if millis else '')}%z")
datetime.strptime("something", f"%Y-%m-%d %H:%M:%S%z")
# F-string is implicitly concatenated to another string
datetime.strptime("something", f"%Y-%m-%dT%H:%M:%S{('.%f' if millis else '')}" "%z")

View File

@@ -21,7 +21,6 @@ def unconventional_aliases():
import tkinter as tkr
import networkx as nxy
def conventional_aliases():
import altair as alt
import matplotlib.pyplot as plt

View File

@@ -1,10 +0,0 @@
def no_alias():
from django.conf import settings
def conventional_alias():
from django.conf import settings as settings
def unconventional_alias():
from django.conf import settings as s

View File

@@ -1,12 +1,9 @@
def func():
import logging
import logging
logging.WARN # LOG009
logging.WARNING # OK
logging.WARN # LOG009
logging.WARNING # OK
from logging import WARN, WARNING
def func():
from logging import WARN, WARNING
WARN # LOG009
WARNING # OK
WARN # LOG009
WARNING # OK

View File

@@ -227,11 +227,3 @@ class Repro[int](Protocol):
def impl(self) -> str:
"""Docstring"""
return self.func()
import typing
if typing.TYPE_CHECKING:
def contains_meaningful_ellipsis() -> list[int]:
"""Allow this in a TYPE_CHECKING block."""
...

View File

@@ -1,14 +0,0 @@
"""Tests to ensure we correctly rename references inside `__all__`"""
from collections.abc import Set
__all__ = ["Set"]
if True:
__all__ += [r'''Set''']
if 1:
__all__ += ["S" "e" "t"]
if not False:
__all__ += ["Se" 't']

View File

@@ -1,14 +0,0 @@
"""Tests to ensure we correctly rename references inside `__all__`"""
from collections.abc import Set
__all__ = ["Set"]
if True:
__all__ += [r'''Set''']
if 1:
__all__ += ["S" "e" "t"]
if not False:
__all__ += ["Se" 't']

View File

@@ -1,6 +0,0 @@
"""
Tests for PYI025 where the import is marked as re-exported
through usage of a "redundant" `import Set as Set` alias
"""
from collections.abc import Set as Set # PYI025 triggered but fix is not marked as safe

View File

@@ -1,6 +0,0 @@
"""
Tests for PYI025 where the import is marked as re-exported
through usage of a "redundant" `import Set as Set` alias
"""
from collections.abc import Set as Set # PYI025 triggered but fix is not marked as safe

View File

@@ -195,13 +195,6 @@ class BadAsyncIterator(collections.abc.AsyncIterator[str]):
def __aiter__(self) -> typing.AsyncIterator[str]:
... # Y034 "__aiter__" methods in classes like "BadAsyncIterator" usually return "self" at runtime. Consider using "typing_extensions.Self" in "BadAsyncIterator.__aiter__", e.g. "def __aiter__(self) -> Self: ..." # Y022 Use "collections.abc.AsyncIterator[T]" instead of "typing.AsyncIterator[T]" (PEP 585 syntax)
class SubclassOfBadIterator3(BadIterator3):
def __iter__(self) -> Iterator[int]: # Y034
...
class SubclassOfBadAsyncIterator(BadAsyncIterator):
def __aiter__(self) -> collections.abc.AsyncIterator[str]: # Y034
...
class AsyncIteratorReturningAsyncIterable:
def __aiter__(self) -> AsyncIterable[str]:
@@ -232,11 +225,6 @@ class MetaclassInWhichSelfCannotBeUsed4(ABCMeta):
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed4: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed4) -> MetaclassInWhichSelfCannotBeUsed4: ...
class SubclassOfMetaclassInWhichSelfCannotBeUsed(MetaclassInWhichSelfCannotBeUsed4):
def __new__(cls) -> SubclassOfMetaclassInWhichSelfCannotBeUsed: ...
def __enter__(self) -> SubclassOfMetaclassInWhichSelfCannotBeUsed: ...
async def __aenter__(self) -> SubclassOfMetaclassInWhichSelfCannotBeUsed: ...
def __isub__(self, other: SubclassOfMetaclassInWhichSelfCannotBeUsed) -> SubclassOfMetaclassInWhichSelfCannotBeUsed: ...
class Abstract(Iterator[str]):
@abstractmethod

View File

@@ -79,14 +79,5 @@ def test_single_list_of_lists(param):
@pytest.mark.parametrize("a", [1, 2])
@pytest.mark.parametrize(("b", "c"), ((3, 4), (5, 6)))
@pytest.mark.parametrize("d", [3,])
@pytest.mark.parametrize(
"d",
[("3", "4")],
)
@pytest.mark.parametrize(
"e",
[("3", "4"),],
)
def test_multiple_decorators(a, b, c, d, e):
def test_multiple_decorators(a, b, c):
pass

View File

@@ -51,8 +51,3 @@ def test_error_parentheses_trailing_comma(x):
@pytest.mark.parametrize("x", [1, 2])
def test_ok(x):
...
@pytest.mark.parametrize('data, spec', [(1.0, 1.0), (1.0, 1.0)])
def test_numbers(data, spec):
...

View File

@@ -1,9 +0,0 @@
class SingleLineDocstrings():
""'Start with empty string' ' and lint docstring safely'
""" Not a docstring """
def foo(self, bar="""not a docstring"""):
""'Start with empty string' ' and lint docstring safely'
pass
class Nested(foo()[:]): ""'Start with empty string' ' and lint docstring safely'; pass

View File

@@ -1,9 +0,0 @@
class SingleLineDocstrings():
"Do not"' start with empty string' ' and lint docstring safely'
""" Not a docstring """
def foo(self, bar="""not a docstring"""):
"Do not"' start with empty string' ' and lint docstring safely'
pass
class Nested(foo()[:]): "Do not"' start with empty string' ' and lint docstring safely'; pass

View File

@@ -1,5 +0,0 @@
""'Start with empty string' ' and lint docstring safely'
def foo():
pass
""" this is not a docstring """

View File

@@ -1,5 +0,0 @@
"Do not"' start with empty string' ' and lint docstring safely'
def foo():
pass
""" this is not a docstring """

View File

@@ -1,9 +0,0 @@
class SingleLineDocstrings():
''"Start with empty string" ' and lint docstring safely'
''' Not a docstring '''
def foo(self, bar='''not a docstring'''):
''"Start with empty string" ' and lint docstring safely'
pass
class Nested(foo()[:]): ''"Start with empty string" ' and lint docstring safely'; pass

View File

@@ -1,9 +0,0 @@
class SingleLineDocstrings():
'Do not'" start with empty string" ' and lint docstring safely'
''' Not a docstring '''
def foo(self, bar='''not a docstring'''):
'Do not'" start with empty string" ' and lint docstring safely'
pass
class Nested(foo()[:]): 'Do not'" start with empty string" ' and lint docstring safely'; pass

View File

@@ -1,5 +0,0 @@
''"Start with empty string" ' and lint docstring safely'
def foo():
pass
""" this is not a docstring """

View File

@@ -1,5 +0,0 @@
'Do not'" start with empty string" ' and lint docstring safely'
def foo():
pass
""" this is not a docstring """

View File

@@ -1,7 +0,0 @@
"""This is a docstring."""
this_is_an_inline_string = "double quote string"
this_is_a_multiline_string = """
double quote string
"""

View File

@@ -1,2 +0,0 @@
s = ""'Start with empty string' ' and lint docstring safely'
s = "Do not"' start with empty string' ' and lint docstring safely'

View File

@@ -2,8 +2,3 @@ this_should_be_linted = 'single quote string'
this_should_be_linted = u'double quote string'
this_should_be_linted = f'double quote string'
this_should_be_linted = f'double {"quote"} string'
# https://github.com/astral-sh/ruff/issues/10546
x: "Literal['foo', 'bar']"
# https://github.com/astral-sh/ruff/issues/10761
f"Before {f'x {x}' if y else f'foo {z}'} after"

View File

@@ -1,2 +0,0 @@
s = ''"Start with empty string" ' and lint docstring safely'
s = 'Do not'" start with empty string" ' and lint docstring safely'

View File

@@ -406,18 +406,3 @@ def foo():
with contextlib.suppress(Exception):
y = 2
return y
# See: https://github.com/astral-sh/ruff/issues/10732
def func(a: dict[str, int]) -> list[dict[str, int]]:
services: list[dict[str, int]]
if "services" in a:
services = a["services"]
return services
# See: https://github.com/astral-sh/ruff/issues/10732
def func(a: dict[str, int]) -> list[dict[str, int]]:
if "services" in a:
services = a["services"]
return services

View File

@@ -52,62 +52,35 @@ def f():
return False
def f():
# SIM103
if a:
return False
else:
return True
def f():
# OK
if a:
return False
else:
return False
def f():
# OK
if a:
return True
else:
return True
def f():
# SIM103 (but not fixable)
if a:
return False
else:
return True
def f():
# OK
if a:
return False
else:
return False
def f():
# OK
if a:
return True
else:
return True
def f():
# OK
def bool():
return False
if a:
return True
else:
return False
def f():
# SIM103
if keys is not None and notice.key not in keys:
return False
else:
return True
###
# Positive cases (preview)
###
def f():
# SIM103
if a:
return True
return False
def f():
# SIM103
if a:
return False
return True

View File

@@ -6,14 +6,6 @@ class Bad(namedtuple("foo", ["str", "int"])): # SLOT002
pass
class UnusualButStillBad(NamedTuple("foo", [("x", int, "y", int)])): # SLOT002
pass
class UnusualButOkay(NamedTuple("foo", [("x", int, "y", int)])):
__slots__ = ()
class Good(namedtuple("foo", ["str", "int"])): # OK
__slots__ = ("foo",)

View File

@@ -3,5 +3,3 @@ import ruff
import leading_prefix
import os
from . import leading_prefix
from .. import trailing_prefix
from ruff import check

View File

@@ -1,6 +0,0 @@
from astropy.constants import hbar as
from numpy import pi as π
import numpy as ℂℇℊℋℌℍℎℐℑℒℓℕℤΩℨKÅℬℭℯℰℱℹℴ
import numpy as CƐgHHHhIILlNZΩZKÅBCeEFio
h = 2 * π *

View File

@@ -104,5 +104,3 @@ def func():
np.unicode_("asf")
np.who()
np.row_stack(([1,2], [3,4]))

View File

@@ -21,10 +21,3 @@ class D(TypedDict):
mixedCase: bool
_mixedCase: list
mixed_Case: set
class E(D):
lower: int
CONSTANT: str
mixedCase: bool
_mixedCase: list
mixed_Case: set

View File

@@ -47,60 +47,4 @@ snapshot.file_uri[len(f's3://{self.s3_bucket_name}/'):]
{len(f's3://{self.s3_bucket_name}/'):1}
#: Okay
a = (1,)
# https://github.com/astral-sh/ruff/issues/10113
"""Minimal repo."""
def main() -> None:
"""Primary function."""
results = {
"k1": [1],
"k2":[2],
}
results_in_tuple = (
{
"k1": [1],
"k2":[2],
},
)
results_in_list = [
{
"k1": [1],
"k2":[2],
}
]
results_in_list_first = [
{
"k2":[2],
}
]
x = [
{
"k1":[2], # E231
"k2": [2:4],
"k3":[2], # E231
"k4": [2],
"k5": [2],
"k6": [1, 2, 3, 4,5,6,7] # E231
},
{
"k1": [
{
"ka":[2,3], # E231
},
{
"kb": [2,3], # E231
},
{
"ka":[2, 3], # E231
"kb": [2, 3], # Ok
"kc": [2, 3], # Ok
"kd": [2,3], # E231
"ke":[2,3], # E231
},
]
}
]
a = (1,

View File

@@ -1,228 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "palRUQyD-U6u"
},
"outputs": [],
"source": [
"some_string = \"123123\""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "UWdDLRyf-Zz0"
},
"outputs": [],
"source": [
"some_computation = 1 + 1"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "YreT1sTr-c32"
},
"outputs": [],
"source": [
"some_computation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "V48ppml7-h0f"
},
"outputs": [],
"source": [
"def fn():\n",
" print(\"Hey!\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "cscw_8Xv-lYQ"
},
"outputs": [],
"source": [
"fn()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# E301\n",
"class Class:\n",
" \"\"\"Class for minimal repo.\"\"\"\n",
"\n",
" def method(cls) -> None:\n",
" pass\n",
" @classmethod\n",
" def cls_method(cls) -> None:\n",
" pass\n",
"# end"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# E302\n",
"def a():\n",
" pass\n",
"\n",
"def b():\n",
" pass\n",
"# end"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# E303\n",
"def fn():\n",
" _ = None\n",
"\n",
"\n",
" # arbitrary comment\n",
"\n",
" def inner(): # E306 not expected (pycodestyle detects E306)\n",
" pass\n",
"# end"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# E303"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"\n",
"\n",
"\n",
"def fn():\n",
"\tpass\n",
"# end"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# E304\n",
"@decorator\n",
"\n",
"def function():\n",
" pass\n",
"# end"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# E305:7:1\n",
"def fn():\n",
" print()\n",
"\n",
" # comment\n",
"\n",
" # another comment\n",
"fn()\n",
"# end"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# E306:3:5\n",
"def a():\n",
" x = 1\n",
" def b():\n",
" pass\n",
"# end"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Ok\n",
"def function1():\n",
"\tpass\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"def function2():\n",
"\tpass\n",
"# end"
]
}
],
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.7"
}
},
"nbformat": 4,
"nbformat_minor": 4
}

View File

@@ -445,39 +445,6 @@ def test():
# end
# no error
class Foo:
"""Demo."""
@overload
def bar(self, x: int) -> int: ...
@overload
def bar(self, x: str) -> str: ...
def bar(self, x: int | str) -> int | str:
return x
# end
# no error
@overload
def foo(x: int) -> int: ...
@overload
def foo(x: str) -> str: ...
def foo(x: int | str) -> int | str:
if not isinstance(x, (int, str)):
raise TypeError
return x
# end
# no error
def foo(self, x: int) -> int: ...
def bar(self, x: str) -> str: ...
def baz(self, x: int | str) -> int | str:
return x
# end
# E301
class Class(object):
@@ -522,20 +489,6 @@ class Class:
# end
# E301
class Foo:
"""Demo."""
@overload
def bar(self, x: int) -> int: ...
@overload
def bar(self, x: str) -> str:
...
def bar(self, x: int | str) -> int | str:
return x
# end
# E302
"""Main module."""
def fn():
@@ -627,23 +580,6 @@ class Test:
# end
# E302
class A:...
class B: ...
# end
# E302
@overload
def fn(a: int) -> int: ...
@overload
def fn(a: str) -> str: ...
def fn(a: int | str) -> int | str:
...
# end
# E303
def fn():
_ = None

View File

@@ -87,37 +87,6 @@
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "a51463ee-091c-44b4-9069-c03bf7e3bf83",
"metadata": {},
"outputs": [],
"source": [
"%%time\n",
"import pathlib"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "0ddc937e-6c19-475f-b108-9405aa1af4f1",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "285041d2-a76c-4ff3-8ff2-0131bbf66016",
"metadata": {},
"outputs": [],
"source": [
"%%time\n",
"%%time\n",
"import pathlib"
]
}
],
"metadata": {

View File

@@ -82,8 +82,3 @@ class Bar:
"""
This is a long sentence that ends with a shortened URL and, therefore, could easily be broken across multiple lines ([source](https://ruff.rs))
"""
# OK
# SPDX-FileCopyrightText: Copyright 2012-2015 Charlie Marsh <very-long-email-address@fake.com>
# SPDX-License-Identifier: a very long license identifier that exceeds the line length limit

View File

@@ -60,7 +60,7 @@ class Scope:
class Scope:
from typing import Callable
# OK
# E731
f: Callable[[int], int] = lambda x: 2 * x
@@ -147,12 +147,3 @@ def scope():
f = lambda: (
i := 1,
)
from dataclasses import dataclass
from typing import Callable
@dataclass
class FilterDataclass:
# OK
filter: Callable[[str], bool] = lambda _: True

View File

@@ -52,8 +52,3 @@ value = rf'\{{1}}'
value = rf'\{1}'
value = rf'{1:\}'
value = f"{rf"\{1}"}"
# Regression tests for https://github.com/astral-sh/ruff/issues/10434
f"{{}}+-\d"
f"\n{{}}+-\d+"
f"\n{{}}<EFBFBD>+-\d+"

View File

@@ -25,9 +25,3 @@ def non_ascii():
def all_caps():
"""th•s is not capitalized."""
def single_word():
"""singleword."""
def single_word_no_dot():
"""singleword"""

View File

@@ -1,8 +0,0 @@
"""Regression test for: https://github.com/astral-sh/ruff/issues/10509"""
from foo import Bar as Bar
class Eggs:
Bar: int # OK
Bar = 1 # F811

View File

@@ -1,6 +1,6 @@
"""Tests for constructs allowed in `.pyi` stub files but not at runtime"""
from typing import Generic, NewType, Optional, TypeAlias, TypeVar, Union
from typing import Optional, TypeAlias, Union
__version__: str
__author__: str
@@ -33,19 +33,6 @@ class Leaf: ...
class Tree(list[Tree | Leaf]): ... # valid in a `.pyi` stub file, not in a `.py` runtime file
class Tree2(list["Tree | Leaf"]): ... # always okay
# Generic bases can have forward references in stubs
class Foo(Generic[T]): ...
T = TypeVar("T")
class Bar(Foo[Baz]): ...
class Baz: ...
# bases in general can be forward references in stubs
class Eggs(Spam): ...
class Spam: ...
# NewType can have forward references
MyNew = NewType("MyNew", MyClass)
# Annotations are treated as assignments in .pyi files, but not in .py files
class MyClass:
foo: int
@@ -55,6 +42,3 @@ class MyClass:
baz: MyClass
eggs = baz # valid in a `.pyi` stub file, not in a `.py` runtime file
eggs = "baz" # always okay
class Blah:
class Blah2(Blah): ...

View File

@@ -1,9 +0,0 @@
"""Test that unicode identifiers are NFKC-normalised"""
𝒞 = 500
print(𝒞)
print(C + 𝒞) # 2 references to the same variable due to NFKC normalization
print(C / 𝒞)
print(C == 𝑪 == 𝒞 == 𝓒 == 𝕮)
print(𝒟) # F821

View File

@@ -1,23 +0,0 @@
"""Regression test for #10451.
Annotations in a class are allowed to be forward references
if `from __future__ import annotations` is active,
even if they're in a class included in
`lint.flake8-type-checking.runtime-evaluated-base-classes`.
They're not allowed to refer to symbols that cannot be *resolved*
at runtime, however.
"""
from __future__ import annotations
from sqlalchemy.orm import DeclarativeBase, Mapped
class Base(DeclarativeBase):
some_mapping: Mapped[list[Bar]] | None = None # Should not trigger F821 (resolveable forward reference)
simplified: list[Bar] | None = None # Should not trigger F821 (resolveable forward reference)
class Bar:
pass

View File

@@ -1,13 +0,0 @@
"""Respect `# noqa` directives on `__all__` definitions."""
__all__ = [ # noqa: F822
"Bernoulli",
"Beta",
"Binomial",
]
__all__ += [
"ContinuousBernoulli", # noqa: F822
"ExponentialFamily",
]

View File

@@ -9,22 +9,3 @@ x = 1
x = 1 # noqa: F401, W203
# noqa: F401
# noqa: F401, W203
# OK
x = 2 # noqa: X100
x = 2 # noqa:X100
# PGH004
x = 2 # noqa X100
# PGH004
x = 2 # noqa X100, X200
# PGH004
x = 2 # noqa : X300
# PGH004
x = 2 # noqa : X400
# PGH004
x = 2 # noqa :X500

View File

@@ -1,44 +0,0 @@
class Wolf:
@staticmethod
def eat(self): # [bad-staticmethod-argument]
pass
class Wolf:
@staticmethod
def eat(sheep):
pass
class Sheep:
@staticmethod
def eat(cls, x, y, z): # [bad-staticmethod-argument]
pass
@staticmethod
def sleep(self, x, y, z): # [bad-staticmethod-argument]
pass
def grow(self, x, y, z):
pass
@classmethod
def graze(cls, x, y, z):
pass
class Foo:
@staticmethod
def eat(x, self, z):
pass
@staticmethod
def sleep(x, cls, z):
pass
def grow(self, x, y, z):
pass
@classmethod
def graze(cls, x, y, z):
pass

View File

@@ -5,7 +5,6 @@ class Person: # [eq-without-hash]
def __eq__(self, other):
return isinstance(other, Person) and other.name == self.name
# OK
class Language:
def __init__(self):
@@ -17,24 +16,8 @@ class Language:
def __hash__(self):
return hash(self.name)
class MyClass:
def __eq__(self, other):
return True
__hash__ = None
class SingleClass:
def __eq__(self, other):
return True
def __hash__(self):
return 7
class ChildClass(SingleClass):
def __eq__(self, other):
return True
__hash__ = SingleClass.__hash__

View File

@@ -11,13 +11,6 @@ def f():
print(X)
def f():
global X
if X > 0:
del X
###
# Non-errors.
###

View File

@@ -1,161 +0,0 @@
# pylint: disable=missing-docstring, invalid-name, too-few-public-methods, redefined-outer-name
value = 10
value2 = 0
value3 = 3
# Positive
if value < 10: # [max-instead-of-if]
value = 10
if value <= 10: # [max-instead-of-if]
value = 10
if value < value2: # [max-instead-of-if]
value = value2
if value > 10: # [min-instead-of-if]
value = 10
if value >= 10: # [min-instead-of-if]
value = 10
if value > value2: # [min-instead-of-if]
value = value2
class A:
def __init__(self):
self.value = 13
A1 = A()
if A1.value < 10: # [max-instead-of-if]
A1.value = 10
if A1.value > 10: # [min-instead-of-if]
A1.value = 10
class AA:
def __init__(self, value):
self.value = value
def __gt__(self, b):
return self.value > b
def __ge__(self, b):
return self.value >= b
def __lt__(self, b):
return self.value < b
def __le__(self, b):
return self.value <= b
A1 = AA(0)
A2 = AA(3)
if A2 < A1: # [max-instead-of-if]
A2 = A1
if A2 <= A1: # [max-instead-of-if]
A2 = A1
if A2 > A1: # [min-instead-of-if]
A2 = A1
if A2 >= A1: # [min-instead-of-if]
A2 = A1
# Negative
if value < 10:
value = 2
if value <= 3:
value = 5
if value < 10:
value = 2
value2 = 3
if value < value2:
value = value3
if value < 5:
value = value3
if 2 < value <= 3:
value = 1
if value < 10:
value = 10
else:
value = 3
if value <= 3:
value = 5
elif value == 3:
value = 2
if value > 10:
value = 2
if value >= 3:
value = 5
if value > 10:
value = 2
value2 = 3
if value > value2:
value = value3
if value > 5:
value = value3
if 2 > value >= 3:
value = 1
if value > 10:
value = 10
else:
value = 3
if value >= 3:
value = 5
elif value == 3:
value = 2
# Parenthesized expressions
if value.attr > 3:
(
value.
attr
) = 3
class Foo:
_min = 0
_max = 0
def foo(self, value) -> None:
if value < self._min:
self._min = value
if value > self._max:
self._max = value
if self._min < value:
self._min = value
if self._max > value:
self._max = value
if value <= self._min:
self._min = value
if value >= self._max:
self._max = value
if self._min <= value:
self._min = value
if self._max >= value:
self._max = value

View File

@@ -1,60 +0,0 @@
# Errors
nums = {1, 2, 3}
for num in nums:
nums.add(num + 1)
animals = {"dog", "cat", "cow"}
for animal in animals:
animals.pop("cow")
fruits = {"apple", "orange", "grape"}
for fruit in fruits:
fruits.clear()
planets = {"mercury", "venus", "earth"}
for planet in planets:
planets.discard("mercury")
colors = {"red", "green", "blue"}
for color in colors:
colors.remove("red")
odds = {1, 3, 5}
for num in odds:
if num > 1:
odds.add(num + 1)
# OK
nums = {1, 2, 3}
for num in nums.copy():
nums.add(nums + 3)
animals = {"dog", "cat", "cow"}
for animal in animals:
print(animals - {animal})
fruits = {"apple", "orange", "grape"}
temp_fruits = set()
for fruit in fruits:
temp_fruits.add(fruit)
temp_fruits.remove(fruit)
temp_fruits.clear(fruit)
colors = {"red", "green", "blue"}
def add_colors():
colors = {"cyan", "magenta", "yellow"}
for color in colors:
def add_color():
global colors
colors.add(color)
add_color()
add_colors()
print(colors)

View File

@@ -1,76 +0,0 @@
import math
from math import nan as bad_val
import numpy as np
from numpy import nan as npy_nan
x = float("nan")
y = np.NaN
# PLW0117
if x == float("nan"):
pass
# PLW0117
if x == float("NaN"):
pass
# PLW0117
if x == float("NAN"):
pass
# PLW0117
if x == float("Nan"):
pass
# PLW0117
if x == math.nan:
pass
# PLW0117
if x == bad_val:
pass
# PLW0117
if y == np.NaN:
pass
# PLW0117
if y == np.NAN:
pass
# PLW0117
if y == np.nan:
pass
# PLW0117
if y == npy_nan:
pass
# OK
if math.isnan(x):
pass
# OK
if np.isnan(y):
pass
# OK
if x == 0:
pass
# OK
if x == float("32"):
pass
# OK
if x == float(42):
pass
# OK
if y == np.inf:
pass
# OK
if x == "nan":
pass

View File

@@ -1,58 +0,0 @@
# Errors
some_string = "some string"
index, a_number, to_multiply, to_divide, to_cube, timeDiffSeconds, flags = (
0,
1,
2,
3,
4,
5,
0x3,
)
a_list = [1, 2]
some_set = {"elem"}
mat1, mat2 = None, None
some_string = some_string + "a very long end of string"
index = index - 1
a_list = a_list + ["to concat"]
some_set = some_set | {"to concat"}
to_multiply = to_multiply * 5
to_multiply = 5 * to_multiply
to_multiply = to_multiply * to_multiply
to_divide = to_divide / 5
to_divide = to_divide // 5
to_cube = to_cube**3
to_cube = 3**to_cube
to_cube = to_cube**to_cube
timeDiffSeconds = timeDiffSeconds % 60
flags = flags & 0x1
flags = flags | 0x1
flags = flags ^ 0x1
flags = flags << 1
flags = flags >> 1
mat1 = mat1 @ mat2
a_list[1] = a_list[1] + 1
a_list[0:2] = a_list[0:2] * 3
a_list[:2] = a_list[:2] * 3
a_list[1:] = a_list[1:] * 3
a_list[:] = a_list[:] * 3
index = index * (index + 10)
class T:
def t(self):
self.a = self.a + 1
obj = T()
obj.a = obj.a + 1
# OK
a_list[0] = a_list[:] * 3
index = a_number = a_number + 1
a_number = index = a_number + 1
index = index * index + 10
some_string = "a very long start to the string" + some_string

View File

@@ -1,67 +0,0 @@
# Positive cases
counter = 0
def count():
global counter
nonlocal counter
counter += 1
def count():
counter = 0
def count(counter_type):
if counter_type == "nonlocal":
nonlocal counter
counter += 1
else:
global counter
counter += 1
def count():
counter = 0
def count_twice():
for i in range(2):
nonlocal counter
counter += 1
global counter
def count():
nonlocal counter
global counter
counter += 1
# Negative cases
counter = 0
def count():
global counter
counter += 1
def count():
counter = 0
def count_local():
nonlocal counter
counter += 1
def count():
counter = 0
def count_local():
nonlocal counter
counter += 1
def count_global():
global counter
counter += 1

Some files were not shown because too many files have changed in this diff Show More