Compare commits
1 Commits
brent/cach
...
perf-node-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
82f33db5e6 |
@@ -8,7 +8,3 @@ benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||
# See: https://github.com/astral-sh/ruff/issues/11503
|
||||
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
||||
[target.'wasm32-unknown-unknown']
|
||||
# See https://docs.rs/getrandom/latest/getrandom/#webassembly-support
|
||||
rustflags = ["--cfg", 'getrandom_backend="wasm_js"']
|
||||
@@ -6,10 +6,3 @@ failure-output = "immediate-final"
|
||||
fail-fast = false
|
||||
|
||||
status-level = "skip"
|
||||
|
||||
# Mark tests that take longer than 1s as slow.
|
||||
# Terminate after 60s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "1s", terminate-after = 60 }
|
||||
|
||||
# Show slow jobs in the final summary
|
||||
final-status-level = "slow"
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"rust-lang.rust-analyzer",
|
||||
"fill-labs.dependi",
|
||||
"serayuzgur.crates",
|
||||
"tamasfe.even-better-toml",
|
||||
"Swellaby.vscode-rust-test-adapter",
|
||||
"charliermarsh.ruff"
|
||||
|
||||
@@ -17,7 +17,4 @@ indent_size = 4
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.md]
|
||||
max_line_length = 100
|
||||
|
||||
[*.toml]
|
||||
indent_size = 4
|
||||
max_line_length = 100
|
||||
14
.gitattributes
vendored
14
.gitattributes
vendored
@@ -5,9 +5,6 @@ crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
|
||||
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/f-string-carriage-return-newline.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@f-string-carriage-return-newline.py.snap text eol=crlf
|
||||
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
||||
|
||||
@@ -15,16 +12,7 @@ crates/ruff_python_parser/resources/invalid/re_lexing/line_continuation_windows_
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_windows_eol.py text eol=crlf
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_mac_eol.py text eol=cr
|
||||
|
||||
crates/ruff_linter/resources/test/fixtures/ruff/RUF046_CR.py text eol=cr
|
||||
crates/ruff_linter/resources/test/fixtures/ruff/RUF046_LF.py text eol=lf
|
||||
|
||||
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP018_CR.py text eol=cr
|
||||
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP018_LF.py text eol=lf
|
||||
|
||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
||||
|
||||
ruff.schema.json -diff linguist-generated=true text=auto eol=lf
|
||||
ty.schema.json -diff linguist-generated=true text=auto eol=lf
|
||||
crates/ruff_python_ast/src/generated.rs -diff linguist-generated=true text=auto eol=lf
|
||||
crates/ruff_python_formatter/src/generated.rs -diff linguist-generated=true text=auto eol=lf
|
||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||
*.md.snap linguist-language=Markdown
|
||||
|
||||
13
.github/CODEOWNERS
vendored
13
.github/CODEOWNERS
vendored
@@ -9,16 +9,13 @@
|
||||
/crates/ruff_formatter/ @MichaReiser
|
||||
/crates/ruff_python_formatter/ @MichaReiser
|
||||
/crates/ruff_python_parser/ @MichaReiser @dhruvmanila
|
||||
/crates/ruff_annotate_snippets/ @BurntSushi
|
||||
|
||||
# flake8-pyi
|
||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
||||
|
||||
# Script for fuzzing the parser/ty etc.
|
||||
/python/py-fuzzer/ @AlexWaygood
|
||||
# Script for fuzzing the parser
|
||||
/scripts/fuzz-parser/ @AlexWaygood
|
||||
|
||||
# ty
|
||||
/crates/ty* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/scripts/ty_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ty_python_semantic @carljm @AlexWaygood @sharkdp @dcreager
|
||||
# red-knot
|
||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood
|
||||
|
||||
12
.github/ISSUE_TEMPLATE.md
vendored
Normal file
12
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
<!--
|
||||
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
|
||||
|
||||
If you're filing a bug report, please consider including the following information:
|
||||
|
||||
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
|
||||
e.g. "RUF001", "unused variable", "Jupyter notebook"
|
||||
* A minimal code snippet that reproduces the bug.
|
||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||
* The current Ruff version (`ruff --version`).
|
||||
-->
|
||||
31
.github/ISSUE_TEMPLATE/1_bug_report.yaml
vendored
31
.github/ISSUE_TEMPLATE/1_bug_report.yaml
vendored
@@ -1,31 +0,0 @@
|
||||
name: Bug report
|
||||
description: Report an error or unexpected behavior
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
|
||||
|
||||
**Before reporting, please make sure to search through [existing issues](https://github.com/astral-sh/ruff/issues?q=is:issue+is:open+label:bug) (including [closed](https://github.com/astral-sh/ruff/issues?q=is:issue%20state:closed%20label:bug)).**
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: |
|
||||
A clear and concise description of the bug, including a minimal reproducible example.
|
||||
|
||||
Be sure to include the command you invoked (e.g., `ruff check /path/to/file.py --fix`), ideally including the `--isolated` flag and
|
||||
the current Ruff settings (e.g., relevant sections from your `pyproject.toml`).
|
||||
|
||||
If possible, try to include the [playground](https://play.ruff.rs) link that reproduces this issue.
|
||||
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of ruff are you using? (see `ruff version`)
|
||||
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
|
||||
validations:
|
||||
required: false
|
||||
10
.github/ISSUE_TEMPLATE/2_rule_request.yaml
vendored
10
.github/ISSUE_TEMPLATE/2_rule_request.yaml
vendored
@@ -1,10 +0,0 @@
|
||||
name: Rule request
|
||||
description: Anything related to lint rules (proposing new rules, changes to existing rules, auto-fixes, etc.)
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: |
|
||||
A clear and concise description of the relevant request. If applicable, please describe the current behavior as well.
|
||||
validations:
|
||||
required: true
|
||||
18
.github/ISSUE_TEMPLATE/3_question.yaml
vendored
18
.github/ISSUE_TEMPLATE/3_question.yaml
vendored
@@ -1,18 +0,0 @@
|
||||
name: Question
|
||||
description: Ask a question about Ruff
|
||||
labels: ["question"]
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Question
|
||||
description: Describe your question in detail.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of ruff are you using? (see `ruff version`)
|
||||
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
|
||||
validations:
|
||||
required: false
|
||||
11
.github/ISSUE_TEMPLATE/config.yml
vendored
11
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,11 +0,0 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Report an issue with ty
|
||||
url: https://github.com/astral-sh/ty/issues/new/choose
|
||||
about: Please report issues for our type checker ty in the ty repository.
|
||||
- name: Documentation
|
||||
url: https://docs.astral.sh/ruff
|
||||
about: Please consult the documentation before creating an issue.
|
||||
- name: Community
|
||||
url: https://discord.com/invite/astral-sh
|
||||
about: Join our Discord community to ask questions and collaborate.
|
||||
5
.github/PULL_REQUEST_TEMPLATE.md
vendored
5
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,9 +1,8 @@
|
||||
<!--
|
||||
Thank you for contributing to Ruff/ty! To help us out with reviewing, please consider the following:
|
||||
Thank you for contributing to Ruff! To help us out with reviewing, please consider the following:
|
||||
|
||||
- Does this pull request include a summary of the change? (See below.)
|
||||
- Does this pull request include a descriptive title? (Please prefix with `[ty]` for ty pull
|
||||
requests.)
|
||||
- Does this pull request include a descriptive title?
|
||||
- Does this pull request include references to any relevant issues?
|
||||
-->
|
||||
|
||||
|
||||
13
.github/actionlint.yaml
vendored
13
.github/actionlint.yaml
vendored
@@ -1,13 +0,0 @@
|
||||
# Configuration for the actionlint tool, which we run via pre-commit
|
||||
# to verify the correctness of the syntax in our GitHub Actions workflows.
|
||||
|
||||
self-hosted-runner:
|
||||
# Various runners we use that aren't recognized out-of-the-box by actionlint:
|
||||
labels:
|
||||
- depot-ubuntu-latest-8
|
||||
- depot-ubuntu-22.04-16
|
||||
- depot-ubuntu-22.04-32
|
||||
- depot-windows-2022-16
|
||||
- github-windows-2025-x86_64-8
|
||||
- github-windows-2025-x86_64-16
|
||||
- codspeed-macro
|
||||
8
.github/mypy-primer-ty.toml
vendored
8
.github/mypy-primer-ty.toml
vendored
@@ -1,8 +0,0 @@
|
||||
#:schema ../ty.schema.json
|
||||
# Configuration overrides for the mypy primer run
|
||||
|
||||
# Enable off-by-default rules.
|
||||
[rules]
|
||||
possibly-unresolved-reference = "warn"
|
||||
unused-ignore-comment = "warn"
|
||||
division-by-zero = "warn"
|
||||
30
.github/renovate.json5
vendored
30
.github/renovate.json5
vendored
@@ -40,23 +40,12 @@
|
||||
enabled: true,
|
||||
},
|
||||
packageRules: [
|
||||
// Pin GitHub Actions to immutable SHAs.
|
||||
{
|
||||
matchDepTypes: ["action"],
|
||||
pinDigests: true,
|
||||
},
|
||||
// Annotate GitHub Actions SHAs with a SemVer version.
|
||||
{
|
||||
extends: ["helpers:pinGitHubActionDigests"],
|
||||
extractVersion: "^(?<version>v?\\d+\\.\\d+\\.\\d+)$",
|
||||
versioning: "regex:^v?(?<major>\\d+)(\\.(?<minor>\\d+)\\.(?<patch>\\d+))?$",
|
||||
},
|
||||
{
|
||||
// Group upload/download artifact updates, the versions are dependent
|
||||
groupName: "Artifact GitHub Actions dependencies",
|
||||
matchManagers: ["github-actions"],
|
||||
matchDatasources: ["gitea-tags", "github-tags"],
|
||||
matchPackageNames: ["actions/.*-artifact"],
|
||||
matchPackagePatterns: ["actions/.*-artifact"],
|
||||
description: "Weekly update of artifact-related GitHub Actions dependencies",
|
||||
},
|
||||
{
|
||||
@@ -72,7 +61,7 @@
|
||||
{
|
||||
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
|
||||
// See: https://github.com/astral-sh/uv/issues/3642
|
||||
matchPackageNames: ["zip"],
|
||||
matchPackagePatterns: ["zip"],
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
@@ -81,7 +70,7 @@
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackageNames: ["mkdocs-material"],
|
||||
matchPackagePatterns: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
@@ -98,15 +87,22 @@
|
||||
{
|
||||
groupName: "Monaco",
|
||||
matchManagers: ["npm"],
|
||||
matchPackageNames: ["monaco"],
|
||||
matchPackagePatterns: ["monaco"],
|
||||
description: "Weekly update of the Monaco editor",
|
||||
},
|
||||
{
|
||||
groupName: "strum",
|
||||
matchManagers: ["cargo"],
|
||||
matchPackageNames: ["strum"],
|
||||
matchPackagePatterns: ["strum"],
|
||||
description: "Weekly update of strum dependencies",
|
||||
}
|
||||
},
|
||||
{
|
||||
groupName: "ESLint",
|
||||
matchManagers: ["npm"],
|
||||
matchPackageNames: ["eslint"],
|
||||
allowedVersions: "<9",
|
||||
description: "Constraint ESLint to version 8 until TypeScript-eslint supports ESLint 9", // https://github.com/typescript-eslint/typescript-eslint/issues/8211
|
||||
},
|
||||
],
|
||||
vulnerabilityAlerts: {
|
||||
commitMessageSuffix: "",
|
||||
|
||||
120
.github/workflows/build-binaries.yml
vendored
120
.github/workflows/build-binaries.yml
vendored
@@ -23,12 +23,10 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: ruff
|
||||
MODULE_NAME: ruff
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.11"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
@@ -39,52 +37,50 @@ jobs:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.tar.gz --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
python -m ${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-sdist
|
||||
path: dist
|
||||
|
||||
macos-x86_64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
runs-on: macos-12
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-macos-x86_64
|
||||
path: dist
|
||||
@@ -99,7 +95,7 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-macos-x86_64
|
||||
path: |
|
||||
@@ -110,28 +106,27 @@ jobs:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: arm64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - aarch64"
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-aarch64-apple-darwin
|
||||
path: dist
|
||||
@@ -146,7 +141,7 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-aarch64-apple-darwin
|
||||
path: |
|
||||
@@ -166,18 +161,17 @@ jobs:
|
||||
- target: aarch64-pc-windows-msvc
|
||||
arch: x64
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
@@ -188,11 +182,11 @@ jobs:
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
python -m ${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
@@ -203,7 +197,7 @@ jobs:
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
@@ -219,18 +213,17 @@ jobs:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- i686-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
@@ -238,11 +231,11 @@ jobs:
|
||||
- name: "Test wheel"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
python -m ${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
@@ -260,7 +253,7 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
@@ -294,24 +287,23 @@ jobs:
|
||||
arch: arm
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
if: ${{ matrix.platform.arch != 'ppc64' && matrix.platform.arch != 'ppc64le'}}
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
if: matrix.platform.arch != 'ppc64'
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch == 'arm' && 'armv6' || matrix.platform.arch }}
|
||||
@@ -325,7 +317,7 @@ jobs:
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
@@ -343,7 +335,7 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
@@ -359,25 +351,24 @@ jobs:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # v3
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
@@ -387,7 +378,7 @@ jobs:
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
@@ -405,7 +396,7 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
@@ -425,23 +416,22 @@ jobs:
|
||||
arch: armv7
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
@@ -454,7 +444,7 @@ jobs:
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
@@ -472,7 +462,7 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
|
||||
273
.github/workflows/build-docker.yml
vendored
273
.github/workflows/build-docker.yml
vendored
@@ -17,283 +17,52 @@ on:
|
||||
paths:
|
||||
- .github/workflows/build-docker.yml
|
||||
|
||||
env:
|
||||
RUFF_BASE_IMG: ghcr.io/${{ github.repository_owner }}/ruff
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
name: Build Docker image (ghcr.io/astral-sh/ruff) for ${{ matrix.platform }}
|
||||
docker-publish:
|
||||
name: Build Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
|
||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/astral-sh/ruff
|
||||
|
||||
- name: Check tag consistency
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
env:
|
||||
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }}
|
||||
run: |
|
||||
version=$(grep -m 1 "^version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${TAG}" != "${version}" ]; then
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ fromJson(inputs.plan).announcement_tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${TAG}" >&2
|
||||
echo "${{ fromJson(inputs.plan).announcement_tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||
tags: |
|
||||
type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Normalize Platform Pair (replace / with -)
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_TUPLE=${platform//\//-}" >> "$GITHUB_ENV"
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
cache-to: type=gha,mode=min,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,name=${{ env.RUFF_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Export digests
|
||||
env:
|
||||
digest: ${{ steps.build.outputs.digest }}
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digests
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_TUPLE }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
docker-publish:
|
||||
name: Publish Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-build
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
docker buildx imagetools create \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)
|
||||
|
||||
docker-publish-extra:
|
||||
name: Publish additional Docker image based on ${{ matrix.image-mapping }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
||||
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
||||
image-mapping:
|
||||
- alpine:3.21,alpine3.21,alpine
|
||||
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||
- buildpack-deps:bookworm,bookworm,debian
|
||||
steps:
|
||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate Dynamic Dockerfile Tags
|
||||
shell: bash
|
||||
env:
|
||||
TAG_VALUE: ${{ fromJson(inputs.plan).announcement_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the image and tags from the matrix variable
|
||||
IFS=',' read -r BASE_IMAGE BASE_TAGS <<< "${{ matrix.image-mapping }}"
|
||||
|
||||
# Generate Dockerfile content
|
||||
cat <<EOF > Dockerfile
|
||||
FROM ${BASE_IMAGE}
|
||||
COPY --from=${RUFF_BASE_IMG}:latest /ruff /usr/local/bin/ruff
|
||||
ENTRYPOINT []
|
||||
CMD ["/usr/local/bin/ruff"]
|
||||
EOF
|
||||
|
||||
# Initialize a variable to store all tag docker metadata patterns
|
||||
TAG_PATTERNS=""
|
||||
|
||||
# Loop through all base tags and append its docker metadata pattern to the list
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
IFS=','; for TAG in ${BASE_TAGS}; do
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${TAG_VALUE}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${TAG_VALUE}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
|
||||
done
|
||||
|
||||
# Remove the trailing newline from the pattern list
|
||||
TAG_PATTERNS="${TAG_PATTERNS%\\n}"
|
||||
|
||||
# Export image cache name
|
||||
echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> "$GITHUB_ENV"
|
||||
|
||||
# Export tag patterns using the multiline env var syntax
|
||||
{
|
||||
echo "TAG_PATTERNS<<EOF"
|
||||
echo -e "${TAG_PATTERNS}"
|
||||
echo EOF
|
||||
} >> "$GITHUB_ENV"
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
# ghcr.io prefers index level annotations
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
${{ env.TAG_PATTERNS }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# We do not really need to cache here as the Dockerfile is tiny
|
||||
#cache-from: type=gha,scope=ruff-${{ env.IMAGE_REF }}
|
||||
#cache-to: type=gha,mode=min,scope=ruff-${{ env.IMAGE_REF }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
# Reuse the builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
push: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || 'dry-run' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
annotations: ${{ steps.meta.outputs.annotations }}
|
||||
|
||||
# This is effectively a duplicate of `docker-publish` to make https://github.com/astral-sh/ruff/pkgs/container/ruff
|
||||
# show the ruff base image first since GitHub always shows the last updated image digests
|
||||
# This works by annotating the original digests (previously non-annotated) which triggers an update to ghcr.io
|
||||
docker-republish:
|
||||
name: Annotate Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish-extra
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces)
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
|
||||
# shellcheck disable=SC2046
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)
|
||||
|
||||
708
.github/workflows/ci.yaml
vendored
708
.github/workflows/ci.yaml
vendored
File diff suppressed because it is too large
Load Diff
30
.github/workflows/daily_fuzz.yaml
vendored
30
.github/workflows/daily_fuzz.yaml
vendored
@@ -31,31 +31,25 @@ jobs:
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
python-version: "3.12"
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
run: cargo build --locked
|
||||
- name: Fuzz
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
(
|
||||
uvx \
|
||||
--python=3.12 \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable=target/debug/ruff \
|
||||
--bin=ruff \
|
||||
$(shuf -i 0-9999999999999999999 -n 1000)
|
||||
)
|
||||
run: python scripts/fuzz-parser/fuzz.py $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily fuzz surfaced any bugs
|
||||
@@ -65,7 +59,7 @@ jobs:
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -73,6 +67,6 @@ jobs:
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Daily parser fuzz failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
body: "Runs listed here: https://github.com/astral-sh/ruff/actions/workflows/daily_fuzz.yml",
|
||||
labels: ["bug", "parser", "fuzzer"],
|
||||
})
|
||||
|
||||
108
.github/workflows/mypy_primer.yaml
vendored
108
.github/workflows/mypy_primer.yaml
vendored
@@ -1,108 +0,0 @@
|
||||
name: Run mypy_primer
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "crates/ty*/**"
|
||||
- "crates/ruff_db"
|
||||
- "crates/ruff_python_ast"
|
||||
- "crates/ruff_python_parser"
|
||||
- ".github/workflows/mypy_primer.yaml"
|
||||
- ".github/workflows/mypy_primer_comment.yaml"
|
||||
- "Cargo.lock"
|
||||
- "!**.md"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
mypy_primer:
|
||||
name: Run mypy_primer
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
env:
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt
|
||||
DIFF_FILE: mypy_primer.diff
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
echo ${{ github.event.number }} > ../pr-number
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
path: mypy_primer.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
memory_usage:
|
||||
name: Run memory statistics
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
env:
|
||||
TY_MAX_PARALLELISM: 1 # for deterministic memory numbers
|
||||
TY_MEMORY_REPORT: mypy_primer
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/memory.txt
|
||||
DIFF_FILE: mypy_primer_memory.diff
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
path: mypy_primer_memory.diff
|
||||
122
.github/workflows/mypy_primer_comment.yaml
vendored
122
.github/workflows/mypy_primer_comment.yaml
vendored
@@ -1,122 +0,0 @@
|
||||
name: PR comment (mypy_primer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run mypy_primer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The mypy_primer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer results"
|
||||
id: download-mypy_primer_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer memory results"
|
||||
id: download-mypy_primer_memory_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_memory_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-mypy_primer_diff.outputs.found_artifact == 'true' && steps.download-mypy_primer_memory_diff.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious mypy_primer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]] || [[ -L pr/mypy_primer_memory_diff/mypy_primer_memory.diff ]]
|
||||
then
|
||||
echo "Error: mypy_primer.diff and mypy_primer_memory.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment mypy_primer -->' >> comment.txt
|
||||
|
||||
echo '## `mypy_primer` results' >> comment.txt
|
||||
if [ -s "pr/mypy_primer_diff/mypy_primer.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_diff/mypy_primer.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No ecosystem changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
if [ -s "pr/mypy_primer_memory_diff/mypy_primer_memory.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Memory usage changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_memory_diff/mypy_primer_memory.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No memory usage changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment mypy_primer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
2
.github/workflows/notify-dependents.yml
vendored
2
.github/workflows/notify-dependents.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "Update pre-commit mirror"
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||
script: |
|
||||
|
||||
21
.github/workflows/pr-comment.yaml
vendored
21
.github/workflows/pr-comment.yaml
vendored
@@ -10,13 +10,14 @@ on:
|
||||
description: The ecosystem workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
@@ -29,10 +30,10 @@ jobs:
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
@@ -65,12 +66,12 @@ jobs:
|
||||
cat pr/ecosystem/ecosystem-result >> comment.txt
|
||||
echo "" >> comment.txt
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
echo 'comment<<EOF' >> $GITHUB_OUTPUT
|
||||
cat comment.txt >> $GITHUB_OUTPUT
|
||||
echo 'EOF' >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
uses: peter-evans/find-comment@v3
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
@@ -80,7 +81,7 @@ jobs:
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
|
||||
61
.github/workflows/publish-docs.yml
vendored
61
.github/workflows/publish-docs.yml
vendored
@@ -23,52 +23,52 @@ jobs:
|
||||
env:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
persist-credentials: true
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- name: "Set docs version"
|
||||
env:
|
||||
version: ${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}
|
||||
run: |
|
||||
# if version is missing, use 'latest'
|
||||
if [ -z "$version" ]; then
|
||||
echo "Using 'latest' as version"
|
||||
version="latest"
|
||||
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
|
||||
# if version is missing, exit with error
|
||||
if [[ -z "$version" ]]; then
|
||||
echo "Can't build docs without a version."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
display_name="$version"
|
||||
|
||||
echo "version=$version" >> "$GITHUB_ENV"
|
||||
echo "display_name=$display_name" >> "$GITHUB_ENV"
|
||||
echo "version=$version" >> $GITHUB_ENV
|
||||
echo "display_name=$display_name" >> $GITHUB_ENV
|
||||
|
||||
- name: "Set branch name"
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
timestamp="$(date +%s)"
|
||||
|
||||
# create branch_display_name from display_name by replacing all
|
||||
# characters disallowed in git branch names with hyphens
|
||||
branch_display_name="$(echo "${display_name}" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
branch_display_name="$(echo "$display_name" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
|
||||
echo "timestamp=$timestamp" >> "$GITHUB_ENV"
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> $GITHUB_ENV
|
||||
echo "timestamp=$timestamp" >> $GITHUB_ENV
|
||||
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
@@ -92,7 +92,9 @@ jobs:
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
|
||||
- name: "Clone docs repo"
|
||||
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
|
||||
- name: "Copy docs"
|
||||
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
|
||||
@@ -100,10 +102,12 @@ jobs:
|
||||
- name: "Commit docs"
|
||||
working-directory: astral-docs
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
|
||||
git checkout -b "${branch_name}"
|
||||
git checkout -b $branch_name
|
||||
git add site/ruff
|
||||
git commit -m "Update ruff documentation for $version"
|
||||
|
||||
@@ -112,8 +116,12 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
# set the PR title
|
||||
pull_request_title="Update ruff documentation for ${display_name}"
|
||||
pull_request_title="Update ruff documentation for $display_name"
|
||||
|
||||
# Delete any existing pull requests that are open for this version
|
||||
# by checking against pull_request_title because the new PR will
|
||||
@@ -122,15 +130,13 @@ jobs:
|
||||
xargs -I {} gh pr close {}
|
||||
|
||||
# push the branch to GitHub
|
||||
git push origin "${branch_name}"
|
||||
git push origin $branch_name
|
||||
|
||||
# create the PR
|
||||
gh pr create \
|
||||
--base=main \
|
||||
--head="${branch_name}" \
|
||||
--title="${pull_request_title}" \
|
||||
--body="Automated documentation update for ${display_name}" \
|
||||
--label="documentation"
|
||||
gh pr create --base main --head $branch_name \
|
||||
--title "$pull_request_title" \
|
||||
--body "Automated documentation update for $display_name" \
|
||||
--label "documentation"
|
||||
|
||||
- name: "Merge Pull Request"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
@@ -138,7 +144,8 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
gh pr merge --squash "${branch_name}"
|
||||
gh pr merge --squash $branch_name
|
||||
|
||||
21
.github/workflows/publish-playground.yml
vendored
21
.github/workflows/publish-playground.yml
vendored
@@ -24,31 +24,32 @@ jobs:
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- name: "Run wasm-pack"
|
||||
run: wasm-pack build --target web --out-dir ../../playground/src/pkg crates/ruff_wasm
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build Ruff playground"
|
||||
run: npm run build --workspace ruff-playground
|
||||
- name: "Build JavaScript bundle"
|
||||
run: npm run build
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
uses: cloudflare/wrangler-action@v3.7.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/ruff/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
command: pages deploy playground/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
|
||||
10
.github/workflows/publish-pypi.yml
vendored
10
.github/workflows/publish-pypi.yml
vendored
@@ -21,12 +21,14 @@ jobs:
|
||||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
run: uv publish -v wheels/*
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
skip-existing: true
|
||||
packages-dir: wheels
|
||||
verbose: true
|
||||
|
||||
58
.github/workflows/publish-ty-playground.yml
vendored
58
.github/workflows/publish-ty-playground.yml
vendored
@@ -1,58 +0,0 @@
|
||||
# Publish the ty playground.
|
||||
name: "[ty Playground] Release"
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "crates/ty*/**"
|
||||
- "crates/ruff_db/**"
|
||||
- "crates/ruff_python_ast/**"
|
||||
- "crates/ruff_python_parser/**"
|
||||
- "playground/**"
|
||||
- ".github/workflows/publish-ty-playground.yml"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build ty playground"
|
||||
run: npm run build --workspace ty-playground
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/ty/dist --project-name=ty-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
14
.github/workflows/publish-wasm.yml
vendored
14
.github/workflows/publish-wasm.yml
vendored
@@ -29,15 +29,11 @@ jobs:
|
||||
target: [web, bundler, nodejs]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
|
||||
with:
|
||||
version: v0.13.1
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- name: "Run wasm-pack build"
|
||||
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
||||
- name: "Rename generated package"
|
||||
@@ -45,9 +41,9 @@ jobs:
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 18
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: "Publish (dry-run)"
|
||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
76
.github/workflows/release.yml
vendored
76
.github/workflows/release.yml
vendored
@@ -1,13 +1,10 @@
|
||||
# This file was autogenerated by dist: https://github.com/astral-sh/cargo-dist
|
||||
#
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# Copyright 2025 Astral Software Inc.
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
#
|
||||
# CI that:
|
||||
#
|
||||
# * checks for a Git Tag that looks like a release
|
||||
# * builds artifacts with dist (archives, installers, hashes)
|
||||
# * builds artifacts with cargo-dist (archives, installers, hashes)
|
||||
# * uploads those artifacts to temporary workflow zip
|
||||
# * on success, uploads the artifacts to a GitHub Release
|
||||
#
|
||||
@@ -25,10 +22,10 @@ permissions:
|
||||
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
|
||||
#
|
||||
# If PACKAGE_NAME is specified, then the announcement will be for that
|
||||
# package (erroring out if it doesn't have the given version or isn't dist-able).
|
||||
# package (erroring out if it doesn't have the given version or isn't cargo-dist-able).
|
||||
#
|
||||
# If PACKAGE_NAME isn't specified, then the announcement will be for all
|
||||
# (dist-able) packages in the workspace with that version (this mode is
|
||||
# (cargo-dist-able) packages in the workspace with that version (this mode is
|
||||
# intended for workspaces with only one dist-able package, or with all dist-able
|
||||
# packages versioned/released in lockstep).
|
||||
#
|
||||
@@ -40,7 +37,6 @@ permissions:
|
||||
# If there's a prerelease-style suffix to the version, then the release(s)
|
||||
# will be marked as a prerelease.
|
||||
on:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
@@ -50,9 +46,9 @@ on:
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
# Run 'dist plan' (or host) to determine what tasks we need to do
|
||||
# Run 'cargo dist plan' (or host) to determine what tasks we need to do
|
||||
plan:
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
runs-on: "ubuntu-20.04"
|
||||
outputs:
|
||||
val: ${{ steps.plan.outputs.manifest }}
|
||||
tag: ${{ (inputs.tag != 'dry-run' && inputs.tag) || '' }}
|
||||
@@ -61,20 +57,19 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install dist
|
||||
- name: Install cargo-dist
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.5-prerelease.1/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.18.0/cargo-dist-installer.sh | sh"
|
||||
- name: Cache cargo-dist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/dist
|
||||
path: ~/.cargo/bin/cargo-dist
|
||||
# sure would be cool if github gave us proper conditionals...
|
||||
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
|
||||
# functionality based on whether this is a pull_request, and whether it's from a fork.
|
||||
@@ -82,12 +77,12 @@ jobs:
|
||||
# but also really annoying to build CI around when it needs secrets to work right.)
|
||||
- id: plan
|
||||
run: |
|
||||
dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
cargo dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-plan-dist-manifest
|
||||
path: plan-dist-manifest.json
|
||||
@@ -119,24 +114,23 @@ jobs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
- name: Install cached cargo-dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
@@ -144,8 +138,8 @@ jobs:
|
||||
- id: cargo-dist
|
||||
shell: bash
|
||||
run: |
|
||||
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
|
||||
# Parse out what we just built and upload it to scratch storage
|
||||
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
|
||||
@@ -154,7 +148,7 @@ jobs:
|
||||
|
||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||
- name: "Upload artifacts"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-build-global
|
||||
path: |
|
||||
@@ -171,23 +165,22 @@ jobs:
|
||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
runs-on: "ubuntu-20.04"
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
- name: Install cached cargo-dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
@@ -196,12 +189,12 @@ jobs:
|
||||
- id: host
|
||||
shell: bash
|
||||
run: |
|
||||
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
echo "artifacts uploaded and released successfully"
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
# Overwrite the previous copy
|
||||
name: artifacts-dist-manifest
|
||||
@@ -247,17 +240,16 @@ jobs:
|
||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||
# "host" however must run to completion, no skipping allowed!
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: artifacts
|
||||
|
||||
179
.github/workflows/sync_typeshed.yaml
vendored
179
.github/workflows/sync_typeshed.yaml
vendored
@@ -1,25 +1,5 @@
|
||||
name: Sync typeshed
|
||||
|
||||
# How this works:
|
||||
#
|
||||
# 1. A Linux worker:
|
||||
# a. Checks out Ruff and typeshed
|
||||
# b. Deletes the vendored typeshed stdlib stubs from Ruff
|
||||
# c. Copies the latest versions of the stubs from typeshed
|
||||
# d. Uses docstring-adder to sync all docstrings available on Linux
|
||||
# e. Creates a new branch on the upstream astral-sh/ruff repository
|
||||
# f. Commits the changes it's made and pushes them to the new upstream branch
|
||||
# 2. Once the Linux worker is done, a Windows worker:
|
||||
# a. Checks out the branch created by the Linux worker
|
||||
# b. Syncs all docstrings available on Windows that are not available on Linux
|
||||
# c. Commits the changes and pushes them to the same upstream branch
|
||||
# 3. Once the Windows worker is done, a MacOS worker:
|
||||
# a. Checks out the branch created by the Linux worker
|
||||
# b. Syncs all docstrings available on MacOS that are not available on Linux or Windows
|
||||
# c. Commits the changes and pushes them to the same upstream branch
|
||||
# d. Creates a PR against the `main` branch using the branch all three workers have pushed to
|
||||
# 4. If any of steps 1-3 failed, an issue is created in the `astral-sh/ruff` repository
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
@@ -30,17 +10,7 @@ env:
|
||||
FORCE_COLOR: 1
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
# The name of the upstream branch that the first worker creates,
|
||||
# and which all three workers push to.
|
||||
UPSTREAM_BRANCH: typeshedbot/sync-typeshed
|
||||
|
||||
# The path to the directory that contains the vendored typeshed stubs,
|
||||
# relative to the root of the Ruff repository.
|
||||
VENDORED_TYPESHED: crates/ty_vendored/vendor/typeshed
|
||||
|
||||
jobs:
|
||||
# Sync typeshed stubs, and sync all docstrings available on Linux.
|
||||
# Push the changes to a new branch on the upstream repository.
|
||||
sync:
|
||||
name: Sync typeshed
|
||||
runs-on: ubuntu-latest
|
||||
@@ -49,150 +19,56 @@ jobs:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
path: ruff
|
||||
persist-credentials: true
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout typeshed
|
||||
with:
|
||||
repository: python/typeshed
|
||||
path: typeshed
|
||||
persist-credentials: false
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: Sync typeshed stubs
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}"
|
||||
mkdir "ruff/${VENDORED_TYPESHED}"
|
||||
cp typeshed/README.md "ruff/${VENDORED_TYPESHED}"
|
||||
cp typeshed/LICENSE "ruff/${VENDORED_TYPESHED}"
|
||||
|
||||
# The pyproject.toml file is needed by a later job for the black configuration.
|
||||
# It's deleted before creating the PR.
|
||||
cp typeshed/pyproject.toml "ruff/${VENDORED_TYPESHED}"
|
||||
|
||||
cp -r typeshed/stdlib "ruff/${VENDORED_TYPESHED}/stdlib"
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}/stdlib/@tests"
|
||||
git -C typeshed rev-parse HEAD > "ruff/${VENDORED_TYPESHED}/source_commit.txt"
|
||||
cd ruff
|
||||
git checkout -b "${UPSTREAM_BRANCH}"
|
||||
git add .
|
||||
git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)" --allow-empty
|
||||
- name: Sync Linux docstrings
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
cd ruff
|
||||
./scripts/codemod_docstrings.sh
|
||||
git commit -am "Sync Linux docstrings" --allow-empty
|
||||
- name: Push the changes
|
||||
id: commit
|
||||
if: ${{ success() }}
|
||||
run: git -C ruff push --force --set-upstream origin "${UPSTREAM_BRANCH}"
|
||||
|
||||
# Checkout the branch created by the sync job,
|
||||
# and sync all docstrings available on Windows that are not available on Linux.
|
||||
# Commit the changes and push them to the same branch.
|
||||
docstrings-windows:
|
||||
runs-on: windows-latest
|
||||
timeout-minutes: 20
|
||||
needs: [sync]
|
||||
|
||||
# Don't run the cron job on forks.
|
||||
# The job will also be skipped if the sync job failed, because it's specified in `needs` above,
|
||||
# and we haven't used `always()` in the `if` condition here
|
||||
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync Windows docstrings
|
||||
id: docstrings
|
||||
shell: bash
|
||||
run: ./scripts/codemod_docstrings.sh
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
if: ${{ steps.docstrings.outcome == 'success' }}
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
run: |
|
||||
git commit -am "Sync Windows docstrings" --allow-empty
|
||||
git push
|
||||
|
||||
# Checkout the branch created by the sync job,
|
||||
# and sync all docstrings available on macOS that are not available on Linux or Windows.
|
||||
# Push the changes to the same branch and create a PR against the `main` branch using that branch.
|
||||
docstrings-macos-and-pr:
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 20
|
||||
needs: [sync, docstrings-windows]
|
||||
|
||||
# Don't run the cron job on forks.
|
||||
# The job will also be skipped if the sync or docstrings-windows jobs failed,
|
||||
# because they're specified in `needs` above and we haven't used an `always()` condition in the `if` here
|
||||
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync macOS docstrings
|
||||
run: ./scripts/codemod_docstrings.sh
|
||||
- name: Commit and push the changes
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
git commit -am "Sync macOS docstrings" --allow-empty
|
||||
|
||||
# Here we just reformat the codemodded stubs so that they are
|
||||
# consistent with the other typeshed stubs around them.
|
||||
# Typeshed formats code using black in their CI, so we just invoke
|
||||
# black on the stubs the same way that typeshed does.
|
||||
uvx black "${VENDORED_TYPESHED}/stdlib" --config "${VENDORED_TYPESHED}/pyproject.toml" || true
|
||||
git commit -am "Format codemodded docstrings" --allow-empty
|
||||
|
||||
rm "${VENDORED_TYPESHED}/pyproject.toml"
|
||||
git commit -am "Remove pyproject.toml file"
|
||||
|
||||
git push
|
||||
cd ruff
|
||||
git checkout -b typeshedbot/sync-typeshed
|
||||
git add .
|
||||
git diff --staged --quiet || git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)"
|
||||
- name: Create a PR
|
||||
if: ${{ success() }}
|
||||
if: ${{ steps.sync.outcome == 'success' && steps.commit.outcome == 'success' }}
|
||||
run: |
|
||||
gh pr list --repo "${GITHUB_REPOSITORY}" --head "${UPSTREAM_BRANCH}" --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "[ty] Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "ty"
|
||||
cd ruff
|
||||
git push --force origin typeshedbot/sync-typeshed
|
||||
gh pr list --repo $GITHUB_REPOSITORY --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "internal"
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the typeshed sync failed
|
||||
runs-on: ubuntu-latest
|
||||
needs: [sync, docstrings-windows, docstrings-macos-and-pr]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && (needs.sync.result == 'failure' || needs.docstrings-windows.result == 'failure' || needs.docstrings-macos-and-pr.result == 'failure') }}
|
||||
needs: [sync]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.sync.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -200,6 +76,5 @@ jobs:
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Automated typeshed sync failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
labels: ["bug", "ty"],
|
||||
body: "Runs are listed here: https://github.com/astral-sh/ruff/actions/workflows/sync_typeshed.yaml",
|
||||
})
|
||||
|
||||
138
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
138
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
@@ -1,138 +0,0 @@
|
||||
name: ty ecosystem-analyzer
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
|
||||
jobs:
|
||||
ty-ecosystem-analyzer:
|
||||
name: Compute diagnostic diff
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
if: contains(github.event.label.name, 'ecosystem-analyzer')
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Compute diagnostic diff
|
||||
shell: bash
|
||||
run: |
|
||||
cd ruff
|
||||
|
||||
echo "Enabling configuration overloads (see .github/mypy-primer-ty.toml)"
|
||||
mkdir -p ~/.config/ty
|
||||
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
|
||||
|
||||
echo "new commit"
|
||||
git checkout -b new_commit "$GITHUB_SHA"
|
||||
git rev-list --format=%s --max-count=1 new_commit
|
||||
cp crates/ty_python_semantic/resources/primer/good.txt projects_new.txt
|
||||
|
||||
echo "old commit (merge base)"
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b old_commit "$MERGE_BASE"
|
||||
git rev-list --format=%s --max-count=1 old_commit
|
||||
cp crates/ty_python_semantic/resources/primer/good.txt projects_old.txt
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@f0eec0e549684d8e1d7b8bc3e351202124b63bda"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
diff \
|
||||
--projects-old ruff/projects_old.txt \
|
||||
--projects-new ruff/projects_new.txt \
|
||||
--old old_commit \
|
||||
--new new_commit \
|
||||
--output-old diagnostics-old.json \
|
||||
--output-new diagnostics-new.json
|
||||
|
||||
mkdir dist
|
||||
|
||||
ecosystem-analyzer \
|
||||
generate-diff \
|
||||
diagnostics-old.json \
|
||||
diagnostics-new.json \
|
||||
--old-name "main (merge base)" \
|
||||
--new-name "$REF_NAME" \
|
||||
--output-html dist/diff.html
|
||||
|
||||
ecosystem-analyzer \
|
||||
generate-diff-statistics \
|
||||
diagnostics-old.json \
|
||||
diagnostics-new.json \
|
||||
--old-name "main (merge base)" \
|
||||
--new-name "$REF_NAME" \
|
||||
--output diff-statistics.md
|
||||
|
||||
echo '## `ecosystem-analyzer` results' > comment.md
|
||||
echo >> comment.md
|
||||
cat diff-statistics.md >> comment.md
|
||||
|
||||
cat diff-statistics.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
id: deploy
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
command: pages deploy dist --project-name=ty-ecosystem --branch ${{ github.head_ref }} --commit-hash ${GITHUB_SHA}
|
||||
|
||||
- name: "Append deployment URL"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
env:
|
||||
DEPLOYMENT_URL: ${{ steps.deploy.outputs.pages-deployment-alias-url }}
|
||||
run: |
|
||||
echo >> comment.md
|
||||
echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)**" >> comment.md
|
||||
|
||||
- name: Upload comment
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: comment.md
|
||||
path: comment.md
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- name: Upload diagnostics diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: diff.html
|
||||
path: dist/diff.html
|
||||
@@ -1,85 +0,0 @@
|
||||
name: PR comment (ty ecosystem-analyzer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [ty ecosystem-analyzer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The ty ecosystem-analyzer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download comment.md"
|
||||
id: download-comment
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: comment.md
|
||||
workflow: ty-ecosystem-analyzer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/comment
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-comment.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious ty ecosystem-analyzer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/comment/comment.md ]]
|
||||
then
|
||||
echo "Error: comment.md cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note: this identifier is used to find the comment to update on subsequent runs
|
||||
echo '<!-- generated-comment ty ecosystem-analyzer -->' > comment.md
|
||||
echo >> comment.md
|
||||
cat pr/comment/comment.md >> comment.md
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.md >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment ty ecosystem-analyzer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.md
|
||||
edit-mode: replace
|
||||
76
.github/workflows/ty-ecosystem-report.yaml
vendored
76
.github/workflows/ty-ecosystem-report.yaml
vendored
@@ -1,76 +0,0 @@
|
||||
name: ty ecosystem-report
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Run every Wednesday at 5:00 UTC:
|
||||
- cron: 0 5 * * 3
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
|
||||
jobs:
|
||||
ty-ecosystem-report:
|
||||
name: Create ecosystem report
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Create report
|
||||
shell: bash
|
||||
run: |
|
||||
cd ruff
|
||||
|
||||
echo "Enabling configuration overloads (see .github/mypy-primer-ty.toml)"
|
||||
mkdir -p ~/.config/ty
|
||||
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@f0eec0e549684d8e1d7b8bc3e351202124b63bda"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--verbose \
|
||||
--repository ruff \
|
||||
analyze \
|
||||
--projects ruff/crates/ty_python_semantic/resources/primer/good.txt \
|
||||
--output ecosystem-diagnostics.json
|
||||
|
||||
mkdir dist
|
||||
|
||||
ecosystem-analyzer \
|
||||
generate-report \
|
||||
--max-diagnostics-per-project=1200 \
|
||||
ecosystem-diagnostics.json \
|
||||
--output dist/index.html
|
||||
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
id: deploy
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
command: pages deploy dist --project-name=ty-ecosystem --branch main --commit-hash ${GITHUB_SHA}
|
||||
21
.github/zizmor.yml
vendored
21
.github/zizmor.yml
vendored
@@ -1,21 +0,0 @@
|
||||
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
|
||||
# https://woodruffw.github.io/zizmor/configuration/
|
||||
#
|
||||
# TODO: can we remove the ignores here so that our workflows are more secure?
|
||||
rules:
|
||||
dangerous-triggers:
|
||||
ignore:
|
||||
- pr-comment.yaml
|
||||
cache-poisoning:
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
- ty-ecosystem-analyzer.yaml
|
||||
- ty-ecosystem-report.yaml
|
||||
excessive-permissions:
|
||||
# it's hard to test what the impact of removing these ignores would be
|
||||
# without actually running the release workflow...
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
- publish-docs.yml
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -29,10 +29,6 @@ tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
# insta
|
||||
*.rs.pending-snap
|
||||
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
|
||||
@@ -21,15 +21,3 @@ MD014: false
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
siblings_only: true
|
||||
|
||||
# MD046/code-block-style
|
||||
#
|
||||
# Ignore this because it conflicts with the code block style used in content
|
||||
# tabs of mkdocs-material which is to add a blank line after the content title.
|
||||
#
|
||||
# Ref: https://github.com/astral-sh/ruff/pull/15011#issuecomment-2544790854
|
||||
MD046: false
|
||||
|
||||
# Link text should be descriptive
|
||||
# Disallows link text like *here* which is annoying.
|
||||
MD059: false
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
fail_fast: false
|
||||
fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.github/workflows/release.yml|
|
||||
crates/ty_vendored/vendor/.*|
|
||||
crates/ty_project/resources/.*|
|
||||
crates/ty_python_semantic/resources/corpus/.*|
|
||||
crates/ty/docs/(configuration|rules|cli|environment).md|
|
||||
crates/ruff_benchmark/resources/.*|
|
||||
crates/red_knot_python_semantic/vendor/.*|
|
||||
crates/red_knot_workspace/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff_notebook/resources/.*|
|
||||
@@ -20,23 +16,18 @@ exclude: |
|
||||
)$
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.24.1
|
||||
rev: v0.19
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 0.7.22
|
||||
rev: 0.7.17
|
||||
hooks:
|
||||
- id: mdformat
|
||||
additional_dependencies:
|
||||
- mdformat-mkdocs==4.0.0
|
||||
- mdformat-footnote==0.1.1
|
||||
- mdformat-mkdocs
|
||||
- mdformat-admon
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black\.md
|
||||
@@ -44,7 +35,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.45.0
|
||||
rev: v0.41.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -53,21 +44,8 @@ repos:
|
||||
| docs/\w+\.md
|
||||
)$
|
||||
|
||||
- repo: https://github.com/adamchainz/blacken-docs
|
||||
rev: 1.19.1
|
||||
hooks:
|
||||
- id: blacken-docs
|
||||
args: ["--pyi", "--line-length", "130"]
|
||||
files: '^crates/.*/resources/mdtest/.*\.md'
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.*?invalid(_.+)*_syntax\.md
|
||||
)$
|
||||
additional_dependencies:
|
||||
- black==25.1.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.34.0
|
||||
rev: v1.23.6
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -81,7 +59,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.4
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
@@ -90,48 +68,11 @@ repos:
|
||||
require_serial: true
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.6.2
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.11.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.33.2
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
|
||||
# `actionlint` hook, for verifying correct syntax in GitHub Actions workflows.
|
||||
# Some additional configuration for `actionlint` can be found in `.github/actionlint.yaml`.
|
||||
- repo: https://github.com/rhysd/actionlint
|
||||
rev: v1.7.7
|
||||
hooks:
|
||||
- id: actionlint
|
||||
stages:
|
||||
# This hook is disabled by default, since it's quite slow.
|
||||
# To run all hooks *including* this hook, use `uvx pre-commit run -a --hook-stage=manual`.
|
||||
# To run *just* this hook, use `uvx pre-commit run -a actionlint --hook-stage=manual`.
|
||||
- manual
|
||||
args:
|
||||
- "-ignore=SC2129" # ignorable stylistic lint from shellcheck
|
||||
- "-ignore=SC2016" # another shellcheck lint: seems to have false positives?
|
||||
additional_dependencies:
|
||||
# actionlint has a shellcheck integration which extracts shell scripts in `run:` steps from GitHub Actions
|
||||
# and checks these with shellcheck. This is arguably its most useful feature,
|
||||
# but the integration only works if shellcheck is installed
|
||||
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
|
||||
|
||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||
rev: v0.10.0.1
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, dev-generate-all]
|
||||
|
||||
@@ -1,131 +1,5 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.12.0
|
||||
|
||||
- **Detection of more syntax errors**
|
||||
|
||||
Ruff now detects version-related syntax errors, such as the use of the `match`
|
||||
statement on Python versions before 3.10, and syntax errors emitted by
|
||||
CPython's compiler, such as irrefutable `match` patterns before the final
|
||||
`case` arm.
|
||||
|
||||
- **New default Python version handling for syntax errors**
|
||||
|
||||
Ruff will default to the _latest_ supported Python version (3.13) when
|
||||
checking for the version-related syntax errors mentioned above to prevent
|
||||
false positives in projects without a Python version configured. The default
|
||||
in all other cases, like applying lint rules, is unchanged and remains at the
|
||||
minimum supported Python version (3.9).
|
||||
|
||||
- **Updated f-string formatting**
|
||||
|
||||
Ruff now formats multi-line f-strings with format specifiers to avoid adding a
|
||||
line break after the format specifier. This addresses a change to the Python
|
||||
grammar in version 3.13.4 that made such a line break a syntax error.
|
||||
|
||||
- **`rust-toolchain.toml` is no longer included in source distributions**
|
||||
|
||||
The `rust-toolchain.toml` is used to specify a higher Rust version than Ruff's
|
||||
minimum supported Rust version (MSRV) for development and building release
|
||||
artifacts. However, when present in source distributions, it would also cause
|
||||
downstream package maintainers to pull in the same Rust toolchain, even if
|
||||
their available toolchain was MSRV-compatible.
|
||||
|
||||
- **[`suspicious-xmle-tree-usage`](https://docs.astral.sh/ruff/rules/suspicious-xmle-tree-usage/)
|
||||
(`S320`) has been removed**
|
||||
|
||||
## 0.11.0
|
||||
|
||||
This is a follow-up to release 0.10.0. Because of a mistake in the release process, the `requires-python` inference changes were not included in that release. Ruff 0.11.0 now includes this change as well as the stabilization of the preview behavior for `PGH004`.
|
||||
|
||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
||||
|
||||
In previous versions of Ruff, you could specify your Python version with:
|
||||
|
||||
- The `target-version` option in a `ruff.toml` file or the `[tool.ruff]` section of a pyproject.toml file.
|
||||
- The `project.requires-python` field in a `pyproject.toml` file with a `[tool.ruff]` section.
|
||||
|
||||
These options worked well in most cases, and are still recommended for fine control of the Python version. However, because of the way Ruff discovers config files, `pyproject.toml` files without a `[tool.ruff]` section would be ignored, including the `requires-python` setting. Ruff would then use the default Python version (3.9 as of this writing) instead, which is surprising when you've attempted to request another version.
|
||||
|
||||
In v0.10, config discovery has been updated to address this issue:
|
||||
|
||||
- If Ruff finds a `ruff.toml` file without a `target-version`, it will check
|
||||
for a `pyproject.toml` file in the same directory and respect its
|
||||
`requires-python` version, even if it does not contain a `[tool.ruff]`
|
||||
section.
|
||||
- If Ruff finds a user-level configuration, the `requires-python` field of the closest `pyproject.toml` in a parent directory will take precedence.
|
||||
- If there is no config file (`ruff.toml`or `pyproject.toml` with a
|
||||
`[tool.ruff]` section) in the directory of the file being checked, Ruff will
|
||||
search for the closest `pyproject.toml` in the parent directories and use its
|
||||
`requires-python` setting.
|
||||
|
||||
## 0.10.0
|
||||
|
||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
||||
|
||||
Because of a mistake in the release process, the `requires-python` inference changes are not included in this release and instead shipped as part of 0.11.0.
|
||||
You can find a description of this change in the 0.11.0 section.
|
||||
|
||||
- **Updated `TYPE_CHECKING` behavior** ([#16669](https://github.com/astral-sh/ruff/pull/16669))
|
||||
|
||||
Previously, Ruff only recognized typechecking blocks that tested the `typing.TYPE_CHECKING` symbol. Now, Ruff recognizes any local variable named `TYPE_CHECKING`. This release also removes support for the legacy `if 0:` and `if False:` typechecking checks. Use a local `TYPE_CHECKING` variable instead.
|
||||
|
||||
- **More robust noqa parsing** ([#16483](https://github.com/astral-sh/ruff/pull/16483))
|
||||
|
||||
The syntax for both file-level and in-line suppression comments has been unified and made more robust to certain errors. In most cases, this will result in more suppression comments being read by Ruff, but there are a few instances where previously read comments will now log an error to the user instead. Please refer to the documentation on [_Error suppression_](https://docs.astral.sh/ruff/linter/#error-suppression) for the full specification.
|
||||
|
||||
- **Avoid unnecessary parentheses around with statements with a single context manager and a trailing comment** ([#14005](https://github.com/astral-sh/ruff/pull/14005))
|
||||
|
||||
This change fixes a bug in the formatter where it introduced unnecessary parentheses around with statements with a single context manager and a trailing comment. This change may result in a change in formatting for some users.
|
||||
|
||||
- **Bump alpine default tag to 3.21 for derived Docker images** ([#16456](https://github.com/astral-sh/ruff/pull/16456))
|
||||
|
||||
Alpine 3.21 was released in Dec 2024 and is used in the official Alpine-based Python images. Now the ruff:alpine image will use 3.21 instead of 3.20 and ruff:alpine3.20 will no longer be updated.
|
||||
|
||||
- **\[`unsafe-markup-use`\]: `RUF035` has been recoded to `S704`** ([#15957](https://github.com/astral-sh/ruff/pull/15957))
|
||||
|
||||
## 0.9.0
|
||||
|
||||
Ruff now formats your code according to the 2025 style guide. As a result, your code might now get formatted differently. See the [changelog](./CHANGELOG.md#090) for a detailed list of changes.
|
||||
|
||||
## 0.8.0
|
||||
|
||||
- **Default to Python 3.9**
|
||||
|
||||
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
|
||||
|
||||
- **Changed location of `pydoclint` diagnostics**
|
||||
|
||||
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
|
||||
|
||||
If you've opted into these preview rules but have them suppressed using
|
||||
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
|
||||
some places, this change may mean that you need to move the `noqa` suppression
|
||||
comments. Most users should be unaffected by this change.
|
||||
|
||||
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
|
||||
|
||||
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
|
||||
|
||||
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
|
||||
|
||||
- **Changes to the line width calculation**
|
||||
|
||||
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
|
||||
|
||||
## 0.7.0
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
|
||||
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
|
||||
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
|
||||
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
|
||||
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
|
||||
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
|
||||
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
|
||||
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
|
||||
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
|
||||
instead.
|
||||
|
||||
## 0.6.0
|
||||
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
@@ -280,7 +154,7 @@ flag or `unsafe-fixes` configuration option can be used to enable unsafe fixes.
|
||||
|
||||
See the [docs](https://docs.astral.sh/ruff/configuration/#fix-safety) for details.
|
||||
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
|
||||
Previously, Ruff enabled all implemented rules in Pycodestyle (`E`) by default. Ruff now only includes the
|
||||
Pycodestyle prefixes `E4`, `E7`, and `E9` to exclude rules that conflict with automatic formatters. Consequently,
|
||||
@@ -293,8 +167,8 @@ This change only affects those using Ruff under its default rule set. Users that
|
||||
|
||||
### Remove support for emoji identifiers ([#7212](https://github.com/astral-sh/ruff/pull/7212))
|
||||
|
||||
Previously, Ruff supported non-standards-compliant emoji identifiers such as `📦 = 1`.
|
||||
We decided to remove this non-standard language extension. Ruff now reports syntax errors for invalid emoji identifiers in your code, the same as CPython.
|
||||
Previously, Ruff supported the non-standard compliant emoji identifiers e.g. `📦 = 1`.
|
||||
We decided to remove this non-standard language extension, and Ruff now reports syntax errors for emoji identifiers in your code, the same as CPython.
|
||||
|
||||
### Improved GitLab fingerprints ([#7203](https://github.com/astral-sh/ruff/pull/7203))
|
||||
|
||||
|
||||
2639
CHANGELOG.md
2639
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -71,7 +71,8 @@ representative at an online or offline event.
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at <hey@astral.sh>.
|
||||
reported to the community leaders responsible for enforcement at
|
||||
<charlie.r.marsh@gmail.com>.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
|
||||
@@ -2,11 +2,6 @@
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> This guide is for Ruff. If you're looking to contribute to ty, please see [the ty contributing
|
||||
> guide](https://github.com/astral-sh/ruff/blob/main/crates/ty/CONTRIBUTING.md).
|
||||
|
||||
## The Basics
|
||||
|
||||
Ruff welcomes contributions in the form of pull requests.
|
||||
@@ -34,14 +29,16 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
||||
cargo install cargo-insta
|
||||
```
|
||||
|
||||
You'll need [uv](https://docs.astral.sh/uv/getting-started/installation/) (or `pipx` and `pip`) to
|
||||
run Python utility commands.
|
||||
And you'll need pre-commit to run some validation checks:
|
||||
|
||||
```shell
|
||||
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
||||
```
|
||||
|
||||
You can optionally install pre-commit hooks to automatically run the validation checks
|
||||
when making a commit:
|
||||
|
||||
```shell
|
||||
uv tool install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
@@ -69,7 +66,7 @@ and that it passes both the lint and test validation checks:
|
||||
```shell
|
||||
cargo clippy --workspace --all-targets --all-features -- -D warnings # Rust linting
|
||||
RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
|
||||
uvx pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
@@ -144,7 +141,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
|
||||
1. Create a file for your rule (e.g., `crates/ruff_linter/src/rules/flake8_bugbear/rules/assert_false.rs`).
|
||||
|
||||
1. In that file, define a violation struct (e.g., `pub struct AssertFalse`). You can grep for
|
||||
`#[derive(ViolationMetadata)]` to see examples.
|
||||
`#[violation]` to see examples.
|
||||
|
||||
1. In that file, define a function that adds the violation to the diagnostic list as appropriate
|
||||
(e.g., `pub(crate) fn assert_false`) based on whatever inputs are required for the rule (e.g.,
|
||||
@@ -266,31 +263,30 @@ Finally, regenerate the documentation and generated code with `cargo dev generat
|
||||
|
||||
## MkDocs
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> The documentation uses Material for MkDocs Insiders, which is closed-source software.
|
||||
> This means only members of the Astral organization can preview the documentation exactly as it
|
||||
> will appear in production.
|
||||
> Outside contributors can still preview the documentation, but there will be some differences. Consult [the Material for MkDocs documentation](https://squidfunk.github.io/mkdocs-material/insiders/benefits/#features) for which features are exclusively available in the insiders version.
|
||||
|
||||
To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
|
||||
1. Install MkDocs and Material for MkDocs with:
|
||||
|
||||
```shell
|
||||
pip install -r docs/requirements.txt
|
||||
```
|
||||
|
||||
1. Generate the MkDocs site with:
|
||||
|
||||
```shell
|
||||
uv run --no-project --isolated --with-requirements docs/requirements.txt scripts/generate_mkdocs.py
|
||||
python scripts/generate_mkdocs.py
|
||||
```
|
||||
|
||||
1. Run the development server with:
|
||||
|
||||
```shell
|
||||
# For contributors.
|
||||
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
|
||||
mkdocs serve -f mkdocs.public.yml
|
||||
|
||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
|
||||
mkdocs serve -f mkdocs.insiders.yml
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
@@ -322,17 +318,6 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
|
||||
Additionally, for minor releases:
|
||||
|
||||
- Move the existing contents of `CHANGELOG.md` to `changelogs/0.MINOR.x.md`,
|
||||
where `MINOR` is the previous minor release (e.g. `11` when preparing
|
||||
the 0.12.0 release)
|
||||
- Reverse the entries to put the oldest version first (`0.MINOR.0` instead
|
||||
of `0.MINOR.LATEST` as in the main changelog)
|
||||
- Use the
|
||||
[`reverse-changelog.py`](https://github.com/astral-sh/uv/blob/main/scripts/reverse-changelog.py)
|
||||
script from the uv repo to do this automatically
|
||||
|
||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
@@ -383,21 +368,13 @@ GitHub Actions will run your changes against a number of real-world projects fro
|
||||
report on any linter or formatter differences. You can also run those checks locally via:
|
||||
|
||||
```shell
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
pip install -e ./python/ruff-ecosystem
|
||||
ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
```
|
||||
|
||||
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
|
||||
|
||||
## Upgrading Rust
|
||||
|
||||
1. Change the `channel` in `./rust-toolchain.toml` to the new Rust version (`<latest>`)
|
||||
1. Change the `rust-version` in the `./Cargo.toml` to `<latest> - 2` (e.g. 1.84 if the latest is 1.86)
|
||||
1. Run `cargo clippy --fix --allow-dirty --allow-staged` to fix new clippy warnings
|
||||
1. Create and merge the PR
|
||||
1. Bump the Rust version in Ruff's conda forge recipe. See [this PR](https://github.com/conda-forge/ruff-feedstock/pull/266) for an example.
|
||||
1. Enjoy the new Rust version!
|
||||
|
||||
## Benchmarking and Profiling
|
||||
|
||||
We have several ways of benchmarking and profiling Ruff:
|
||||
@@ -420,18 +397,12 @@ which makes it a good target for benchmarking.
|
||||
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
|
||||
```
|
||||
|
||||
Install `hyperfine`:
|
||||
|
||||
```shell
|
||||
cargo install hyperfine
|
||||
```
|
||||
|
||||
To benchmark the release build:
|
||||
|
||||
```shell
|
||||
cargo build --release --bin ruff && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
|
||||
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
|
||||
@@ -450,7 +421,7 @@ To benchmark against the ecosystem's existing tools:
|
||||
|
||||
```shell
|
||||
hyperfine --ignore-failure --warmup 5 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"pyflakes crates/ruff_linter/resources/test/cpython" \
|
||||
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
|
||||
"pycodestyle crates/ruff_linter/resources/test/cpython" \
|
||||
@@ -496,10 +467,10 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
```
|
||||
|
||||
You can run `uv venv --project ./scripts/benchmarks`, activate the venv and then run `uv sync --project ./scripts/benchmarks` to create a working environment for the
|
||||
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
|
||||
above. All reported benchmarks were computed using the versions specified by
|
||||
`./scripts/benchmarks/pyproject.toml` on Python 3.11.
|
||||
|
||||
@@ -553,12 +524,10 @@ You can run the benchmarks with
|
||||
cargo benchmark
|
||||
```
|
||||
|
||||
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
|
||||
|
||||
#### Benchmark-driven Development
|
||||
|
||||
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
|
||||
`--save-baseline=<name>` to store an initial baseline benchmark (e.g., on `main`) and then use
|
||||
`--save-baseline=<name>` to store an initial baseline benchmark (e.g. on `main`) and then use
|
||||
`--benchmark=<name>` to compare against that benchmark. Criterion will print a message telling you
|
||||
if the benchmark improved/regressed compared to that baseline.
|
||||
|
||||
@@ -593,7 +562,7 @@ cargo install critcmp
|
||||
|
||||
#### Tips
|
||||
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
||||
to only run the lexer benchmarks.
|
||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
||||
@@ -628,7 +597,8 @@ Then convert the recorded profile
|
||||
perf script -F +pid > /tmp/test.perf
|
||||
```
|
||||
|
||||
You can now view the converted file with [firefox profiler](https://profiler.firefox.com/). To learn more about Firefox profiler, read the [Firefox profiler profiling-guide](https://profiler.firefox.com/docs/#/./guide-perf-profiling).
|
||||
You can now view the converted file with [firefox profiler](https://profiler.firefox.com/), with a
|
||||
more in-depth guide [here](https://profiler.firefox.com/docs/#/./guide-perf-profiling)
|
||||
|
||||
An alternative is to convert the perf data to `flamegraph.svg` using
|
||||
[flamegraph](https://github.com/flamegraph-rs/flamegraph) (`cargo install flamegraph`):
|
||||
@@ -709,9 +679,9 @@ utils with it:
|
||||
23 Newline 24
|
||||
```
|
||||
|
||||
- `cargo dev print-cst <file>`: Print the CST of a Python file using
|
||||
- `cargo dev print-cst <file>`: Print the CST of a python file using
|
||||
[LibCST](https://github.com/Instagram/LibCST), which is used in addition to the RustPython parser
|
||||
in Ruff. For example, for `if True: pass # comment`, everything, including the whitespace, is represented:
|
||||
in Ruff. E.g. for `if True: pass # comment` everything including the whitespace is represented:
|
||||
|
||||
```text
|
||||
Module {
|
||||
@@ -894,7 +864,7 @@ each configuration file.
|
||||
|
||||
The package root is used to determine a file's "module path". Consider, again, `baz.py`. In that
|
||||
case, `./my_project/src/foo` was identified as the package root, so the module path for `baz.py`
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
(inclusive of the root itself). The module path can be thought of as "the path you would use to
|
||||
import the module" (e.g., `import foo.bar.baz`).
|
||||
|
||||
|
||||
3268
Cargo.lock
generated
3268
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
208
Cargo.toml
208
Cargo.toml
@@ -3,9 +3,8 @@ members = ["crates/*"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
# Please update rustfmt.toml when bumping the Rust edition
|
||||
edition = "2024"
|
||||
rust-version = "1.86"
|
||||
edition = "2021"
|
||||
rust-version = "1.76"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -14,17 +13,14 @@ license = "MIT"
|
||||
|
||||
[workspace.dependencies]
|
||||
ruff = { path = "crates/ruff" }
|
||||
ruff_annotate_snippets = { path = "crates/ruff_annotate_snippets" }
|
||||
ruff_cache = { path = "crates/ruff_cache" }
|
||||
ruff_db = { path = "crates/ruff_db", default-features = false }
|
||||
ruff_db = { path = "crates/ruff_db" }
|
||||
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
|
||||
ruff_formatter = { path = "crates/ruff_formatter" }
|
||||
ruff_graph = { path = "crates/ruff_graph" }
|
||||
ruff_index = { path = "crates/ruff_index" }
|
||||
ruff_linter = { path = "crates/ruff_linter" }
|
||||
ruff_macros = { path = "crates/ruff_macros" }
|
||||
ruff_notebook = { path = "crates/ruff_notebook" }
|
||||
ruff_options_metadata = { path = "crates/ruff_options_metadata" }
|
||||
ruff_python_ast = { path = "crates/ruff_python_ast" }
|
||||
ruff_python_codegen = { path = "crates/ruff_python_codegen" }
|
||||
ruff_python_formatter = { path = "crates/ruff_python_formatter" }
|
||||
@@ -39,106 +35,80 @@ ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
ty = { path = "crates/ty" }
|
||||
ty_ide = { path = "crates/ty_ide" }
|
||||
ty_project = { path = "crates/ty_project", default-features = false }
|
||||
ty_python_semantic = { path = "crates/ty_python_semantic" }
|
||||
ty_server = { path = "crates/ty_server" }
|
||||
ty_static = { path = "crates/ty_static" }
|
||||
ty_test = { path = "crates/ty_test" }
|
||||
ty_vendored = { path = "crates/ty_vendored" }
|
||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
||||
red_knot_server = { path = "crates/red_knot_server" }
|
||||
red_knot_workspace = { path = "crates/red_knot_workspace" }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
anstream = { version = "0.6.18" }
|
||||
anstyle = { version = "1.0.10" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.80" }
|
||||
arc-swap = { version = "1.7.1" }
|
||||
assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "2.0.0" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bstr = { version = "1.9.1" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.6.0" }
|
||||
clearscreen = { version = "4.0.0" }
|
||||
divan = { package = "codspeed-divan-compat", version = "3.0.2" }
|
||||
codspeed-criterion-compat = { version = "3.0.2", default-features = false }
|
||||
colored = { version = "3.0.0" }
|
||||
clearscreen = { version = "3.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
||||
colored = { version = "2.1.0" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
compact_str = "0.9.0"
|
||||
criterion = { version = "0.6.0", default-features = false }
|
||||
compact_str = "0.8.0"
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
etcetera = { version = "0.10.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
etcetera = { version = "0.8.0" }
|
||||
fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.23" }
|
||||
getrandom = { version = "0.3.1" }
|
||||
get-size2 = { version = "0.5.0", features = [
|
||||
"derive",
|
||||
"smallvec",
|
||||
"hashbrown",
|
||||
"compact-str",
|
||||
] }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
globwalk = { version = "0.9.1" }
|
||||
hashbrown = { version = "0.15.0", default-features = false, features = [
|
||||
"raw-entry",
|
||||
"equivalent",
|
||||
"inline-more",
|
||||
] }
|
||||
heck = "0.5.0"
|
||||
hashbrown = "0.14.3"
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
indicatif = { version = "0.18.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
is-macro = { version = "0.3.5" }
|
||||
is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.14.0" }
|
||||
jiff = { version = "0.2.0" }
|
||||
itertools = { version = "0.13.0" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "1.0.0" }
|
||||
jod-thread = { version = "0.1.2" }
|
||||
libc = { version = "0.2.153" }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
"proposed",
|
||||
"proposed",
|
||||
] }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "8.0.0" }
|
||||
notify = { version = "6.1.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
ordermap = { version = "0.5.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
pep440_rs = { version = "0.7.1" }
|
||||
pep440_rs = { version = "0.6.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.13.4" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.4.0" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.9.0" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
regex-automata = { version = "0.4.9" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "dba66f1a37acca014c2402f231ed5b361bd7d8fe" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -146,62 +116,49 @@ serde-wasm-bindgen = { version = "0.6.4" }
|
||||
serde_json = { version = "1.0.113" }
|
||||
serde_test = { version = "1.0.152" }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
"macros",
|
||||
"macros",
|
||||
] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2", features = ["union", "const_generics", "const_new"] }
|
||||
snapbox = { version = "0.6.0", features = [
|
||||
"diff",
|
||||
"term-svg",
|
||||
"cmd",
|
||||
"examples",
|
||||
] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.27.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.27.0" }
|
||||
strum = { version = "0.26.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.26.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.9.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.11" }
|
||||
tracing-log = { version = "0.2.0" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||
"env-filter",
|
||||
"fmt",
|
||||
"ansi",
|
||||
"smallvec",
|
||||
] }
|
||||
tryfn = { version = "0.2.1" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt"] }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode-width = { version = "0.2.0" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
ureq = { version = "2.9.6" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics"] }
|
||||
uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["getrandom", "ruff_options_metadata", "uuid"]
|
||||
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = [
|
||||
"cfg(fuzzing)",
|
||||
"cfg(codspeed)",
|
||||
] }
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ["cfg(fuzzing)", "cfg(codspeed)"] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
@@ -217,14 +174,10 @@ missing_panics_doc = "allow"
|
||||
module_name_repetitions = "allow"
|
||||
must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
single_match_else = "allow"
|
||||
too_many_lines = "allow"
|
||||
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
|
||||
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
|
||||
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
|
||||
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
|
||||
needless_raw_string_hashes = "allow"
|
||||
# Disallowed restriction lints
|
||||
ignore_without_reason = "allow" # Too many exsisting instances, and there's no auto fix.
|
||||
print_stdout = "warn"
|
||||
print_stderr = "warn"
|
||||
dbg_macro = "warn"
|
||||
@@ -235,13 +188,6 @@ get_unwrap = "warn"
|
||||
rc_buffer = "warn"
|
||||
rc_mutex = "warn"
|
||||
rest_pat_in_fully_bound_structs = "warn"
|
||||
# nursery rules
|
||||
redundant_clone = "warn"
|
||||
debug_assert_with_mut_call = "warn"
|
||||
unused_peekable = "warn"
|
||||
|
||||
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
|
||||
large_stack_arrays = "allow"
|
||||
|
||||
[profile.release]
|
||||
# Note that we set these explicitly, and these values
|
||||
@@ -266,9 +212,6 @@ opt-level = 3
|
||||
[profile.dev.package.similar]
|
||||
opt-level = 3
|
||||
|
||||
[profile.dev.package.salsa]
|
||||
opt-level = 3
|
||||
|
||||
# Reduce complexity of a parser function that would trigger a locals limit in a wasm tool.
|
||||
# https://github.com/bytecodealliance/wasm-tools/blob/b5c3d98e40590512a3b12470ef358d5c7b983b15/crates/wasmparser/src/limits.rs#L29
|
||||
[profile.dev.package.ruff_python_parser]
|
||||
@@ -283,3 +226,58 @@ debug = 1
|
||||
# The profile that 'cargo dist' will build with.
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
# Config for 'cargo dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.18.0"
|
||||
# CI backends to support
|
||||
ci = ["github"]
|
||||
# The installers to generate for each app
|
||||
installers = ["shell", "powershell"]
|
||||
# The archive format to use for windows builds (defaults .zip)
|
||||
windows-archive = ".zip"
|
||||
# The archive format to use for non-windows builds (defaults .tar.xz)
|
||||
unix-archive = ".tar.gz"
|
||||
# Target platforms to build apps for (Rust target-triple syntax)
|
||||
targets = [
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"arm-unknown-linux-musleabihf",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"i686-pc-windows-msvc",
|
||||
"i686-unknown-linux-gnu",
|
||||
"i686-unknown-linux-musl",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
]
|
||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether cargo-dist should create a GitHub Release or use an existing draft
|
||||
create-release = true
|
||||
# Publish jobs to run in CI
|
||||
pr-run-mode = "skip"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# The stage during which the GitHub Release should be created
|
||||
github-release = "announce"
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
# Local artifacts jobs to run in CI
|
||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||
# Announcement jobs to run in CI
|
||||
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM --platform=$BUILDPLATFORM ubuntu AS build
|
||||
FROM --platform=$BUILDPLATFORM ubuntu as build
|
||||
ENV HOME="/root"
|
||||
WORKDIR $HOME
|
||||
|
||||
|
||||
57
README.md
57
README.md
@@ -34,7 +34,8 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/editors) for [VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
@@ -109,28 +110,15 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
1. [Who's Using Ruff?](#whos-using-ruff)
|
||||
1. [License](#license)
|
||||
|
||||
## Getting Started<a id="getting-started"></a>
|
||||
## Getting Started
|
||||
|
||||
For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
|
||||
### Installation
|
||||
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI.
|
||||
|
||||
Invoke Ruff directly with [`uvx`](https://docs.astral.sh/uv/):
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
uvx ruff check # Lint all files in the current directory.
|
||||
uvx ruff format # Format all files in the current directory.
|
||||
```
|
||||
|
||||
Or install Ruff with `uv` (recommended), `pip`, or `pipx`:
|
||||
|
||||
```shell
|
||||
# With uv.
|
||||
uv tool install ruff@latest # Install Ruff globally.
|
||||
uv add --dev ruff # Or add Ruff to your project.
|
||||
|
||||
# With pip.
|
||||
pip install ruff
|
||||
|
||||
@@ -148,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.12.4/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.4/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.6.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -182,10 +170,10 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.4
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
@@ -194,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/astral-sh/ruff-action):
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
|
||||
```yaml
|
||||
name: Ruff
|
||||
@@ -204,10 +192,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/ruff-action@v3
|
||||
- uses: chartboost/ruff-action@v1
|
||||
```
|
||||
|
||||
### Configuration<a id="configuration"></a>
|
||||
### Configuration
|
||||
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
@@ -250,11 +238,11 @@ exclude = [
|
||||
line-length = 88
|
||||
indent-width = 4
|
||||
|
||||
# Assume Python 3.9
|
||||
target-version = "py39"
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
|
||||
[lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
select = ["E4", "E7", "E9", "F"]
|
||||
ignore = []
|
||||
|
||||
@@ -303,7 +291,7 @@ features that may change prior to stabilization.
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
|
||||
## Rules<a id="rules"></a>
|
||||
## Rules
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
@@ -379,21 +367,21 @@ quality tools, including:
|
||||
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
|
||||
|
||||
## Contributing<a id="contributing"></a>
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Support<a id="support"></a>
|
||||
## Support
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Acknowledgements<a id="acknowledgements"></a>
|
||||
## Acknowledgements
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
|
||||
@@ -417,21 +405,18 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
## Who's Using Ruff?<a id="whos-using-ruff"></a>
|
||||
## Who's Using Ruff?
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
- [Albumentations](https://github.com/albumentations-team/albumentations)
|
||||
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
|
||||
- [Anki](https://apps.ankiweb.net/)
|
||||
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- Capital One ([datacompy](https://github.com/capitalone/datacompy))
|
||||
- CrowdCent ([NumerBlox](https://github.com/crowdcent/numerblox)) <!-- typos: ignore -->
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- CERN ([Indico](https://getindico.io/))
|
||||
- [DVC](https://github.com/iterative/dvc)
|
||||
@@ -453,7 +438,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
- [JAX](https://github.com/jax-ml/jax)
|
||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||
- [Kraken Tech](https://kraken.tech/)
|
||||
- [LangChain](https://github.com/hwchase17/langchain)
|
||||
@@ -507,7 +491,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Streamlit](https://github.com/streamlit/streamlit)
|
||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||
- [Weblate](https://weblate.org/)
|
||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||
- [ZenML](https://github.com/zenml-io/zenml)
|
||||
- [Zulip](https://github.com/zulip/zulip)
|
||||
@@ -541,7 +524,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
||||
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
||||
```
|
||||
|
||||
## License<a id="license"></a>
|
||||
## License
|
||||
|
||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
|
||||
|
||||
15
SECURITY.md
15
SECURITY.md
@@ -1,15 +0,0 @@
|
||||
# Security policy
|
||||
|
||||
## Reporting a vulnerability
|
||||
|
||||
If you have found a possible vulnerability, please email `security at astral dot sh`.
|
||||
|
||||
## Bug bounties
|
||||
|
||||
While we sincerely appreciate and encourage reports of suspected security problems, please note that
|
||||
Astral does not currently run any bug bounty programs.
|
||||
|
||||
## Vulnerability disclosures
|
||||
|
||||
Critical vulnerabilities will be disclosed via GitHub's
|
||||
[security advisory](https://github.com/astral-sh/ruff/security) system.
|
||||
26
_typos.toml
26
_typos.toml
@@ -1,14 +1,6 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = [
|
||||
"crates/ty_vendored/vendor/**/*",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
# Completion tests tend to have a lot of incomplete
|
||||
# words naturally. It's annoying to have to make all
|
||||
# of them actually words. So just ignore typos here.
|
||||
"crates/ty_ide/src/completion.rs",
|
||||
]
|
||||
extend-exclude = ["crates/red_knot_python_semantic/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
@@ -16,22 +8,14 @@ hel = "hel"
|
||||
whos = "whos"
|
||||
spawnve = "spawnve"
|
||||
ned = "ned"
|
||||
pn = "pn" # `import panel as pn` is a thing
|
||||
pn = "pn" # `import panel as pd` is a thing
|
||||
poit = "poit"
|
||||
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
||||
jod = "jod" # e.g., `jod-thread`
|
||||
Numer = "Numer" # Library name 'NumerBlox' in "Who's Using Ruff?"
|
||||
|
||||
[default]
|
||||
extend-ignore-re = [
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$",
|
||||
"LICENSEs",
|
||||
# Various third party dependencies uses `typ` as struct field names (e.g., lsp_types::LogMessageParams)
|
||||
"typ",
|
||||
# TODO: Remove this once the `TYP` redirects are removed from `rule_redirects.rs`
|
||||
"TYP",
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$",
|
||||
"LICENSEs",
|
||||
]
|
||||
|
||||
[default.extend-identifiers]
|
||||
"FrIeNdLy" = "FrIeNdLy"
|
||||
|
||||
@@ -1,885 +0,0 @@
|
||||
# Changelog 0.1.x
|
||||
|
||||
## 0.1.0
|
||||
|
||||
This is the first release which uses the `CHANGELOG` file. See [GitHub Releases](https://github.com/astral-sh/ruff/releases) for prior changelog entries.
|
||||
|
||||
Read Ruff's new [versioning policy](https://docs.astral.sh/ruff/versioning/).
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Unsafe fixes are no longer displayed or applied without opt-in ([#7769](https://github.com/astral-sh/ruff/pull/7769))
|
||||
- Drop formatting specific rules from the default set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
- The deprecated `format` setting has been removed ([#7984](https://github.com/astral-sh/ruff/pull/7984))
|
||||
- The `format` setting cannot be used to configure the output format, use `output-format` instead
|
||||
- The `RUFF_FORMAT` environment variable is ignored, use `RUFF_OUTPUT_FORMAT` instead
|
||||
- The `--format` option has been removed from `ruff check`, use `--output-format` instead
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Extend `reimplemented-starmap` (`FURB140`) to catch calls with a single and starred argument ([#7768](https://github.com/astral-sh/ruff/pull/7768))
|
||||
- Improve cases covered by `RUF015` ([#7848](https://github.com/astral-sh/ruff/pull/7848))
|
||||
- Update `SIM15` to allow `open` followed by `close` ([#7916](https://github.com/astral-sh/ruff/pull/7916))
|
||||
- Respect `msgspec.Struct` default-copy semantics in `RUF012` ([#7786](https://github.com/astral-sh/ruff/pull/7786))
|
||||
- Add `sqlalchemy` methods to \`flake8-boolean-trap\`\` exclusion list ([#7874](https://github.com/astral-sh/ruff/pull/7874))
|
||||
- Add fix for `PLR1714` ([#7910](https://github.com/astral-sh/ruff/pull/7910))
|
||||
- Add fix for `PIE804` ([#7884](https://github.com/astral-sh/ruff/pull/7884))
|
||||
- Add fix for `PLC0208` ([#7887](https://github.com/astral-sh/ruff/pull/7887))
|
||||
- Add fix for `PYI055` ([#7886](https://github.com/astral-sh/ruff/pull/7886))
|
||||
- Update `non-pep695-type-alias` to require `--unsafe-fixes` outside of stub files ([#7836](https://github.com/astral-sh/ruff/pull/7836))
|
||||
- Improve fix message for `UP018` ([#7913](https://github.com/astral-sh/ruff/pull/7913))
|
||||
- Update `PLW3201` to support `Enum` [sunder names](https://docs.python.org/3/library/enum.html#supported-sunder-names) ([#7987](https://github.com/astral-sh/ruff/pull/7987))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Only show warnings for empty preview selectors when enabling rules ([#7842](https://github.com/astral-sh/ruff/pull/7842))
|
||||
- Add `unnecessary-key-check` to simplify `key in dct and dct[key]` to `dct.get(key)` ([#7895](https://github.com/astral-sh/ruff/pull/7895))
|
||||
- Add `assignment-in-assert` to prevent walrus expressions in assert statements ([#7856](https://github.com/astral-sh/ruff/pull/7856))
|
||||
- \[`refurb`\] Add `single-item-membership-test` (`FURB171`) ([#7815](https://github.com/astral-sh/ruff/pull/7815))
|
||||
- \[`pylint`\] Add `and-or-ternary` (`R1706`) ([#7811](https://github.com/astral-sh/ruff/pull/7811))
|
||||
|
||||
_New rules are added in [preview](https://docs.astral.sh/ruff/preview/)._
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `unsafe-fixes` setting ([#7769](https://github.com/astral-sh/ruff/pull/7769))
|
||||
- Add `extend-safe-fixes` and `extend-unsafe-fixes` for promoting and demoting fixes ([#7841](https://github.com/astral-sh/ruff/pull/7841))
|
||||
|
||||
### CLI
|
||||
|
||||
- Added `--unsafe-fixes` option for opt-in to display and apply unsafe fixes ([#7769](https://github.com/astral-sh/ruff/pull/7769))
|
||||
- Fix use of deprecated `--format` option in warning ([#7837](https://github.com/astral-sh/ruff/pull/7837))
|
||||
- Show changed files when running under `--check` ([#7788](https://github.com/astral-sh/ruff/pull/7788))
|
||||
- Write summary messages to stderr when fixing via stdin instead of omitting them ([#7838](https://github.com/astral-sh/ruff/pull/7838))
|
||||
- Update fix summary message in `check --diff` to include unsafe fix hints ([#7790](https://github.com/astral-sh/ruff/pull/7790))
|
||||
- Add notebook `cell` field to JSON output format ([#7664](https://github.com/astral-sh/ruff/pull/7664))
|
||||
- Rename applicability levels to `Safe`, `Unsafe`, and `Display` ([#7843](https://github.com/astral-sh/ruff/pull/7843))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix bug where f-strings were allowed in match pattern literal ([#7857](https://github.com/astral-sh/ruff/pull/7857))
|
||||
- Fix `SIM110` with a yield in the condition ([#7801](https://github.com/astral-sh/ruff/pull/7801))
|
||||
- Preserve trailing comments in `C414` fixes ([#7775](https://github.com/astral-sh/ruff/pull/7775))
|
||||
- Check sequence type before triggering `unnecessary-enumerate` `len` suggestion ([#7781](https://github.com/astral-sh/ruff/pull/7781))
|
||||
- Use correct start location for class/function clause header ([#7802](https://github.com/astral-sh/ruff/pull/7802))
|
||||
- Fix incorrect fixes for `SIM101` ([#7798](https://github.com/astral-sh/ruff/pull/7798))
|
||||
- Format comment before parameter default correctly ([#7870](https://github.com/astral-sh/ruff/pull/7870))
|
||||
- Fix `E251` false positive inside f-strings ([#7894](https://github.com/astral-sh/ruff/pull/7894))
|
||||
- Allow bindings to be created and referenced within annotations ([#7885](https://github.com/astral-sh/ruff/pull/7885))
|
||||
- Show per-cell diffs when analyzing notebooks over `stdin` ([#7789](https://github.com/astral-sh/ruff/pull/7789))
|
||||
- Avoid curly brace escape in f-string format spec ([#7780](https://github.com/astral-sh/ruff/pull/7780))
|
||||
- Fix lexing single-quoted f-string with multi-line format spec ([#7787](https://github.com/astral-sh/ruff/pull/7787))
|
||||
- Consider nursery rules to be in-preview for `ruff rule` ([#7812](https://github.com/astral-sh/ruff/pull/7812))
|
||||
- Report precise location for invalid conversion flag ([#7809](https://github.com/astral-sh/ruff/pull/7809))
|
||||
- Visit pattern match guard as a boolean test ([#7911](https://github.com/astral-sh/ruff/pull/7911))
|
||||
- Respect `--unfixable` in `ISC` rules ([#7917](https://github.com/astral-sh/ruff/pull/7917))
|
||||
- Fix edge case with `PIE804` ([#7922](https://github.com/astral-sh/ruff/pull/7922))
|
||||
- Show custom message in `PTH118` for `Path.joinpath` with starred arguments ([#7852](https://github.com/astral-sh/ruff/pull/7852))
|
||||
- Fix false negative in `outdated-version-block` when using greater than comparisons ([#7920](https://github.com/astral-sh/ruff/pull/7920))
|
||||
- Avoid converting f-strings within Django `gettext` calls ([#7898](https://github.com/astral-sh/ruff/pull/7898))
|
||||
- Fix false positive in `PLR6301` ([#7933](https://github.com/astral-sh/ruff/pull/7933))
|
||||
- Treat type aliases as typing-only expressions e.g. resolves false positive in `TCH004` ([#7968](https://github.com/astral-sh/ruff/pull/7968))
|
||||
- Resolve `cache-dir` relative to project root ([#7962](https://github.com/astral-sh/ruff/pull/7962))
|
||||
- Respect subscripted base classes in type-checking rules e.g. resolves false positive in `TCH003` ([#7954](https://github.com/astral-sh/ruff/pull/7954))
|
||||
- Fix JSON schema limit for `line-length` ([#7883](https://github.com/astral-sh/ruff/pull/7883))
|
||||
- Fix commented-out `coalesce` keyword ([#7876](https://github.com/astral-sh/ruff/pull/7876))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document `reimplemented-starmap` performance effects ([#7846](https://github.com/astral-sh/ruff/pull/7846))
|
||||
- Default to following the system dark/light mode ([#7888](https://github.com/astral-sh/ruff/pull/7888))
|
||||
- Add documentation for fixes ([#7901](https://github.com/astral-sh/ruff/pull/7901))
|
||||
- Fix typo in docs of `PLR6301` ([#7831](https://github.com/astral-sh/ruff/pull/7831))
|
||||
- Update `UP038` docs to note that it results in slower code ([#7872](https://github.com/astral-sh/ruff/pull/7872))
|
||||
- crlf -> cr-lf ([#7766](https://github.com/astral-sh/ruff/pull/7766))
|
||||
- Add an example of an unsafe fix ([#7924](https://github.com/astral-sh/ruff/pull/7924))
|
||||
- Fix documented examples for `unnecessary-subscript-reversal` ([#7774](https://github.com/astral-sh/ruff/pull/7774))
|
||||
- Correct error in tuple example in ruff formatter docs ([#7822](https://github.com/astral-sh/ruff/pull/7822))
|
||||
- Add versioning policy to documentation ([#7923](https://github.com/astral-sh/ruff/pull/7923))
|
||||
- Fix invalid code in `FURB177` example ([#7832](https://github.com/astral-sh/ruff/pull/7832))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Less scary `ruff format` message ([#7867](https://github.com/astral-sh/ruff/pull/7867))
|
||||
- Remove spaces from import statements ([#7859](https://github.com/astral-sh/ruff/pull/7859))
|
||||
- Formatter quoting for f-strings with triple quotes ([#7826](https://github.com/astral-sh/ruff/pull/7826))
|
||||
- Update `ruff_python_formatter` generate.py comment ([#7850](https://github.com/astral-sh/ruff/pull/7850))
|
||||
- Document one-call chaining deviation ([#7767](https://github.com/astral-sh/ruff/pull/7767))
|
||||
- Allow f-string modifications in line-shrinking cases ([#7818](https://github.com/astral-sh/ruff/pull/7818))
|
||||
- Add trailing comment deviation to README ([#7827](https://github.com/astral-sh/ruff/pull/7827))
|
||||
- Add trailing zero between dot and exponential ([#7956](https://github.com/astral-sh/ruff/pull/7956))
|
||||
- Force parentheses for power operations in unary expressions ([#7955](https://github.com/astral-sh/ruff/pull/7955))
|
||||
|
||||
### Playground
|
||||
|
||||
- Fix playground `Quick Fix` action ([#7824](https://github.com/astral-sh/ruff/pull/7824))
|
||||
|
||||
## 0.1.1
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Add unsafe fix for `escape-sequence-in-docstring` (`D301`) ([#7970](https://github.com/astral-sh/ruff/pull/7970))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Respect `#(deprecated)` attribute in configuration options ([#8035](https://github.com/astral-sh/ruff/pull/8035))
|
||||
- Add `[format|lint].exclude` options ([#8000](https://github.com/astral-sh/ruff/pull/8000))
|
||||
- Respect `tab-size` setting in formatter ([#8006](https://github.com/astral-sh/ruff/pull/8006))
|
||||
- Add `lint.preview` ([#8002](https://github.com/astral-sh/ruff/pull/8002))
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `literal-membership` (`PLR6201`) ([#7973](https://github.com/astral-sh/ruff/pull/7973))
|
||||
- \[`pylint`\] Implement `too-many-boolean-expressions` (`PLR0916`) ([#7975](https://github.com/astral-sh/ruff/pull/7975))
|
||||
- \[`pylint`\] Implement `misplaced-bare-raise` (`E0704`) ([#7961](https://github.com/astral-sh/ruff/pull/7961))
|
||||
- \[`pylint`\] Implement `global-at-module-level` (`W0604`) ([#8058](https://github.com/astral-sh/ruff/pull/8058))
|
||||
- \[`pylint`\] Implement `unspecified-encoding` (`PLW1514`) ([#7939](https://github.com/astral-sh/ruff/pull/7939))
|
||||
- Add fix for `triple-single-quotes` (`D300`) ([#7967](https://github.com/astral-sh/ruff/pull/7967))
|
||||
|
||||
### Formatter
|
||||
|
||||
- New code style badge for `ruff format` ([#7878](https://github.com/astral-sh/ruff/pull/7878))
|
||||
- Fix comments outside expression parentheses ([#7873](https://github.com/astral-sh/ruff/pull/7873))
|
||||
- Add `--target-version` to `ruff format` ([#8055](https://github.com/astral-sh/ruff/pull/8055))
|
||||
- Skip over parentheses when detecting `in` keyword ([#8054](https://github.com/astral-sh/ruff/pull/8054))
|
||||
- Add `--diff` option to `ruff format` ([#7937](https://github.com/astral-sh/ruff/pull/7937))
|
||||
- Insert newline after nested function or class statements ([#7946](https://github.com/astral-sh/ruff/pull/7946))
|
||||
- Use `pass` over ellipsis in non-function/class contexts ([#8049](https://github.com/astral-sh/ruff/pull/8049))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Lazily evaluate all PEP 695 type alias values ([#8033](https://github.com/astral-sh/ruff/pull/8033))
|
||||
- Avoid failed assertion when showing fixes from stdin ([#8029](https://github.com/astral-sh/ruff/pull/8029))
|
||||
- Avoid flagging HTTP and HTTPS literals in urllib-open ([#8046](https://github.com/astral-sh/ruff/pull/8046))
|
||||
- Avoid flagging `bad-dunder-method-name` for `_` ([#8015](https://github.com/astral-sh/ruff/pull/8015))
|
||||
- Remove Python 2-only methods from `URLOpen` audit ([#8047](https://github.com/astral-sh/ruff/pull/8047))
|
||||
- Use set bracket replacement for `iteration-over-set` to preserve whitespace and comments ([#8001](https://github.com/astral-sh/ruff/pull/8001))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update tutorial to match revised Ruff defaults ([#8066](https://github.com/astral-sh/ruff/pull/8066))
|
||||
- Update rule `B005` docs ([#8028](https://github.com/astral-sh/ruff/pull/8028))
|
||||
- Update GitHub actions example in docs to use `--output-format` ([#8014](https://github.com/astral-sh/ruff/pull/8014))
|
||||
- Document `lint.preview` and `format.preview` ([#8032](https://github.com/astral-sh/ruff/pull/8032))
|
||||
- Clarify that new rules should be added to `RuleGroup::Preview`. ([#7989](https://github.com/astral-sh/ruff/pull/7989))
|
||||
|
||||
## 0.1.2
|
||||
|
||||
This release includes the Beta version of the Ruff formatter — an extremely fast, Black-compatible Python formatter.
|
||||
Try it today with `ruff format`! [Check out the blog post](https://astral.sh/blog/the-ruff-formatter) and [read the docs](https://docs.astral.sh/ruff/formatter/).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `non-ascii-module-import` (`C2403`) ([#8056](https://github.com/astral-sh/ruff/pull/8056))
|
||||
- \[`pylint`\] implement `non-ascii-name` (`C2401`) ([#8038](https://github.com/astral-sh/ruff/pull/8038))
|
||||
- \[`pylint`\] Implement unnecessary-lambda (W0108) ([#7953](https://github.com/astral-sh/ruff/pull/7953))
|
||||
- \[`refurb`\] Implement `read-whole-file` (`FURB101`) ([#7682](https://github.com/astral-sh/ruff/pull/7682))
|
||||
- Add fix for `E223`, `E224`, and `E242` ([#8143](https://github.com/astral-sh/ruff/pull/8143))
|
||||
- Add fix for `E225`, `E226`, `E227`, and `E228` ([#8136](https://github.com/astral-sh/ruff/pull/8136))
|
||||
- Add fix for `E252` ([#8142](https://github.com/astral-sh/ruff/pull/8142))
|
||||
- Add fix for `E261` ([#8114](https://github.com/astral-sh/ruff/pull/8114))
|
||||
- Add fix for `E273` and `E274` ([#8144](https://github.com/astral-sh/ruff/pull/8144))
|
||||
- Add fix for `E275` ([#8133](https://github.com/astral-sh/ruff/pull/8133))
|
||||
- Update `SIM401` to catch ternary operations ([#7415](https://github.com/astral-sh/ruff/pull/7415))
|
||||
- Update `E721` to allow `is` and `is` not for direct type comparisons ([#7905](https://github.com/astral-sh/ruff/pull/7905))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Add `backports.strenum` to `deprecated-imports` ([#8113](https://github.com/astral-sh/ruff/pull/8113))
|
||||
- Update `SIM112` to ignore `https_proxy`, `http_proxy`, and `no_proxy` ([#8140](https://github.com/astral-sh/ruff/pull/8140))
|
||||
- Update fix for `literal-membership` (`PLR6201`) to be unsafe ([#8097](https://github.com/astral-sh/ruff/pull/8097))
|
||||
- Update fix for `mutable-argument-defaults` (`B006`) to be unsafe ([#8108](https://github.com/astral-sh/ruff/pull/8108))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Change `line-ending` default to `auto` ([#8057](https://github.com/astral-sh/ruff/pull/8057))
|
||||
- Respect parenthesized generators in `has_own_parentheses` ([#8100](https://github.com/astral-sh/ruff/pull/8100))
|
||||
- Add caching to formatter ([#8089](https://github.com/astral-sh/ruff/pull/8089))
|
||||
- Remove `--line-length` option from `format` command ([#8131](https://github.com/astral-sh/ruff/pull/8131))
|
||||
- Add formatter to `line-length` documentation ([#8150](https://github.com/astral-sh/ruff/pull/8150))
|
||||
- Warn about incompatible formatter options ([#8088](https://github.com/astral-sh/ruff/pull/8088))
|
||||
- Fix range of unparenthesized tuple subject in match statement ([#8101](https://github.com/astral-sh/ruff/pull/8101))
|
||||
- Remove experimental formatter warning ([#8148](https://github.com/astral-sh/ruff/pull/8148))
|
||||
- Don't move type param opening parenthesis comment ([#8163](https://github.com/astral-sh/ruff/pull/8163))
|
||||
- Update versions in format benchmark script ([#8110](https://github.com/astral-sh/ruff/pull/8110))
|
||||
- Avoid loading files for cached format results ([#8134](https://github.com/astral-sh/ruff/pull/8134))
|
||||
|
||||
### CLI
|
||||
|
||||
- Show the `ruff format` command in help menus ([#8167](https://github.com/astral-sh/ruff/pull/8167))
|
||||
- Add `ruff version` command with long version display ([#8034](https://github.com/astral-sh/ruff/pull/8034))
|
||||
|
||||
### Configuration
|
||||
|
||||
- New `pycodestyle.max-line-length` option ([#8039](https://github.com/astral-sh/ruff/pull/8039))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Detect `sys.version_info` slices in `outdated-version-block` ([#8112](https://github.com/astral-sh/ruff/pull/8112))
|
||||
- Avoid if-else simplification for `TYPE_CHECKING` blocks ([#8072](https://github.com/astral-sh/ruff/pull/8072))
|
||||
- Avoid false-positive print separator diagnostic with starred argument ([#8079](https://github.com/astral-sh/ruff/pull/8079))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix message for `too-many-arguments` lint ([#8092](https://github.com/astral-sh/ruff/pull/8092))
|
||||
- Fix `extend-unsafe-fixes` and `extend-safe-fixes` example ([#8139](https://github.com/astral-sh/ruff/pull/8139))
|
||||
- Add links to `flake8-import-conventions` options ([#8115](https://github.com/astral-sh/ruff/pull/8115))
|
||||
- Rework the documentation to incorporate the Ruff formatter ([#7732](https://github.com/astral-sh/ruff/pull/7732))
|
||||
- Fix `Options` JSON schema description ([#8081](https://github.com/astral-sh/ruff/pull/8081))
|
||||
- Fix typo (`pytext` -> `pytest`) ([#8117](https://github.com/astral-sh/ruff/pull/8117))
|
||||
- Improve `magic-value-comparison` example in docs ([#8111](https://github.com/astral-sh/ruff/pull/8111))
|
||||
|
||||
## 0.1.3
|
||||
|
||||
This release includes a variety of improvements to the Ruff formatter, removing several known and
|
||||
unintentional deviations from Black.
|
||||
|
||||
### Formatter
|
||||
|
||||
- Avoid space around pow for `None`, `True` and `False` ([#8189](https://github.com/astral-sh/ruff/pull/8189))
|
||||
- Avoid sorting all paths in the format command ([#8181](https://github.com/astral-sh/ruff/pull/8181))
|
||||
- Insert necessary blank line between class and leading comments ([#8224](https://github.com/astral-sh/ruff/pull/8224))
|
||||
- Avoid introducing new parentheses in annotated assignments ([#8233](https://github.com/astral-sh/ruff/pull/8233))
|
||||
- Refine the warnings about incompatible linter options ([#8196](https://github.com/astral-sh/ruff/pull/8196))
|
||||
- Add test and basic implementation for formatter preview mode ([#8044](https://github.com/astral-sh/ruff/pull/8044))
|
||||
- Refine warning about incompatible `isort` settings ([#8192](https://github.com/astral-sh/ruff/pull/8192))
|
||||
- Only omit optional parentheses for starting or ending with parentheses ([#8238](https://github.com/astral-sh/ruff/pull/8238))
|
||||
- Use source type to determine parser mode for formatting ([#8205](https://github.com/astral-sh/ruff/pull/8205))
|
||||
- Don't warn about magic trailing comma when `isort.force-single-line` is true ([#8244](https://github.com/astral-sh/ruff/pull/8244))
|
||||
- Use `SourceKind::diff` for formatter ([#8240](https://github.com/astral-sh/ruff/pull/8240))
|
||||
- Fix `fmt:off` with trailing child comment ([#8234](https://github.com/astral-sh/ruff/pull/8234))
|
||||
- Formatter parentheses support for `IpyEscapeCommand` ([#8207](https://github.com/astral-sh/ruff/pull/8207))
|
||||
|
||||
### Linter
|
||||
|
||||
- \[`pylint`\] Add buffer methods to `bad-dunder-method-name` (`PLW3201`) exclusions ([#8190](https://github.com/astral-sh/ruff/pull/8190))
|
||||
- Match rule prefixes from `external` codes setting in `unused-noqa` ([#8177](https://github.com/astral-sh/ruff/pull/8177))
|
||||
- Use `line-length` setting for isort in lieu of `pycodestyle.max-line-length` ([#8235](https://github.com/astral-sh/ruff/pull/8235))
|
||||
- Update fix for `unnecessary-paren-on-raise-exception` to unsafe for unknown types ([#8231](https://github.com/astral-sh/ruff/pull/8231))
|
||||
- Correct quick fix message for `W605` ([#8255](https://github.com/astral-sh/ruff/pull/8255))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix typo in max-doc-length documentation ([#8201](https://github.com/astral-sh/ruff/pull/8201))
|
||||
- Improve documentation around linter-formatter conflicts ([#8257](https://github.com/astral-sh/ruff/pull/8257))
|
||||
- Fix link to error suppression documentation in `unused-noqa` ([#8172](https://github.com/astral-sh/ruff/pull/8172))
|
||||
- Add `external` option to `unused-noqa` documentation ([#8171](https://github.com/astral-sh/ruff/pull/8171))
|
||||
- Add title attribute to icons ([#8060](https://github.com/astral-sh/ruff/pull/8060))
|
||||
- Clarify unsafe case in RSE102 ([#8256](https://github.com/astral-sh/ruff/pull/8256))
|
||||
- Fix skipping formatting examples ([#8210](https://github.com/astral-sh/ruff/pull/8210))
|
||||
- docs: fix name of `magic-trailing-comma` option in README ([#8200](https://github.com/astral-sh/ruff/pull/8200))
|
||||
- Add note about scope of rule changing in versioning policy ([#8169](https://github.com/astral-sh/ruff/pull/8169))
|
||||
- Document: Fix default lint rules ([#8218](https://github.com/astral-sh/ruff/pull/8218))
|
||||
- Fix a wrong setting in configuration.md ([#8186](https://github.com/astral-sh/ruff/pull/8186))
|
||||
- Fix misspelled TOML headers in the tutorial ([#8209](https://github.com/astral-sh/ruff/pull/8209))
|
||||
|
||||
## 0.1.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-trio`\] Implement `timeout-without-await` (`TRIO001`) ([#8439](https://github.com/astral-sh/ruff/pull/8439))
|
||||
- \[`numpy`\] Implement NumPy 2.0 migration rule (`NPY200`) ([#7702](https://github.com/astral-sh/ruff/pull/7702))
|
||||
- \[`pylint`\] Implement `bad-open-mode` (`W1501`) ([#8294](https://github.com/astral-sh/ruff/pull/8294))
|
||||
- \[`pylint`\] Implement `import-outside-toplevel` (`C0415`) rule ([#5180](https://github.com/astral-sh/ruff/pull/5180))
|
||||
- \[`pylint`\] Implement `useless-with-lock` (`W2101`) ([#8321](https://github.com/astral-sh/ruff/pull/8321))
|
||||
- \[`pyupgrade`\] Implement `timeout-error-alias` (`UP041`) ([#8476](https://github.com/astral-sh/ruff/pull/8476))
|
||||
- \[`refurb`\] Implement `isinstance-type-none` (`FURB168`) ([#8308](https://github.com/astral-sh/ruff/pull/8308))
|
||||
- Detect confusable Unicode-to-Unicode units in `RUF001`, `RUF002`, and `RUF003` ([#4430](https://github.com/astral-sh/ruff/pull/4430))
|
||||
- Add newline after module docstrings in preview style ([#8283](https://github.com/astral-sh/ruff/pull/8283))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Add a note on line-too-long to the formatter docs ([#8314](https://github.com/astral-sh/ruff/pull/8314))
|
||||
- Preserve trailing statement semicolons when using `fmt: skip` ([#8273](https://github.com/astral-sh/ruff/pull/8273))
|
||||
- Preserve trailing semicolons when using `fmt: off` ([#8275](https://github.com/astral-sh/ruff/pull/8275))
|
||||
- Avoid duplicating linter-formatter compatibility warnings ([#8292](https://github.com/astral-sh/ruff/pull/8292))
|
||||
- Avoid inserting a newline after function docstrings ([#8375](https://github.com/astral-sh/ruff/pull/8375))
|
||||
- Insert newline between docstring and following own line comment ([#8216](https://github.com/astral-sh/ruff/pull/8216))
|
||||
- Split tuples in return positions by comma first ([#8280](https://github.com/astral-sh/ruff/pull/8280))
|
||||
- Avoid treating byte strings as docstrings ([#8350](https://github.com/astral-sh/ruff/pull/8350))
|
||||
- Add `--line-length` option to `format` command ([#8363](https://github.com/astral-sh/ruff/pull/8363))
|
||||
- Avoid parenthesizing unsplittable because of comments ([#8431](https://github.com/astral-sh/ruff/pull/8431))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add `--output-format` to `ruff rule` and `ruff linter` ([#8203](https://github.com/astral-sh/ruff/pull/8203))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Respect `--force-exclude` in `lint.exclude` and `format.exclude` ([#8393](https://github.com/astral-sh/ruff/pull/8393))
|
||||
- Respect `--extend-per-file-ignores` on the CLI ([#8329](https://github.com/astral-sh/ruff/pull/8329))
|
||||
- Extend `bad-dunder-method-name` to permit `__index__` ([#8300](https://github.com/astral-sh/ruff/pull/8300))
|
||||
- Fix panic with 8 in octal escape ([#8356](https://github.com/astral-sh/ruff/pull/8356))
|
||||
- Avoid raising `D300` when both triple quote styles are present ([#8462](https://github.com/astral-sh/ruff/pull/8462))
|
||||
- Consider unterminated f-strings in `FStringRanges` ([#8154](https://github.com/astral-sh/ruff/pull/8154))
|
||||
- Avoid including literal `shell=True` for truthy, non-`True` diagnostics ([#8359](https://github.com/astral-sh/ruff/pull/8359))
|
||||
- Avoid triggering single-element test for starred expressions ([#8433](https://github.com/astral-sh/ruff/pull/8433))
|
||||
- Detect and ignore Jupyter automagics ([#8398](https://github.com/astral-sh/ruff/pull/8398))
|
||||
- Fix invalid E231 error with f-strings ([#8369](https://github.com/astral-sh/ruff/pull/8369))
|
||||
- Avoid triggering `NamedTuple` rewrite with starred annotation ([#8434](https://github.com/astral-sh/ruff/pull/8434))
|
||||
- Avoid un-setting bracket flag in logical lines ([#8380](https://github.com/astral-sh/ruff/pull/8380))
|
||||
- Place 'r' prefix before 'f' for raw format strings ([#8464](https://github.com/astral-sh/ruff/pull/8464))
|
||||
- Remove trailing periods from NumPy 2.0 code actions ([#8475](https://github.com/astral-sh/ruff/pull/8475))
|
||||
- Fix bug where `PLE1307` was raised when formatting `%c` with characters ([#8407](https://github.com/astral-sh/ruff/pull/8407))
|
||||
- Remove unicode flag from comparable ([#8440](https://github.com/astral-sh/ruff/pull/8440))
|
||||
- Improve B015 message ([#8295](https://github.com/astral-sh/ruff/pull/8295))
|
||||
- Use `fixedOverflowWidgets` for playground popover ([#8458](https://github.com/astral-sh/ruff/pull/8458))
|
||||
- Mark `byte_bounds` as a non-backwards-compatible NumPy 2.0 change ([#8474](https://github.com/astral-sh/ruff/pull/8474))
|
||||
|
||||
### Internals
|
||||
|
||||
- Add a dedicated cache directory per Ruff version ([#8333](https://github.com/astral-sh/ruff/pull/8333))
|
||||
- Allow selective caching for `--fix` and `--diff` ([#8316](https://github.com/astral-sh/ruff/pull/8316))
|
||||
- Improve performance of comment parsing ([#8193](https://github.com/astral-sh/ruff/pull/8193))
|
||||
- Improve performance of string parsing ([#8227](https://github.com/astral-sh/ruff/pull/8227))
|
||||
- Use a dedicated sort key for isort import sorting ([#7963](https://github.com/astral-sh/ruff/pull/7963))
|
||||
|
||||
## 0.1.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bandit`\] Implement `mako-templates` (`S702`) ([#8533](https://github.com/astral-sh/ruff/pull/8533))
|
||||
- \[`flake8-trio`\] Implement `TRIO105` ([#8490](https://github.com/astral-sh/ruff/pull/8490))
|
||||
- \[`flake8-trio`\] Implement `TRIO109` ([#8534](https://github.com/astral-sh/ruff/pull/8534))
|
||||
- \[`flake8-trio`\] Implement `TRIO110` ([#8537](https://github.com/astral-sh/ruff/pull/8537))
|
||||
- \[`flake8-trio`\] Implement `TRIO115` ([#8486](https://github.com/astral-sh/ruff/pull/8486))
|
||||
- \[`refurb`\] Implement `type-none-comparison` (`FURB169`) ([#8487](https://github.com/astral-sh/ruff/pull/8487))
|
||||
- Flag all comparisons against builtin types in `E721` ([#8491](https://github.com/astral-sh/ruff/pull/8491))
|
||||
- Make `SIM118` fix as safe when the expression is a known dictionary ([#8525](https://github.com/astral-sh/ruff/pull/8525))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix multiline lambda expression statement formatting ([#8466](https://github.com/astral-sh/ruff/pull/8466))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add hidden `--extension` to override inference of source type from file extension ([#8373](https://github.com/astral-sh/ruff/pull/8373))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Account for selector specificity when merging `extend_unsafe_fixes` and `override extend_safe_fixes` ([#8444](https://github.com/astral-sh/ruff/pull/8444))
|
||||
- Add support for disabling cache with `RUFF_NO_CACHE` environment variable ([#8538](https://github.com/astral-sh/ruff/pull/8538))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`E721`\] Flag comparisons to `memoryview` ([#8485](https://github.com/astral-sh/ruff/pull/8485))
|
||||
- Allow collapsed-ellipsis bodies in other statements ([#8499](https://github.com/astral-sh/ruff/pull/8499))
|
||||
- Avoid `D301` autofix for `u` prefixed strings ([#8495](https://github.com/astral-sh/ruff/pull/8495))
|
||||
- Only flag `flake8-trio` rules when `trio` import is present ([#8550](https://github.com/astral-sh/ruff/pull/8550))
|
||||
- Reject more syntactically invalid Python programs ([#8524](https://github.com/astral-sh/ruff/pull/8524))
|
||||
- Avoid raising `TRIO115` violations for `trio.sleep(...)` calls with non-number values ([#8532](https://github.com/astral-sh/ruff/pull/8532))
|
||||
- Fix `F841` false negative on assignment to multiple variables ([#8489](https://github.com/astral-sh/ruff/pull/8489))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix link to isort `known-first-party` ([#8562](https://github.com/astral-sh/ruff/pull/8562))
|
||||
- Add notes on fix safety to a few rules ([#8500](https://github.com/astral-sh/ruff/pull/8500))
|
||||
- Add missing toml config tabs ([#8512](https://github.com/astral-sh/ruff/pull/8512))
|
||||
- Add instructions for configuration of Emacs ([#8488](https://github.com/astral-sh/ruff/pull/8488))
|
||||
- Improve detail link contrast in dark mode ([#8548](https://github.com/astral-sh/ruff/pull/8548))
|
||||
- Fix typo in example ([#8506](https://github.com/astral-sh/ruff/pull/8506))
|
||||
- Added tabs for configuration files in the documentation ([#8480](https://github.com/astral-sh/ruff/pull/8480))
|
||||
- Recommend `project.requires-python` over `target-version` ([#8513](https://github.com/astral-sh/ruff/pull/8513))
|
||||
- Add singleton escape hatch to `B008` documentation ([#8501](https://github.com/astral-sh/ruff/pull/8501))
|
||||
- Fix tab configuration docs ([#8502](https://github.com/astral-sh/ruff/pull/8502))
|
||||
|
||||
## 0.1.6
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-boolean-trap`\] Extend `boolean-type-hint-positional-argument` (`FBT001`) to include booleans in unions ([#7501](https://github.com/astral-sh/ruff/pull/7501))
|
||||
- \[`flake8-pie`\] Extend `reimplemented-list-builtin` (`PIE807`) to `dict` reimplementations ([#8608](https://github.com/astral-sh/ruff/pull/8608))
|
||||
- \[`flake8-pie`\] Extend `unnecessary-pass` (`PIE790`) to include ellipses (`...`) ([#8641](https://github.com/astral-sh/ruff/pull/8641))
|
||||
- \[`flake8-pie`\] Implement fix for `unnecessary-spread` (`PIE800`) ([#8668](https://github.com/astral-sh/ruff/pull/8668))
|
||||
- \[`flake8-quotes`\] Implement `unnecessary-escaped-quote` (`Q004`) ([#8630](https://github.com/astral-sh/ruff/pull/8630))
|
||||
- \[`pycodestyle`\] Implement fix for `multiple-spaces-after-keyword` (`E271`) and `multiple-spaces-before-keyword` (`E272`) ([#8622](https://github.com/astral-sh/ruff/pull/8622))
|
||||
- \[`pycodestyle`\] Implement fix for `multiple-spaces-after-operator` (`E222`) and `multiple-spaces-before-operator` (`E221`) ([#8623](https://github.com/astral-sh/ruff/pull/8623))
|
||||
- \[`pyflakes`\] Extend `is-literal` (`F632`) to include comparisons against mutable initializers ([#8607](https://github.com/astral-sh/ruff/pull/8607))
|
||||
- \[`pylint`\] Implement `redefined-argument-from-local` (`PLR1704`) ([#8159](https://github.com/astral-sh/ruff/pull/8159))
|
||||
- \[`pylint`\] Implement fix for `unnecessary-lambda` (`PLW0108`) ([#8621](https://github.com/astral-sh/ruff/pull/8621))
|
||||
- \[`refurb`\] Implement `if-expr-min-max` (`FURB136`) ([#8664](https://github.com/astral-sh/ruff/pull/8664))
|
||||
- \[`refurb`\] Implement `math-constant` (`FURB152`) ([#8727](https://github.com/astral-sh/ruff/pull/8727))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-annotations`\] Add autotyping-like return type inference for annotation rules ([#8643](https://github.com/astral-sh/ruff/pull/8643))
|
||||
- \[`flake8-future-annotations`\] Implement fix for `future-required-type-annotation` (`FA102`) ([#8711](https://github.com/astral-sh/ruff/pull/8711))
|
||||
- \[`flake8-implicit-namespace-package`\] Avoid missing namespace violations in scripts with shebangs ([#8710](https://github.com/astral-sh/ruff/pull/8710))
|
||||
- \[`pydocstyle`\] Update `over-indentation` (`D208`) to preserve indentation offsets when fixing overindented lines ([#8699](https://github.com/astral-sh/ruff/pull/8699))
|
||||
- \[`pyupgrade`\] Refine `timeout-error-alias` (`UP041`) to remove false positives ([#8587](https://github.com/astral-sh/ruff/pull/8587))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix instability in `await` formatting with fluent style ([#8676](https://github.com/astral-sh/ruff/pull/8676))
|
||||
- Compare formatted and unformatted ASTs during formatter tests ([#8624](https://github.com/astral-sh/ruff/pull/8624))
|
||||
- Preserve trailing semicolon for Notebooks ([#8590](https://github.com/astral-sh/ruff/pull/8590))
|
||||
|
||||
### CLI
|
||||
|
||||
- Improve debug printing for resolving origin of config settings ([#8729](https://github.com/astral-sh/ruff/pull/8729))
|
||||
- Write unchanged, excluded files to stdout when read via stdin ([#8596](https://github.com/astral-sh/ruff/pull/8596))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`isort`\] Support disabling sections with `no-sections = true` ([#8657](https://github.com/astral-sh/ruff/pull/8657))
|
||||
- \[`pep8-naming`\] Support local and dynamic class- and static-method decorators ([#8592](https://github.com/astral-sh/ruff/pull/8592))
|
||||
- \[`pydocstyle`\] Allow overriding pydocstyle convention rules ([#8586](https://github.com/astral-sh/ruff/pull/8586))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid syntax error via importing `trio.lowlevel` ([#8730](https://github.com/astral-sh/ruff/pull/8730))
|
||||
- Omit unrolled augmented assignments in `PIE794` ([#8634](https://github.com/astral-sh/ruff/pull/8634))
|
||||
- Slice source code instead of generating it for `EM` fixes ([#7746](https://github.com/astral-sh/ruff/pull/7746))
|
||||
- Allow whitespace around colon in slices for `whitespace-before-punctuation` (`E203`) ([#8654](https://github.com/astral-sh/ruff/pull/8654))
|
||||
- Use function range for `no-self-use` ([#8637](https://github.com/astral-sh/ruff/pull/8637))
|
||||
- F-strings doesn't contain bytes literal for `PLW0129` ([#8675](https://github.com/astral-sh/ruff/pull/8675))
|
||||
- Improve detection of `TYPE_CHECKING` blocks imported from `typing_extensions` or `_typeshed` ([#8429](https://github.com/astral-sh/ruff/pull/8429))
|
||||
- Treat display as a builtin in IPython ([#8707](https://github.com/astral-sh/ruff/pull/8707))
|
||||
- Avoid `FURB113` autofix if comments are present ([#8494](https://github.com/astral-sh/ruff/pull/8494))
|
||||
- Consider the new f-string tokens for `flake8-commas` ([#8582](https://github.com/astral-sh/ruff/pull/8582))
|
||||
- Remove erroneous bad-dunder-name reference ([#8742](https://github.com/astral-sh/ruff/pull/8742))
|
||||
- Avoid recommending Self usages in metaclasses ([#8639](https://github.com/astral-sh/ruff/pull/8639))
|
||||
- Detect runtime-evaluated base classes defined in the current file ([#8572](https://github.com/astral-sh/ruff/pull/8572))
|
||||
- Avoid inserting trailing commas within f-strings ([#8574](https://github.com/astral-sh/ruff/pull/8574))
|
||||
- Remove incorrect deprecation label for stdout and stderr ([#8743](https://github.com/astral-sh/ruff/pull/8743))
|
||||
- Fix unnecessary parentheses in UP007 fix ([#8610](https://github.com/astral-sh/ruff/pull/8610))
|
||||
- Remove repeated and erroneous scoped settings headers in docs ([#8670](https://github.com/astral-sh/ruff/pull/8670))
|
||||
- Trim trailing empty strings when converting to f-strings ([#8712](https://github.com/astral-sh/ruff/pull/8712))
|
||||
- Fix ordering for `force-sort-within-sections` ([#8665](https://github.com/astral-sh/ruff/pull/8665))
|
||||
- Run unicode prefix rule over tokens ([#8709](https://github.com/astral-sh/ruff/pull/8709))
|
||||
- Update UP032 to unescape curly braces in literal parts of converted strings ([#8697](https://github.com/astral-sh/ruff/pull/8697))
|
||||
- List all ipython builtins ([#8719](https://github.com/astral-sh/ruff/pull/8719))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document conventions in the FAQ ([#8638](https://github.com/astral-sh/ruff/pull/8638))
|
||||
- Redirect from rule codes to rule pages in docs ([#8636](https://github.com/astral-sh/ruff/pull/8636))
|
||||
- Fix permalink to convention setting ([#8575](https://github.com/astral-sh/ruff/pull/8575))
|
||||
|
||||
## 0.1.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- Implement multiline dictionary and list hugging for preview style ([#8293](https://github.com/astral-sh/ruff/pull/8293))
|
||||
- Implement the `fix_power_op_line_length` preview style ([#8947](https://github.com/astral-sh/ruff/pull/8947))
|
||||
- Use Python version to determine typing rewrite safety ([#8919](https://github.com/astral-sh/ruff/pull/8919))
|
||||
- \[`flake8-annotations`\] Enable auto-return-type involving `Optional` and `Union` annotations ([#8885](https://github.com/astral-sh/ruff/pull/8885))
|
||||
- \[`flake8-bandit`\] Implement `django-raw-sql` (`S611`) ([#8651](https://github.com/astral-sh/ruff/pull/8651))
|
||||
- \[`flake8-bandit`\] Implement `tarfile-unsafe-members` (`S202`) ([#8829](https://github.com/astral-sh/ruff/pull/8829))
|
||||
- \[`flake8-pyi`\] Implement fix for `unnecessary-literal-union` (`PYI030`) ([#7934](https://github.com/astral-sh/ruff/pull/7934))
|
||||
- \[`flake8-simplify`\] Extend `dict-get-with-none-default` (`SIM910`) to non-literals ([#8762](https://github.com/astral-sh/ruff/pull/8762))
|
||||
- \[`pylint`\] - add `unnecessary-list-index-lookup` (`PLR1736`) + autofix ([#7999](https://github.com/astral-sh/ruff/pull/7999))
|
||||
- \[`pylint`\] - implement R0202 and R0203 with autofixes ([#8335](https://github.com/astral-sh/ruff/pull/8335))
|
||||
- \[`pylint`\] Implement `repeated-keyword` (`PLe1132`) ([#8706](https://github.com/astral-sh/ruff/pull/8706))
|
||||
- \[`pylint`\] Implement `too-many-positional` (`PLR0917`) ([#8995](https://github.com/astral-sh/ruff/pull/8995))
|
||||
- \[`pylint`\] Implement `unnecessary-dict-index-lookup` (`PLR1733`) ([#8036](https://github.com/astral-sh/ruff/pull/8036))
|
||||
- \[`refurb`\] Implement `redundant-log-base` (`FURB163`) ([#8842](https://github.com/astral-sh/ruff/pull/8842))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-boolean-trap`\] Allow booleans in `@override` methods ([#8882](https://github.com/astral-sh/ruff/pull/8882))
|
||||
- \[`flake8-bugbear`\] Avoid `B015`,`B018` for last expression in a cell ([#8815](https://github.com/astral-sh/ruff/pull/8815))
|
||||
- \[`flake8-pie`\] Allow ellipses for enum values in stub files ([#8825](https://github.com/astral-sh/ruff/pull/8825))
|
||||
- \[`flake8-pyi`\] Check PEP 695 type aliases for `snake-case-type-alias` and `t-suffixed-type-alias` ([#8966](https://github.com/astral-sh/ruff/pull/8966))
|
||||
- \[`flake8-pyi`\] Check for kwarg and vararg `NoReturn` type annotations ([#8948](https://github.com/astral-sh/ruff/pull/8948))
|
||||
- \[`flake8-simplify`\] Omit select context managers from `SIM117` ([#8801](https://github.com/astral-sh/ruff/pull/8801))
|
||||
- \[`pep8-naming`\] Allow Django model loads in `non-lowercase-variable-in-function` (`N806`) ([#8917](https://github.com/astral-sh/ruff/pull/8917))
|
||||
- \[`pycodestyle`\] Avoid `E703` for last expression in a cell ([#8821](https://github.com/astral-sh/ruff/pull/8821))
|
||||
- \[`pycodestyle`\] Update `E402` to work at cell level for notebooks ([#8872](https://github.com/astral-sh/ruff/pull/8872))
|
||||
- \[`pydocstyle`\] Avoid `D100` for Jupyter Notebooks ([#8816](https://github.com/astral-sh/ruff/pull/8816))
|
||||
- \[`pylint`\] Implement fix for `unspecified-encoding` (`PLW1514`) ([#8928](https://github.com/astral-sh/ruff/pull/8928))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Avoid unstable formatting in ellipsis-only body with trailing comment ([#8984](https://github.com/astral-sh/ruff/pull/8984))
|
||||
- Inline trailing comments for type alias similar to assignments ([#8941](https://github.com/astral-sh/ruff/pull/8941))
|
||||
- Insert trailing comma when function breaks with single argument ([#8921](https://github.com/astral-sh/ruff/pull/8921))
|
||||
|
||||
### CLI
|
||||
|
||||
- Update `ruff check` and `ruff format` to default to the current directory ([#8791](https://github.com/astral-sh/ruff/pull/8791))
|
||||
- Stop at the first resolved parent configuration ([#8864](https://github.com/astral-sh/ruff/pull/8864))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`pylint`\] Default `max-positional-args` to `max-args` ([#8998](https://github.com/astral-sh/ruff/pull/8998))
|
||||
- \[`pylint`\] Add `allow-dunder-method-names` setting for `bad-dunder-method-name` (`PLW3201`) ([#8812](https://github.com/astral-sh/ruff/pull/8812))
|
||||
- \[`isort`\] Add support for `from-first` setting ([#8663](https://github.com/astral-sh/ruff/pull/8663))
|
||||
- \[`isort`\] Add support for `length-sort` settings ([#8841](https://github.com/astral-sh/ruff/pull/8841))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add support for `@functools.singledispatch` ([#8934](https://github.com/astral-sh/ruff/pull/8934))
|
||||
- Avoid off-by-one error in stripping noqa following multi-byte char ([#8979](https://github.com/astral-sh/ruff/pull/8979))
|
||||
- Avoid off-by-one error in with-item named expressions ([#8915](https://github.com/astral-sh/ruff/pull/8915))
|
||||
- Avoid syntax error via invalid ur string prefix ([#8971](https://github.com/astral-sh/ruff/pull/8971))
|
||||
- Avoid underflow in `get_model` matching ([#8965](https://github.com/astral-sh/ruff/pull/8965))
|
||||
- Avoid unnecessary index diagnostics when value is modified ([#8970](https://github.com/astral-sh/ruff/pull/8970))
|
||||
- Convert over-indentation rule to use number of characters ([#8983](https://github.com/astral-sh/ruff/pull/8983))
|
||||
- Detect implicit returns in auto-return-types ([#8952](https://github.com/astral-sh/ruff/pull/8952))
|
||||
- Fix start >= end error in over-indentation ([#8982](https://github.com/astral-sh/ruff/pull/8982))
|
||||
- Ignore `@overload` and `@override` methods for too-many-arguments checks ([#8954](https://github.com/astral-sh/ruff/pull/8954))
|
||||
- Lexer start of line is false only for `Mode::Expression` ([#8880](https://github.com/astral-sh/ruff/pull/8880))
|
||||
- Mark `pydantic_settings.BaseSettings` as having default copy semantics ([#8793](https://github.com/astral-sh/ruff/pull/8793))
|
||||
- Respect dictionary unpacking in `NamedTuple` assignments ([#8810](https://github.com/astral-sh/ruff/pull/8810))
|
||||
- Respect local subclasses in `flake8-type-checking` ([#8768](https://github.com/astral-sh/ruff/pull/8768))
|
||||
- Support type alias statements in simple statement positions ([#8916](https://github.com/astral-sh/ruff/pull/8916))
|
||||
- \[`flake8-annotations`\] Avoid filtering out un-representable types in return annotation ([#8881](https://github.com/astral-sh/ruff/pull/8881))
|
||||
- \[`flake8-pie`\] Retain extra ellipses in protocols and abstract methods ([#8769](https://github.com/astral-sh/ruff/pull/8769))
|
||||
- \[`flake8-pyi`\] Respect local enum subclasses in `simple-defaults` (`PYI052`) ([#8767](https://github.com/astral-sh/ruff/pull/8767))
|
||||
- \[`flake8-trio`\] Use correct range for `TRIO115` fix ([#8933](https://github.com/astral-sh/ruff/pull/8933))
|
||||
- \[`flake8-trio`\] Use full arguments range for zero-sleep-call ([#8936](https://github.com/astral-sh/ruff/pull/8936))
|
||||
- \[`isort`\] fix: mark `__main__` as first-party import ([#8805](https://github.com/astral-sh/ruff/pull/8805))
|
||||
- \[`pep8-naming`\] Avoid `N806` errors for type alias statements ([#8785](https://github.com/astral-sh/ruff/pull/8785))
|
||||
- \[`perflint`\] Avoid `PERF101` if there's an append in loop body ([#8809](https://github.com/astral-sh/ruff/pull/8809))
|
||||
- \[`pycodestyle`\] Allow space-before-colon after end-of-slice ([#8838](https://github.com/astral-sh/ruff/pull/8838))
|
||||
- \[`pydocstyle`\] Avoid non-character breaks in `over-indentation` (`D208`) ([#8866](https://github.com/astral-sh/ruff/pull/8866))
|
||||
- \[`pydocstyle`\] Ignore underlines when determining docstring logical lines ([#8929](https://github.com/astral-sh/ruff/pull/8929))
|
||||
- \[`pylint`\] Extend `self-assigning-variable` to multi-target assignments ([#8839](https://github.com/astral-sh/ruff/pull/8839))
|
||||
- \[`tryceratops`\] Avoid repeated triggers in nested `tryceratops` diagnostics ([#8772](https://github.com/astral-sh/ruff/pull/8772))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add advice for fixing RUF008 when mutability is not desired ([#8853](https://github.com/astral-sh/ruff/pull/8853))
|
||||
- Added the command to run ruff using pkgx to the installation.md ([#8955](https://github.com/astral-sh/ruff/pull/8955))
|
||||
- Document fix safety for flake8-comprehensions and some pyupgrade rules ([#8918](https://github.com/astral-sh/ruff/pull/8918))
|
||||
- Fix doc formatting for zero-sleep-call ([#8937](https://github.com/astral-sh/ruff/pull/8937))
|
||||
- Remove duplicate imports from os-stat documentation ([#8930](https://github.com/astral-sh/ruff/pull/8930))
|
||||
- Replace generated reference to MkDocs ([#8806](https://github.com/astral-sh/ruff/pull/8806))
|
||||
- Update Arch Linux package URL in installation.md ([#8802](https://github.com/astral-sh/ruff/pull/8802))
|
||||
- \[`flake8-pyi`\] Fix error in `t-suffixed-type-alias` (`PYI043`) example ([#8963](https://github.com/astral-sh/ruff/pull/8963))
|
||||
- \[`flake8-pyi`\] Improve motivation for `custom-type-var-return-type` (`PYI019`) ([#8766](https://github.com/astral-sh/ruff/pull/8766))
|
||||
|
||||
## 0.1.8
|
||||
|
||||
This release includes opt-in support for formatting Python snippets within
|
||||
docstrings via the `docstring-code-format` setting.
|
||||
[Check out the blog post](https://astral.sh/blog/ruff-v0.1.8) for more details!
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add `"preserve"` quote-style to mimic Black's skip-string-normalization ([#8822](https://github.com/astral-sh/ruff/pull/8822))
|
||||
- Implement `prefer_splitting_right_hand_side_of_assignments` preview style ([#8943](https://github.com/astral-sh/ruff/pull/8943))
|
||||
- \[`pycodestyle`\] Add fix for `unexpected-spaces-around-keyword-parameter-equals` ([#9072](https://github.com/astral-sh/ruff/pull/9072))
|
||||
- \[`pycodestyle`\] Add fix for comment-related whitespace rules ([#9075](https://github.com/astral-sh/ruff/pull/9075))
|
||||
- \[`pycodestyle`\] Allow `sys.path` modifications between imports ([#9047](https://github.com/astral-sh/ruff/pull/9047))
|
||||
- \[`refurb`\] Implement `hashlib-digest-hex` (`FURB181`) ([#9077](https://github.com/astral-sh/ruff/pull/9077))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Allow `flake8-type-checking` rules to automatically quote runtime-evaluated references ([#6001](https://github.com/astral-sh/ruff/pull/6001))
|
||||
- Allow transparent cell magics in Jupyter Notebooks ([#8911](https://github.com/astral-sh/ruff/pull/8911))
|
||||
- \[`flake8-annotations`\] Avoid `ANN2xx` fixes for abstract methods with empty bodies ([#9034](https://github.com/astral-sh/ruff/pull/9034))
|
||||
- \[`flake8-self`\] Ignore underscore references in type annotations ([#9036](https://github.com/astral-sh/ruff/pull/9036))
|
||||
- \[`pep8-naming`\] Allow class names when `apps.get_model` is a non-string ([#9065](https://github.com/astral-sh/ruff/pull/9065))
|
||||
- \[`pycodestyle`\] Allow `matplotlib.use` calls to intersperse imports ([#9094](https://github.com/astral-sh/ruff/pull/9094))
|
||||
- \[`pyflakes`\] Support fixing unused assignments in tuples by renaming variables (`F841`) ([#9107](https://github.com/astral-sh/ruff/pull/9107))
|
||||
- \[`pylint`\] Add fix for `subprocess-run-without-check` (`PLW1510`) ([#6708](https://github.com/astral-sh/ruff/pull/6708))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Add `docstring-code-format` knob to enable docstring snippet formatting ([#8854](https://github.com/astral-sh/ruff/pull/8854))
|
||||
- Use double quotes for all docstrings, including single-quoted docstrings ([#9020](https://github.com/astral-sh/ruff/pull/9020))
|
||||
- Implement "dynamic" line width mode for docstring code formatting ([#9098](https://github.com/astral-sh/ruff/pull/9098))
|
||||
- Support reformatting Markdown code blocks ([#9030](https://github.com/astral-sh/ruff/pull/9030))
|
||||
- add support for formatting reStructuredText code snippets ([#9003](https://github.com/astral-sh/ruff/pull/9003))
|
||||
- Avoid trailing comma for single-argument with positional separator ([#9076](https://github.com/astral-sh/ruff/pull/9076))
|
||||
- Fix handling of trailing target comment ([#9051](https://github.com/astral-sh/ruff/pull/9051))
|
||||
|
||||
### CLI
|
||||
|
||||
- Hide unsafe fix suggestions when explicitly disabled ([#9095](https://github.com/astral-sh/ruff/pull/9095))
|
||||
- Add SARIF support to `--output-format` ([#9078](https://github.com/astral-sh/ruff/pull/9078))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Apply unnecessary index rule prior to enumerate rewrite ([#9012](https://github.com/astral-sh/ruff/pull/9012))
|
||||
- \[`flake8-err-msg`\] Allow `EM` fixes even if `msg` variable is defined ([#9059](https://github.com/astral-sh/ruff/pull/9059))
|
||||
- \[`flake8-pie`\] Prevent keyword arguments duplication ([#8450](https://github.com/astral-sh/ruff/pull/8450))
|
||||
- \[`flake8-pie`\] Respect trailing comma in `unnecessary-dict-kwargs` (`PIE804`) ([#9015](https://github.com/astral-sh/ruff/pull/9015))
|
||||
- \[`flake8-raise`\] Avoid removing parentheses on ctypes.WinError ([#9027](https://github.com/astral-sh/ruff/pull/9027))
|
||||
- \[`isort`\] Avoid invalid combination of `force-sort-within-types` and `lines-between-types` ([#9041](https://github.com/astral-sh/ruff/pull/9041))
|
||||
- \[`isort`\] Ensure that from-style imports are always ordered first in `__future__` ([#9039](https://github.com/astral-sh/ruff/pull/9039))
|
||||
- \[`pycodestyle`\] Allow tab indentation before keyword ([#9099](https://github.com/astral-sh/ruff/pull/9099))
|
||||
- \[`pylint`\] Ignore `@overrides` and `@overloads` for `too-many-positional` ([#9000](https://github.com/astral-sh/ruff/pull/9000))
|
||||
- \[`pyupgrade`\] Enable `printf-string-formatting` fix with comments on right-hand side ([#9037](https://github.com/astral-sh/ruff/pull/9037))
|
||||
- \[`refurb`\] Make `math-constant` (`FURB152`) rule more targeted ([#9054](https://github.com/astral-sh/ruff/pull/9054))
|
||||
- \[`refurb`\] Support floating-point base in `redundant-log-base` (`FURB163`) ([#9100](https://github.com/astral-sh/ruff/pull/9100))
|
||||
- \[`ruff`\] Detect `unused-asyncio-dangling-task` (`RUF006`) on unused assignments ([#9060](https://github.com/astral-sh/ruff/pull/9060))
|
||||
|
||||
## 0.1.9
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Add site-packages to default exclusions ([#9188](https://github.com/astral-sh/ruff/pull/9188))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix: Avoid parenthesizing subscript targets and values ([#9209](https://github.com/astral-sh/ruff/pull/9209))
|
||||
- \[`pylint`\] Implement `too-many-locals` (`PLR0914`) ([#9163](https://github.com/astral-sh/ruff/pull/9163))
|
||||
- Implement `reimplemented_operator` (FURB118) ([#9171](https://github.com/astral-sh/ruff/pull/9171))
|
||||
- Add a rule to detect string members in runtime-evaluated unions ([#9143](https://github.com/astral-sh/ruff/pull/9143))
|
||||
- Implement `no_blank_line_before_class_docstring` preview style ([#9154](https://github.com/astral-sh/ruff/pull/9154))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- `CONSTANT_CASE` variables are improperly flagged for yoda violation (`SIM300`) ([#9164](https://github.com/astral-sh/ruff/pull/9164))
|
||||
- \[`flake8-pyi`\] Cover ParamSpecs and TypeVarTuples (`PYI018`) ([#9198](https://github.com/astral-sh/ruff/pull/9198))
|
||||
- \[`flake8-bugbear`\] Add fix for `zip-without-explicit-strict` (`B905`) ([#9176](https://github.com/astral-sh/ruff/pull/9176))
|
||||
- Add fix to automatically remove `print` and `pprint` statements (`T201`, `T203`) ([#9208](https://github.com/astral-sh/ruff/pull/9208))
|
||||
- Prefer `Never` to `NoReturn` in auto-typing in Python >= 3.11 (`ANN201`) ([#9213](https://github.com/astral-sh/ruff/pull/9213))
|
||||
|
||||
### Formatter
|
||||
|
||||
- `can_omit_optional_parentheses`: Exit early for unparenthesized expressions ([#9125](https://github.com/astral-sh/ruff/pull/9125))
|
||||
- Fix `dynamic` mode with doctests so that it doesn't exceed configured line width ([#9129](https://github.com/astral-sh/ruff/pull/9129))
|
||||
- Fix `can_omit_optional_parentheses` for expressions with a right most fstring ([#9124](https://github.com/astral-sh/ruff/pull/9124))
|
||||
- Add `target_version` to formatter options ([#9220](https://github.com/astral-sh/ruff/pull/9220))
|
||||
|
||||
### CLI
|
||||
|
||||
- Update `ruff format --check` to display message for already formatted files ([#9153](https://github.com/astral-sh/ruff/pull/9153))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Reverse order of arguments for `operator.contains` ([#9192](https://github.com/astral-sh/ruff/pull/9192))
|
||||
- Iterate over lambdas in deferred type annotations ([#9175](https://github.com/astral-sh/ruff/pull/9175))
|
||||
- Fix panic in `D208` with multibyte indent ([#9147](https://github.com/astral-sh/ruff/pull/9147))
|
||||
- Add support for `NoReturn` in auto-return-typing ([#9206](https://github.com/astral-sh/ruff/pull/9206))
|
||||
- Allow removal of `typing` from `exempt-modules` ([#9214](https://github.com/astral-sh/ruff/pull/9214))
|
||||
- Avoid `mutable-class-default` violations for Pydantic subclasses ([#9187](https://github.com/astral-sh/ruff/pull/9187))
|
||||
- Fix dropped union expressions for piped non-types in `PYI055` autofix ([#9161](https://github.com/astral-sh/ruff/pull/9161))
|
||||
- Enable annotation quoting for multi-line expressions ([#9142](https://github.com/astral-sh/ruff/pull/9142))
|
||||
- Deduplicate edits when quoting annotations ([#9140](https://github.com/astral-sh/ruff/pull/9140))
|
||||
- Prevent invalid utf8 indexing in cell magic detection ([#9146](https://github.com/astral-sh/ruff/pull/9146))
|
||||
- Avoid nested quotations in auto-quoting fix ([#9168](https://github.com/astral-sh/ruff/pull/9168))
|
||||
- Add base-class inheritance detection to flake8-django rules ([#9151](https://github.com/astral-sh/ruff/pull/9151))
|
||||
- Avoid `asyncio-dangling-task` violations on shadowed bindings ([#9215](https://github.com/astral-sh/ruff/pull/9215))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix blog post URL in changelog ([#9119](https://github.com/astral-sh/ruff/pull/9119))
|
||||
- Add error suppression hint for multi-line strings ([#9205](https://github.com/astral-sh/ruff/pull/9205))
|
||||
- Fix typo in SemanticModel.parent_expression docstring ([#9167](https://github.com/astral-sh/ruff/pull/9167))
|
||||
- Document link between import sorting and formatter ([#9117](https://github.com/astral-sh/ruff/pull/9117))
|
||||
|
||||
## 0.1.10
|
||||
|
||||
### Preview features
|
||||
|
||||
- Improve `dummy_implementations` preview style formatting ([#9240](https://github.com/astral-sh/ruff/pull/9240))
|
||||
- Normalise Hex and unicode escape sequences in strings ([#9280](https://github.com/astral-sh/ruff/pull/9280))
|
||||
- Parenthesize long type annotations in annotated assignments ([#9210](https://github.com/astral-sh/ruff/pull/9210))
|
||||
- Parenthesize multi-context managers in `with` statements ([#9222](https://github.com/astral-sh/ruff/pull/9222))
|
||||
- \[`flake8-pyi`\] Implement `generator-return-from-iter-method` (`PYI058`) ([#9313](https://github.com/astral-sh/ruff/pull/9313))
|
||||
- \[`pylint`\] Implement `empty-comment` (`PLR2044`) ([#9174](https://github.com/astral-sh/ruff/pull/9174))
|
||||
- \[`refurb`\] Implement `bit-count` (`FURB161`) ([#9265](https://github.com/astral-sh/ruff/pull/9265))
|
||||
- \[`ruff`\] Add `never-union` rule to detect redundant `typing.NoReturn` and `typing.Never` ([#9217](https://github.com/astral-sh/ruff/pull/9217))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add paths to TOML parse errors ([#9358](https://github.com/astral-sh/ruff/pull/9358))
|
||||
- Add row and column numbers to formatter parse errors ([#9321](https://github.com/astral-sh/ruff/pull/9321))
|
||||
- Improve responsiveness when invoked via Python ([#9315](https://github.com/astral-sh/ruff/pull/9315))
|
||||
- Short rule messages should not end with a period ([#9345](https://github.com/astral-sh/ruff/pull/9345))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Respect runtime-required decorators on functions ([#9317](https://github.com/astral-sh/ruff/pull/9317))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid `asyncio-dangling-task` for nonlocal and global bindings ([#9263](https://github.com/astral-sh/ruff/pull/9263))
|
||||
- Escape trailing placeholders in rule documentation ([#9301](https://github.com/astral-sh/ruff/pull/9301))
|
||||
- Fix continuation detection following multi-line strings ([#9332](https://github.com/astral-sh/ruff/pull/9332))
|
||||
- Fix scoping for generators in named expressions in classes ([#9248](https://github.com/astral-sh/ruff/pull/9248))
|
||||
- Port from obsolete wsl crate to is-wsl ([#9356](https://github.com/astral-sh/ruff/pull/9356))
|
||||
- Remove special pre-visit for module docstrings ([#9261](https://github.com/astral-sh/ruff/pull/9261))
|
||||
- Respect `__str__` definitions from super classes ([#9338](https://github.com/astral-sh/ruff/pull/9338))
|
||||
- Respect `unused-noqa` via `per-file-ignores` ([#9300](https://github.com/astral-sh/ruff/pull/9300))
|
||||
- Respect attribute chains when resolving builtin call paths ([#9309](https://github.com/astral-sh/ruff/pull/9309))
|
||||
- Treat all `typing_extensions` members as typing aliases ([#9335](https://github.com/astral-sh/ruff/pull/9335))
|
||||
- Use `Display` for formatter parse errors ([#9316](https://github.com/astral-sh/ruff/pull/9316))
|
||||
- Wrap subscripted dicts in parens for f-string conversion ([#9238](https://github.com/astral-sh/ruff/pull/9238))
|
||||
- \[`flake8-annotations`\] Avoid adding return types to stub methods ([#9277](https://github.com/astral-sh/ruff/pull/9277))
|
||||
- \[`flake8-annotations`\] Respect mixed `return` and `raise` cases in return-type analysis ([#9310](https://github.com/astral-sh/ruff/pull/9310))
|
||||
- \[`flake8-bandit`\] Don't report violations when `SafeLoader` is imported from `yaml.loader` (`S506`) ([#9299](https://github.com/astral-sh/ruff/pull/9299))
|
||||
- \[`pylint`\] Avoid panic when comment is preceded by Unicode ([#9331](https://github.com/astral-sh/ruff/pull/9331))
|
||||
- \[`pylint`\] Change `PLR0917` error message to match other `PLR09XX` messages ([#9308](https://github.com/astral-sh/ruff/pull/9308))
|
||||
- \[`refurb`\] Avoid false positives for `math-constant` (`FURB152`) ([#9290](https://github.com/astral-sh/ruff/pull/9290))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Expand target name for better rule documentation ([#9302](https://github.com/astral-sh/ruff/pull/9302))
|
||||
- Fix typos found by codespell ([#9346](https://github.com/astral-sh/ruff/pull/9346))
|
||||
- \[`perflint`\] Document `PERF102` fix un-safety ([#9351](https://github.com/astral-sh/ruff/pull/9351))
|
||||
- \[`pyupgrade`\] Document `UP007` fix un-safety ([#9306](https://github.com/astral-sh/ruff/pull/9306))
|
||||
|
||||
## 0.1.11
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `super-without-brackets` (`W0245`) ([#9257](https://github.com/astral-sh/ruff/pull/9257))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check path string properly in `python -m ruff` invocations ([#9367](https://github.com/astral-sh/ruff/pull/9367))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Tweak `relative-imports` message ([#9365](https://github.com/astral-sh/ruff/pull/9365))
|
||||
- Add fix safety note for `yield-in-for-loop` ([#9364](https://github.com/astral-sh/ruff/pull/9364))
|
||||
|
||||
## 0.1.12
|
||||
|
||||
### Preview features
|
||||
|
||||
- Formatter: Hug multiline-strings in preview style ([#9243](https://github.com/astral-sh/ruff/pull/9243))
|
||||
- \[`flake8-bandit`\] Add `ssl-with-no-version` (`S504`) ([#9384](https://github.com/astral-sh/ruff/pull/9384))
|
||||
- \[`flake8-bandit`\] Implement `ssl-insecure-version` (`S502`) ([#9390](https://github.com/astral-sh/ruff/pull/9390))
|
||||
- \[`flake8-bandit`\] Implement `ssl-with-bad-defaults` (`S503`) ([#9391](https://github.com/astral-sh/ruff/pull/9391))
|
||||
- \[`flake8-bandit`\] Implement suspicious import rules (`S4XX`) ([#8831](https://github.com/astral-sh/ruff/pull/8831))
|
||||
- \[`flake8-simplify`\] Implement `zip-dict-keys-and-values` (`SIM911`) ([#9460](https://github.com/astral-sh/ruff/pull/9460))
|
||||
- \[`pyflakes`\] Add a fix for `redefined-while-unused` (`F811`) ([#9419](https://github.com/astral-sh/ruff/pull/9419))
|
||||
- \[`pylint`\] Implement `unnecessary-dunder-call` (`C2801`) ([#9166](https://github.com/astral-sh/ruff/pull/9166))
|
||||
- \[`ruff`\] Add `parenthesize-chained-operators` (`RUF021`) to enforce parentheses in `a or b and c` ([#9440](https://github.com/astral-sh/ruff/pull/9440))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-boolean-trap`\] Allow Boolean positional arguments in setters ([#9429](https://github.com/astral-sh/ruff/pull/9429))
|
||||
- \[`flake8-builtins`\] Restrict `builtin-attribute-shadowing` (`A003`) to actual shadowed references ([#9462](https://github.com/astral-sh/ruff/pull/9462))
|
||||
- \[`flake8-pyi`\] Add fix for `generator-return-from-iter-method` (`PYI058`) ([#9355](https://github.com/astral-sh/ruff/pull/9355))
|
||||
- \[`pyflakes`\] Don't flag `redefined-while-unused` (`F811`) in `if` branches ([#9418](https://github.com/astral-sh/ruff/pull/9418))
|
||||
- \[`pyupgrade`\] Add some additional Python 3.12 typing members to `deprecated-import` ([#9445](https://github.com/astral-sh/ruff/pull/9445))
|
||||
- \[`ruff`\] Add fix for `parenthesize-chained-operators` (`RUF021`) ([#9449](https://github.com/astral-sh/ruff/pull/9449))
|
||||
- \[`ruff`\] Include subscripts and attributes in static key rule (`RUF011`) ([#9416](https://github.com/astral-sh/ruff/pull/9416))
|
||||
- \[`ruff`\] Support variable keys in static dictionary key rule (`RUF011`) ([#9411](https://github.com/astral-sh/ruff/pull/9411))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Generate deterministic IDs when formatting notebooks ([#9359](https://github.com/astral-sh/ruff/pull/9359))
|
||||
- Allow `# fmt: skip` with interspersed same-line comments ([#9395](https://github.com/astral-sh/ruff/pull/9395))
|
||||
- Parenthesize breaking named expressions in match guards ([#9396](https://github.com/astral-sh/ruff/pull/9396))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add cell indexes to all diagnostics ([#9387](https://github.com/astral-sh/ruff/pull/9387))
|
||||
- Avoid infinite loop in constant vs. `None` comparisons ([#9376](https://github.com/astral-sh/ruff/pull/9376))
|
||||
- Handle raises with implicit alternate branches ([#9377](https://github.com/astral-sh/ruff/pull/9377))
|
||||
- Ignore trailing quotes for unclosed l-brace errors ([#9388](https://github.com/astral-sh/ruff/pull/9388))
|
||||
- Respect multi-segment submodule imports when resolving qualified names ([#9382](https://github.com/astral-sh/ruff/pull/9382))
|
||||
- Use `DisplayParseError` for stdin parser errors ([#9409](https://github.com/astral-sh/ruff/pull/9409))
|
||||
- Use `comment_ranges` for isort directive extraction ([#9414](https://github.com/astral-sh/ruff/pull/9414))
|
||||
- Use transformed source code for diagnostic locations ([#9408](https://github.com/astral-sh/ruff/pull/9408))
|
||||
- \[`flake8-pyi`\] Exclude `warnings.deprecated` and `typing_extensions.deprecated` arguments ([#9423](https://github.com/astral-sh/ruff/pull/9423))
|
||||
- \[`flake8-pyi`\] Fix false negative for `unused-private-protocol` (`PYI046`) with unused generic protocols ([#9405](https://github.com/astral-sh/ruff/pull/9405))
|
||||
- \[`pydocstyle`\] Disambiguate argument descriptors from section headers ([#9427](https://github.com/astral-sh/ruff/pull/9427))
|
||||
- \[`pylint`\] Homogenize `PLR0914` message to match other `PLR09XX` rules ([#9399](https://github.com/astral-sh/ruff/pull/9399))
|
||||
- \[`ruff`\] Allow `Hashable = None` in type annotations (`RUF013`) ([#9442](https://github.com/astral-sh/ruff/pull/9442))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix admonition hyperlink colouring ([#9385](https://github.com/astral-sh/ruff/pull/9385))
|
||||
- Add missing preview link ([#9386](https://github.com/astral-sh/ruff/pull/9386))
|
||||
|
||||
## 0.1.13
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Include base pyproject when initializing cache settings ([#9480](https://github.com/astral-sh/ruff/pull/9480))
|
||||
- \[`flake8-simplify`\] Account for possibly-empty f-string values in truthiness logic ([#9484](https://github.com/astral-sh/ruff/pull/9484))
|
||||
- \[`pylint`\] Add the missing period in `unnecessary-dunder-call` ([#9485](https://github.com/astral-sh/ruff/pull/9485))
|
||||
- \[`pylint`\] Fix `__aenter__` message in `unnecessary-dunder-call` ([#9492](https://github.com/astral-sh/ruff/pull/9492))
|
||||
|
||||
## 0.1.14
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Add fix for `duplicate-value` (`B033`) ([#9510](https://github.com/astral-sh/ruff/pull/9510))
|
||||
- \[`flake8-simplify`\] Implement `enumerate-for-loop` (`SIM113`) ([#7777](https://github.com/astral-sh/ruff/pull/7777))
|
||||
- \[`pygrep_hooks`\] Add fix for `deprecated-log-warn` (`PGH002`) ([#9519](https://github.com/astral-sh/ruff/pull/9519))
|
||||
- \[`pylint`\] Implement `import-private-name` (`C2701`) ([#5920](https://github.com/astral-sh/ruff/pull/5920))
|
||||
- \[`refurb`\] Implement `regex-flag-alias` with fix (`FURB167`) ([#9516](https://github.com/astral-sh/ruff/pull/9516))
|
||||
- \[`ruff`\] Add rule and fix to sort contents of `__all__` (`RUF022`) ([#9474](https://github.com/astral-sh/ruff/pull/9474))
|
||||
- \[`tryceratops`\] Add fix for `error-instead-of-exception` (`TRY400`) ([#9520](https://github.com/astral-sh/ruff/pull/9520))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-pyi`\] Fix `PYI047` false negatives on PEP-695 type aliases ([#9566](https://github.com/astral-sh/ruff/pull/9566))
|
||||
- \[`flake8-pyi`\] Fix `PYI049` false negatives on call-based `TypedDict`s ([#9567](https://github.com/astral-sh/ruff/pull/9567))
|
||||
- \[`pylint`\] Exclude `self` and `cls` when counting method arguments (`PLR0917`) ([#9563](https://github.com/astral-sh/ruff/pull/9563))
|
||||
|
||||
### CLI
|
||||
|
||||
- `--show-settings` displays active settings in a far more readable format ([#9464](https://github.com/astral-sh/ruff/pull/9464))
|
||||
- Add `--extension` support to the formatter ([#9483](https://github.com/astral-sh/ruff/pull/9483))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Ignore preview status for fixable and unfixable selectors ([#9538](https://github.com/astral-sh/ruff/pull/9538))
|
||||
- \[`pycodestyle`\] Use the configured tab size when expanding indents ([#9506](https://github.com/astral-sh/ruff/pull/9506))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Recursively visit deferred AST nodes ([#9541](https://github.com/astral-sh/ruff/pull/9541))
|
||||
- Visit deferred lambdas before type definitions ([#9540](https://github.com/astral-sh/ruff/pull/9540))
|
||||
- \[`flake8-simplify`\] Avoid some more `enumerate-for-loop` false positives (`SIM113`) ([#9515](https://github.com/astral-sh/ruff/pull/9515))
|
||||
- \[`pandas-vet`\] Limit inplace diagnostics to methods that accept inplace ([#9495](https://github.com/astral-sh/ruff/pull/9495))
|
||||
- \[`pylint`\] Add the `__prepare__` method to the list of recognized dunder method ([#9529](https://github.com/astral-sh/ruff/pull/9529))
|
||||
- \[`pylint`\] Ignore unnecessary dunder calls within dunder definitions ([#9496](https://github.com/astral-sh/ruff/pull/9496))
|
||||
- \[`refurb`\] Avoid bailing when `reimplemented-operator` is called on function (`FURB118`) ([#9556](https://github.com/astral-sh/ruff/pull/9556))
|
||||
- \[`ruff`\] Avoid treating named expressions as static keys (`RUF011`) ([#9494](https://github.com/astral-sh/ruff/pull/9494))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add instructions on using `noqa` with isort rules ([#9555](https://github.com/astral-sh/ruff/pull/9555))
|
||||
- Documentation update for URL giving 'page not found' ([#9565](https://github.com/astral-sh/ruff/pull/9565))
|
||||
- Fix admonition in dark mode ([#9502](https://github.com/astral-sh/ruff/pull/9502))
|
||||
- Update contributing docs to use `cargo bench -p ruff_benchmark` ([#9535](https://github.com/astral-sh/ruff/pull/9535))
|
||||
- Update emacs integration section to include `emacs-ruff-format` ([#9403](https://github.com/astral-sh/ruff/pull/9403))
|
||||
- \[`flake8-blind-except`\] Document exceptions to `blind-except` rule ([#9580](https://github.com/astral-sh/ruff/pull/9580))
|
||||
|
||||
## 0.1.15
|
||||
|
||||
### Preview features
|
||||
|
||||
- Error when `NURSERY` selector is used with `--preview` ([#9682](https://github.com/astral-sh/ruff/pull/9682))
|
||||
- Preserve indentation around multiline strings in formatter ([#9637](https://github.com/astral-sh/ruff/pull/9637))
|
||||
- \[`flake8-return`\] Add fixes for all rules (`RET505`, `RET506`, `RET507`, `RET508`) ([#9595](https://github.com/astral-sh/ruff/pull/9595))
|
||||
- \[`flake8-simplify`\] Add fix for `if-with-same-arms` (`SIM114`) ([#9591](https://github.com/astral-sh/ruff/pull/9591))
|
||||
- \[`pycodestyle`\] Add fix for `multiple-imports-on-one-line` (`E401`) ([#9518](https://github.com/astral-sh/ruff/pull/9518))
|
||||
- \[`pylint`\] Add fix for `collapsible-else-if` (`PLR5501`) ([#9594](https://github.com/astral-sh/ruff/pull/9594))
|
||||
- \[`pylint`\] Add fix for `useless-else-on-loop` (`PLW0120`) ([#9590](https://github.com/astral-sh/ruff/pull/9590))
|
||||
- \[`pylint`\] Implement `assigning-non-slot` (`E0237`) ([#9623](https://github.com/astral-sh/ruff/pull/9623))
|
||||
- \[`pylint`\] Implement `potential-index-error` (`PLE0643`) ([#9545](https://github.com/astral-sh/ruff/pull/9545))
|
||||
- \[`pylint`\] Implement `too-many-nested-blocks` (`PLR1702`) ([#9172](https://github.com/astral-sh/ruff/pull/9172))
|
||||
- \[`ruff`\] Add rule to sort `__slots__` and `__match_args__` ([#9564](https://github.com/astral-sh/ruff/pull/9564))
|
||||
- \[`ruff`\] Detect unnecessary `dict` comprehensions for iterables (`RUF025`) ([#9613](https://github.com/astral-sh/ruff/pull/9613))
|
||||
- \[`ruff`\] Guard against use of `default_factory` as a keyword argument (`RUF026`) ([#9651](https://github.com/astral-sh/ruff/pull/9651))
|
||||
- \[`ruff`\] Implement `mutable-fromkeys-value` (`RUF024`) ([#9597](https://github.com/astral-sh/ruff/pull/9597))
|
||||
|
||||
### CLI
|
||||
|
||||
- Enable auto-wrapping of `--help` output ([#9633](https://github.com/astral-sh/ruff/pull/9633))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid rendering display-only rules as fixable ([#9649](https://github.com/astral-sh/ruff/pull/9649))
|
||||
- Detect automagic-like assignments in notebooks ([#9653](https://github.com/astral-sh/ruff/pull/9653))
|
||||
- Generate custom JSON schema for dynamic setting ([#9632](https://github.com/astral-sh/ruff/pull/9632))
|
||||
- \[`flake8-no-pep420`\] Include global `--config` when determining namespace packages ([#9603](https://github.com/astral-sh/ruff/pull/9603))
|
||||
- \[`flake8-pie`\] Omit bound tuples passed to `.startswith` or `.endswith` ([#9661](https://github.com/astral-sh/ruff/pull/9661))
|
||||
- \[`flake8-return`\] Avoid panic when fixing inlined else blocks ([#9657](https://github.com/astral-sh/ruff/pull/9657))
|
||||
- \[`flake8-return`\] Consider exception suppression in unnecessary assignment ([#9673](https://github.com/astral-sh/ruff/pull/9673))
|
||||
- \[`flake8-return`\] Take `NoReturn` annotation into account when analyzing implicit returns ([#9636](https://github.com/astral-sh/ruff/pull/9636))
|
||||
- \[`flake8-simplify`\] Support inverted returns in `needless-bool` (`SIM103`) ([#9619](https://github.com/astral-sh/ruff/pull/9619))
|
||||
- \[`flake8-type-checking`\] Add Pydantic's `BaseConfig` to default-copy list ([#9650](https://github.com/astral-sh/ruff/pull/9650))
|
||||
- \[`flake8-type-checking`\] Avoid marking `InitVar` as a typing-only annotation ([#9688](https://github.com/astral-sh/ruff/pull/9688))
|
||||
- \[`pycodestyle`\] Allow `dtype` comparisons in `type-comparison` ([#9676](https://github.com/astral-sh/ruff/pull/9676))
|
||||
- \[`pydocstyle`\] Re-implement `last-line-after-section` (`D413`) ([#9654](https://github.com/astral-sh/ruff/pull/9654))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-pytest-style`\] Add fix safety documentation for `duplicate-parameterize-test-cases` ([#9678](https://github.com/astral-sh/ruff/pull/9678))
|
||||
- \[`pylint`\] Document `literal-membership` fix safety conditions ([#9677](https://github.com/astral-sh/ruff/pull/9677))
|
||||
- \[`isort`\] Fix reference to `isort` rule code ([#9598](https://github.com/astral-sh/ruff/pull/9598))
|
||||
@@ -1,101 +0,0 @@
|
||||
# Changelog 0.10.x
|
||||
|
||||
## 0.10.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.10.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
||||
|
||||
Because of a mistake in the release process, the `requires-python` inference changes are not included in this release and instead shipped as part of 0.11.0.
|
||||
You can find a description of this change in the 0.11.0 section.
|
||||
|
||||
- **Updated `TYPE_CHECKING` behavior** ([#16669](https://github.com/astral-sh/ruff/pull/16669))
|
||||
|
||||
Previously, Ruff only recognized typechecking blocks that tested the `typing.TYPE_CHECKING` symbol. Now, Ruff recognizes any local variable named `TYPE_CHECKING`. This release also removes support for the legacy `if 0:` and `if False:` typechecking checks. Use a local `TYPE_CHECKING` variable instead.
|
||||
|
||||
- **More robust noqa parsing** ([#16483](https://github.com/astral-sh/ruff/pull/16483))
|
||||
|
||||
The syntax for both file-level and in-line suppression comments has been unified and made more robust to certain errors. In most cases, this will result in more suppression comments being read by Ruff, but there are a few instances where previously read comments will now log an error to the user instead. Please refer to the documentation on [_Error suppression_](https://docs.astral.sh/ruff/linter/#error-suppression) for the full specification.
|
||||
|
||||
- **Avoid unnecessary parentheses around with statements with a single context manager and a trailing comment** ([#14005](https://github.com/astral-sh/ruff/pull/14005))
|
||||
|
||||
This change fixes a bug in the formatter where it introduced unnecessary parentheses around with statements with a single context manager and a trailing comment. This change may result in a change in formatting for some users.
|
||||
|
||||
- **Bump alpine default tag to 3.21 for derived Docker images** ([#16456](https://github.com/astral-sh/ruff/pull/16456))
|
||||
|
||||
Alpine 3.21 was released in Dec 2024 and is used in the official Alpine-based Python images. Now the ruff:alpine image will use 3.21 instead of 3.20 and ruff:alpine3.20 will no longer be updated.
|
||||
|
||||
### Deprecated Rules
|
||||
|
||||
The following rules have been deprecated:
|
||||
|
||||
- [`non-pep604-isinstance`](https://docs.astral.sh/ruff/rules/non-pep604-isinstance/) (`UP038`)
|
||||
- [`suspicious-xmle-tree-usage`](https://docs.astral.sh/ruff/rules/suspicious-xmle-tree-usage/) (`S320`)
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new rule codes:
|
||||
|
||||
- \[`unsafe-markup-use`\]: `RUF035` to `S704`
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`batched-without-explicit-strict`](https://docs.astral.sh/ruff/rules/batched-without-explicit-strict) (`B911`)
|
||||
- [`unnecessary-dict-comprehension-for-iterable`](https://docs.astral.sh/ruff/rules/unnecessary-dict-comprehension-for-iterable) (`C420`)
|
||||
- [`datetime-min-max`](https://docs.astral.sh/ruff/rules/datetime-min-max) (`DTZ901`)
|
||||
- [`fast-api-unused-path-parameter`](https://docs.astral.sh/ruff/rules/fast-api-unused-path-parameter) (`FAST003`)
|
||||
- [`root-logger-call`](https://docs.astral.sh/ruff/rules/root-logger-call) (`LOG015`)
|
||||
- [`len-test`](https://docs.astral.sh/ruff/rules/len-test) (`PLC1802`)
|
||||
- [`shallow-copy-environ`](https://docs.astral.sh/ruff/rules/shallow-copy-environ) (`PLW1507`)
|
||||
- [`os-listdir`](https://docs.astral.sh/ruff/rules/os-listdir) (`PTH208`)
|
||||
- [`invalid-pathlib-with-suffix`](https://docs.astral.sh/ruff/rules/invalid-pathlib-with-suffix) (`PTH210`)
|
||||
- [`invalid-assert-message-literal-argument`](https://docs.astral.sh/ruff/rules/invalid-assert-message-literal-argument) (`RUF040`)
|
||||
- [`unnecessary-nested-literal`](https://docs.astral.sh/ruff/rules/unnecessary-nested-literal) (`RUF041`)
|
||||
- [`unnecessary-cast-to-int`](https://docs.astral.sh/ruff/rules/unnecessary-cast-to-int) (`RUF046`)
|
||||
- [`map-int-version-parsing`](https://docs.astral.sh/ruff/rules/map-int-version-parsing) (`RUF048`)
|
||||
- [`if-key-in-dict-del`](https://docs.astral.sh/ruff/rules/if-key-in-dict-del) (`RUF051`)
|
||||
- [`unsafe-markup-use`](https://docs.astral.sh/ruff/rules/unsafe-markup-use) (`S704`). This rule has also been renamed from `RUF035`.
|
||||
- [`split-static-string`](https://docs.astral.sh/ruff/rules/split-static-string) (`SIM905`)
|
||||
- [`runtime-cast-value`](https://docs.astral.sh/ruff/rules/runtime-cast-value) (`TC006`)
|
||||
- [`unquoted-type-alias`](https://docs.astral.sh/ruff/rules/unquoted-type-alias) (`TC007`)
|
||||
- [`non-pep646-unpack`](https://docs.astral.sh/ruff/rules/non-pep646-unpack) (`UP044`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`bad-staticmethod-argument`](https://docs.astral.sh/ruff/rules/bad-staticmethod-argument/) (`PLW0211`) [`invalid-first-argument-name-for-class-method`](https://docs.astral.sh/ruff/rules/invalid-first-argument-name-for-class-method/) (`N804`): `__new__` methods are now no longer flagged by `invalid-first-argument-name-for-class-method` (`N804`) but instead by `bad-staticmethod-argument` (`PLW0211`)
|
||||
- [`bad-str-strip-call`](https://docs.astral.sh/ruff/rules/bad-str-strip-call/) (`PLE1310`): The rule now applies to objects which are known to have type `str` or `bytes`.
|
||||
- [`custom-type-var-for-self`](https://docs.astral.sh/ruff/rules/custom-type-var-for-self/) (`PYI019`): More accurate detection of custom `TypeVars` replaceable by `Self`. The range of the diagnostic is now the full function header rather than just the return annotation.
|
||||
- [`invalid-argument-name`](https://docs.astral.sh/ruff/rules/invalid-argument-name/) (`N803`): Ignore argument names of functions decorated with `typing.override`
|
||||
- [`invalid-envvar-default`](https://docs.astral.sh/ruff/rules/invalid-envvar-default/) (`PLW1508`): Detect default value arguments to `os.environ.get` with invalid type.
|
||||
- [`pytest-raises-with-multiple-statements`](https://docs.astral.sh/ruff/rules/pytest-raises-with-multiple-statements/) (`PT012`) [`pytest-warns-with-multiple-statements`](https://docs.astral.sh/ruff/rules/pytest-warns-with-multiple-statements/) (`PT031`): Allow `for` statements with an empty body in `pytest.raises` and `pytest.warns` `with` statements.
|
||||
- [`redundant-open-modes`](https://docs.astral.sh/ruff/rules/redundant-open-modes/) (`UP015`): The diagnostic range is now the range of the redundant mode argument where it previously was the range of the entire open call. You may have to replace your `noqa` comments when suppressing `UP015`.
|
||||
- [`stdlib-module-shadowing`](https://docs.astral.sh/ruff/rules/stdlib-module-shadowing/) (`A005`): Changes the default value of `lint.flake8-builtins.strict-checking` from `true` to `false`.
|
||||
- [`type-none-comparison`](https://docs.astral.sh/ruff/rules/type-none-comparison/) (`FURB169`): Now also recognizes `type(expr) is type(None)` comparisons where `expr` isn't a name expression.
|
||||
|
||||
The following fixes or improvements to fixes have been stabilized:
|
||||
|
||||
- [`repeated-equality-comparison`](https://docs.astral.sh/ruff/rules/repeated-equality-comparison/) (`PLR1714`) ([#16685](https://github.com/astral-sh/ruff/pull/16685))
|
||||
- [`needless-bool`](https://docs.astral.sh/ruff/rules/needless-bool/) (`SIM103`) ([#16684](https://github.com/astral-sh/ruff/pull/16684))
|
||||
- [`unused-private-type-var`](https://docs.astral.sh/ruff/rules/unused-private-type-var/) (`PYI018`) ([#16682](https://github.com/astral-sh/ruff/pull/16682))
|
||||
|
||||
### Server
|
||||
|
||||
- Remove logging output for `ruff.printDebugInformation` ([#16617](https://github.com/astral-sh/ruff/pull/16617))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-builtins`\] Deprecate the `builtins-` prefixed options in favor of the unprefixed options (e.g. `builtins-allowed-modules` is now deprecated in favor of `allowed-modules`) ([#16092](https://github.com/astral-sh/ruff/pull/16092))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [flake8-bandit] Fix mixed-case hash algorithm names (S324) ([#16552](https://github.com/astral-sh/ruff/pull/16552))
|
||||
|
||||
### CLI
|
||||
|
||||
- [ruff] Fix `last_tag`/`commits_since_last_tag` for `version` command ([#16686](https://github.com/astral-sh/ruff/pull/16686))
|
||||
@@ -1,373 +0,0 @@
|
||||
# Changelog 0.11.x
|
||||
|
||||
## 0.11.0
|
||||
|
||||
This is a follow-up to release 0.10.0. Because of a mistake in the release process, the `requires-python` inference changes were not included in that release. Ruff 0.11.0 now includes this change as well as the stabilization of the preview behavior for `PGH004`.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
||||
|
||||
In previous versions of Ruff, you could specify your Python version with:
|
||||
|
||||
- The `target-version` option in a `ruff.toml` file or the `[tool.ruff]` section of a pyproject.toml file.
|
||||
- The `project.requires-python` field in a `pyproject.toml` file with a `[tool.ruff]` section.
|
||||
|
||||
These options worked well in most cases, and are still recommended for fine control of the Python version. However, because of the way Ruff discovers config files, `pyproject.toml` files without a `[tool.ruff]` section would be ignored, including the `requires-python` setting. Ruff would then use the default Python version (3.9 as of this writing) instead, which is surprising when you've attempted to request another version.
|
||||
|
||||
In v0.10, config discovery has been updated to address this issue:
|
||||
|
||||
- If Ruff finds a `ruff.toml` file without a `target-version`, it will check
|
||||
for a `pyproject.toml` file in the same directory and respect its
|
||||
`requires-python` version, even if it does not contain a `[tool.ruff]`
|
||||
section.
|
||||
- If Ruff finds a user-level configuration, the `requires-python` field of the closest `pyproject.toml` in a parent directory will take precedence.
|
||||
- If there is no config file (`ruff.toml`or `pyproject.toml` with a
|
||||
`[tool.ruff]` section) in the directory of the file being checked, Ruff will
|
||||
search for the closest `pyproject.toml` in the parent directories and use its
|
||||
`requires-python` setting.
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`blanket-noqa`](https://docs.astral.sh/ruff/rules/blanket-noqa/) (`PGH004`): Also detect blanked file-level noqa comments (and not just line level comments).
|
||||
|
||||
### Preview features
|
||||
|
||||
- [syntax-errors] Tuple unpacking in `for` statement iterator clause before Python 3.9 ([#16558](https://github.com/astral-sh/ruff/pull/16558))
|
||||
|
||||
## 0.11.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Add `chain`, `chain_linear` and `cross_downstream` for `AIR302` ([#16647](https://github.com/astral-sh/ruff/pull/16647))
|
||||
- [syntax-errors] Improve error message and range for pre-PEP-614 decorator syntax errors ([#16581](https://github.com/astral-sh/ruff/pull/16581))
|
||||
- [syntax-errors] PEP 701 f-strings before Python 3.12 ([#16543](https://github.com/astral-sh/ruff/pull/16543))
|
||||
- [syntax-errors] Parenthesized context managers before Python 3.9 ([#16523](https://github.com/astral-sh/ruff/pull/16523))
|
||||
- [syntax-errors] Star annotations before Python 3.11 ([#16545](https://github.com/astral-sh/ruff/pull/16545))
|
||||
- [syntax-errors] Star expression in index before Python 3.11 ([#16544](https://github.com/astral-sh/ruff/pull/16544))
|
||||
- [syntax-errors] Unparenthesized assignment expressions in sets and indexes ([#16404](https://github.com/astral-sh/ruff/pull/16404))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Server: Allow `FixAll` action in presence of version-specific syntax errors ([#16848](https://github.com/astral-sh/ruff/pull/16848))
|
||||
- \[`flake8-bandit`\] Allow raw strings in `suspicious-mark-safe-usage` (`S308`) #16702 ([#16770](https://github.com/astral-sh/ruff/pull/16770))
|
||||
- \[`refurb`\] Avoid panicking `unwrap` in `verbose-decimal-constructor` (`FURB157`) ([#16777](https://github.com/astral-sh/ruff/pull/16777))
|
||||
- \[`refurb`\] Fix starred expressions fix (`FURB161`) ([#16550](https://github.com/astral-sh/ruff/pull/16550))
|
||||
- Fix `--statistics` reporting for unsafe fixes ([#16756](https://github.com/astral-sh/ruff/pull/16756))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-executables`\] Allow `uv run` in shebang line for `shebang-missing-python` (`EXE003`) ([#16849](https://github.com/astral-sh/ruff/pull/16849),[#16855](https://github.com/astral-sh/ruff/pull/16855))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add `--exit-non-zero-on-format` ([#16009](https://github.com/astral-sh/ruff/pull/16009))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update Ruff tutorial to avoid non-existent fix in `__init__.py` ([#16818](https://github.com/astral-sh/ruff/pull/16818))
|
||||
- \[`flake8-gettext`\] Swap `format-` and `printf-in-get-text-func-call` examples (`INT002`, `INT003`) ([#16769](https://github.com/astral-sh/ruff/pull/16769))
|
||||
|
||||
## 0.11.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- [syntax-errors] Fix false-positive syntax errors emitted for annotations on variadic parameters before Python 3.11 ([#16878](https://github.com/astral-sh/ruff/pull/16878))
|
||||
|
||||
## 0.11.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Add more autofixes for `AIR302` ([#16876](https://github.com/astral-sh/ruff/pull/16876), [#16977](https://github.com/astral-sh/ruff/pull/16977), [#16976](https://github.com/astral-sh/ruff/pull/16976), [#16965](https://github.com/astral-sh/ruff/pull/16965))
|
||||
- \[`airflow`\] Move `AIR301` to `AIR002` ([#16978](https://github.com/astral-sh/ruff/pull/16978))
|
||||
- \[`airflow`\] Move `AIR302` to `AIR301` and `AIR303` to `AIR302` ([#17151](https://github.com/astral-sh/ruff/pull/17151))
|
||||
- \[`flake8-bandit`\] Mark `str` and `list[str]` literals as trusted input (`S603`) ([#17136](https://github.com/astral-sh/ruff/pull/17136))
|
||||
- \[`ruff`\] Support slices in `RUF005` ([#17078](https://github.com/astral-sh/ruff/pull/17078))
|
||||
- [syntax-errors] Start detecting compile-time syntax errors ([#16106](https://github.com/astral-sh/ruff/pull/16106))
|
||||
- [syntax-errors] Duplicate type parameter names ([#16858](https://github.com/astral-sh/ruff/pull/16858))
|
||||
- [syntax-errors] Irrefutable `case` pattern before final case ([#16905](https://github.com/astral-sh/ruff/pull/16905))
|
||||
- [syntax-errors] Multiple assignments in `case` pattern ([#16957](https://github.com/astral-sh/ruff/pull/16957))
|
||||
- [syntax-errors] Single starred assignment target ([#17024](https://github.com/astral-sh/ruff/pull/17024))
|
||||
- [syntax-errors] Starred expressions in `return`, `yield`, and `for` ([#17134](https://github.com/astral-sh/ruff/pull/17134))
|
||||
- [syntax-errors] Store to or delete `__debug__` ([#16984](https://github.com/astral-sh/ruff/pull/16984))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Error instead of `panic!` when running Ruff from a deleted directory (#16903) ([#17054](https://github.com/astral-sh/ruff/pull/17054))
|
||||
- [syntax-errors] Fix false positive for parenthesized tuple index ([#16948](https://github.com/astral-sh/ruff/pull/16948))
|
||||
|
||||
### CLI
|
||||
|
||||
- Check `pyproject.toml` correctly when it is passed via stdin ([#16971](https://github.com/astral-sh/ruff/pull/16971))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-import-conventions`\] Add import `numpy.typing as npt` to default `flake8-import-conventions.aliases` ([#17133](https://github.com/astral-sh/ruff/pull/17133))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`refurb`\] Document why `UserDict`, `UserList`, and `UserString` are preferred over `dict`, `list`, and `str` (`FURB189`) ([#16927](https://github.com/astral-sh/ruff/pull/16927))
|
||||
|
||||
## 0.11.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`ruff`\] Implement `invalid-rule-code` as `RUF102` ([#17138](https://github.com/astral-sh/ruff/pull/17138))
|
||||
- [syntax-errors] Detect duplicate keys in `match` mapping patterns ([#17129](https://github.com/astral-sh/ruff/pull/17129))
|
||||
- [syntax-errors] Detect duplicate attributes in `match` class patterns ([#17186](https://github.com/astral-sh/ruff/pull/17186))
|
||||
- [syntax-errors] Detect invalid syntax in annotations ([#17101](https://github.com/astral-sh/ruff/pull/17101))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [syntax-errors] Fix multiple assignment error for class fields in `match` patterns ([#17184](https://github.com/astral-sh/ruff/pull/17184))
|
||||
- Don't skip visiting non-tuple slice in `typing.Annotated` subscripts ([#17201](https://github.com/astral-sh/ruff/pull/17201))
|
||||
|
||||
## 0.11.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Add missing `AIR302` attribute check ([#17115](https://github.com/astral-sh/ruff/pull/17115))
|
||||
- \[`airflow`\] Expand module path check to individual symbols (`AIR302`) ([#17278](https://github.com/astral-sh/ruff/pull/17278))
|
||||
- \[`airflow`\] Extract `AIR312` from `AIR302` rules (`AIR302`, `AIR312`) ([#17152](https://github.com/astral-sh/ruff/pull/17152))
|
||||
- \[`airflow`\] Update outdated `AIR301`, `AIR302` rules ([#17123](https://github.com/astral-sh/ruff/pull/17123))
|
||||
- [syntax-errors] Async comprehension in sync comprehension ([#17177](https://github.com/astral-sh/ruff/pull/17177))
|
||||
- [syntax-errors] Check annotations in annotated assignments ([#17283](https://github.com/astral-sh/ruff/pull/17283))
|
||||
- [syntax-errors] Extend annotation checks to `await` ([#17282](https://github.com/astral-sh/ruff/pull/17282))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-pie`\] Avoid false positive for multiple assignment with `auto()` (`PIE796`) ([#17274](https://github.com/astral-sh/ruff/pull/17274))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Fix `RUF100` to detect unused file-level `noqa` directives with specific codes (#17042) ([#17061](https://github.com/astral-sh/ruff/pull/17061))
|
||||
- \[`flake8-pytest-style`\] Avoid false positive for legacy form of `pytest.raises` (`PT011`) ([#17231](https://github.com/astral-sh/ruff/pull/17231))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix formatting of "See Style Guide" link ([#17272](https://github.com/astral-sh/ruff/pull/17272))
|
||||
|
||||
## 0.11.6
|
||||
|
||||
### Preview features
|
||||
|
||||
- Avoid adding whitespace to the end of a docstring after an escaped quote ([#17216](https://github.com/astral-sh/ruff/pull/17216))
|
||||
- \[`airflow`\] Extract `AIR311` from `AIR301` rules (`AIR301`, `AIR311`) ([#17310](https://github.com/astral-sh/ruff/pull/17310), [#17422](https://github.com/astral-sh/ruff/pull/17422))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Raise syntax error when `\` is at end of file ([#17409](https://github.com/astral-sh/ruff/pull/17409))
|
||||
|
||||
## 0.11.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Apply auto fixes to cases where the names have changed in Airflow 3 (`AIR301`) ([#17355](https://github.com/astral-sh/ruff/pull/17355))
|
||||
- \[`perflint`\] Implement fix for `manual-dict-comprehension` (`PERF403`) ([#16719](https://github.com/astral-sh/ruff/pull/16719))
|
||||
- [syntax-errors] Make duplicate parameter names a semantic error ([#17131](https://github.com/astral-sh/ruff/pull/17131))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`airflow`\] Fix typos in provider package names (`AIR302`, `AIR312`) ([#17574](https://github.com/astral-sh/ruff/pull/17574))
|
||||
- \[`flake8-type-checking`\] Visit keyword arguments in checks involving `typing.cast`/`typing.NewType` arguments ([#17538](https://github.com/astral-sh/ruff/pull/17538))
|
||||
- \[`pyupgrade`\] Preserve parenthesis when fixing native literals containing newlines (`UP018`) ([#17220](https://github.com/astral-sh/ruff/pull/17220))
|
||||
- \[`refurb`\] Mark the `FURB161` fix unsafe except for integers and booleans ([#17240](https://github.com/astral-sh/ruff/pull/17240))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`perflint`\] Allow list function calls to be replaced with a comprehension (`PERF401`) ([#17519](https://github.com/astral-sh/ruff/pull/17519))
|
||||
- \[`pycodestyle`\] Auto-fix redundant boolean comparison (`E712`) ([#17090](https://github.com/astral-sh/ruff/pull/17090))
|
||||
- \[`pylint`\] make fix unsafe if delete comments (`PLR1730`) ([#17459](https://github.com/astral-sh/ruff/pull/17459))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add fix safety sections to docs for several rules ([#17410](https://github.com/astral-sh/ruff/pull/17410),[#17440](https://github.com/astral-sh/ruff/pull/17440),[#17441](https://github.com/astral-sh/ruff/pull/17441),[#17443](https://github.com/astral-sh/ruff/pull/17443),[#17444](https://github.com/astral-sh/ruff/pull/17444))
|
||||
|
||||
## 0.11.8
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Apply auto fixes to cases where the names have changed in Airflow 3 (`AIR302`, `AIR311`) ([#17553](https://github.com/astral-sh/ruff/pull/17553), [#17570](https://github.com/astral-sh/ruff/pull/17570), [#17571](https://github.com/astral-sh/ruff/pull/17571))
|
||||
- \[`airflow`\] Extend `AIR301` rule ([#17598](https://github.com/astral-sh/ruff/pull/17598))
|
||||
- \[`airflow`\] Update existing `AIR302` rules with better suggestions ([#17542](https://github.com/astral-sh/ruff/pull/17542))
|
||||
- \[`refurb`\] Mark fix as safe for `readlines-in-for` (`FURB129`) ([#17644](https://github.com/astral-sh/ruff/pull/17644))
|
||||
- [syntax-errors] `nonlocal` declaration at module level ([#17559](https://github.com/astral-sh/ruff/pull/17559))
|
||||
- [syntax-errors] Detect single starred expression assignment `x = *y` ([#17624](https://github.com/astral-sh/ruff/pull/17624))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-pyi`\] Ensure `Literal[None,] | Literal[None,]` is not autofixed to `None | None` (`PYI061`) ([#17659](https://github.com/astral-sh/ruff/pull/17659))
|
||||
- \[`flake8-use-pathlib`\] Avoid suggesting `Path.iterdir()` for `os.listdir` with file descriptor (`PTH208`) ([#17715](https://github.com/astral-sh/ruff/pull/17715))
|
||||
- \[`flake8-use-pathlib`\] Fix `PTH104` false positive when `rename` is passed a file descriptor ([#17712](https://github.com/astral-sh/ruff/pull/17712))
|
||||
- \[`flake8-use-pathlib`\] Fix `PTH116` false positive when `stat` is passed a file descriptor ([#17709](https://github.com/astral-sh/ruff/pull/17709))
|
||||
- \[`flake8-use-pathlib`\] Fix `PTH123` false positive when `open` is passed a file descriptor from a function call ([#17705](https://github.com/astral-sh/ruff/pull/17705))
|
||||
- \[`pycodestyle`\] Fix duplicated diagnostic in `E712` ([#17651](https://github.com/astral-sh/ruff/pull/17651))
|
||||
- \[`pylint`\] Detect `global` declarations in module scope (`PLE0118`) ([#17411](https://github.com/astral-sh/ruff/pull/17411))
|
||||
- [syntax-errors] Make `async-comprehension-in-sync-comprehension` more specific ([#17460](https://github.com/astral-sh/ruff/pull/17460))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add option to disable `typing_extensions` imports ([#17611](https://github.com/astral-sh/ruff/pull/17611))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix example syntax for the `lint.pydocstyle.ignore-var-parameters` option ([#17740](https://github.com/astral-sh/ruff/pull/17740))
|
||||
- Add fix safety sections (`ASYNC116`, `FLY002`, `D200`, `RUF005`, `RUF017`, `RUF027`, `RUF028`, `RUF057`) ([#17497](https://github.com/astral-sh/ruff/pull/17497), [#17496](https://github.com/astral-sh/ruff/pull/17496), [#17502](https://github.com/astral-sh/ruff/pull/17502), [#17484](https://github.com/astral-sh/ruff/pull/17484), [#17480](https://github.com/astral-sh/ruff/pull/17480), [#17485](https://github.com/astral-sh/ruff/pull/17485), [#17722](https://github.com/astral-sh/ruff/pull/17722), [#17483](https://github.com/astral-sh/ruff/pull/17483))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Add Python 3.14 to configuration options ([#17647](https://github.com/astral-sh/ruff/pull/17647))
|
||||
- Make syntax error for unparenthesized except tuples version specific to before 3.14 ([#17660](https://github.com/astral-sh/ruff/pull/17660))
|
||||
|
||||
## 0.11.9
|
||||
|
||||
### Preview features
|
||||
|
||||
- Default to latest supported Python version for version-related syntax errors ([#17529](https://github.com/astral-sh/ruff/pull/17529))
|
||||
- Implement deferred annotations for Python 3.14 ([#17658](https://github.com/astral-sh/ruff/pull/17658))
|
||||
- \[`airflow`\] Fix `SQLTableCheckOperator` typo (`AIR302`) ([#17946](https://github.com/astral-sh/ruff/pull/17946))
|
||||
- \[`airflow`\] Remove `airflow.utils.dag_parsing_context.get_parsing_context` (`AIR301`) ([#17852](https://github.com/astral-sh/ruff/pull/17852))
|
||||
- \[`airflow`\] Skip attribute check in try catch block (`AIR301`) ([#17790](https://github.com/astral-sh/ruff/pull/17790))
|
||||
- \[`flake8-bandit`\] Mark tuples of string literals as trusted input in `S603` ([#17801](https://github.com/astral-sh/ruff/pull/17801))
|
||||
- \[`isort`\] Check full module path against project root(s) when categorizing first-party imports ([#16565](https://github.com/astral-sh/ruff/pull/16565))
|
||||
- \[`ruff`\] Add new rule `in-empty-collection` (`RUF060`) ([#16480](https://github.com/astral-sh/ruff/pull/16480))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix missing `combine` call for `lint.typing-extensions` setting ([#17823](https://github.com/astral-sh/ruff/pull/17823))
|
||||
- \[`flake8-async`\] Fix module name in `ASYNC110`, `ASYNC115`, and `ASYNC116` fixes ([#17774](https://github.com/astral-sh/ruff/pull/17774))
|
||||
- \[`pyupgrade`\] Add spaces between tokens as necessary to avoid syntax errors in `UP018` autofix ([#17648](https://github.com/astral-sh/ruff/pull/17648))
|
||||
- \[`refurb`\] Fix false positive for float and complex numbers in `FURB116` ([#17661](https://github.com/astral-sh/ruff/pull/17661))
|
||||
- [parser] Flag single unparenthesized generator expr with trailing comma in arguments. ([#17893](https://github.com/astral-sh/ruff/pull/17893))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add instructions on how to upgrade to a newer Rust version ([#17928](https://github.com/astral-sh/ruff/pull/17928))
|
||||
- Update code of conduct email address ([#17875](https://github.com/astral-sh/ruff/pull/17875))
|
||||
- Add fix safety sections to `PLC2801`, `PLR1722`, and `RUF013` ([#17825](https://github.com/astral-sh/ruff/pull/17825), [#17826](https://github.com/astral-sh/ruff/pull/17826), [#17759](https://github.com/astral-sh/ruff/pull/17759))
|
||||
- Add link to `check-typed-exception` from `S110` and `S112` ([#17786](https://github.com/astral-sh/ruff/pull/17786))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Allow passing a virtual environment to `ruff analyze graph` ([#17743](https://github.com/astral-sh/ruff/pull/17743))
|
||||
|
||||
## 0.11.10
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`ruff`\] Implement a recursive check for `RUF060` ([#17976](https://github.com/astral-sh/ruff/pull/17976))
|
||||
- \[`airflow`\] Enable autofixes for `AIR301` and `AIR311` ([#17941](https://github.com/astral-sh/ruff/pull/17941))
|
||||
- \[`airflow`\] Apply try catch guard to all `AIR3` rules ([#17887](https://github.com/astral-sh/ruff/pull/17887))
|
||||
- \[`airflow`\] Extend `AIR311` rules ([#17913](https://github.com/astral-sh/ruff/pull/17913))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-bugbear`\] Ignore `B028` if `skip_file_prefixes` is present ([#18047](https://github.com/astral-sh/ruff/pull/18047))
|
||||
- \[`flake8-pie`\] Mark autofix for `PIE804` as unsafe if the dictionary contains comments ([#18046](https://github.com/astral-sh/ruff/pull/18046))
|
||||
- \[`flake8-simplify`\] Correct behavior for `str.split`/`rsplit` with `maxsplit=0` (`SIM905`) ([#18075](https://github.com/astral-sh/ruff/pull/18075))
|
||||
- \[`flake8-simplify`\] Fix `SIM905` autofix for `rsplit` creating a reversed list literal ([#18045](https://github.com/astral-sh/ruff/pull/18045))
|
||||
- \[`flake8-use-pathlib`\] Suppress diagnostics for all `os.*` functions that have the `dir_fd` parameter (`PTH`) ([#17968](https://github.com/astral-sh/ruff/pull/17968))
|
||||
- \[`refurb`\] Mark autofix as safe only for number literals (`FURB116`) ([#17692](https://github.com/astral-sh/ruff/pull/17692))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bandit`\] Skip `S608` for expressionless f-strings ([#17999](https://github.com/astral-sh/ruff/pull/17999))
|
||||
- \[`flake8-pytest-style`\] Don't recommend `usefixtures` for `parametrize` values (`PT019`) ([#17650](https://github.com/astral-sh/ruff/pull/17650))
|
||||
- \[`pyupgrade`\] Add `resource.error` as deprecated alias of `OSError` (`UP024`) ([#17933](https://github.com/astral-sh/ruff/pull/17933))
|
||||
|
||||
### CLI
|
||||
|
||||
- Disable jemalloc on Android ([#18033](https://github.com/astral-sh/ruff/pull/18033))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update Neovim setup docs ([#18108](https://github.com/astral-sh/ruff/pull/18108))
|
||||
- \[`flake8-simplify`\] Add fix safety section (`SIM103`) ([#18086](https://github.com/astral-sh/ruff/pull/18086))
|
||||
- \[`flake8-simplify`\] Add fix safety section (`SIM112`) ([#18099](https://github.com/astral-sh/ruff/pull/18099))
|
||||
- \[`pylint`\] Add fix safety section (`PLC0414`) ([#17802](https://github.com/astral-sh/ruff/pull/17802))
|
||||
- \[`pylint`\] Add fix safety section (`PLE4703`) ([#17824](https://github.com/astral-sh/ruff/pull/17824))
|
||||
- \[`pylint`\] Add fix safety section (`PLW1514`) ([#17932](https://github.com/astral-sh/ruff/pull/17932))
|
||||
- \[`pylint`\] Add fix safety section (`PLW3301`) ([#17878](https://github.com/astral-sh/ruff/pull/17878))
|
||||
- \[`ruff`\] Add fix safety section (`RUF007`) ([#17755](https://github.com/astral-sh/ruff/pull/17755))
|
||||
- \[`ruff`\] Add fix safety section (`RUF033`) ([#17760](https://github.com/astral-sh/ruff/pull/17760))
|
||||
|
||||
## 0.11.11
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Add autofixes for `AIR302` and `AIR312` ([#17942](https://github.com/astral-sh/ruff/pull/17942))
|
||||
- \[`airflow`\] Move rules from `AIR312` to `AIR302` ([#17940](https://github.com/astral-sh/ruff/pull/17940))
|
||||
- \[`airflow`\] Update `AIR301` and `AIR311` with the latest Airflow implementations ([#17985](https://github.com/astral-sh/ruff/pull/17985))
|
||||
- \[`flake8-simplify`\] Enable fix in preview mode (`SIM117`) ([#18208](https://github.com/astral-sh/ruff/pull/18208))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix inconsistent formatting of match-case on `[]` and `_` ([#18147](https://github.com/astral-sh/ruff/pull/18147))
|
||||
- \[`pylint`\] Fix `PLW1514` not recognizing the `encoding` positional argument of `codecs.open` ([#18109](https://github.com/astral-sh/ruff/pull/18109))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add full option name in formatter warning ([#18217](https://github.com/astral-sh/ruff/pull/18217))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix rendering of admonition in docs ([#18163](https://github.com/astral-sh/ruff/pull/18163))
|
||||
- \[`flake8-print`\] Improve print/pprint docs for `T201` and `T203` ([#18130](https://github.com/astral-sh/ruff/pull/18130))
|
||||
- \[`flake8-simplify`\] Add fix safety section (`SIM110`,`SIM210`) ([#18114](https://github.com/astral-sh/ruff/pull/18114),[#18100](https://github.com/astral-sh/ruff/pull/18100))
|
||||
- \[`pylint`\] Fix docs example that produced different output (`PLW0603`) ([#18216](https://github.com/astral-sh/ruff/pull/18216))
|
||||
|
||||
## 0.11.12
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Revise fix titles (`AIR3`) ([#18215](https://github.com/astral-sh/ruff/pull/18215))
|
||||
- \[`pylint`\] Implement `missing-maxsplit-arg` (`PLC0207`) ([#17454](https://github.com/astral-sh/ruff/pull/17454))
|
||||
- \[`pyupgrade`\] New rule `UP050` (`useless-class-metaclass-type`) ([#18334](https://github.com/astral-sh/ruff/pull/18334))
|
||||
- \[`flake8-use-pathlib`\] Replace `os.symlink` with `Path.symlink_to` (`PTH211`) ([#18337](https://github.com/astral-sh/ruff/pull/18337))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-bugbear`\] Ignore `__debug__` attribute in `B010` ([#18357](https://github.com/astral-sh/ruff/pull/18357))
|
||||
- \[`flake8-async`\] Fix `anyio.sleep` argument name (`ASYNC115`, `ASYNC116`) ([#18262](https://github.com/astral-sh/ruff/pull/18262))
|
||||
- \[`refurb`\] Fix `FURB129` autofix generating invalid syntax ([#18235](https://github.com/astral-sh/ruff/pull/18235))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-implicit-str-concat`\] Add autofix for `ISC003` ([#18256](https://github.com/astral-sh/ruff/pull/18256))
|
||||
- \[`pycodestyle`\] Improve the diagnostic message for `E712` ([#18328](https://github.com/astral-sh/ruff/pull/18328))
|
||||
- \[`flake8-2020`\] Fix diagnostic message for `!=` comparisons (`YTT201`) ([#18293](https://github.com/astral-sh/ruff/pull/18293))
|
||||
- \[`pyupgrade`\] Make fix unsafe if it deletes comments (`UP010`) ([#18291](https://github.com/astral-sh/ruff/pull/18291))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Simplify rules table to improve readability ([#18297](https://github.com/astral-sh/ruff/pull/18297))
|
||||
- Update editor integrations link in README ([#17977](https://github.com/astral-sh/ruff/pull/17977))
|
||||
- \[`flake8-bugbear`\] Add fix safety section (`B006`) ([#17652](https://github.com/astral-sh/ruff/pull/17652))
|
||||
|
||||
## 0.11.13
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Add unsafe fix for module moved cases (`AIR301`,`AIR311`,`AIR312`,`AIR302`) ([#18367](https://github.com/astral-sh/ruff/pull/18367),[#18366](https://github.com/astral-sh/ruff/pull/18366),[#18363](https://github.com/astral-sh/ruff/pull/18363),[#18093](https://github.com/astral-sh/ruff/pull/18093))
|
||||
- \[`refurb`\] Add coverage of `set` and `frozenset` calls (`FURB171`) ([#18035](https://github.com/astral-sh/ruff/pull/18035))
|
||||
- \[`refurb`\] Mark `FURB180` fix unsafe when class has bases ([#18149](https://github.com/astral-sh/ruff/pull/18149))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`perflint`\] Fix missing parentheses for lambda and ternary conditions (`PERF401`, `PERF403`) ([#18412](https://github.com/astral-sh/ruff/pull/18412))
|
||||
- \[`pyupgrade`\] Apply `UP035` only on py313+ for `get_type_hints()` ([#18476](https://github.com/astral-sh/ruff/pull/18476))
|
||||
- \[`pyupgrade`\] Make fix unsafe if it deletes comments (`UP004`,`UP050`) ([#18393](https://github.com/astral-sh/ruff/pull/18393), [#18390](https://github.com/astral-sh/ruff/pull/18390))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`fastapi`\] Avoid false positive for class dependencies (`FAST003`) ([#18271](https://github.com/astral-sh/ruff/pull/18271))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update editor setup docs for Neovim and Vim ([#18324](https://github.com/astral-sh/ruff/pull/18324))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Support Python 3.14 template strings (t-strings) in formatter and parser ([#17851](https://github.com/astral-sh/ruff/pull/17851))
|
||||
@@ -1,271 +0,0 @@
|
||||
# Changelog 0.2.x
|
||||
|
||||
## 0.2.0
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- The `NURSERY` selector cannot be used anymore
|
||||
- Legacy selection of nursery rules by exact codes is no longer allowed without preview enabled
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
### Deprecations
|
||||
|
||||
The following rules are now deprecated:
|
||||
|
||||
- [`missing-type-self`](https://docs.astral.sh/ruff/rules/missing-type-self/) (`ANN101`)
|
||||
- [`missing-type-cls`](https://docs.astral.sh/ruff/rules/missing-type-cls/) (`ANN102`)
|
||||
|
||||
The following command line options are now deprecated:
|
||||
|
||||
- `--show-source`; use `--output-format full` instead
|
||||
- `--no-show-source`; use `--output-format concise` instead
|
||||
- `--output-format text`; use `full` or `concise` instead
|
||||
|
||||
The following settings have moved and the previous name is deprecated:
|
||||
|
||||
- `ruff.allowed-confusables` → [`ruff.lint.allowed-confusables`](https://docs.astral.sh//ruff/settings/#lint_allowed-confusables)
|
||||
- `ruff.dummy-variable-rgx` → [`ruff.lint.dummy-variable-rgx`](https://docs.astral.sh//ruff/settings/#lint_dummy-variable-rgx)
|
||||
- `ruff.explicit-preview-rules` → [`ruff.lint.explicit-preview-rules`](https://docs.astral.sh//ruff/settings/#lint_explicit-preview-rules)
|
||||
- `ruff.extend-fixable` → [`ruff.lint.extend-fixable`](https://docs.astral.sh//ruff/settings/#lint_extend-fixable)
|
||||
- `ruff.extend-ignore` → [`ruff.lint.extend-ignore`](https://docs.astral.sh//ruff/settings/#lint_extend-ignore)
|
||||
- `ruff.extend-per-file-ignores` → [`ruff.lint.extend-per-file-ignores`](https://docs.astral.sh//ruff/settings/#lint_extend-per-file-ignores)
|
||||
- `ruff.extend-safe-fixes` → [`ruff.lint.extend-safe-fixes`](https://docs.astral.sh//ruff/settings/#lint_extend-safe-fixes)
|
||||
- `ruff.extend-select` → [`ruff.lint.extend-select`](https://docs.astral.sh//ruff/settings/#lint_extend-select)
|
||||
- `ruff.extend-unfixable` → [`ruff.lint.extend-unfixable`](https://docs.astral.sh//ruff/settings/#lint_extend-unfixable)
|
||||
- `ruff.extend-unsafe-fixes` → [`ruff.lint.extend-unsafe-fixes`](https://docs.astral.sh//ruff/settings/#lint_extend-unsafe-fixes)
|
||||
- `ruff.external` → [`ruff.lint.external`](https://docs.astral.sh//ruff/settings/#lint_external)
|
||||
- `ruff.fixable` → [`ruff.lint.fixable`](https://docs.astral.sh//ruff/settings/#lint_fixable)
|
||||
- `ruff.flake8-annotations` → [`ruff.lint.flake8-annotations`](https://docs.astral.sh//ruff/settings/#lint_flake8-annotations)
|
||||
- `ruff.flake8-bandit` → [`ruff.lint.flake8-bandit`](https://docs.astral.sh//ruff/settings/#lint_flake8-bandit)
|
||||
- `ruff.flake8-bugbear` → [`ruff.lint.flake8-bugbear`](https://docs.astral.sh//ruff/settings/#lint_flake8-bugbear)
|
||||
- `ruff.flake8-builtins` → [`ruff.lint.flake8-builtins`](https://docs.astral.sh//ruff/settings/#lint_flake8-builtins)
|
||||
- `ruff.flake8-comprehensions` → [`ruff.lint.flake8-comprehensions`](https://docs.astral.sh//ruff/settings/#lint_flake8-comprehensions)
|
||||
- `ruff.flake8-copyright` → [`ruff.lint.flake8-copyright`](https://docs.astral.sh//ruff/settings/#lint_flake8-copyright)
|
||||
- `ruff.flake8-errmsg` → [`ruff.lint.flake8-errmsg`](https://docs.astral.sh//ruff/settings/#lint_flake8-errmsg)
|
||||
- `ruff.flake8-gettext` → [`ruff.lint.flake8-gettext`](https://docs.astral.sh//ruff/settings/#lint_flake8-gettext)
|
||||
- `ruff.flake8-implicit-str-concat` → [`ruff.lint.flake8-implicit-str-concat`](https://docs.astral.sh//ruff/settings/#lint_flake8-implicit-str-concat)
|
||||
- `ruff.flake8-import-conventions` → [`ruff.lint.flake8-import-conventions`](https://docs.astral.sh//ruff/settings/#lint_flake8-import-conventions)
|
||||
- `ruff.flake8-pytest-style` → [`ruff.lint.flake8-pytest-style`](https://docs.astral.sh//ruff/settings/#lint_flake8-pytest-style)
|
||||
- `ruff.flake8-quotes` → [`ruff.lint.flake8-quotes`](https://docs.astral.sh//ruff/settings/#lint_flake8-quotes)
|
||||
- `ruff.flake8-self` → [`ruff.lint.flake8-self`](https://docs.astral.sh//ruff/settings/#lint_flake8-self)
|
||||
- `ruff.flake8-tidy-imports` → [`ruff.lint.flake8-tidy-imports`](https://docs.astral.sh//ruff/settings/#lint_flake8-tidy-imports)
|
||||
- `ruff.flake8-type-checking` → [`ruff.lint.flake8-type-checking`](https://docs.astral.sh//ruff/settings/#lint_flake8-type-checking)
|
||||
- `ruff.flake8-unused-arguments` → [`ruff.lint.flake8-unused-arguments`](https://docs.astral.sh//ruff/settings/#lint_flake8-unused-arguments)
|
||||
- `ruff.ignore` → [`ruff.lint.ignore`](https://docs.astral.sh//ruff/settings/#lint_ignore)
|
||||
- `ruff.ignore-init-module-imports` → [`ruff.lint.ignore-init-module-imports`](https://docs.astral.sh//ruff/settings/#lint_ignore-init-module-imports)
|
||||
- `ruff.isort` → [`ruff.lint.isort`](https://docs.astral.sh//ruff/settings/#lint_isort)
|
||||
- `ruff.logger-objects` → [`ruff.lint.logger-objects`](https://docs.astral.sh//ruff/settings/#lint_logger-objects)
|
||||
- `ruff.mccabe` → [`ruff.lint.mccabe`](https://docs.astral.sh//ruff/settings/#lint_mccabe)
|
||||
- `ruff.pep8-naming` → [`ruff.lint.pep8-naming`](https://docs.astral.sh//ruff/settings/#lint_pep8-naming)
|
||||
- `ruff.per-file-ignores` → [`ruff.lint.per-file-ignores`](https://docs.astral.sh//ruff/settings/#lint_per-file-ignores)
|
||||
- `ruff.pycodestyle` → [`ruff.lint.pycodestyle`](https://docs.astral.sh//ruff/settings/#lint_pycodestyle)
|
||||
- `ruff.pydocstyle` → [`ruff.lint.pydocstyle`](https://docs.astral.sh//ruff/settings/#lint_pydocstyle)
|
||||
- `ruff.pyflakes` → [`ruff.lint.pyflakes`](https://docs.astral.sh//ruff/settings/#lint_pyflakes)
|
||||
- `ruff.pylint` → [`ruff.lint.pylint`](https://docs.astral.sh//ruff/settings/#lint_pylint)
|
||||
- `ruff.pyupgrade` → [`ruff.lint.pyupgrade`](https://docs.astral.sh//ruff/settings/#lint_pyupgrade)
|
||||
- `ruff.select` → [`ruff.lint.select`](https://docs.astral.sh//ruff/settings/#lint_select)
|
||||
- `ruff.task-tags` → [`ruff.lint.task-tags`](https://docs.astral.sh//ruff/settings/#lint_task-tags)
|
||||
- `ruff.typing-modules` → [`ruff.lint.typing-modules`](https://docs.astral.sh//ruff/settings/#lint_typing-modules)
|
||||
- `ruff.unfixable` → [`ruff.lint.unfixable`](https://docs.astral.sh//ruff/settings/#lint_unfixable)
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new codes:
|
||||
|
||||
- [`raise-without-from-inside-except`](https://docs.astral.sh/ruff/rules/raise-without-from-inside-except/): `TRY200` to `B904`
|
||||
- [`suspicious-eval-usage`](https://docs.astral.sh/ruff/rules/suspicious-eval-usage/): `PGH001` to `S307`
|
||||
- [`logging-warn`](https://docs.astral.sh/ruff/rules/logging-warn/): `PGH002` to `G010`
|
||||
- [`static-key-dict-comprehension`](https://docs.astral.sh/ruff/rules/static-key-dict-comprehension): `RUF011` to `B035`
|
||||
- [`runtime-string-union`](https://docs.astral.sh/ruff/rules/runtime-string-union): `TCH006` to `TCH010`
|
||||
|
||||
### Stabilizations
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`trio-timeout-without-await`](https://docs.astral.sh/ruff/rules/trio-timeout-without-await) (`TRIO100`)
|
||||
- [`trio-sync-call`](https://docs.astral.sh/ruff/rules/trio-sync-call) (`TRIO105`)
|
||||
- [`trio-async-function-with-timeout`](https://docs.astral.sh/ruff/rules/trio-async-function-with-timeout) (`TRIO109`)
|
||||
- [`trio-unneeded-sleep`](https://docs.astral.sh/ruff/rules/trio-unneeded-sleep) (`TRIO110`)
|
||||
- [`trio-zero-sleep-call`](https://docs.astral.sh/ruff/rules/trio-zero-sleep-call) (`TRIO115`)
|
||||
- [`unnecessary-escaped-quote`](https://docs.astral.sh/ruff/rules/unnecessary-escaped-quote) (`Q004`)
|
||||
- [`enumerate-for-loop`](https://docs.astral.sh/ruff/rules/enumerate-for-loop) (`SIM113`)
|
||||
- [`zip-dict-keys-and-values`](https://docs.astral.sh/ruff/rules/zip-dict-keys-and-values) (`SIM911`)
|
||||
- [`timeout-error-alias`](https://docs.astral.sh/ruff/rules/timeout-error-alias) (`UP041`)
|
||||
- [`flask-debug-true`](https://docs.astral.sh/ruff/rules/flask-debug-true) (`S201`)
|
||||
- [`tarfile-unsafe-members`](https://docs.astral.sh/ruff/rules/tarfile-unsafe-members) (`S202`)
|
||||
- [`ssl-insecure-version`](https://docs.astral.sh/ruff/rules/ssl-insecure-version) (`S502`)
|
||||
- [`ssl-with-bad-defaults`](https://docs.astral.sh/ruff/rules/ssl-with-bad-defaults) (`S503`)
|
||||
- [`ssl-with-no-version`](https://docs.astral.sh/ruff/rules/ssl-with-no-version) (`S504`)
|
||||
- [`weak-cryptographic-key`](https://docs.astral.sh/ruff/rules/weak-cryptographic-key) (`S505`)
|
||||
- [`ssh-no-host-key-verification`](https://docs.astral.sh/ruff/rules/ssh-no-host-key-verification) (`S507`)
|
||||
- [`django-raw-sql`](https://docs.astral.sh/ruff/rules/django-raw-sql) (`S611`)
|
||||
- [`mako-templates`](https://docs.astral.sh/ruff/rules/mako-templates) (`S702`)
|
||||
- [`generator-return-from-iter-method`](https://docs.astral.sh/ruff/rules/generator-return-from-iter-method) (`PYI058`)
|
||||
- [`runtime-string-union`](https://docs.astral.sh/ruff/rules/runtime-string-union) (`TCH006`)
|
||||
- [`numpy2-deprecation`](https://docs.astral.sh/ruff/rules/numpy2-deprecation) (`NPY201`)
|
||||
- [`quadratic-list-summation`](https://docs.astral.sh/ruff/rules/quadratic-list-summation) (`RUF017`)
|
||||
- [`assignment-in-assert`](https://docs.astral.sh/ruff/rules/assignment-in-assert) (`RUF018`)
|
||||
- [`unnecessary-key-check`](https://docs.astral.sh/ruff/rules/unnecessary-key-check) (`RUF019`)
|
||||
- [`never-union`](https://docs.astral.sh/ruff/rules/never-union) (`RUF020`)
|
||||
- [`direct-logger-instantiation`](https://docs.astral.sh/ruff/rules/direct-logger-instantiation) (`LOG001`)
|
||||
- [`invalid-get-logger-argument`](https://docs.astral.sh/ruff/rules/invalid-get-logger-argument) (`LOG002`)
|
||||
- [`exception-without-exc-info`](https://docs.astral.sh/ruff/rules/exception-without-exc-info) (`LOG007`)
|
||||
- [`undocumented-warn`](https://docs.astral.sh/ruff/rules/undocumented-warn) (`LOG009`)
|
||||
|
||||
Fixes for the following rules have been stabilized and are now available without preview:
|
||||
|
||||
- [`triple-single-quotes`](https://docs.astral.sh/ruff/rules/triple-single-quotes) (`D300`)
|
||||
- [`non-pep604-annotation`](https://docs.astral.sh/ruff/rules/non-pep604-annotation) (`UP007`)
|
||||
- [`dict-get-with-none-default`](https://docs.astral.sh/ruff/rules/dict-get-with-none-default) (`SIM910`)
|
||||
- [`in-dict-keys`](https://docs.astral.sh/ruff/rules/in-dict-keys) (`SIM118`)
|
||||
- [`collapsible-else-if`](https://docs.astral.sh/ruff/rules/collapsible-else-if) (`PLR5501`)
|
||||
- [`if-with-same-arms`](https://docs.astral.sh/ruff/rules/if-with-same-arms) (`SIM114`)
|
||||
- [`useless-else-on-loop`](https://docs.astral.sh/ruff/rules/useless-else-on-loop) (`PLW0120`)
|
||||
- [`unnecessary-literal-union`](https://docs.astral.sh/ruff/rules/unnecessary-literal-union) (`PYI030`)
|
||||
- [`unnecessary-spread`](https://docs.astral.sh/ruff/rules/unnecessary-spread) (`PIE800`)
|
||||
- [`error-instead-of-exception`](https://docs.astral.sh/ruff/rules/error-instead-of-exception) (`TRY400`)
|
||||
- [`redefined-while-unused`](https://docs.astral.sh/ruff/rules/redefined-while-unused) (`F811`)
|
||||
- [`duplicate-value`](https://docs.astral.sh/ruff/rules/duplicate-value) (`B033`)
|
||||
- [`multiple-imports-on-one-line`](https://docs.astral.sh/ruff/rules/multiple-imports-on-one-line) (`E401`)
|
||||
- [`non-pep585-annotation`](https://docs.astral.sh/ruff/rules/non-pep585-annotation) (`UP006`)
|
||||
|
||||
Fixes for the following rules have been promoted from unsafe to safe:
|
||||
|
||||
- [`unaliased-collections-abc-set-import`](https://docs.astral.sh/ruff/rules/unaliased-collections-abc-set-import) (`PYI025`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`module-import-not-at-top-of-file`](https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file/) (`E402`) allows `sys.path` modifications between imports
|
||||
- [`reimplemented-container-builtin`](https://docs.astral.sh/ruff/rules/reimplemented-container-builtin/) (`PIE807`) includes lambdas that can be replaced with `dict`
|
||||
- [`unnecessary-placeholder`](https://docs.astral.sh/ruff/rules/unnecessary-placeholder/) (`PIE790`) applies to unnecessary ellipses (`...`)
|
||||
- [`if-else-block-instead-of-dict-get`](https://docs.astral.sh/ruff/rules/if-else-block-instead-of-dict-get/) (`SIM401`) applies to `if-else` expressions
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`refurb`\] Implement `metaclass_abcmeta` (`FURB180`) ([#9658](https://github.com/astral-sh/ruff/pull/9658))
|
||||
- Implement `blank_line_after_nested_stub_class` preview style ([#9155](https://github.com/astral-sh/ruff/pull/9155))
|
||||
- The preview rule [`and-or-ternary`](https://docs.astral.sh/ruff/rules/and-or-ternary) (`PLR1706`) was removed
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Take `pathlib.Path` into account when analyzing async functions ([#9703](https://github.com/astral-sh/ruff/pull/9703))
|
||||
- \[`flake8-return`\] - fix indentation syntax error (`RET505`) ([#9705](https://github.com/astral-sh/ruff/pull/9705))
|
||||
- Detect multi-statement lines in else removal ([#9748](https://github.com/astral-sh/ruff/pull/9748))
|
||||
- `RUF022`, `RUF023`: never add two trailing commas to the end of a sequence ([#9698](https://github.com/astral-sh/ruff/pull/9698))
|
||||
- `RUF023`: Don't sort `__match_args__`, only `__slots__` ([#9724](https://github.com/astral-sh/ruff/pull/9724))
|
||||
- \[`flake8-simplify`\] - Fix syntax error in autofix (`SIM114`) ([#9704](https://github.com/astral-sh/ruff/pull/9704))
|
||||
- \[`pylint`\] Show verbatim constant in `magic-value-comparison` (`PLR2004`) ([#9694](https://github.com/astral-sh/ruff/pull/9694))
|
||||
- Removing trailing whitespace inside multiline strings is unsafe ([#9744](https://github.com/astral-sh/ruff/pull/9744))
|
||||
- Support `IfExp` with dual string arms in `invalid-envvar-default` ([#9734](https://github.com/astral-sh/ruff/pull/9734))
|
||||
- \[`pylint`\] Add `__mro_entries__` to known dunder methods (`PLW3201`) ([#9706](https://github.com/astral-sh/ruff/pull/9706))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Removed rules are now retained in the documentation ([#9691](https://github.com/astral-sh/ruff/pull/9691))
|
||||
- Deprecated rules are now indicated in the documentation ([#9689](https://github.com/astral-sh/ruff/pull/9689))
|
||||
|
||||
## 0.2.1
|
||||
|
||||
This release includes support for range formatting (i.e., the ability to format specific lines
|
||||
within a source file).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`refurb`\] Implement `missing-f-string-syntax` (`RUF027`) ([#9728](https://github.com/astral-sh/ruff/pull/9728))
|
||||
- Format module-level docstrings ([#9725](https://github.com/astral-sh/ruff/pull/9725))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Add `--range` option to `ruff format` ([#9733](https://github.com/astral-sh/ruff/pull/9733))
|
||||
- Don't trim last empty line in docstrings ([#9813](https://github.com/astral-sh/ruff/pull/9813))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Skip empty lines when determining base indentation ([#9795](https://github.com/astral-sh/ruff/pull/9795))
|
||||
- Drop `__get__` and `__set__` from `unnecessary-dunder-call` ([#9791](https://github.com/astral-sh/ruff/pull/9791))
|
||||
- Respect generic `Protocol` in ellipsis removal ([#9841](https://github.com/astral-sh/ruff/pull/9841))
|
||||
- Revert "Use publicly available Apple Silicon runners (#9726)" ([#9834](https://github.com/astral-sh/ruff/pull/9834))
|
||||
|
||||
### Performance
|
||||
|
||||
- Skip LibCST parsing for standard dedent adjustments ([#9769](https://github.com/astral-sh/ruff/pull/9769))
|
||||
- Remove CST-based fixer for `C408` ([#9822](https://github.com/astral-sh/ruff/pull/9822))
|
||||
- Add our own ignored-names abstractions ([#9802](https://github.com/astral-sh/ruff/pull/9802))
|
||||
- Remove CST-based fixers for `C400`, `C401`, `C410`, and `C418` ([#9819](https://github.com/astral-sh/ruff/pull/9819))
|
||||
- Use `AhoCorasick` to speed up quote match ([#9773](https://github.com/astral-sh/ruff/pull/9773))
|
||||
- Remove CST-based fixers for `C405` and `C409` ([#9821](https://github.com/astral-sh/ruff/pull/9821))
|
||||
- Add fast-path for comment detection ([#9808](https://github.com/astral-sh/ruff/pull/9808))
|
||||
- Invert order of checks in `zero-sleep-call` ([#9766](https://github.com/astral-sh/ruff/pull/9766))
|
||||
- Short-circuit typing matches based on imports ([#9800](https://github.com/astral-sh/ruff/pull/9800))
|
||||
- Run dunder method rule on methods directly ([#9815](https://github.com/astral-sh/ruff/pull/9815))
|
||||
- Track top-level module imports in the semantic model ([#9775](https://github.com/astral-sh/ruff/pull/9775))
|
||||
- Slight speed-up for lowercase and uppercase identifier checks ([#9798](https://github.com/astral-sh/ruff/pull/9798))
|
||||
- Remove LibCST-based fixer for `C403` ([#9818](https://github.com/astral-sh/ruff/pull/9818))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update `max-pos-args` example to `max-positional-args` ([#9797](https://github.com/astral-sh/ruff/pull/9797))
|
||||
- Fixed example code in `weak_cryptographic_key.rs` ([#9774](https://github.com/astral-sh/ruff/pull/9774))
|
||||
- Fix references to deprecated `ANN` rules in changelog ([#9771](https://github.com/astral-sh/ruff/pull/9771))
|
||||
- Fix default for `max-positional-args` ([#9838](https://github.com/astral-sh/ruff/pull/9838))
|
||||
|
||||
## 0.2.2
|
||||
|
||||
Highlights include:
|
||||
|
||||
- Initial support formatting f-strings (in `--preview`).
|
||||
- Support for overriding arbitrary configuration options via the CLI through an expanded `--config` argument (e.g., `--config "lint.isort.combine-as-imports=false"`).
|
||||
- Significant performance improvements in Ruff's lexer, parser, and lint rules.
|
||||
|
||||
### Preview features
|
||||
|
||||
- Implement minimal f-string formatting ([#9642](https://github.com/astral-sh/ruff/pull/9642))
|
||||
- \[`pycodestyle`\] Add blank line(s) rules (`E301`, `E302`, `E303`, `E304`, `E305`, `E306`) ([#9266](https://github.com/astral-sh/ruff/pull/9266))
|
||||
- \[`refurb`\] Implement `readlines_in_for` (`FURB129`) ([#9880](https://github.com/astral-sh/ruff/pull/9880))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Ensure closing parentheses for multiline sequences are always on their own line (`RUF022`, `RUF023`) ([#9793](https://github.com/astral-sh/ruff/pull/9793))
|
||||
- \[`numpy`\] Add missing deprecation violations (`NPY002`) ([#9862](https://github.com/astral-sh/ruff/pull/9862))
|
||||
- \[`flake8-bandit`\] Detect `mark_safe` usages in decorators ([#9887](https://github.com/astral-sh/ruff/pull/9887))
|
||||
- \[`ruff`\] Expand `asyncio-dangling-task` (`RUF006`) to include `new_event_loop` ([#9976](https://github.com/astral-sh/ruff/pull/9976))
|
||||
- \[`flake8-pyi`\] Ignore 'unused' private type dicts in class scopes ([#9952](https://github.com/astral-sh/ruff/pull/9952))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Docstring formatting: Preserve tab indentation when using `indent-style=tabs` ([#9915](https://github.com/astral-sh/ruff/pull/9915))
|
||||
- Disable top-level docstring formatting for notebooks ([#9957](https://github.com/astral-sh/ruff/pull/9957))
|
||||
- Stabilize quote-style's `preserve` mode ([#9922](https://github.com/astral-sh/ruff/pull/9922))
|
||||
|
||||
### CLI
|
||||
|
||||
- Allow arbitrary configuration options to be overridden via the CLI ([#9599](https://github.com/astral-sh/ruff/pull/9599))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make `show-settings` filters directory-agnostic ([#9866](https://github.com/astral-sh/ruff/pull/9866))
|
||||
- Respect duplicates when rewriting type aliases ([#9905](https://github.com/astral-sh/ruff/pull/9905))
|
||||
- Respect tuple assignments in typing analyzer ([#9969](https://github.com/astral-sh/ruff/pull/9969))
|
||||
- Use atomic write when persisting cache ([#9981](https://github.com/astral-sh/ruff/pull/9981))
|
||||
- Use non-parenthesized range for `DebugText` ([#9953](https://github.com/astral-sh/ruff/pull/9953))
|
||||
- \[`flake8-simplify`\] Avoid false positive with `async` for loops (`SIM113`) ([#9996](https://github.com/astral-sh/ruff/pull/9996))
|
||||
- \[`flake8-trio`\] Respect `async with` in `timeout-without-await` ([#9859](https://github.com/astral-sh/ruff/pull/9859))
|
||||
- \[`perflint`\] Catch a wider range of mutations in `PERF101` ([#9955](https://github.com/astral-sh/ruff/pull/9955))
|
||||
- \[`pycodestyle`\] Fix `E30X` panics on blank lines with trailing white spaces ([#9907](https://github.com/astral-sh/ruff/pull/9907))
|
||||
- \[`pydocstyle`\] Allow using `parameters` as a subsection header (`D405`) ([#9894](https://github.com/astral-sh/ruff/pull/9894))
|
||||
- \[`pydocstyle`\] Fix blank-line docstring rules for module-level docstrings ([#9878](https://github.com/astral-sh/ruff/pull/9878))
|
||||
- \[`pylint`\] Accept 0.0 and 1.0 as common magic values (`PLR2004`) ([#9964](https://github.com/astral-sh/ruff/pull/9964))
|
||||
- \[`pylint`\] Avoid suggesting set rewrites for non-hashable types ([#9956](https://github.com/astral-sh/ruff/pull/9956))
|
||||
- \[`ruff`\] Avoid false negatives with string literals inside of method calls (`RUF027`) ([#9865](https://github.com/astral-sh/ruff/pull/9865))
|
||||
- \[`ruff`\] Fix panic on with f-string detection (`RUF027`) ([#9990](https://github.com/astral-sh/ruff/pull/9990))
|
||||
- \[`ruff`\] Ignore builtins when detecting missing f-strings ([#9849](https://github.com/astral-sh/ruff/pull/9849))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use `memchr` for string lexing ([#9888](https://github.com/astral-sh/ruff/pull/9888))
|
||||
- Use `memchr` for tab-indentation detection ([#9853](https://github.com/astral-sh/ruff/pull/9853))
|
||||
- Reduce `Result<Tok, LexicalError>` size by using `Box<str>` instead of `String` ([#9885](https://github.com/astral-sh/ruff/pull/9885))
|
||||
- Reduce size of `Expr` from 80 to 64 bytes ([#9900](https://github.com/astral-sh/ruff/pull/9900))
|
||||
- Improve trailing comma rule performance ([#9867](https://github.com/astral-sh/ruff/pull/9867))
|
||||
- Remove unnecessary string cloning from the parser ([#9884](https://github.com/astral-sh/ruff/pull/9884))
|
||||
@@ -1,308 +0,0 @@
|
||||
# Changelog 0.3.x
|
||||
|
||||
## 0.3.0
|
||||
|
||||
This release introduces the new Ruff formatter 2024.2 style and adds a new lint rule to
|
||||
detect invalid formatter suppression comments.
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bandit`\] Remove suspicious-lxml-import (`S410`) ([#10154](https://github.com/astral-sh/ruff/pull/10154))
|
||||
- \[`pycodestyle`\] Allow `os.environ` modifications between imports (`E402`) ([#10066](https://github.com/astral-sh/ruff/pull/10066))
|
||||
- \[`pycodestyle`\] Don't warn about a single whitespace character before a comma in a tuple (`E203`) ([#10094](https://github.com/astral-sh/ruff/pull/10094))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`eradicate`\] Detect commented out `case` statements (`ERA001`) ([#10055](https://github.com/astral-sh/ruff/pull/10055))
|
||||
- \[`eradicate`\] Detect single-line code for `try:`, `except:`, etc. (`ERA001`) ([#10057](https://github.com/astral-sh/ruff/pull/10057))
|
||||
- \[`flake8-boolean-trap`\] Allow boolean positionals in `__post_init__` ([#10027](https://github.com/astral-sh/ruff/pull/10027))
|
||||
- \[`flake8-copyright`\] Allow © in copyright notices ([#10065](https://github.com/astral-sh/ruff/pull/10065))
|
||||
- \[`isort`\]: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
|
||||
- \[`pylint`\] New Rule `dict-iter-missing-items` (`PLE1141`) ([#9845](https://github.com/astral-sh/ruff/pull/9845))
|
||||
- \[`pylint`\] Ignore `sys.version` and `sys.platform` (`PLR1714`) ([#10054](https://github.com/astral-sh/ruff/pull/10054))
|
||||
- \[`pyupgrade`\] Detect literals with unary operators (`UP018`) ([#10060](https://github.com/astral-sh/ruff/pull/10060))
|
||||
- \[`ruff`\] Expand rule for `list(iterable).pop(0)` idiom (`RUF015`) ([#10148](https://github.com/astral-sh/ruff/pull/10148))
|
||||
|
||||
### Formatter
|
||||
|
||||
This release introduces the Ruff 2024.2 style, stabilizing the following changes:
|
||||
|
||||
- Prefer splitting the assignment's value over the target or type annotation ([#8943](https://github.com/astral-sh/ruff/pull/8943))
|
||||
- Remove blank lines before class docstrings ([#9154](https://github.com/astral-sh/ruff/pull/9154))
|
||||
- Wrap multiple context managers in `with` parentheses when targeting Python 3.9 or newer ([#9222](https://github.com/astral-sh/ruff/pull/9222))
|
||||
- Add a blank line after nested classes with a dummy body (`...`) in typing stub files ([#9155](https://github.com/astral-sh/ruff/pull/9155))
|
||||
- Reduce vertical spacing for classes and functions with a dummy (`...`) body ([#7440](https://github.com/astral-sh/ruff/issues/7440), [#9240](https://github.com/astral-sh/ruff/pull/9240))
|
||||
- Add a blank line after the module docstring ([#8283](https://github.com/astral-sh/ruff/pull/8283))
|
||||
- Parenthesize long type hints in assignments ([#9210](https://github.com/astral-sh/ruff/pull/9210))
|
||||
- Preserve indent for single multiline-string call-expressions ([#9673](https://github.com/astral-sh/ruff/pull/9637))
|
||||
- Normalize hex escape and unicode escape sequences ([#9280](https://github.com/astral-sh/ruff/pull/9280))
|
||||
- Format module docstrings ([#9725](https://github.com/astral-sh/ruff/pull/9725))
|
||||
|
||||
### CLI
|
||||
|
||||
- Explicitly disallow `extend` as part of a `--config` flag ([#10135](https://github.com/astral-sh/ruff/pull/10135))
|
||||
- Remove `build` from the default exclusion list ([#10093](https://github.com/astral-sh/ruff/pull/10093))
|
||||
- Deprecate `ruff <path>`, `ruff --explain`, `ruff --clean`, and `ruff --generate-shell-completion` in favor of `ruff check <path>`, `ruff rule`, `ruff clean`, and `ruff generate-shell-completion` ([#10169](https://github.com/astral-sh/ruff/pull/10169))
|
||||
- Remove the deprecated CLI option `--format` from `ruff rule` and `ruff linter` ([#10170](https://github.com/astral-sh/ruff/pull/10170))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-bugbear`\] Avoid adding default initializers to stubs (`B006`) ([#10152](https://github.com/astral-sh/ruff/pull/10152))
|
||||
- \[`flake8-type-checking`\] Respect runtime-required decorators for function signatures ([#10091](https://github.com/astral-sh/ruff/pull/10091))
|
||||
- \[`pycodestyle`\] Mark fixes overlapping with a multiline string as unsafe (`W293`) ([#10049](https://github.com/astral-sh/ruff/pull/10049))
|
||||
- \[`pydocstyle`\] Trim whitespace when removing blank lines after section (`D413`) ([#10162](https://github.com/astral-sh/ruff/pull/10162))
|
||||
- \[`pylint`\] Delete entire statement, including semicolons (`PLR0203`) ([#10074](https://github.com/astral-sh/ruff/pull/10074))
|
||||
- \[`ruff`\] Avoid f-string false positives in `gettext` calls (`RUF027`) ([#10118](https://github.com/astral-sh/ruff/pull/10118))
|
||||
- Fix `ruff` crashing on PowerPC systems because of too small page size ([#10080](https://github.com/astral-sh/ruff/pull/10080))
|
||||
|
||||
### Performance
|
||||
|
||||
- Add cold attribute to less likely printer queue branches in the formatter ([#10121](https://github.com/astral-sh/ruff/pull/10121))
|
||||
- Skip unnecessary string normalization in the formatter ([#10116](https://github.com/astral-sh/ruff/pull/10116))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove "Beta" Label from formatter documentation ([#10144](https://github.com/astral-sh/ruff/pull/10144))
|
||||
- `line-length` option: fix link to `pycodestyle.max-line-length` ([#10136](https://github.com/astral-sh/ruff/pull/10136))
|
||||
|
||||
## 0.3.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pycodestyle`\] Fix E301 not triggering on decorated methods. ([#10117](https://github.com/astral-sh/ruff/pull/10117))
|
||||
- \[`pycodestyle`\] Respect `isort` settings in blank line rules (`E3*`) ([#10096](https://github.com/astral-sh/ruff/pull/10096))
|
||||
- \[`pycodestyle`\] Make blank lines in typing stub files optional (`E3*`) ([#10098](https://github.com/astral-sh/ruff/pull/10098))
|
||||
- \[`pylint`\] Implement `singledispatch-method` (`E1519`) ([#10140](https://github.com/astral-sh/ruff/pull/10140))
|
||||
- \[`pylint`\] Implement `useless-exception-statement` (`W0133`) ([#10176](https://github.com/astral-sh/ruff/pull/10176))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-debugger`\] Check for use of `debugpy` and `ptvsd` debug modules (#10177) ([#10194](https://github.com/astral-sh/ruff/pull/10194))
|
||||
- \[`pyupgrade`\] Generate diagnostic for all valid f-string conversions regardless of line length (`UP032`) ([#10238](https://github.com/astral-sh/ruff/pull/10238))
|
||||
- \[`pep8_naming`\] Add fixes for `N804` and `N805` ([#10215](https://github.com/astral-sh/ruff/pull/10215))
|
||||
|
||||
### CLI
|
||||
|
||||
- Colorize the output of `ruff format --diff` ([#10110](https://github.com/astral-sh/ruff/pull/10110))
|
||||
- Make `--config` and `--isolated` global flags ([#10150](https://github.com/astral-sh/ruff/pull/10150))
|
||||
- Correctly expand tildes and environment variables in paths passed to `--config` ([#10219](https://github.com/astral-sh/ruff/pull/10219))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Accept a PEP 440 version specifier for `required-version` ([#10216](https://github.com/astral-sh/ruff/pull/10216))
|
||||
- Implement isort's `default-section` setting ([#10149](https://github.com/astral-sh/ruff/pull/10149))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Remove trailing space from `CapWords` message ([#10220](https://github.com/astral-sh/ruff/pull/10220))
|
||||
- Respect external codes in file-level exemptions ([#10203](https://github.com/astral-sh/ruff/pull/10203))
|
||||
- \[`flake8-raise`\] Avoid false-positives for parens-on-raise with `future.exception()` (`RSE102`) ([#10206](https://github.com/astral-sh/ruff/pull/10206))
|
||||
- \[`pylint`\] Add fix for unary expressions in `PLC2801` ([#9587](https://github.com/astral-sh/ruff/pull/9587))
|
||||
- \[`ruff`\] Fix RUF028 not allowing `# fmt: skip` on match cases ([#10178](https://github.com/astral-sh/ruff/pull/10178))
|
||||
|
||||
## 0.3.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- Improve single-`with` item formatting for Python 3.8 or older ([#10276](https://github.com/astral-sh/ruff/pull/10276))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pyupgrade`\] Allow fixes for f-string rule regardless of line length (`UP032`) ([#10263](https://github.com/astral-sh/ruff/pull/10263))
|
||||
- \[`pycodestyle`\] Include actual conditions in E712 diagnostics ([#10254](https://github.com/astral-sh/ruff/pull/10254))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix trailing kwargs end of line comment after slash ([#10297](https://github.com/astral-sh/ruff/pull/10297))
|
||||
- Fix unstable `with` items formatting ([#10274](https://github.com/astral-sh/ruff/pull/10274))
|
||||
- Avoid repeating function calls in f-string conversions ([#10265](https://github.com/astral-sh/ruff/pull/10265))
|
||||
- Fix E203 false positive for slices in format strings ([#10280](https://github.com/astral-sh/ruff/pull/10280))
|
||||
- Fix incorrect `Parameter` range for `*args` and `**kwargs` ([#10283](https://github.com/astral-sh/ruff/pull/10283))
|
||||
- Treat `typing.Annotated` subscripts as type definitions ([#10285](https://github.com/astral-sh/ruff/pull/10285))
|
||||
|
||||
## 0.3.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bandit`\]: Implement `S610` rule ([#10316](https://github.com/astral-sh/ruff/pull/10316))
|
||||
- \[`pycodestyle`\] Implement `blank-line-at-end-of-file` (`W391`) ([#10243](https://github.com/astral-sh/ruff/pull/10243))
|
||||
- \[`pycodestyle`\] Implement `redundant-backslash` (`E502`) ([#10292](https://github.com/astral-sh/ruff/pull/10292))
|
||||
- \[`pylint`\] - implement `redeclared-assigned-name` (`W0128`) ([#9268](https://github.com/astral-sh/ruff/pull/9268))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8_comprehensions`\] Handled special case for `C400` which also matches `C416` ([#10419](https://github.com/astral-sh/ruff/pull/10419))
|
||||
- \[`flake8-bandit`\] Implement upstream updates for `S311`, `S324` and `S605` ([#10313](https://github.com/astral-sh/ruff/pull/10313))
|
||||
- \[`pyflakes`\] Remove `F401` fix for `__init__` imports by default and allow opt-in to unsafe fix ([#10365](https://github.com/astral-sh/ruff/pull/10365))
|
||||
- \[`pylint`\] Implement `invalid-bool-return-type` (`E304`) ([#10377](https://github.com/astral-sh/ruff/pull/10377))
|
||||
- \[`pylint`\] Include builtin warnings in useless-exception-statement (`PLW0133`) ([#10394](https://github.com/astral-sh/ruff/pull/10394))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add message on success to `ruff check` ([#8631](https://github.com/astral-sh/ruff/pull/8631))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`PIE970`\] Allow trailing ellipsis in `typing.TYPE_CHECKING` ([#10413](https://github.com/astral-sh/ruff/pull/10413))
|
||||
- Avoid `TRIO115` if the argument is a variable ([#10376](https://github.com/astral-sh/ruff/pull/10376))
|
||||
- \[`F811`\] Avoid removing shadowed imports that point to different symbols ([#10387](https://github.com/astral-sh/ruff/pull/10387))
|
||||
- Fix `F821` and `F822` false positives in `.pyi` files ([#10341](https://github.com/astral-sh/ruff/pull/10341))
|
||||
- Fix `F821` false negatives in `.py` files when `from __future__ import annotations` is active ([#10362](https://github.com/astral-sh/ruff/pull/10362))
|
||||
- Fix case where `Indexer` fails to identify continuation preceded by newline #10351 ([#10354](https://github.com/astral-sh/ruff/pull/10354))
|
||||
- Sort hash maps in `Settings` display ([#10370](https://github.com/astral-sh/ruff/pull/10370))
|
||||
- Track conditional deletions in the semantic model ([#10415](https://github.com/astral-sh/ruff/pull/10415))
|
||||
- \[`C413`\] Wrap expressions in parentheses when negating ([#10346](https://github.com/astral-sh/ruff/pull/10346))
|
||||
- \[`pycodestyle`\] Do not ignore lines before the first logical line in blank lines rules. ([#10382](https://github.com/astral-sh/ruff/pull/10382))
|
||||
- \[`pycodestyle`\] Do not trigger `E225` and `E275` when the next token is a ')' ([#10315](https://github.com/astral-sh/ruff/pull/10315))
|
||||
- \[`pylint`\] Avoid false-positive slot non-assignment for `__dict__` (`PLE0237`) ([#10348](https://github.com/astral-sh/ruff/pull/10348))
|
||||
- Gate f-string struct size test for Rustc < 1.76 ([#10371](https://github.com/astral-sh/ruff/pull/10371))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Use `ruff.toml` format in README ([#10393](https://github.com/astral-sh/ruff/pull/10393))
|
||||
- \[`RUF008`\] Make it clearer that a mutable default in a dataclass is only valid if it is typed as a ClassVar ([#10395](https://github.com/astral-sh/ruff/pull/10395))
|
||||
- \[`pylint`\] Extend docs and test in `invalid-str-return-type` (`E307`) ([#10400](https://github.com/astral-sh/ruff/pull/10400))
|
||||
- Remove `.` from `check` and `format` commands ([#10217](https://github.com/astral-sh/ruff/pull/10217))
|
||||
|
||||
## 0.3.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Detect implicit `else` cases in `needless-bool` (`SIM103`) ([#10414](https://github.com/astral-sh/ruff/pull/10414))
|
||||
- \[`pylint`\] Implement `nan-comparison` (`PLW0117`) ([#10401](https://github.com/astral-sh/ruff/pull/10401))
|
||||
- \[`pylint`\] Implement `nonlocal-and-global` (`E115`) ([#10407](https://github.com/astral-sh/ruff/pull/10407))
|
||||
- \[`pylint`\] Implement `singledispatchmethod-function` (`PLE5120`) ([#10428](https://github.com/astral-sh/ruff/pull/10428))
|
||||
- \[`refurb`\] Implement `list-reverse-copy` (`FURB187`) ([#10212](https://github.com/astral-sh/ruff/pull/10212))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-pytest-style`\] Add automatic fix for `pytest-parametrize-values-wrong-type` (`PT007`) ([#10461](https://github.com/astral-sh/ruff/pull/10461))
|
||||
- \[`pycodestyle`\] Allow SPDX license headers to exceed the line length (`E501`) ([#10481](https://github.com/astral-sh/ruff/pull/10481))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix unstable formatting for trailing subscript end-of-line comment ([#10492](https://github.com/astral-sh/ruff/pull/10492))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid code comment detection in PEP 723 script tags ([#10464](https://github.com/astral-sh/ruff/pull/10464))
|
||||
- Avoid incorrect tuple transformation in single-element case (`C409`) ([#10491](https://github.com/astral-sh/ruff/pull/10491))
|
||||
- Bug fix: Prevent fully defined links [`name`](link) from being reformatted ([#10442](https://github.com/astral-sh/ruff/pull/10442))
|
||||
- Consider raw source code for `W605` ([#10480](https://github.com/astral-sh/ruff/pull/10480))
|
||||
- Docs: Link inline settings when not part of options section ([#10499](https://github.com/astral-sh/ruff/pull/10499))
|
||||
- Don't treat annotations as redefinitions in `.pyi` files ([#10512](https://github.com/astral-sh/ruff/pull/10512))
|
||||
- Fix `E231` bug: Inconsistent catch compared to pycodestyle, such as when dict nested in list ([#10469](https://github.com/astral-sh/ruff/pull/10469))
|
||||
- Fix pylint upstream categories not showing in docs ([#10441](https://github.com/astral-sh/ruff/pull/10441))
|
||||
- Add missing `Options` references to blank line docs ([#10498](https://github.com/astral-sh/ruff/pull/10498))
|
||||
- 'Revert "F821: Fix false negatives in .py files when `from __future__ import annotations` is active (#10362)"' ([#10513](https://github.com/astral-sh/ruff/pull/10513))
|
||||
- Apply NFKC normalization to unicode identifiers in the lexer ([#10412](https://github.com/astral-sh/ruff/pull/10412))
|
||||
- Avoid failures due to non-deterministic binding ordering ([#10478](https://github.com/astral-sh/ruff/pull/10478))
|
||||
- \[`flake8-bugbear`\] Allow tuples of exceptions (`B030`) ([#10437](https://github.com/astral-sh/ruff/pull/10437))
|
||||
- \[`flake8-quotes`\] Avoid syntax errors due to invalid quotes (`Q000, Q002`) ([#10199](https://github.com/astral-sh/ruff/pull/10199))
|
||||
|
||||
## 0.3.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `modified-iterating-set` (`E4703`) ([#10473](https://github.com/astral-sh/ruff/pull/10473))
|
||||
- \[`refurb`\] Implement `for-loop-set-mutations` (`FURB142`) ([#10583](https://github.com/astral-sh/ruff/pull/10583))
|
||||
- \[`refurb`\] Implement `unnecessary-from-float` (`FURB164`) ([#10647](https://github.com/astral-sh/ruff/pull/10647))
|
||||
- \[`refurb`\] Implement `verbose-decimal-constructor` (`FURB157`) ([#10533](https://github.com/astral-sh/ruff/pull/10533))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-comprehensions`\] Handled special case for `C401` which also matches `C416` ([#10596](https://github.com/astral-sh/ruff/pull/10596))
|
||||
- \[`flake8-pyi`\] Mark `unaliased-collections-abc-set-import` fix as "safe" for more cases in stub files (`PYI025`) ([#10547](https://github.com/astral-sh/ruff/pull/10547))
|
||||
- \[`numpy`\] Add `row_stack` to NumPy 2.0 migration rule ([#10646](https://github.com/astral-sh/ruff/pull/10646))
|
||||
- \[`pycodestyle`\] Allow cell magics before an import (`E402`) ([#10545](https://github.com/astral-sh/ruff/pull/10545))
|
||||
- \[`pycodestyle`\] Avoid blank line rules for the first logical line in cell ([#10291](https://github.com/astral-sh/ruff/pull/10291))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Respected nested namespace packages ([#10541](https://github.com/astral-sh/ruff/pull/10541))
|
||||
- \[`flake8-boolean-trap`\] Add setting for user defined allowed boolean trap ([#10531](https://github.com/astral-sh/ruff/pull/10531))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Correctly handle references in `__all__` definitions when renaming symbols in autofixes ([#10527](https://github.com/astral-sh/ruff/pull/10527))
|
||||
- Track ranges of names inside `__all__` definitions ([#10525](https://github.com/astral-sh/ruff/pull/10525))
|
||||
- \[`flake8-bugbear`\] Avoid false positive for usage after `continue` (`B031`) ([#10539](https://github.com/astral-sh/ruff/pull/10539))
|
||||
- \[`flake8-copyright`\] Accept commas in default copyright pattern ([#9498](https://github.com/astral-sh/ruff/pull/9498))
|
||||
- \[`flake8-datetimez`\] Allow f-strings with `%z` for `DTZ007` ([#10651](https://github.com/astral-sh/ruff/pull/10651))
|
||||
- \[`flake8-pytest-style`\] Fix `PT014` autofix for last item in list ([#10532](https://github.com/astral-sh/ruff/pull/10532))
|
||||
- \[`flake8-quotes`\] Ignore `Q000`, `Q001` when string is inside forward ref ([#10585](https://github.com/astral-sh/ruff/pull/10585))
|
||||
- \[`isort`\] Always place non-relative imports after relative imports ([#10669](https://github.com/astral-sh/ruff/pull/10669))
|
||||
- \[`isort`\] Respect Unicode characters in import sorting ([#10529](https://github.com/astral-sh/ruff/pull/10529))
|
||||
- \[`pyflakes`\] Fix F821 false negatives when `from __future__ import annotations` is active (attempt 2) ([#10524](https://github.com/astral-sh/ruff/pull/10524))
|
||||
- \[`pyflakes`\] Make `unnecessary-lambda` an always-unsafe fix ([#10668](https://github.com/astral-sh/ruff/pull/10668))
|
||||
- \[`pylint`\] Fixed false-positive on the rule `PLW1641` (`eq-without-hash`) ([#10566](https://github.com/astral-sh/ruff/pull/10566))
|
||||
- \[`ruff`\] Fix panic in unused `# noqa` removal with multi-byte space (`RUF100`) ([#10682](https://github.com/astral-sh/ruff/pull/10682))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add PR title format to `CONTRIBUTING.md` ([#10665](https://github.com/astral-sh/ruff/pull/10665))
|
||||
- Fix list markup to include blank lines required ([#10591](https://github.com/astral-sh/ruff/pull/10591))
|
||||
- Put `flake8-logging` next to the other flake8 plugins in registry ([#10587](https://github.com/astral-sh/ruff/pull/10587))
|
||||
- \[`flake8-bandit`\] Update warning message for rule `S305` to address insecure block cipher mode use ([#10602](https://github.com/astral-sh/ruff/pull/10602))
|
||||
- \[`flake8-bugbear`\] Document use of anonymous assignment in `useless-expression` ([#10551](https://github.com/astral-sh/ruff/pull/10551))
|
||||
- \[`flake8-datetimez`\] Clarify error messages and docs for `DTZ` rules ([#10621](https://github.com/astral-sh/ruff/pull/10621))
|
||||
- \[`pycodestyle`\] Use same before vs. after numbers for `space-around-operator` ([#10640](https://github.com/astral-sh/ruff/pull/10640))
|
||||
- \[`ruff`\] Change `quadratic-list-summation` docs to use `iadd` consistently ([#10666](https://github.com/astral-sh/ruff/pull/10666))
|
||||
|
||||
## 0.3.6
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `bad-staticmethod-argument` (`PLW0211`) ([#10781](https://github.com/astral-sh/ruff/pull/10781))
|
||||
- \[`pylint`\] Implement `if-stmt-min-max` (`PLR1730`, `PLR1731`) ([#10002](https://github.com/astral-sh/ruff/pull/10002))
|
||||
- \[`pyupgrade`\] Replace `str,Enum` multiple inheritance with `StrEnum` `UP042` ([#10713](https://github.com/astral-sh/ruff/pull/10713))
|
||||
- \[`refurb`\] Implement `if-expr-instead-of-or-operator` (`FURB110`) ([#10687](https://github.com/astral-sh/ruff/pull/10687))
|
||||
- \[`refurb`\] Implement `int-on-sliced-str` (`FURB166`) ([#10650](https://github.com/astral-sh/ruff/pull/10650))
|
||||
- \[`refurb`\] Implement `write-whole-file` (`FURB103`) ([#10802](https://github.com/astral-sh/ruff/pull/10802))
|
||||
- \[`refurb`\] Support `itemgetter` in `reimplemented-operator` (`FURB118`) ([#10526](https://github.com/astral-sh/ruff/pull/10526))
|
||||
- \[`flake8_comprehensions`\] Add `sum`/`min`/`max` to unnecessary comprehension check (`C419`) ([#10759](https://github.com/astral-sh/ruff/pull/10759))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pydocstyle`\] Require capitalizing docstrings where the first sentence is a single word (`D403`) ([#10776](https://github.com/astral-sh/ruff/pull/10776))
|
||||
- \[`pycodestyle`\] Ignore annotated lambdas in class scopes (`E731`) ([#10720](https://github.com/astral-sh/ruff/pull/10720))
|
||||
- \[`flake8-pyi`\] Various improvements to PYI034 ([#10807](https://github.com/astral-sh/ruff/pull/10807))
|
||||
- \[`flake8-slots`\] Flag subclasses of call-based `typing.NamedTuple`s as well as subclasses of `collections.namedtuple()` (`SLOT002`) ([#10808](https://github.com/astral-sh/ruff/pull/10808))
|
||||
- \[`pyflakes`\] Allow forward references in class bases in stub files (`F821`) ([#10779](https://github.com/astral-sh/ruff/pull/10779))
|
||||
- \[`pygrep-hooks`\] Improve `blanket-noqa` error message (`PGH004`) ([#10851](https://github.com/astral-sh/ruff/pull/10851))
|
||||
|
||||
### CLI
|
||||
|
||||
- Support `FORCE_COLOR` env var ([#10839](https://github.com/astral-sh/ruff/pull/10839))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Support negated patterns in `[extend-]per-file-ignores` ([#10852](https://github.com/astral-sh/ruff/pull/10852))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-import-conventions`\] Accept non-aliased (but correct) import in `unconventional-import-alias` (`ICN001`) ([#10729](https://github.com/astral-sh/ruff/pull/10729))
|
||||
- \[`flake8-quotes`\] Add semantic model flag when inside f-string replacement field ([#10766](https://github.com/astral-sh/ruff/pull/10766))
|
||||
- \[`pep8-naming`\] Recursively resolve `TypeDicts` for N815 violations ([#10719](https://github.com/astral-sh/ruff/pull/10719))
|
||||
- \[`flake8-quotes`\] Respect `Q00*` ignores in `flake8-quotes` rules ([#10728](https://github.com/astral-sh/ruff/pull/10728))
|
||||
- \[`flake8-simplify`\] Show negated condition in `needless-bool` diagnostics (`SIM103`) ([#10854](https://github.com/astral-sh/ruff/pull/10854))
|
||||
- \[`ruff`\] Use within-scope shadowed bindings in `asyncio-dangling-task` (`RUF006`) ([#10793](https://github.com/astral-sh/ruff/pull/10793))
|
||||
- \[`flake8-pytest-style`\] Fix single-tuple conversion in `pytest-parametrize-values-wrong-type` (`PT007`) ([#10862](https://github.com/astral-sh/ruff/pull/10862))
|
||||
- \[`flake8-return`\] Ignore assignments to annotated variables in `unnecessary-assign` (`RET504`) ([#10741](https://github.com/astral-sh/ruff/pull/10741))
|
||||
- \[`refurb`\] Do not allow any keyword arguments for `read-whole-file` in `rb` mode (`FURB101`) ([#10803](https://github.com/astral-sh/ruff/pull/10803))
|
||||
- \[`pylint`\] Don't recommend decorating staticmethods with `@singledispatch` (`PLE1519`, `PLE1520`) ([#10637](https://github.com/astral-sh/ruff/pull/10637))
|
||||
- \[`pydocstyle`\] Use section name range for all section-related docstring diagnostics ([#10740](https://github.com/astral-sh/ruff/pull/10740))
|
||||
- Respect `# noqa` directives on `__all__` openers ([#10798](https://github.com/astral-sh/ruff/pull/10798))
|
||||
|
||||
## 0.3.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Implement `loop-iterator-mutation` (`B909`) ([#9578](https://github.com/astral-sh/ruff/pull/9578))
|
||||
- \[`pylint`\] Implement rule to prefer augmented assignment (`PLR6104`) ([#9932](https://github.com/astral-sh/ruff/pull/9932))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid TOCTOU errors in cache initialization ([#10884](https://github.com/astral-sh/ruff/pull/10884))
|
||||
- \[`pylint`\] Recode `nan-comparison` rule to `W0177` ([#10894](https://github.com/astral-sh/ruff/pull/10894))
|
||||
- \[`pylint`\] Reverse min-max logic in `if-stmt-min-max` ([#10890](https://github.com/astral-sh/ruff/pull/10890))
|
||||
@@ -1,415 +0,0 @@
|
||||
# Changelog 0.4.x
|
||||
|
||||
## 0.4.0
|
||||
|
||||
### A new, hand-written parser
|
||||
|
||||
Ruff's new parser is **>2x faster**, which translates to a **20-40% speedup** for all linting and formatting invocations.
|
||||
There's a lot to say about this exciting change, so check out the [blog post](https://astral.sh/blog/ruff-v0.4.0) for more details!
|
||||
|
||||
See [#10036](https://github.com/astral-sh/ruff/pull/10036) for implementation details.
|
||||
|
||||
### A new language server in Rust
|
||||
|
||||
With this release, we also want to highlight our new language server. `ruff server` is a Rust-powered language
|
||||
server that comes built-in with Ruff. It can be used with any editor that supports the [Language Server Protocol](https://microsoft.github.io/language-server-protocol/) (LSP).
|
||||
It uses a multi-threaded, lock-free architecture inspired by `rust-analyzer` and it will open the door for a lot
|
||||
of exciting features. It’s also faster than our previous [Python-based language server](https://github.com/astral-sh/ruff-lsp)
|
||||
-- but you probably guessed that already.
|
||||
|
||||
`ruff server` is only in alpha, but it has a lot of features that you can try out today:
|
||||
|
||||
- Lints Python files automatically and shows quick-fixes when available
|
||||
- Formats Python files, with support for range formatting
|
||||
- Comes with commands for quickly performing actions: `ruff.applyAutofix`, `ruff.applyFormat`, and `ruff.applyOrganizeImports`
|
||||
- Supports `source.fixAll` and `source.organizeImports` source actions
|
||||
- Automatically reloads your project configuration when you change it
|
||||
|
||||
To setup `ruff server` with your editor, refer to the [README.md](https://github.com/astral-sh/ruff/blob/main/crates/ruff_server/README.md).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pycodestyle`\] Do not trigger `E3` rules on `def`s following a function/method with a dummy body ([#10704](https://github.com/astral-sh/ruff/pull/10704))
|
||||
- \[`pylint`\] Implement `invalid-bytes-returned` (`E0308`) ([#10959](https://github.com/astral-sh/ruff/pull/10959))
|
||||
- \[`pylint`\] Implement `invalid-length-returned` (`E0303`) ([#10963](https://github.com/astral-sh/ruff/pull/10963))
|
||||
- \[`pylint`\] Implement `self-cls-assignment` (`W0642`) ([#9267](https://github.com/astral-sh/ruff/pull/9267))
|
||||
- \[`pylint`\] Omit stubs from `invalid-bool` and `invalid-str-return-type` ([#11008](https://github.com/astral-sh/ruff/pull/11008))
|
||||
- \[`ruff`\] New rule `unused-async` (`RUF029`) to detect unneeded `async` keywords on functions ([#9966](https://github.com/astral-sh/ruff/pull/9966))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bandit`\] Allow `urllib.request.urlopen` calls with static `Request` argument (`S310`) ([#10964](https://github.com/astral-sh/ruff/pull/10964))
|
||||
- \[`flake8-bugbear`\] Treat `raise NotImplemented`-only bodies as stub functions (`B006`) ([#10990](https://github.com/astral-sh/ruff/pull/10990))
|
||||
- \[`flake8-slots`\] Respect same-file `Enum` subclasses (`SLOT000`) ([#11006](https://github.com/astral-sh/ruff/pull/11006))
|
||||
- \[`pylint`\] Support inverted comparisons (`PLR1730`) ([#10920](https://github.com/astral-sh/ruff/pull/10920))
|
||||
|
||||
### Linter
|
||||
|
||||
- Improve handling of builtin symbols in linter rules ([#10919](https://github.com/astral-sh/ruff/pull/10919))
|
||||
- Improve display of rules in `--show-settings` ([#11003](https://github.com/astral-sh/ruff/pull/11003))
|
||||
- Improve inference capabilities of the `BuiltinTypeChecker` ([#10976](https://github.com/astral-sh/ruff/pull/10976))
|
||||
- Resolve classes and functions relative to script name ([#10965](https://github.com/astral-sh/ruff/pull/10965))
|
||||
- Improve performance of `RuleTable::any_enabled` ([#10971](https://github.com/astral-sh/ruff/pull/10971))
|
||||
|
||||
### Server
|
||||
|
||||
_This section is devoted to updates for our new language server, written in Rust._
|
||||
|
||||
- Enable ruff-specific source actions ([#10916](https://github.com/astral-sh/ruff/pull/10916))
|
||||
- Refreshes diagnostics for open files when file configuration is changed ([#10988](https://github.com/astral-sh/ruff/pull/10988))
|
||||
- Important errors are now shown as popups ([#10951](https://github.com/astral-sh/ruff/pull/10951))
|
||||
- Introduce settings for directly configuring the linter and formatter ([#10984](https://github.com/astral-sh/ruff/pull/10984))
|
||||
- Resolve configuration for each document individually ([#10950](https://github.com/astral-sh/ruff/pull/10950))
|
||||
- Write a setup guide for Neovim ([#10987](https://github.com/astral-sh/ruff/pull/10987))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `RUFF_OUTPUT_FILE` environment variable support ([#10992](https://github.com/astral-sh/ruff/pull/10992))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid `non-augmented-assignment` for reversed, non-commutative operators (`PLR6104`) ([#10909](https://github.com/astral-sh/ruff/pull/10909))
|
||||
- Limit commutative non-augmented-assignments to primitive data types (`PLR6104`) ([#10912](https://github.com/astral-sh/ruff/pull/10912))
|
||||
- Respect `per-file-ignores` for `RUF100` on blanket `# noqa` ([#10908](https://github.com/astral-sh/ruff/pull/10908))
|
||||
- Consider `if` expression for parenthesized with items parsing ([#11010](https://github.com/astral-sh/ruff/pull/11010))
|
||||
- Consider binary expr for parenthesized with items parsing ([#11012](https://github.com/astral-sh/ruff/pull/11012))
|
||||
- Reset `FOR_TARGET` context for all kinds of parentheses ([#11009](https://github.com/astral-sh/ruff/pull/11009))
|
||||
|
||||
## 0.4.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `invalid-hash-returned` (`PLE0309`) ([#10961](https://github.com/astral-sh/ruff/pull/10961))
|
||||
- \[`pylint`\] Implement `invalid-index-returned` (`PLE0305`) ([#10962](https://github.com/astral-sh/ruff/pull/10962))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pylint`\] Allow `NoReturn`-like functions for `__str__`, `__len__`, etc. (`PLE0307`) ([#11017](https://github.com/astral-sh/ruff/pull/11017))
|
||||
- Parser: Use empty range when there's "gap" in token source ([#11032](https://github.com/astral-sh/ruff/pull/11032))
|
||||
- \[`ruff`\] Ignore stub functions in `unused-async` (`RUF029`) ([#11026](https://github.com/astral-sh/ruff/pull/11026))
|
||||
- Parser: Expect indented case block instead of match stmt ([#11033](https://github.com/astral-sh/ruff/pull/11033))
|
||||
|
||||
## 0.4.2
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-pyi`\] Allow for overloaded `__exit__` and `__aexit__` definitions (`PYI036`) ([#11057](https://github.com/astral-sh/ruff/pull/11057))
|
||||
- \[`pyupgrade`\] Catch usages of `"%s" % var` and provide an unsafe fix (`UP031`) ([#11019](https://github.com/astral-sh/ruff/pull/11019))
|
||||
- \[`refurb`\] Implement new rule that suggests min/max over `sorted()` (`FURB192`) ([#10868](https://github.com/astral-sh/ruff/pull/10868))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix an issue with missing diagnostics for Neovim and Helix ([#11092](https://github.com/astral-sh/ruff/pull/11092))
|
||||
- Implement hover documentation for `noqa` codes ([#11096](https://github.com/astral-sh/ruff/pull/11096))
|
||||
- Introduce common Ruff configuration options with new server settings ([#11062](https://github.com/astral-sh/ruff/pull/11062))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Use `macos-12` for building release wheels to enable macOS 11 compatibility ([#11146](https://github.com/astral-sh/ruff/pull/11146))
|
||||
- \[`flake8-blind-expect`\] Allow raise from in `BLE001` ([#11131](https://github.com/astral-sh/ruff/pull/11131))
|
||||
- \[`flake8-pyi`\] Allow simple assignments to `None` in enum class scopes (`PYI026`) ([#11128](https://github.com/astral-sh/ruff/pull/11128))
|
||||
- \[`flake8-simplify`\] Avoid raising `SIM911` for non-`zip` attribute calls ([#11126](https://github.com/astral-sh/ruff/pull/11126))
|
||||
- \[`refurb`\] Avoid `operator.itemgetter` suggestion for single-item tuple ([#11095](https://github.com/astral-sh/ruff/pull/11095))
|
||||
- \[`ruff`\] Respect per-file-ignores for `RUF100` with no other diagnostics ([#11058](https://github.com/astral-sh/ruff/pull/11058))
|
||||
- \[`ruff`\] Fix async comprehension false positive (`RUF029`) ([#11070](https://github.com/astral-sh/ruff/pull/11070))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-bugbear`\] Document explicitly disabling strict zip (`B905`) ([#11040](https://github.com/astral-sh/ruff/pull/11040))
|
||||
- \[`flake8-type-checking`\] Mention `lint.typing-modules` in `TCH001`, `TCH002`, and `TCH003` ([#11144](https://github.com/astral-sh/ruff/pull/11144))
|
||||
- \[`isort`\] Improve documentation around custom `isort` sections ([#11050](https://github.com/astral-sh/ruff/pull/11050))
|
||||
- \[`pylint`\] Fix documentation oversight for `invalid-X-returns` ([#11094](https://github.com/astral-sh/ruff/pull/11094))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use `matchit` to resolve per-file settings ([#11111](https://github.com/astral-sh/ruff/pull/11111))
|
||||
|
||||
## 0.4.3
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add support for PEP 696 syntax ([#11120](https://github.com/astral-sh/ruff/pull/11120))
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`refurb`\] Use function range for `reimplemented-operator` diagnostics ([#11271](https://github.com/astral-sh/ruff/pull/11271))
|
||||
- \[`refurb`\] Ignore methods in `reimplemented-operator` (`FURB118`) ([#11270](https://github.com/astral-sh/ruff/pull/11270))
|
||||
- \[`refurb`\] Implement `fstring-number-format` (`FURB116`) ([#10921](https://github.com/astral-sh/ruff/pull/10921))
|
||||
- \[`ruff`\] Implement `redirected-noqa` (`RUF101`) ([#11052](https://github.com/astral-sh/ruff/pull/11052))
|
||||
- \[`pyflakes`\] Distinguish between first-party and third-party imports for fix suggestions ([#11168](https://github.com/astral-sh/ruff/pull/11168))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Ignore non-abstract class attributes when enforcing `B024` ([#11210](https://github.com/astral-sh/ruff/pull/11210))
|
||||
- \[`flake8-logging`\] Include inline instantiations when detecting loggers ([#11154](https://github.com/astral-sh/ruff/pull/11154))
|
||||
- \[`pylint`\] Also emit `PLR0206` for properties with variadic parameters ([#11200](https://github.com/astral-sh/ruff/pull/11200))
|
||||
- \[`ruff`\] Detect duplicate codes as part of `unused-noqa` (`RUF100`) ([#10850](https://github.com/astral-sh/ruff/pull/10850))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Avoid multiline expression if format specifier is present ([#11123](https://github.com/astral-sh/ruff/pull/11123))
|
||||
|
||||
### LSP
|
||||
|
||||
- Write `ruff server` setup guide for Helix ([#11183](https://github.com/astral-sh/ruff/pull/11183))
|
||||
- `ruff server` no longer hangs after shutdown ([#11222](https://github.com/astral-sh/ruff/pull/11222))
|
||||
- `ruff server` reads from a configuration TOML file in the user configuration directory if no local configuration exists ([#11225](https://github.com/astral-sh/ruff/pull/11225))
|
||||
- `ruff server` respects `per-file-ignores` configuration ([#11224](https://github.com/astral-sh/ruff/pull/11224))
|
||||
- `ruff server`: Support a custom TOML configuration file ([#11140](https://github.com/astral-sh/ruff/pull/11140))
|
||||
- `ruff server`: Support setting to prioritize project configuration over editor configuration ([#11086](https://github.com/astral-sh/ruff/pull/11086))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid debug assertion around NFKC renames ([#11249](https://github.com/astral-sh/ruff/pull/11249))
|
||||
- \[`pyflakes`\] Prioritize `redefined-while-unused` over `unused-import` ([#11173](https://github.com/astral-sh/ruff/pull/11173))
|
||||
- \[`ruff`\] Respect `async` expressions in comprehension bodies ([#11219](https://github.com/astral-sh/ruff/pull/11219))
|
||||
- \[`pygrep_hooks`\] Fix `blanket-noqa` panic when last line has noqa with no newline (`PGH004`) ([#11108](https://github.com/astral-sh/ruff/pull/11108))
|
||||
- \[`perflint`\] Ignore list-copy recommendations for async `for` loops ([#11250](https://github.com/astral-sh/ruff/pull/11250))
|
||||
- \[`pyflakes`\] Improve `invalid-print-syntax` documentation ([#11171](https://github.com/astral-sh/ruff/pull/11171))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid allocations for isort module names ([#11251](https://github.com/astral-sh/ruff/pull/11251))
|
||||
- Build a separate ARM wheel for macOS ([#11149](https://github.com/astral-sh/ruff/pull/11149))
|
||||
|
||||
### Windows
|
||||
|
||||
- Increase the minimum requirement to Windows 10.
|
||||
|
||||
## 0.4.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pycodestyle`\] Ignore end-of-line comments when determining blank line rules ([#11342](https://github.com/astral-sh/ruff/pull/11342))
|
||||
- \[`pylint`\] Detect `pathlib.Path.open` calls in `unspecified-encoding` (`PLW1514`) ([#11288](https://github.com/astral-sh/ruff/pull/11288))
|
||||
- \[`flake8-pyi`\] Implement `PYI059` (`generic-not-last-base-class`) ([#11233](https://github.com/astral-sh/ruff/pull/11233))
|
||||
- \[`flake8-pyi`\] Implement `PYI062` (`duplicate-literal-member`) ([#11269](https://github.com/astral-sh/ruff/pull/11269))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-boolean-trap`\] Allow passing booleans as positional-only arguments in code such as `set(True)` ([#11287](https://github.com/astral-sh/ruff/pull/11287))
|
||||
- \[`flake8-bugbear`\] Ignore enum classes in `cached-instance-method` (`B019`) ([#11312](https://github.com/astral-sh/ruff/pull/11312))
|
||||
|
||||
### Server
|
||||
|
||||
- Expand tildes when resolving Ruff server configuration file ([#11283](https://github.com/astral-sh/ruff/pull/11283))
|
||||
- Fix `ruff server` hanging after Neovim closes ([#11291](https://github.com/astral-sh/ruff/pull/11291))
|
||||
- Editor settings are used by default if no file-based configuration exists ([#11266](https://github.com/astral-sh/ruff/pull/11266))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pylint`\] Consider `with` statements for `too-many-branches` (`PLR0912`) ([#11321](https://github.com/astral-sh/ruff/pull/11321))
|
||||
- \[`flake8-blind-except`, `tryceratops`\] Respect logged and re-raised expressions in nested statements (`BLE001`, `TRY201`) ([#11301](https://github.com/astral-sh/ruff/pull/11301))
|
||||
- Recognise assignments such as `__all__ = builtins.list(["foo", "bar"])` as valid `__all__` definitions ([#11335](https://github.com/astral-sh/ruff/pull/11335))
|
||||
|
||||
## 0.4.5
|
||||
|
||||
### Ruff's language server is now in Beta
|
||||
|
||||
`v0.4.5` marks the official Beta release of `ruff server`, an integrated language server built into Ruff.
|
||||
`ruff server` supports the same feature set as `ruff-lsp`, powering linting, formatting, and
|
||||
code fixes in Ruff's editor integrations -- but with superior performance and
|
||||
no installation required. We'd love your feedback!
|
||||
|
||||
You can enable `ruff server` in the [VS Code extension](https://github.com/astral-sh/ruff-vscode?tab=readme-ov-file#enabling-the-rust-based-language-server) today.
|
||||
|
||||
To read more about this exciting milestone, check out our [blog post](https://astral.sh/blog/ruff-v0.4.5)!
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-future-annotations`\] Reword `future-rewritable-type-annotation` (`FA100`) message ([#11381](https://github.com/astral-sh/ruff/pull/11381))
|
||||
- \[`isort`\] Expanded the set of standard-library modules to include `_string`, etc. ([#11374](https://github.com/astral-sh/ruff/pull/11374))
|
||||
- \[`pycodestyle`\] Consider soft keywords for `E27` rules ([#11446](https://github.com/astral-sh/ruff/pull/11446))
|
||||
- \[`pyflakes`\] Recommend adding unused import bindings to `__all__` ([#11314](https://github.com/astral-sh/ruff/pull/11314))
|
||||
- \[`pyflakes`\] Update documentation and deprecate `ignore_init_module_imports` ([#11436](https://github.com/astral-sh/ruff/pull/11436))
|
||||
- \[`pyupgrade`\] Mark quotes as unnecessary for non-evaluated annotations ([#11485](https://github.com/astral-sh/ruff/pull/11485))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Avoid multiline quotes warning with `quote-style = preserve` ([#11490](https://github.com/astral-sh/ruff/pull/11490))
|
||||
|
||||
### Server
|
||||
|
||||
- Support Jupyter Notebook files ([#11206](https://github.com/astral-sh/ruff/pull/11206))
|
||||
- Support `noqa` comment code actions ([#11276](https://github.com/astral-sh/ruff/pull/11276))
|
||||
- Fix automatic configuration reloading ([#11492](https://github.com/astral-sh/ruff/pull/11492))
|
||||
- Fix several issues with configuration in Neovim and Helix ([#11497](https://github.com/astral-sh/ruff/pull/11497))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add `--output-format` as a CLI option for `ruff config` ([#11438](https://github.com/astral-sh/ruff/pull/11438))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid `PLE0237` for property with setter ([#11377](https://github.com/astral-sh/ruff/pull/11377))
|
||||
- Avoid `TCH005` for `if` stmt with `elif`/`else` block ([#11376](https://github.com/astral-sh/ruff/pull/11376))
|
||||
- Avoid flagging `__future__` annotations as required for non-evaluated type annotations ([#11414](https://github.com/astral-sh/ruff/pull/11414))
|
||||
- Check for ruff executable in 'bin' directory as installed by 'pip install --target'. ([#11450](https://github.com/astral-sh/ruff/pull/11450))
|
||||
- Sort edits prior to deduplicating in quotation fix ([#11452](https://github.com/astral-sh/ruff/pull/11452))
|
||||
- Treat escaped newline as valid sequence ([#11465](https://github.com/astral-sh/ruff/pull/11465))
|
||||
- \[`flake8-pie`\] Preserve parentheses in `unnecessary-dict-kwargs` ([#11372](https://github.com/astral-sh/ruff/pull/11372))
|
||||
- \[`pylint`\] Ignore `__slots__` with dynamic values ([#11488](https://github.com/astral-sh/ruff/pull/11488))
|
||||
- \[`pylint`\] Remove `try` body from branch counting ([#11487](https://github.com/astral-sh/ruff/pull/11487))
|
||||
- \[`refurb`\] Respect operator precedence in `FURB110` ([#11464](https://github.com/astral-sh/ruff/pull/11464))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add `--preview` to the README ([#11395](https://github.com/astral-sh/ruff/pull/11395))
|
||||
- Add Python 3.13 to list of allowed Python versions ([#11411](https://github.com/astral-sh/ruff/pull/11411))
|
||||
- Simplify Neovim setup documentation ([#11489](https://github.com/astral-sh/ruff/pull/11489))
|
||||
- Update CONTRIBUTING.md to reflect the new parser ([#11434](https://github.com/astral-sh/ruff/pull/11434))
|
||||
- Update server documentation with new migration guide ([#11499](https://github.com/astral-sh/ruff/pull/11499))
|
||||
- \[`pycodestyle`\] Clarify motivation for `E713` and `E714` ([#11483](https://github.com/astral-sh/ruff/pull/11483))
|
||||
- \[`pyflakes`\] Update docs to describe WAI behavior (F541) ([#11362](https://github.com/astral-sh/ruff/pull/11362))
|
||||
- \[`pylint`\] Clearly indicate what is counted as a branch ([#11423](https://github.com/astral-sh/ruff/pull/11423))
|
||||
|
||||
## 0.4.6
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Use project-relative paths when calculating GitLab fingerprints ([#11532](https://github.com/astral-sh/ruff/pull/11532))
|
||||
- Bump minimum supported Windows version to Windows 10 ([#11613](https://github.com/astral-sh/ruff/pull/11613))
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-async`\] Sleep with >24 hour interval should usually sleep forever (`ASYNC116`) ([#11498](https://github.com/astral-sh/ruff/pull/11498))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`numpy`\] Add missing functions to NumPy 2.0 migration rule ([#11528](https://github.com/astral-sh/ruff/pull/11528))
|
||||
- \[`mccabe`\] Consider irrefutable pattern similar to `if .. else` for `C901` ([#11565](https://github.com/astral-sh/ruff/pull/11565))
|
||||
- Consider `match`-`case` statements for `C901`, `PLR0912`, and `PLR0915` ([#11521](https://github.com/astral-sh/ruff/pull/11521))
|
||||
- Remove empty strings when converting to f-string (`UP032`) ([#11524](https://github.com/astral-sh/ruff/pull/11524))
|
||||
- \[`flake8-bandit`\] `request-without-timeout` should warn for `requests.request` ([#11548](https://github.com/astral-sh/ruff/pull/11548))
|
||||
- \[`flake8-self`\] Ignore sunder accesses in `flake8-self` rules ([#11546](https://github.com/astral-sh/ruff/pull/11546))
|
||||
- \[`pyupgrade`\] Lint for `TypeAliasType` usages (`UP040`) ([#11530](https://github.com/astral-sh/ruff/pull/11530))
|
||||
|
||||
### Server
|
||||
|
||||
- Respect excludes in `ruff server` configuration discovery ([#11551](https://github.com/astral-sh/ruff/pull/11551))
|
||||
- Use default settings if initialization options is empty or not provided ([#11566](https://github.com/astral-sh/ruff/pull/11566))
|
||||
- `ruff server` correctly treats `.pyi` files as stub files ([#11535](https://github.com/astral-sh/ruff/pull/11535))
|
||||
- `ruff server` searches for configuration in parent directories ([#11537](https://github.com/astral-sh/ruff/pull/11537))
|
||||
- `ruff server`: An empty code action filter no longer returns notebook source actions ([#11526](https://github.com/astral-sh/ruff/pull/11526))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-logging-format`\] Fix autofix title in `logging-warn` (`G010`) ([#11514](https://github.com/astral-sh/ruff/pull/11514))
|
||||
- \[`refurb`\] Avoid recommending `operator.itemgetter` with dependence on lambda arguments ([#11574](https://github.com/astral-sh/ruff/pull/11574))
|
||||
- \[`flake8-simplify`\] Avoid recommending context manager in `__enter__` implementations ([#11575](https://github.com/astral-sh/ruff/pull/11575))
|
||||
- Create intermediary directories for `--output-file` ([#11550](https://github.com/astral-sh/ruff/pull/11550))
|
||||
- Propagate reads on global variables ([#11584](https://github.com/astral-sh/ruff/pull/11584))
|
||||
- Treat all `singledispatch` arguments as runtime-required ([#11523](https://github.com/astral-sh/ruff/pull/11523))
|
||||
|
||||
## 0.4.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-pyi`\] Implement `PYI064` ([#11325](https://github.com/astral-sh/ruff/pull/11325))
|
||||
- \[`flake8-pyi`\] Implement `PYI066` ([#11541](https://github.com/astral-sh/ruff/pull/11541))
|
||||
- \[`flake8-pyi`\] Implement `PYI057` ([#11486](https://github.com/astral-sh/ruff/pull/11486))
|
||||
- \[`pyflakes`\] Enable `F822` in `__init__.py` files by default ([#11370](https://github.com/astral-sh/ruff/pull/11370))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix incorrect placement of trailing stub function comments ([#11632](https://github.com/astral-sh/ruff/pull/11632))
|
||||
|
||||
### Server
|
||||
|
||||
- Respect file exclusions in `ruff server` ([#11590](https://github.com/astral-sh/ruff/pull/11590))
|
||||
- Add support for documents not exist on disk ([#11588](https://github.com/astral-sh/ruff/pull/11588))
|
||||
- Add Vim and Kate setup guide for `ruff server` ([#11615](https://github.com/astral-sh/ruff/pull/11615))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid removing newlines between docstring headers and rST blocks ([#11609](https://github.com/astral-sh/ruff/pull/11609))
|
||||
- Infer indentation with imports when logical indent is absent ([#11608](https://github.com/astral-sh/ruff/pull/11608))
|
||||
- Use char index rather than position for indent slice ([#11645](https://github.com/astral-sh/ruff/pull/11645))
|
||||
- \[`flake8-comprehension`\] Strip parentheses around generators in `C400` ([#11607](https://github.com/astral-sh/ruff/pull/11607))
|
||||
- Mark `repeated-isinstance-calls` as unsafe on Python 3.10 and later ([#11622](https://github.com/astral-sh/ruff/pull/11622))
|
||||
|
||||
## 0.4.8
|
||||
|
||||
### Performance
|
||||
|
||||
- Linter performance has been improved by around 10% on some microbenchmarks by refactoring the lexer and parser to maintain synchronicity between them ([#11457](https://github.com/astral-sh/ruff/pull/11457))
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Implement `return-in-generator` (`B901`) ([#11644](https://github.com/astral-sh/ruff/pull/11644))
|
||||
- \[`flake8-pyi`\] Implement `pep484-style-positional-only-parameter` (`PYI063`) ([#11699](https://github.com/astral-sh/ruff/pull/11699))
|
||||
- \[`pygrep_hooks`\] Check blanket ignores via file-level pragmas (`PGH004`) ([#11540](https://github.com/astral-sh/ruff/pull/11540))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pyupgrade`\] Update `UP035` for Python 3.13 and the latest version of `typing_extensions` ([#11693](https://github.com/astral-sh/ruff/pull/11693))
|
||||
- \[`numpy`\] Update `NPY001` rule for NumPy 2.0 ([#11735](https://github.com/astral-sh/ruff/pull/11735))
|
||||
|
||||
### Server
|
||||
|
||||
- Formatting a document with syntax problems no longer spams a visible error popup ([#11745](https://github.com/astral-sh/ruff/pull/11745))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add RDJson support for `--output-format` flag ([#11682](https://github.com/astral-sh/ruff/pull/11682))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pyupgrade`\] Write empty string in lieu of panic when fixing `UP032` ([#11696](https://github.com/astral-sh/ruff/pull/11696))
|
||||
- \[`flake8-simplify`\] Simplify double negatives in `SIM103` ([#11684](https://github.com/astral-sh/ruff/pull/11684))
|
||||
- Ensure the expression generator adds a newline before `type` statements ([#11720](https://github.com/astral-sh/ruff/pull/11720))
|
||||
- Respect per-file ignores for blanket and redirected noqa rules ([#11728](https://github.com/astral-sh/ruff/pull/11728))
|
||||
|
||||
## 0.4.9
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `consider-dict-items` (`C0206`) ([#11688](https://github.com/astral-sh/ruff/pull/11688))
|
||||
- \[`refurb`\] Implement `repeated-global` (`FURB154`) ([#11187](https://github.com/astral-sh/ruff/pull/11187))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pycodestyle`\] Adapt fix for `E203` to work identical to `ruff format` ([#10999](https://github.com/astral-sh/ruff/pull/10999))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix formatter instability for lines only consisting of zero-width characters ([#11748](https://github.com/astral-sh/ruff/pull/11748))
|
||||
|
||||
### Server
|
||||
|
||||
- Add supported commands in server capabilities ([#11850](https://github.com/astral-sh/ruff/pull/11850))
|
||||
- Use real file path when available in `ruff server` ([#11800](https://github.com/astral-sh/ruff/pull/11800))
|
||||
- Improve error message when a command is run on an unavailable document ([#11823](https://github.com/astral-sh/ruff/pull/11823))
|
||||
- Introduce the `ruff.printDebugInformation` command ([#11831](https://github.com/astral-sh/ruff/pull/11831))
|
||||
- Tracing system now respects log level and trace level, with options to log to a file ([#11747](https://github.com/astral-sh/ruff/pull/11747))
|
||||
|
||||
### CLI
|
||||
|
||||
- Handle non-printable characters in diff view ([#11687](https://github.com/astral-sh/ruff/pull/11687))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`refurb`\] Avoid suggesting starmap when arguments are used outside call (`FURB140`) ([#11830](https://github.com/astral-sh/ruff/pull/11830))
|
||||
- \[`flake8-bugbear`\] Avoid panic in `B909` when checking large loop blocks ([#11772](https://github.com/astral-sh/ruff/pull/11772))
|
||||
- \[`refurb`\] Fix misbehavior of `operator.itemgetter` when getter param is a tuple (`FURB118`) ([#11774](https://github.com/astral-sh/ruff/pull/11774))
|
||||
|
||||
## 0.4.10
|
||||
|
||||
### Parser
|
||||
|
||||
- Implement re-lexing logic for better error recovery ([#11845](https://github.com/astral-sh/ruff/pull/11845))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-copyright`\] Update `CPY001` to check the first 4096 bytes instead of 1024 ([#11927](https://github.com/astral-sh/ruff/pull/11927))
|
||||
- \[`pycodestyle`\] Update `E999` to show all syntax errors instead of just the first one ([#11900](https://github.com/astral-sh/ruff/pull/11900))
|
||||
|
||||
### Server
|
||||
|
||||
- Add tracing setup guide to Helix documentation ([#11883](https://github.com/astral-sh/ruff/pull/11883))
|
||||
- Add tracing setup guide to Neovim documentation ([#11884](https://github.com/astral-sh/ruff/pull/11884))
|
||||
- Defer notebook cell deletion to avoid an error message ([#11864](https://github.com/astral-sh/ruff/pull/11864))
|
||||
|
||||
### Security
|
||||
|
||||
- Guard against malicious ecosystem comment artifacts ([#11879](https://github.com/astral-sh/ruff/pull/11879))
|
||||
@@ -1,402 +0,0 @@
|
||||
# Changelog 0.5.x
|
||||
|
||||
## 0.5.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.5.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
||||
- Selecting `ALL` now excludes deprecated rules
|
||||
- The released archives now include an extra level of nesting, which can be removed with `--strip-components=1` when untarring.
|
||||
- The release artifact's file name no longer includes the version tag. This enables users to install via `/latest` URLs on GitHub.
|
||||
- The diagnostic ranges for some `flake8-bandit` rules were modified ([#10667](https://github.com/astral-sh/ruff/pull/10667)).
|
||||
|
||||
### Deprecations
|
||||
|
||||
The following rules are now deprecated:
|
||||
|
||||
- [`syntax-error`](https://docs.astral.sh/ruff/rules/syntax-error/) (`E999`): Syntax errors are now always shown
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new rule codes:
|
||||
|
||||
- [`blocking-http-call-in-async-function`](https://docs.astral.sh/ruff/rules/blocking-http-call-in-async-function/): `ASYNC100` to `ASYNC210`
|
||||
- [`open-sleep-or-subprocess-in-async-function`](https://docs.astral.sh/ruff/rules/open-sleep-or-subprocess-in-async-function/): `ASYNC101` split into `ASYNC220`, `ASYNC221`, `ASYNC230`, and `ASYNC251`
|
||||
- [`blocking-os-call-in-async-function`](https://docs.astral.sh/ruff/rules/blocking-os-call-in-async-function/): `ASYNC102` has been merged into `ASYNC220` and `ASYNC221`
|
||||
- [`trio-timeout-without-await`](https://docs.astral.sh/ruff/rules/trio-timeout-without-await/): `TRIO100` to `ASYNC100`
|
||||
- [`trio-sync-call`](https://docs.astral.sh/ruff/rules/trio-sync-call/): `TRIO105` to `ASYNC105`
|
||||
- [`trio-async-function-with-timeout`](https://docs.astral.sh/ruff/rules/trio-async-function-with-timeout/): `TRIO109` to `ASYNC109`
|
||||
- [`trio-unneeded-sleep`](https://docs.astral.sh/ruff/rules/trio-unneeded-sleep/): `TRIO110` to `ASYNC110`
|
||||
- [`trio-zero-sleep-call`](https://docs.astral.sh/ruff/rules/trio-zero-sleep-call/): `TRIO115` to `ASYNC115`
|
||||
- [`repeated-isinstance-calls`](https://docs.astral.sh/ruff/rules/repeated-isinstance-calls/): `PLR1701` to `SIM101`
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`mutable-fromkeys-value`](https://docs.astral.sh/ruff/rules/mutable-fromkeys-value/) (`RUF024`)
|
||||
- [`default-factory-kwarg`](https://docs.astral.sh/ruff/rules/default-factory-kwarg/) (`RUF026`)
|
||||
- [`django-extra`](https://docs.astral.sh/ruff/rules/django-extra/) (`S610`)
|
||||
- [`manual-dict-comprehension`](https://docs.astral.sh/ruff/rules/manual-dict-comprehension/) (`PERF403`)
|
||||
- [`print-empty-string`](https://docs.astral.sh/ruff/rules/print-empty-string/) (`FURB105`)
|
||||
- [`readlines-in-for`](https://docs.astral.sh/ruff/rules/readlines-in-for/) (`FURB129`)
|
||||
- [`if-expr-min-max`](https://docs.astral.sh/ruff/rules/if-expr-min-max/) (`FURB136`)
|
||||
- [`bit-count`](https://docs.astral.sh/ruff/rules/bit-count/) (`FURB161`)
|
||||
- [`redundant-log-base`](https://docs.astral.sh/ruff/rules/redundant-log-base/) (`FURB163`)
|
||||
- [`regex-flag-alias`](https://docs.astral.sh/ruff/rules/regex-flag-alias/) (`FURB167`)
|
||||
- [`isinstance-type-none`](https://docs.astral.sh/ruff/rules/isinstance-type-none/) (`FURB168`)
|
||||
- [`type-none-comparison`](https://docs.astral.sh/ruff/rules/type-none-comparison/) (`FURB169`)
|
||||
- [`implicit-cwd`](https://docs.astral.sh/ruff/rules/implicit-cwd/) (`FURB177`)
|
||||
- [`hashlib-digest-hex`](https://docs.astral.sh/ruff/rules/hashlib-digest-hex/) (`FURB181`)
|
||||
- [`list-reverse-copy`](https://docs.astral.sh/ruff/rules/list-reverse-copy/) (`FURB187`)
|
||||
- [`bad-open-mode`](https://docs.astral.sh/ruff/rules/bad-open-mode/) (`PLW1501`)
|
||||
- [`empty-comment`](https://docs.astral.sh/ruff/rules/empty-comment/) (`PLR2044`)
|
||||
- [`global-at-module-level`](https://docs.astral.sh/ruff/rules/global-at-module-level/) (`PLW0604`)
|
||||
- [`misplaced-bare-raise`](https://docs.astral.sh/ruff/rules/misplaced-bare-raise/) (`PLE0744`)
|
||||
- [`non-ascii-import-name`](https://docs.astral.sh/ruff/rules/non-ascii-import-name/) (`PLC2403`)
|
||||
- [`non-ascii-name`](https://docs.astral.sh/ruff/rules/non-ascii-name/) (`PLC2401`)
|
||||
- [`nonlocal-and-global`](https://docs.astral.sh/ruff/rules/nonlocal-and-global/) (`PLE0115`)
|
||||
- [`potential-index-error`](https://docs.astral.sh/ruff/rules/potential-index-error/) (`PLE0643`)
|
||||
- [`redeclared-assigned-name`](https://docs.astral.sh/ruff/rules/redeclared-assigned-name/) (`PLW0128`)
|
||||
- [`redefined-argument-from-local`](https://docs.astral.sh/ruff/rules/redefined-argument-from-local/) (`PLR1704`)
|
||||
- [`repeated-keyword-argument`](https://docs.astral.sh/ruff/rules/repeated-keyword-argument/) (`PLE1132`)
|
||||
- [`super-without-brackets`](https://docs.astral.sh/ruff/rules/super-without-brackets/) (`PLW0245`)
|
||||
- [`unnecessary-list-index-lookup`](https://docs.astral.sh/ruff/rules/unnecessary-list-index-lookup/) (`PLR1736`)
|
||||
- [`useless-exception-statement`](https://docs.astral.sh/ruff/rules/useless-exception-statement/) (`PLW0133`)
|
||||
- [`useless-with-lock`](https://docs.astral.sh/ruff/rules/useless-with-lock/) (`PLW2101`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`is-literal`](https://docs.astral.sh/ruff/rules/is-literal/) (`F632`) now warns for identity checks against list, set or dictionary literals
|
||||
- [`needless-bool`](https://docs.astral.sh/ruff/rules/needless-bool/) (`SIM103`) now detects `if` expressions with implicit `else` branches
|
||||
- [`module-import-not-at-top-of-file`](https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file/) (`E402`) now allows `os.environ` modifications between import statements
|
||||
- [`type-comparison`](https://docs.astral.sh/ruff/rules/type-comparison/) (`E721`) now allows idioms such as `type(x) is int`
|
||||
- [`yoda-condition`](https://docs.astral.sh/ruff/rules/yoda-conditions/) (`SIM300`) now flags a wider range of expressions
|
||||
|
||||
### Removals
|
||||
|
||||
The following deprecated settings have been removed:
|
||||
|
||||
- `output-format=text`; use `output-format=concise` or `output-format=full`
|
||||
- `tab-size`; use `indent-width`
|
||||
|
||||
The following deprecated CLI options have been removed:
|
||||
|
||||
- `--show-source`; use `--output-format=full`
|
||||
- `--no-show-source`; use `--output-format=concise`
|
||||
|
||||
The following deprecated CLI commands have been removed:
|
||||
|
||||
- `ruff <path>`; use `ruff check <path>`
|
||||
- `ruff --clean`; use `ruff clean`
|
||||
- `ruff --generate-shell-completion`; use `ruff generate-shell-completion`
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`ruff`\] Add `assert-with-print-message` rule ([#11981](https://github.com/astral-sh/ruff/pull/11981))
|
||||
|
||||
### CLI
|
||||
|
||||
- Use rule name rather than message in `--statistics` ([#11697](https://github.com/astral-sh/ruff/pull/11697))
|
||||
- Use the output format `full` by default ([#12010](https://github.com/astral-sh/ruff/pull/12010))
|
||||
- Don't log syntax errors to the console ([#11902](https://github.com/astral-sh/ruff/pull/11902))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Fix false positives if `gettext` is imported using an alias (`RUF027`) ([#12025](https://github.com/astral-sh/ruff/pull/12025))
|
||||
- \[`numpy`\] Update `trapz` and `in1d` deprecation (`NPY201`) ([#11948](https://github.com/astral-sh/ruff/pull/11948))
|
||||
- \[`flake8-bandit`\] Modify diagnostic ranges for shell-related rules ([#10667](https://github.com/astral-sh/ruff/pull/10667))
|
||||
|
||||
### Server
|
||||
|
||||
- Closing an untitled, unsaved notebook document no longer throws an error ([#11942](https://github.com/astral-sh/ruff/pull/11942))
|
||||
- Support the usage of tildes and environment variables in `logFile` ([#11945](https://github.com/astral-sh/ruff/pull/11945))
|
||||
- Add option to configure whether to show syntax errors ([#12059](https://github.com/astral-sh/ruff/pull/12059))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pycodestyle`\] Avoid `E203` for f-string debug expression ([#12024](https://github.com/astral-sh/ruff/pull/12024))
|
||||
- \[`pep8-naming`\] Match import-name ignores against both name and alias (`N812`, `N817`) ([#12033](https://github.com/astral-sh/ruff/pull/12033))
|
||||
- \[`pyflakes`\] Detect assignments that shadow definitions (`F811`) ([#11961](https://github.com/astral-sh/ruff/pull/11961))
|
||||
|
||||
### Parser
|
||||
|
||||
- Emit a syntax error for an empty type parameter list ([#12030](https://github.com/astral-sh/ruff/pull/12030))
|
||||
- Avoid consuming the newline for unterminated strings ([#12067](https://github.com/astral-sh/ruff/pull/12067))
|
||||
- Do not include the newline in the unterminated string range ([#12017](https://github.com/astral-sh/ruff/pull/12017))
|
||||
- Use the correct range to highlight line continuation errors ([#12016](https://github.com/astral-sh/ruff/pull/12016))
|
||||
- Consider 2-character EOL before line continuations ([#12035](https://github.com/astral-sh/ruff/pull/12035))
|
||||
- Consider line continuation character for re-lexing ([#12008](https://github.com/astral-sh/ruff/pull/12008))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Upgrade the Unicode table used for measuring the line-length ([#11194](https://github.com/astral-sh/ruff/pull/11194))
|
||||
- Remove the deprecation error message for the nursery selector ([#10172](https://github.com/astral-sh/ruff/pull/10172))
|
||||
|
||||
## 0.5.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Implement mutable-contextvar-default (B039) ([#12113](https://github.com/astral-sh/ruff/pull/12113))
|
||||
- \[`pycodestyle`\] Whitespace after decorator (`E204`) ([#12140](https://github.com/astral-sh/ruff/pull/12140))
|
||||
- \[`pytest`\] Reverse `PT001` and `PT0023` defaults ([#12106](https://github.com/astral-sh/ruff/pull/12106))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Enable token-based rules on source with syntax errors ([#11950](https://github.com/astral-sh/ruff/pull/11950))
|
||||
- \[`flake8-bandit`\] Detect `httpx` for `S113` ([#12174](https://github.com/astral-sh/ruff/pull/12174))
|
||||
- \[`numpy`\] Update `NPY201` to include exception deprecations ([#12065](https://github.com/astral-sh/ruff/pull/12065))
|
||||
- \[`pylint`\] Generate autofix for `duplicate-bases` (`PLE0241`) ([#12105](https://github.com/astral-sh/ruff/pull/12105))
|
||||
|
||||
### Server
|
||||
|
||||
- Avoid syntax error notification for source code actions ([#12148](https://github.com/astral-sh/ruff/pull/12148))
|
||||
- Consider the content of the new cells during notebook sync ([#12203](https://github.com/astral-sh/ruff/pull/12203))
|
||||
- Fix replacement edit range computation ([#12171](https://github.com/astral-sh/ruff/pull/12171))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Disable auto-fix when source has syntax errors ([#12134](https://github.com/astral-sh/ruff/pull/12134))
|
||||
- Fix cache key collisions for paths with separators ([#12159](https://github.com/astral-sh/ruff/pull/12159))
|
||||
- Make `requires-python` inference robust to `==` ([#12091](https://github.com/astral-sh/ruff/pull/12091))
|
||||
- Use char-wise width instead of `str`-width ([#12135](https://github.com/astral-sh/ruff/pull/12135))
|
||||
- \[`pycodestyle`\] Avoid `E275` if keyword followed by comma ([#12136](https://github.com/astral-sh/ruff/pull/12136))
|
||||
- \[`pycodestyle`\] Avoid `E275` if keyword is followed by a semicolon ([#12095](https://github.com/astral-sh/ruff/pull/12095))
|
||||
- \[`pylint`\] Skip [dummy variables](https://docs.astral.sh/ruff/settings/#lint_dummy-variable-rgx) for `PLR1704` ([#12190](https://github.com/astral-sh/ruff/pull/12190))
|
||||
|
||||
### Performance
|
||||
|
||||
- Remove allocation in `parse_identifier` ([#12103](https://github.com/astral-sh/ruff/pull/12103))
|
||||
- Use `CompactString` for `Identifier` AST node ([#12101](https://github.com/astral-sh/ruff/pull/12101))
|
||||
|
||||
## 0.5.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- Use `space` separator before parenthesized expressions in comprehensions with leading comments ([#12282](https://github.com/astral-sh/ruff/pull/12282))
|
||||
- \[`flake8-async`\] Update `ASYNC100` to include `anyio` and `asyncio` ([#12221](https://github.com/astral-sh/ruff/pull/12221))
|
||||
- \[`flake8-async`\] Update `ASYNC109` to include `anyio` and `asyncio` ([#12236](https://github.com/astral-sh/ruff/pull/12236))
|
||||
- \[`flake8-async`\] Update `ASYNC110` to include `anyio` and `asyncio` ([#12261](https://github.com/astral-sh/ruff/pull/12261))
|
||||
- \[`flake8-async`\] Update `ASYNC115` to include `anyio` and `asyncio` ([#12262](https://github.com/astral-sh/ruff/pull/12262))
|
||||
- \[`flake8-async`\] Update `ASYNC116` to include `anyio` and `asyncio` ([#12266](https://github.com/astral-sh/ruff/pull/12266))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-return`\] Exempt properties from explicit return rule (`RET501`) ([#12243](https://github.com/astral-sh/ruff/pull/12243))
|
||||
- \[`numpy`\] Add `np.NAN`-to-`np.nan` diagnostic ([#12292](https://github.com/astral-sh/ruff/pull/12292))
|
||||
- \[`refurb`\] Make `list-reverse-copy` an unsafe fix ([#12303](https://github.com/astral-sh/ruff/pull/12303))
|
||||
|
||||
### Server
|
||||
|
||||
- Consider `include` and `extend-include` settings in native server ([#12252](https://github.com/astral-sh/ruff/pull/12252))
|
||||
- Include nested configurations in settings reloading ([#12253](https://github.com/astral-sh/ruff/pull/12253))
|
||||
|
||||
### CLI
|
||||
|
||||
- Omit code frames for fixes with empty ranges ([#12304](https://github.com/astral-sh/ruff/pull/12304))
|
||||
- Warn about formatter incompatibility for `D203` ([#12238](https://github.com/astral-sh/ruff/pull/12238))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make cache-write failures non-fatal on Windows ([#12302](https://github.com/astral-sh/ruff/pull/12302))
|
||||
- Treat `not` operations as boolean tests ([#12301](https://github.com/astral-sh/ruff/pull/12301))
|
||||
- \[`flake8-bandit`\] Avoid `S310` violations for HTTP-safe f-strings ([#12305](https://github.com/astral-sh/ruff/pull/12305))
|
||||
- \[`flake8-bandit`\] Support explicit string concatenations in S310 HTTP detection ([#12315](https://github.com/astral-sh/ruff/pull/12315))
|
||||
- \[`flake8-bandit`\] fix S113 false positive for httpx without `timeout` argument ([#12213](https://github.com/astral-sh/ruff/pull/12213))
|
||||
- \[`pycodestyle`\] Remove "non-obvious" allowance for E721 ([#12300](https://github.com/astral-sh/ruff/pull/12300))
|
||||
- \[`pyflakes`\] Consider `with` blocks as single-item branches for redefinition analysis ([#12311](https://github.com/astral-sh/ruff/pull/12311))
|
||||
- \[`refurb`\] Restrict forwarding for `newline` argument in `open()` calls to Python versions >= 3.10 ([#12244](https://github.com/astral-sh/ruff/pull/12244))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update help and documentation to reflect `--output-format full` default ([#12248](https://github.com/astral-sh/ruff/pull/12248))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use more threads when discovering Python files ([#12258](https://github.com/astral-sh/ruff/pull/12258))
|
||||
|
||||
## 0.5.3
|
||||
|
||||
**Ruff 0.5.3 marks the stable release of the Ruff language server and introduces revamped
|
||||
[documentation](https://docs.astral.sh/ruff/editors), including [setup guides for your editor of
|
||||
choice](https://docs.astral.sh/ruff/editors/setup) and [the language server
|
||||
itself](https://docs.astral.sh/ruff/editors/settings)**.
|
||||
|
||||
### Preview features
|
||||
|
||||
- Formatter: Insert empty line between suite and alternative branch after function/class definition ([#12294](https://github.com/astral-sh/ruff/pull/12294))
|
||||
- \[`pyupgrade`\] Implement `unnecessary-default-type-args` (`UP043`) ([#12371](https://github.com/astral-sh/ruff/pull/12371))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Detect enumerate iterations in `loop-iterator-mutation` (`B909`) ([#12366](https://github.com/astral-sh/ruff/pull/12366))
|
||||
- \[`flake8-bugbear`\] Remove `discard`, `remove`, and `pop` allowance for `loop-iterator-mutation` (`B909`) ([#12365](https://github.com/astral-sh/ruff/pull/12365))
|
||||
- \[`pylint`\] Allow `repeated-equality-comparison` for mixed operations (`PLR1714`) ([#12369](https://github.com/astral-sh/ruff/pull/12369))
|
||||
- \[`pylint`\] Ignore `self` and `cls` when counting arguments (`PLR0913`) ([#12367](https://github.com/astral-sh/ruff/pull/12367))
|
||||
- \[`pylint`\] Use UTF-8 as default encoding in `unspecified-encoding` fix (`PLW1514`) ([#12370](https://github.com/astral-sh/ruff/pull/12370))
|
||||
|
||||
### Server
|
||||
|
||||
- Build settings index in parallel for the native server ([#12299](https://github.com/astral-sh/ruff/pull/12299))
|
||||
- Use fallback settings when indexing the project ([#12362](https://github.com/astral-sh/ruff/pull/12362))
|
||||
- Consider `--preview` flag for `server` subcommand for the linter and formatter ([#12208](https://github.com/astral-sh/ruff/pull/12208))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-comprehensions`\] Allow additional arguments for `sum` and `max` comprehensions (`C419`) ([#12364](https://github.com/astral-sh/ruff/pull/12364))
|
||||
- \[`pylint`\] Avoid dropping extra boolean operations in `repeated-equality-comparison` (`PLR1714`) ([#12368](https://github.com/astral-sh/ruff/pull/12368))
|
||||
- \[`pylint`\] Consider expression before statement when determining binding kind (`PLR1704`) ([#12346](https://github.com/astral-sh/ruff/pull/12346))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add docs for Ruff language server ([#12344](https://github.com/astral-sh/ruff/pull/12344))
|
||||
- Migrate to standalone docs repo ([#12341](https://github.com/astral-sh/ruff/pull/12341))
|
||||
- Update versioning policy for editor integration ([#12375](https://github.com/astral-sh/ruff/pull/12375))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Publish Wasm API to npm ([#12317](https://github.com/astral-sh/ruff/pull/12317))
|
||||
|
||||
## 0.5.4
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Rename `RUF007` to `zip-instead-of-pairwise` ([#12399](https://github.com/astral-sh/ruff/pull/12399))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-builtins`\] Avoid shadowing diagnostics for `@override` methods ([#12415](https://github.com/astral-sh/ruff/pull/12415))
|
||||
- \[`flake8-comprehensions`\] Insert parentheses for multi-argument generators ([#12422](https://github.com/astral-sh/ruff/pull/12422))
|
||||
- \[`pydocstyle`\] Handle escaped docstrings within docstring (`D301`) ([#12192](https://github.com/astral-sh/ruff/pull/12192))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix GitHub link to Neovim setup ([#12410](https://github.com/astral-sh/ruff/pull/12410))
|
||||
- Fix `output-format` default in settings reference ([#12409](https://github.com/astral-sh/ruff/pull/12409))
|
||||
|
||||
## 0.5.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fastapi-redundant-response-model` (`FAST001`) and `fastapi-non-annotated-dependency`(`FAST002`) ([#11579](https://github.com/astral-sh/ruff/pull/11579))
|
||||
- \[`pydoclint`\] Implement `docstring-missing-exception` (`DOC501`) and `docstring-extraneous-exception` (`DOC502`) ([#11471](https://github.com/astral-sh/ruff/pull/11471))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`numpy`\] Fix NumPy 2.0 rule for `np.alltrue` and `np.sometrue` ([#12473](https://github.com/astral-sh/ruff/pull/12473))
|
||||
- \[`numpy`\] Ignore `NPY201` inside `except` blocks for compatibility with older numpy versions ([#12490](https://github.com/astral-sh/ruff/pull/12490))
|
||||
- \[`pep8-naming`\] Avoid applying `ignore-names` to `self` and `cls` function names (`N804`, `N805`) ([#12497](https://github.com/astral-sh/ruff/pull/12497))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix incorrect placement of leading function comment with type params ([#12447](https://github.com/astral-sh/ruff/pull/12447))
|
||||
|
||||
### Server
|
||||
|
||||
- Do not bail code action resolution when a quick fix is requested ([#12462](https://github.com/astral-sh/ruff/pull/12462))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `Ord` implementation of `cmp_fix` ([#12471](https://github.com/astral-sh/ruff/pull/12471))
|
||||
- Raise syntax error for unparenthesized generator expression in multi-argument call ([#12445](https://github.com/astral-sh/ruff/pull/12445))
|
||||
- \[`pydoclint`\] Fix panic in `DOC501` reported in [#12428](https://github.com/astral-sh/ruff/pull/12428) ([#12435](https://github.com/astral-sh/ruff/pull/12435))
|
||||
- \[`flake8-bugbear`\] Allow singleton tuples with starred expressions in `B013` ([#12484](https://github.com/astral-sh/ruff/pull/12484))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add Eglot setup guide for Emacs editor ([#12426](https://github.com/astral-sh/ruff/pull/12426))
|
||||
- Add note about the breaking change in `nvim-lspconfig` ([#12507](https://github.com/astral-sh/ruff/pull/12507))
|
||||
- Add note to include notebook files for native server ([#12449](https://github.com/astral-sh/ruff/pull/12449))
|
||||
- Add setup docs for Zed editor ([#12501](https://github.com/astral-sh/ruff/pull/12501))
|
||||
|
||||
## 0.5.6
|
||||
|
||||
Ruff 0.5.6 automatically enables linting and formatting of notebooks in _preview mode_.
|
||||
You can opt-out of this behavior by adding `*.ipynb` to the `extend-exclude` setting.
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
### Preview features
|
||||
|
||||
- Enable notebooks by default in preview mode ([#12621](https://github.com/astral-sh/ruff/pull/12621))
|
||||
- \[`flake8-builtins`\] Implement import, lambda, and module shadowing ([#12546](https://github.com/astral-sh/ruff/pull/12546))
|
||||
- \[`pydoclint`\] Add `docstring-missing-returns` (`DOC201`) and `docstring-extraneous-returns` (`DOC202`) ([#12485](https://github.com/astral-sh/ruff/pull/12485))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-return`\] Exempt cached properties and other property-like decorators from explicit return rule (`RET501`) ([#12563](https://github.com/astral-sh/ruff/pull/12563))
|
||||
|
||||
### Server
|
||||
|
||||
- Make server panic hook more error resilient ([#12610](https://github.com/astral-sh/ruff/pull/12610))
|
||||
- Use `$/logTrace` for server trace logs in Zed and VS Code ([#12564](https://github.com/astral-sh/ruff/pull/12564))
|
||||
- Keep track of deleted cells for reorder change request ([#12575](https://github.com/astral-sh/ruff/pull/12575))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-implicit-str-concat`\] Always allow explicit multi-line concatenations when implicit concatenations are banned ([#12532](https://github.com/astral-sh/ruff/pull/12532))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Avoid flagging `asyncio.timeout`s as unused when the context manager includes `asyncio.TaskGroup` ([#12605](https://github.com/astral-sh/ruff/pull/12605))
|
||||
- \[`flake8-slots`\] Avoid recommending `__slots__` for classes that inherit from more than `namedtuple` ([#12531](https://github.com/astral-sh/ruff/pull/12531))
|
||||
- \[`isort`\] Avoid marking required imports as unused ([#12537](https://github.com/astral-sh/ruff/pull/12537))
|
||||
- \[`isort`\] Preserve trailing inline comments on import-from statements ([#12498](https://github.com/astral-sh/ruff/pull/12498))
|
||||
- \[`pycodestyle`\] Add newlines before comments (`E305`) ([#12606](https://github.com/astral-sh/ruff/pull/12606))
|
||||
- \[`pycodestyle`\] Don't attach comments with mismatched indents ([#12604](https://github.com/astral-sh/ruff/pull/12604))
|
||||
- \[`pyflakes`\] Fix preview-mode bugs in `F401` when attempting to autofix unused first-party submodule imports in an `__init__.py` file ([#12569](https://github.com/astral-sh/ruff/pull/12569))
|
||||
- \[`pylint`\] Respect start index in `unnecessary-list-index-lookup` ([#12603](https://github.com/astral-sh/ruff/pull/12603))
|
||||
- \[`pyupgrade`\] Avoid recommending no-argument super in `slots=True` dataclasses ([#12530](https://github.com/astral-sh/ruff/pull/12530))
|
||||
- \[`pyupgrade`\] Use colon rather than dot formatting for integer-only types ([#12534](https://github.com/astral-sh/ruff/pull/12534))
|
||||
- Fix NFKC normalization bug when removing unused imports ([#12571](https://github.com/astral-sh/ruff/pull/12571))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Consider more stdlib decorators to be property-like ([#12583](https://github.com/astral-sh/ruff/pull/12583))
|
||||
- Improve handling of metaclasses in various linter rules ([#12579](https://github.com/astral-sh/ruff/pull/12579))
|
||||
- Improve consistency between linter rules in determining whether a function is property ([#12581](https://github.com/astral-sh/ruff/pull/12581))
|
||||
|
||||
## 0.5.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-comprehensions`\] Account for list and set comprehensions in `unnecessary-literal-within-tuple-call` (`C409`) ([#12657](https://github.com/astral-sh/ruff/pull/12657))
|
||||
- \[`flake8-pyi`\] Add autofix for `future-annotations-in-stub` (`PYI044`) ([#12676](https://github.com/astral-sh/ruff/pull/12676))
|
||||
- \[`flake8-return`\] Avoid syntax error when auto-fixing `RET505` with mixed indentation (space and tabs) ([#12740](https://github.com/astral-sh/ruff/pull/12740))
|
||||
- \[`pydoclint`\] Add `docstring-missing-yields` (`DOC402`) and `docstring-extraneous-yields` (`DOC403`) ([#12538](https://github.com/astral-sh/ruff/pull/12538))
|
||||
- \[`pydoclint`\] Avoid `DOC201` if docstring begins with "Return", "Returns", "Yield", or "Yields" ([#12675](https://github.com/astral-sh/ruff/pull/12675))
|
||||
- \[`pydoclint`\] Deduplicate collected exceptions after traversing function bodies (`DOC501`) ([#12642](https://github.com/astral-sh/ruff/pull/12642))
|
||||
- \[`pydoclint`\] Ignore `DOC` errors for stub functions ([#12651](https://github.com/astral-sh/ruff/pull/12651))
|
||||
- \[`pydoclint`\] Teach rules to understand reraised exceptions as being explicitly raised (`DOC501`, `DOC502`) ([#12639](https://github.com/astral-sh/ruff/pull/12639))
|
||||
- \[`ruff`\] Implement `incorrectly-parenthesized-tuple-in-subscript` (`RUF031`) ([#12480](https://github.com/astral-sh/ruff/pull/12480))
|
||||
- \[`ruff`\] Mark `RUF023` fix as unsafe if `__slots__` is not a set and the binding is used elsewhere ([#12692](https://github.com/astral-sh/ruff/pull/12692))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`refurb`\] Add autofix for `implicit-cwd` (`FURB177`) ([#12708](https://github.com/astral-sh/ruff/pull/12708))
|
||||
- \[`ruff`\] Add autofix for `zip-instead-of-pairwise` (`RUF007`) ([#12663](https://github.com/astral-sh/ruff/pull/12663))
|
||||
- \[`tryceratops`\] Add `BaseException` to `raise-vanilla-class` rule (`TRY002`) ([#12620](https://github.com/astral-sh/ruff/pull/12620))
|
||||
|
||||
### Server
|
||||
|
||||
- Ignore non-file workspace URL; Ruff will display a warning notification in this case ([#12725](https://github.com/astral-sh/ruff/pull/12725))
|
||||
|
||||
### CLI
|
||||
|
||||
- Fix cache invalidation for nested `pyproject.toml` files ([#12727](https://github.com/astral-sh/ruff/pull/12727))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Fix false positives with multiple `async with` items (`ASYNC100`) ([#12643](https://github.com/astral-sh/ruff/pull/12643))
|
||||
- \[`flake8-bandit`\] Avoid false-positives for list concatenations in SQL construction (`S608`) ([#12720](https://github.com/astral-sh/ruff/pull/12720))
|
||||
- \[`flake8-bugbear`\] Treat `return` as equivalent to `break` (`B909`) ([#12646](https://github.com/astral-sh/ruff/pull/12646))
|
||||
- \[`flake8-comprehensions`\] Set comprehensions not a violation for `sum` in `unnecessary-comprehension-in-call` (`C419`) ([#12691](https://github.com/astral-sh/ruff/pull/12691))
|
||||
- \[`flake8-simplify`\] Parenthesize conditions based on precedence when merging if arms (`SIM114`) ([#12737](https://github.com/astral-sh/ruff/pull/12737))
|
||||
- \[`pydoclint`\] Try both 'Raises' section styles when convention is unspecified (`DOC501`) ([#12649](https://github.com/astral-sh/ruff/pull/12649))
|
||||
@@ -1,331 +0,0 @@
|
||||
# Changelog 0.6.x
|
||||
|
||||
## 0.6.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.6.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
### Deprecations
|
||||
|
||||
The following rules are now deprecated:
|
||||
|
||||
- [`pytest-missing-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-missing-fixture-name-underscore/) (`PT004`)
|
||||
- [`pytest-incorrect-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-incorrect-fixture-name-underscore/) (`PT005`)
|
||||
- [`unpacked-list-comprehension`](https://docs.astral.sh/ruff/rules/unpacked-list-comprehension/) (`UP027`)
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new rule codes:
|
||||
|
||||
- [`unnecessary-dict-comprehension-for-iterable`](https://docs.astral.sh/ruff/rules/unnecessary-dict-comprehension-for-iterable/): `RUF025` to `C420`
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`singledispatch-method`](https://docs.astral.sh/ruff/rules/singledispatch-method/) (`PLE1519`)
|
||||
- [`singledispatchmethod-function`](https://docs.astral.sh/ruff/rules/singledispatchmethod-function/) (`PLE1520`)
|
||||
- [`bad-staticmethod-argument`](https://docs.astral.sh/ruff/rules/bad-staticmethod-argument/) (`PLW0211`)
|
||||
- [`if-stmt-min-max`](https://docs.astral.sh/ruff/rules/if-stmt-min-max/) (`PLR1730`)
|
||||
- [`invalid-bytes-return-type`](https://docs.astral.sh/ruff/rules/invalid-bytes-return-type/) (`PLE0308`)
|
||||
- [`invalid-hash-return-type`](https://docs.astral.sh/ruff/rules/invalid-hash-return-type/) (`PLE0309`)
|
||||
- [`invalid-index-return-type`](https://docs.astral.sh/ruff/rules/invalid-index-return-type/) (`PLE0305`)
|
||||
- [`invalid-length-return-type`](https://docs.astral.sh/ruff/rules/invalid-length-return-type/) (`PLEE303`)
|
||||
- [`self-or-cls-assignment`](https://docs.astral.sh/ruff/rules/self-or-cls-assignment/) (`PLW0642`)
|
||||
- [`byte-string-usage`](https://docs.astral.sh/ruff/rules/byte-string-usage/) (`PYI057`)
|
||||
- [`duplicate-literal-member`](https://docs.astral.sh/ruff/rules/duplicate-literal-member/) (`PYI062`)
|
||||
- [`redirected-noqa`](https://docs.astral.sh/ruff/rules/redirected-noqa/) (`RUF101`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`cancel-scope-no-checkpoint`](https://docs.astral.sh/ruff/rules/cancel-scope-no-checkpoint/) (`ASYNC100`): Support `asyncio` and `anyio` context managers.
|
||||
- [`async-function-with-timeout`](https://docs.astral.sh/ruff/rules/async-function-with-timeout/) (`ASYNC109`): Support `asyncio` and `anyio` context managers.
|
||||
- [`async-busy-wait`](https://docs.astral.sh/ruff/rules/async-busy-wait/) (`ASYNC110`): Support `asyncio` and `anyio` context managers.
|
||||
- [`async-zero-sleep`](https://docs.astral.sh/ruff/rules/async-zero-sleep/) (`ASYNC115`): Support `anyio` context managers.
|
||||
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever/) (`ASYNC116`): Support `anyio` context managers.
|
||||
|
||||
The following fixes have been stabilized:
|
||||
|
||||
- [`superfluous-else-return`](https://docs.astral.sh/ruff/rules/superfluous-else-return/) (`RET505`)
|
||||
- [`superfluous-else-raise`](https://docs.astral.sh/ruff/rules/superfluous-else-raise/) (`RET506`)
|
||||
- [`superfluous-else-continue`](https://docs.astral.sh/ruff/rules/superfluous-else-continue/) (`RET507`)
|
||||
- [`superfluous-else-break`](https://docs.astral.sh/ruff/rules/superfluous-else-break/) (`RET508`)
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Further simplify to binary in preview for (`SIM108`) ([#12796](https://github.com/astral-sh/ruff/pull/12796))
|
||||
- \[`pyupgrade`\] Show violations without auto-fix (`UP031`) ([#11229](https://github.com/astral-sh/ruff/pull/11229))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-import-conventions`\] Add `xml.etree.ElementTree` to default conventions ([#12455](https://github.com/astral-sh/ruff/pull/12455))
|
||||
- \[`flake8-pytest-style`\] Add a space after comma in CSV output (`PT006`) ([#12853](https://github.com/astral-sh/ruff/pull/12853))
|
||||
|
||||
### Server
|
||||
|
||||
- Show a message for incorrect settings ([#12781](https://github.com/astral-sh/ruff/pull/12781))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Do not lint yield in context manager (`ASYNC100`) ([#12896](https://github.com/astral-sh/ruff/pull/12896))
|
||||
- \[`flake8-comprehensions`\] Do not lint `async for` comprehensions (`C419`) ([#12895](https://github.com/astral-sh/ruff/pull/12895))
|
||||
- \[`flake8-return`\] Only add return `None` at end of a function (`RET503`) ([#11074](https://github.com/astral-sh/ruff/pull/11074))
|
||||
- \[`flake8-type-checking`\] Avoid treating `dataclasses.KW_ONLY` as typing-only (`TCH003`) ([#12863](https://github.com/astral-sh/ruff/pull/12863))
|
||||
- \[`pep8-naming`\] Treat `type(Protocol)` et al as metaclass base (`N805`) ([#12770](https://github.com/astral-sh/ruff/pull/12770))
|
||||
- \[`pydoclint`\] Don't enforce returns and yields in abstract methods (`DOC201`, `DOC202`) ([#12771](https://github.com/astral-sh/ruff/pull/12771))
|
||||
- \[`ruff`\] Skip tuples with slice expressions in (`RUF031`) ([#12768](https://github.com/astral-sh/ruff/pull/12768))
|
||||
- \[`ruff`\] Ignore unparenthesized tuples in subscripts when the subscript is a type annotation or type alias (`RUF031`) ([#12762](https://github.com/astral-sh/ruff/pull/12762))
|
||||
- \[`ruff`\] Ignore template strings passed to logging and `builtins._()` calls (`RUF027`) ([#12889](https://github.com/astral-sh/ruff/pull/12889))
|
||||
- \[`ruff`\] Do not remove parens for tuples with starred expressions in Python \<=3.10 (`RUF031`) ([#12784](https://github.com/astral-sh/ruff/pull/12784))
|
||||
- Evaluate default parameter values for a function in that function's enclosing scope ([#12852](https://github.com/astral-sh/ruff/pull/12852))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Respect VS Code cell metadata when detecting the language of Jupyter Notebook cells ([#12864](https://github.com/astral-sh/ruff/pull/12864))
|
||||
- Respect `kernelspec` notebook metadata when detecting the preferred language for a Jupyter Notebook ([#12875](https://github.com/astral-sh/ruff/pull/12875))
|
||||
|
||||
## 0.6.1
|
||||
|
||||
This is a hotfix release to address an issue with `ruff-pre-commit`. In v0.6,
|
||||
Ruff changed its behavior to lint and format Jupyter notebooks by default;
|
||||
however, due to an oversight, these files were still excluded by default if
|
||||
Ruff was run via pre-commit, leading to inconsistent behavior.
|
||||
This has [now been fixed](https://github.com/astral-sh/ruff-pre-commit/pull/96).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fast-api-unused-path-parameter` (`FAST003`) ([#12638](https://github.com/astral-sh/ruff/pull/12638))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pylint`\] Rename `too-many-positional` to `too-many-positional-arguments` (`R0917`) ([#12905](https://github.com/astral-sh/ruff/pull/12905))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix crash when applying "fix-all" code-action to notebook cells ([#12929](https://github.com/astral-sh/ruff/pull/12929))
|
||||
|
||||
### Other changes
|
||||
|
||||
- \[`flake8-naming`\]: Respect import conventions (`N817`) ([#12922](https://github.com/astral-sh/ruff/pull/12922))
|
||||
|
||||
## 0.6.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with other standard-library IO modules (`SIM115`) ([#12959](https://github.com/astral-sh/ruff/pull/12959))
|
||||
- \[`ruff`\] Avoid `unused-async` for functions with FastAPI route decorator (`RUF029`) ([#12938](https://github.com/astral-sh/ruff/pull/12938))
|
||||
- \[`ruff`\] Ignore `fstring-missing-syntax` (`RUF027`) for `fastAPI` paths ([#12939](https://github.com/astral-sh/ruff/pull/12939))
|
||||
- \[`ruff`\] Implement check for Decimal called with a float literal (RUF032) ([#12909](https://github.com/astral-sh/ruff/pull/12909))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Update diagnostic message when expression is at the end of function (`B015`) ([#12944](https://github.com/astral-sh/ruff/pull/12944))
|
||||
- \[`flake8-pyi`\] Skip type annotations in `string-or-bytes-too-long` (`PYI053`) ([#13002](https://github.com/astral-sh/ruff/pull/13002))
|
||||
- \[`flake8-type-checking`\] Always recognise relative imports as first-party ([#12994](https://github.com/astral-sh/ruff/pull/12994))
|
||||
- \[`flake8-unused-arguments`\] Ignore unused arguments on stub functions (`ARG001`) ([#12966](https://github.com/astral-sh/ruff/pull/12966))
|
||||
- \[`pylint`\] Ignore augmented assignment for `self-cls-assignment` (`PLW0642`) ([#12957](https://github.com/astral-sh/ruff/pull/12957))
|
||||
|
||||
### Server
|
||||
|
||||
- Show full context in error log messages ([#13029](https://github.com/astral-sh/ruff/pull/13029))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pep8-naming`\] Don't flag `from` imports following conventional import names (`N817`) ([#12946](https://github.com/astral-sh/ruff/pull/12946))
|
||||
- \[`pylint`\] - Allow `__new__` methods to have `cls` as their first argument even if decorated with `@staticmethod` for `bad-staticmethod-argument` (`PLW0211`) ([#12958](https://github.com/astral-sh/ruff/pull/12958))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add `hyperfine` installation instructions; update `hyperfine` code samples ([#13034](https://github.com/astral-sh/ruff/pull/13034))
|
||||
- Expand note to use Ruff with other language server in Kate ([#12806](https://github.com/astral-sh/ruff/pull/12806))
|
||||
- Update example for `PT001` as per the new default behavior ([#13019](https://github.com/astral-sh/ruff/pull/13019))
|
||||
- \[`perflint`\] Improve docs for `try-except-in-loop` (`PERF203`) ([#12947](https://github.com/astral-sh/ruff/pull/12947))
|
||||
- \[`pydocstyle`\] Add reference to `lint.pydocstyle.ignore-decorators` setting to rule docs ([#12996](https://github.com/astral-sh/ruff/pull/12996))
|
||||
|
||||
## 0.6.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with `dbm.sqlite3` (`SIM115`) ([#13104](https://github.com/astral-sh/ruff/pull/13104))
|
||||
- \[`pycodestyle`\] Disable `E741` in stub files (`.pyi`) ([#13119](https://github.com/astral-sh/ruff/pull/13119))
|
||||
- \[`pydoclint`\] Avoid `DOC201` on explicit returns in functions that only return `None` ([#13064](https://github.com/astral-sh/ruff/pull/13064))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-async`\] Disable check for `asyncio` before Python 3.11 (`ASYNC109`) ([#13023](https://github.com/astral-sh/ruff/pull/13023))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`FastAPI`\] Avoid introducing invalid syntax in fix for `fast-api-non-annotated-dependency` (`FAST002`) ([#13133](https://github.com/astral-sh/ruff/pull/13133))
|
||||
- \[`flake8-implicit-str-concat`\] Normalize octals before merging concatenated strings in `single-line-implicit-string-concatenation` (`ISC001`) ([#13118](https://github.com/astral-sh/ruff/pull/13118))
|
||||
- \[`flake8-pytest-style`\] Improve help message for `pytest-incorrect-mark-parentheses-style` (`PT023`) ([#13092](https://github.com/astral-sh/ruff/pull/13092))
|
||||
- \[`pylint`\] Avoid autofix for calls that aren't `min` or `max` as starred expression (`PLW3301`) ([#13089](https://github.com/astral-sh/ruff/pull/13089))
|
||||
- \[`ruff`\] Add `datetime.time`, `datetime.tzinfo`, and `datetime.timezone` as immutable function calls (`RUF009`) ([#13109](https://github.com/astral-sh/ruff/pull/13109))
|
||||
- \[`ruff`\] Extend comment deletion for `RUF100` to include trailing text from `noqa` directives while preserving any following comments on the same line, if any ([#13105](https://github.com/astral-sh/ruff/pull/13105))
|
||||
- Fix dark theme on initial page load for the Ruff playground ([#13077](https://github.com/astral-sh/ruff/pull/13077))
|
||||
|
||||
## 0.6.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-builtins`\] Use dynamic builtins list based on Python version ([#13172](https://github.com/astral-sh/ruff/pull/13172))
|
||||
- \[`pydoclint`\] Permit yielding `None` in `DOC402` and `DOC403` ([#13148](https://github.com/astral-sh/ruff/pull/13148))
|
||||
- \[`pylint`\] Update diagnostic message for `PLW3201` ([#13194](https://github.com/astral-sh/ruff/pull/13194))
|
||||
- \[`ruff`\] Implement `post-init-default` (`RUF033`) ([#13192](https://github.com/astral-sh/ruff/pull/13192))
|
||||
- \[`ruff`\] Implement useless if-else (`RUF034`) ([#13218](https://github.com/astral-sh/ruff/pull/13218))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-pyi`\] Respect `pep8_naming.classmethod-decorators` settings when determining if a method is a classmethod in `custom-type-var-return-type` (`PYI019`) ([#13162](https://github.com/astral-sh/ruff/pull/13162))
|
||||
- \[`flake8-pyi`\] Teach various rules that annotations might be stringized ([#12951](https://github.com/astral-sh/ruff/pull/12951))
|
||||
- \[`pylint`\] Avoid `no-self-use` for `attrs`-style validators ([#13166](https://github.com/astral-sh/ruff/pull/13166))
|
||||
- \[`pylint`\] Recurse into subscript subexpressions when searching for list/dict lookups (`PLR1733`, `PLR1736`) ([#13186](https://github.com/astral-sh/ruff/pull/13186))
|
||||
- \[`pyupgrade`\] Detect `aiofiles.open` calls in `UP015` ([#13173](https://github.com/astral-sh/ruff/pull/13173))
|
||||
- \[`pyupgrade`\] Mark `sys.version_info[0] < 3` and similar comparisons as outdated (`UP036`) ([#13175](https://github.com/astral-sh/ruff/pull/13175))
|
||||
|
||||
### CLI
|
||||
|
||||
- Enrich messages of SARIF results ([#13180](https://github.com/astral-sh/ruff/pull/13180))
|
||||
- Handle singular case for incompatible rules warning in `ruff format` output ([#13212](https://github.com/astral-sh/ruff/pull/13212))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pydocstyle`\] Improve heuristics for detecting Google-style docstrings ([#13142](https://github.com/astral-sh/ruff/pull/13142))
|
||||
- \[`refurb`\] Treat `sep` arguments with effects as unsafe removals (`FURB105`) ([#13165](https://github.com/astral-sh/ruff/pull/13165))
|
||||
|
||||
## 0.6.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pydoclint`\] Ignore `DOC201` when function name is "**new**" ([#13300](https://github.com/astral-sh/ruff/pull/13300))
|
||||
- \[`refurb`\] Implement `slice-to-remove-prefix-or-suffix` (`FURB188`) ([#13256](https://github.com/astral-sh/ruff/pull/13256))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`eradicate`\] Ignore script-comments with multiple end-tags (`ERA001`) ([#13283](https://github.com/astral-sh/ruff/pull/13283))
|
||||
- \[`pyflakes`\] Improve error message for `UndefinedName` when a builtin was added in a newer version than specified in Ruff config (`F821`) ([#13293](https://github.com/astral-sh/ruff/pull/13293))
|
||||
|
||||
### Server
|
||||
|
||||
- Add support for extensionless Python files for server ([#13326](https://github.com/astral-sh/ruff/pull/13326))
|
||||
- Fix configuration inheritance for configurations specified in the LSP settings ([#13285](https://github.com/astral-sh/ruff/pull/13285))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`ruff`\] Handle unary operators in `decimal-from-float-literal` (`RUF032`) ([#13275](https://github.com/astral-sh/ruff/pull/13275))
|
||||
|
||||
### CLI
|
||||
|
||||
- Only include rules with diagnostics in SARIF metadata ([#13268](https://github.com/astral-sh/ruff/pull/13268))
|
||||
|
||||
### Playground
|
||||
|
||||
- Add "Copy as pyproject.toml/ruff.toml" and "Paste from TOML" ([#13328](https://github.com/astral-sh/ruff/pull/13328))
|
||||
- Fix errors not shown for restored snippet on page load ([#13262](https://github.com/astral-sh/ruff/pull/13262))
|
||||
|
||||
## 0.6.6
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`refurb`\] Skip `slice-to-remove-prefix-or-suffix` (`FURB188`) when non-trivial slice steps are present ([#13405](https://github.com/astral-sh/ruff/pull/13405))
|
||||
- Add a subcommand to generate dependency graphs ([#13402](https://github.com/astral-sh/ruff/pull/13402))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix placement of inline parameter comments ([#13379](https://github.com/astral-sh/ruff/pull/13379))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix off-by one error in the `LineIndex::offset` calculation ([#13407](https://github.com/astral-sh/ruff/pull/13407))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`fastapi`\] Respect FastAPI aliases in route definitions ([#13394](https://github.com/astral-sh/ruff/pull/13394))
|
||||
- \[`pydocstyle`\] Respect word boundaries when detecting function signature in docs ([#13388](https://github.com/astral-sh/ruff/pull/13388))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add backlinks to rule overview linter ([#13368](https://github.com/astral-sh/ruff/pull/13368))
|
||||
- Fix documentation for editor vim plugin ALE ([#13348](https://github.com/astral-sh/ruff/pull/13348))
|
||||
- Fix rendering of `FURB188` docs ([#13406](https://github.com/astral-sh/ruff/pull/13406))
|
||||
|
||||
## 0.6.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add Python version support to ruff analyze CLI ([#13426](https://github.com/astral-sh/ruff/pull/13426))
|
||||
- Add `exclude` support to `ruff analyze` ([#13425](https://github.com/astral-sh/ruff/pull/13425))
|
||||
- Fix parentheses around return type annotations ([#13381](https://github.com/astral-sh/ruff/pull/13381))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pycodestyle`\] Fix: Don't autofix if the first line ends in a question mark? (D400) ([#13399](https://github.com/astral-sh/ruff/pull/13399))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Respect `lint.exclude` in ruff check `--add-noqa` ([#13427](https://github.com/astral-sh/ruff/pull/13427))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid tracking module resolver files in Salsa ([#13437](https://github.com/astral-sh/ruff/pull/13437))
|
||||
- Use `forget` for module resolver database ([#13438](https://github.com/astral-sh/ruff/pull/13438))
|
||||
|
||||
## 0.6.8
|
||||
|
||||
### Preview features
|
||||
|
||||
- Remove unnecessary parentheses around `match case` clauses ([#13510](https://github.com/astral-sh/ruff/pull/13510))
|
||||
- Parenthesize overlong `if` guards in `match..case` clauses ([#13513](https://github.com/astral-sh/ruff/pull/13513))
|
||||
- Detect basic wildcard imports in `ruff analyze graph` ([#13486](https://github.com/astral-sh/ruff/pull/13486))
|
||||
- \[`pylint`\] Implement `boolean-chained-comparison` (`R1716`) ([#13435](https://github.com/astral-sh/ruff/pull/13435))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`lake8-simplify`\] Detect `SIM910` when using variadic keyword arguments, i.e., `**kwargs` ([#13503](https://github.com/astral-sh/ruff/pull/13503))
|
||||
- \[`pyupgrade`\] Avoid false negatives with non-reference shadowed bindings of loop variables (`UP028`) ([#13504](https://github.com/astral-sh/ruff/pull/13504))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Detect tuples bound to variadic positional arguments i.e. `*args` ([#13512](https://github.com/astral-sh/ruff/pull/13512))
|
||||
- Exit gracefully on broken pipe errors ([#13485](https://github.com/astral-sh/ruff/pull/13485))
|
||||
- Avoid panic when analyze graph hits broken pipe ([#13484](https://github.com/astral-sh/ruff/pull/13484))
|
||||
|
||||
### Performance
|
||||
|
||||
- Reuse `BTreeSets` in module resolver ([#13440](https://github.com/astral-sh/ruff/pull/13440))
|
||||
- Skip traversal for non-compound statements ([#13441](https://github.com/astral-sh/ruff/pull/13441))
|
||||
|
||||
## 0.6.9
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix codeblock dynamic line length calculation for indented docstring examples ([#13523](https://github.com/astral-sh/ruff/pull/13523))
|
||||
- \[`refurb`\] Mark `FURB118` fix as unsafe ([#13613](https://github.com/astral-sh/ruff/pull/13613))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pydocstyle`\] Don't raise `D208` when last line is non-empty ([#13372](https://github.com/astral-sh/ruff/pull/13372))
|
||||
- \[`pylint`\] Preserve trivia (i.e. comments) in `PLR5501` autofix ([#13573](https://github.com/astral-sh/ruff/pull/13573))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`pyflakes`\] Add `allow-unused-imports` setting for `unused-import` rule (`F401`) ([#13601](https://github.com/astral-sh/ruff/pull/13601))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Support ruff discovery in pip build environments ([#13591](https://github.com/astral-sh/ruff/pull/13591))
|
||||
- \[`flake8-bugbear`\] Avoid short circuiting `B017` for multiple context managers ([#13609](https://github.com/astral-sh/ruff/pull/13609))
|
||||
- \[`pylint`\] Do not offer an invalid fix for `PLR1716` when the comparisons contain parenthesis ([#13527](https://github.com/astral-sh/ruff/pull/13527))
|
||||
- \[`pyupgrade`\] Fix `UP043` to apply to `collections.abc.Generator` and `collections.abc.AsyncGenerator` ([#13611](https://github.com/astral-sh/ruff/pull/13611))
|
||||
- \[`refurb`\] Fix handling of slices in tuples for `FURB118`, e.g., `x[:, 1]` ([#13518](https://github.com/astral-sh/ruff/pull/13518))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update GitHub Action link to `astral-sh/ruff-action` ([#13551](https://github.com/astral-sh/ruff/pull/13551))
|
||||
@@ -1,185 +0,0 @@
|
||||
# Changelog 0.7.x
|
||||
|
||||
## 0.7.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.7.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
|
||||
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
|
||||
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
|
||||
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
|
||||
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
|
||||
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
|
||||
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
|
||||
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
|
||||
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
|
||||
instead.
|
||||
|
||||
### Formatter preview style
|
||||
|
||||
- Normalize implicit concatenated f-string quotes per part ([#13539](https://github.com/astral-sh/ruff/pull/13539))
|
||||
|
||||
### Preview linter features
|
||||
|
||||
- \[`refurb`\] implement `hardcoded-string-charset` (FURB156) ([#13530](https://github.com/astral-sh/ruff/pull/13530))
|
||||
- \[`refurb`\] Count codepoints not bytes for `slice-to-remove-prefix-or-suffix (FURB188)` ([#13631](https://github.com/astral-sh/ruff/pull/13631))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pylint`\] Mark `PLE1141` fix as unsafe ([#13629](https://github.com/astral-sh/ruff/pull/13629))
|
||||
- \[`flake8-async`\] Consider async generators to be "checkpoints" for `cancel-scope-no-checkpoint` (`ASYNC100`) ([#13639](https://github.com/astral-sh/ruff/pull/13639))
|
||||
- \[`flake8-bugbear`\] Do not suggest setting parameter `strict=` to `False` in `B905` diagnostic message ([#13656](https://github.com/astral-sh/ruff/pull/13656))
|
||||
- \[`flake8-todos`\] Only flag the word "TODO", not words starting with "todo" (`TD006`) ([#13640](https://github.com/astral-sh/ruff/pull/13640))
|
||||
- \[`pycodestyle`\] Fix whitespace-related false positives and false negatives inside type-parameter lists (`E231`, `E251`) ([#13704](https://github.com/astral-sh/ruff/pull/13704))
|
||||
- \[`flake8-simplify`\] Stabilize preview behavior for `SIM115` so that the rule can detect files
|
||||
being opened from a wider range of standard-library functions ([#12959](https://github.com/astral-sh/ruff/pull/12959)).
|
||||
|
||||
### CLI
|
||||
|
||||
- Add explanation of fixable in `--statistics` command ([#13774](https://github.com/astral-sh/ruff/pull/13774))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pyflakes`\] Allow `ipytest` cell magic (`F401`) ([#13745](https://github.com/astral-sh/ruff/pull/13745))
|
||||
- \[`flake8-use-pathlib`\] Fix `PTH123` false positive when `open` is passed a file descriptor ([#13616](https://github.com/astral-sh/ruff/pull/13616))
|
||||
- \[`flake8-bandit`\] Detect patterns from multi line SQL statements (`S608`) ([#13574](https://github.com/astral-sh/ruff/pull/13574))
|
||||
- \[`flake8-pyi`\] - Fix dropped expressions in `PYI030` autofix ([#13727](https://github.com/astral-sh/ruff/pull/13727))
|
||||
|
||||
## 0.7.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix `E221` and `E222` to flag missing or extra whitespace around `==` operator ([#13890](https://github.com/astral-sh/ruff/pull/13890))
|
||||
- Formatter: Alternate quotes for strings inside f-strings in preview ([#13860](https://github.com/astral-sh/ruff/pull/13860))
|
||||
- Formatter: Join implicit concatenated strings when they fit on a line ([#13663](https://github.com/astral-sh/ruff/pull/13663))
|
||||
- \[`pylint`\] Restrict `iteration-over-set` to only work on sets of literals (`PLC0208`) ([#13731](https://github.com/astral-sh/ruff/pull/13731))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-type-checking`\] Support auto-quoting when annotations contain quotes ([#11811](https://github.com/astral-sh/ruff/pull/11811))
|
||||
|
||||
### Server
|
||||
|
||||
- Avoid indexing the workspace for single-file mode ([#13770](https://github.com/astral-sh/ruff/pull/13770))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make `ARG002` compatible with `EM101` when raising `NotImplementedError` ([#13714](https://github.com/astral-sh/ruff/pull/13714))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Introduce more Docker tags for Ruff (similar to uv) ([#13274](https://github.com/astral-sh/ruff/pull/13274))
|
||||
|
||||
## 0.7.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix formatting of single with-item with trailing comment ([#14005](https://github.com/astral-sh/ruff/pull/14005))
|
||||
- \[`pyupgrade`\] Add PEP 646 `Unpack` conversion to `*` with fix (`UP044`) ([#13988](https://github.com/astral-sh/ruff/pull/13988))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Regenerate `known_stdlibs.rs` with stdlibs 2024.10.25 ([#13963](https://github.com/astral-sh/ruff/pull/13963))
|
||||
- \[`flake8-no-pep420`\] Skip namespace package enforcement for PEP 723 scripts (`INP001`) ([#13974](https://github.com/astral-sh/ruff/pull/13974))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix server panic when undoing an edit ([#14010](https://github.com/astral-sh/ruff/pull/14010))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix issues in discovering ruff in pip build environments ([#13881](https://github.com/astral-sh/ruff/pull/13881))
|
||||
- \[`flake8-type-checking`\] Fix false positive for `singledispatchmethod` (`TCH003`) ([#13941](https://github.com/astral-sh/ruff/pull/13941))
|
||||
- \[`flake8-type-checking`\] Treat return type of `singledispatch` as runtime-required (`TCH003`) ([#13957](https://github.com/astral-sh/ruff/pull/13957))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-simplify`\] Include caveats of enabling `if-else-block-instead-of-if-exp` (`SIM108`) ([#14019](https://github.com/astral-sh/ruff/pull/14019))
|
||||
|
||||
## 0.7.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- Formatter: Disallow single-line implicit concatenated strings ([#13928](https://github.com/astral-sh/ruff/pull/13928))
|
||||
- \[`flake8-pyi`\] Include all Python file types for `PYI006` and `PYI066` ([#14059](https://github.com/astral-sh/ruff/pull/14059))
|
||||
- \[`flake8-simplify`\] Implement `split-of-static-string` (`SIM905`) ([#14008](https://github.com/astral-sh/ruff/pull/14008))
|
||||
- \[`refurb`\] Implement `subclass-builtin` (`FURB189`) ([#14105](https://github.com/astral-sh/ruff/pull/14105))
|
||||
- \[`ruff`\] Improve diagnostic messages and docs (`RUF031`, `RUF032`, `RUF034`) ([#14068](https://github.com/astral-sh/ruff/pull/14068))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Detect items that hash to same value in duplicate sets (`B033`, `PLC0208`) ([#14064](https://github.com/astral-sh/ruff/pull/14064))
|
||||
- \[`eradicate`\] Better detection of IntelliJ language injection comments (`ERA001`) ([#14094](https://github.com/astral-sh/ruff/pull/14094))
|
||||
- \[`flake8-pyi`\] Add autofix for `docstring-in-stub` (`PYI021`) ([#14150](https://github.com/astral-sh/ruff/pull/14150))
|
||||
- \[`flake8-pyi`\] Update `duplicate-literal-member` (`PYI062`) to always provide an autofix ([#14188](https://github.com/astral-sh/ruff/pull/14188))
|
||||
- \[`pyflakes`\] Detect items that hash to same value in duplicate dictionaries (`F601`) ([#14065](https://github.com/astral-sh/ruff/pull/14065))
|
||||
- \[`ruff`\] Fix false positive for decorators (`RUF028`) ([#14061](https://github.com/astral-sh/ruff/pull/14061))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid parsing joint rule codes as distinct codes in `# noqa` ([#12809](https://github.com/astral-sh/ruff/pull/12809))
|
||||
- \[`eradicate`\] ignore `# language=` in commented-out-code rule (ERA001) ([#14069](https://github.com/astral-sh/ruff/pull/14069))
|
||||
- \[`flake8-bugbear`\] - do not run `mutable-argument-default` on stubs (`B006`) ([#14058](https://github.com/astral-sh/ruff/pull/14058))
|
||||
- \[`flake8-builtins`\] Skip lambda expressions in `builtin-argument-shadowing (A002)` ([#14144](https://github.com/astral-sh/ruff/pull/14144))
|
||||
- \[`flake8-comprehension`\] Also remove trailing comma while fixing `C409` and `C419` ([#14097](https://github.com/astral-sh/ruff/pull/14097))
|
||||
- \[`flake8-simplify`\] Allow `open` without context manager in `return` statement (`SIM115`) ([#14066](https://github.com/astral-sh/ruff/pull/14066))
|
||||
- \[`pylint`\] Respect hash-equivalent literals in `iteration-over-set` (`PLC0208`) ([#14063](https://github.com/astral-sh/ruff/pull/14063))
|
||||
- \[`pylint`\] Update known dunder methods for Python 3.13 (`PLW3201`) ([#14146](https://github.com/astral-sh/ruff/pull/14146))
|
||||
- \[`pyupgrade`\] - ignore kwarg unpacking for `UP044` ([#14053](https://github.com/astral-sh/ruff/pull/14053))
|
||||
- \[`refurb`\] Parse more exotic decimal strings in `verbose-decimal-constructor` (`FURB157`) ([#14098](https://github.com/astral-sh/ruff/pull/14098))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add links to missing related options within rule documentations ([#13971](https://github.com/astral-sh/ruff/pull/13971))
|
||||
- Add rule short code to mkdocs tags to allow searching via rule codes ([#14040](https://github.com/astral-sh/ruff/pull/14040))
|
||||
|
||||
## 0.7.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-datetimez`\] Detect usages of `datetime.max`/`datetime.min` (`DTZ901`) ([#14288](https://github.com/astral-sh/ruff/pull/14288))
|
||||
- \[`flake8-logging`\] Implement `root-logger-calls` (`LOG015`) ([#14302](https://github.com/astral-sh/ruff/pull/14302))
|
||||
- \[`flake8-no-pep420`\] Detect empty implicit namespace packages (`INP001`) ([#14236](https://github.com/astral-sh/ruff/pull/14236))
|
||||
- \[`flake8-pyi`\] Add "replace with `Self`" fix (`PYI019`) ([#14238](https://github.com/astral-sh/ruff/pull/14238))
|
||||
- \[`perflint`\] Implement quick-fix for `manual-list-comprehension` (`PERF401`) ([#13919](https://github.com/astral-sh/ruff/pull/13919))
|
||||
- \[`pylint`\] Implement `shallow-copy-environ` (`W1507`) ([#14241](https://github.com/astral-sh/ruff/pull/14241))
|
||||
- \[`ruff`\] Implement `none-not-at-end-of-union` (`RUF036`) ([#14314](https://github.com/astral-sh/ruff/pull/14314))
|
||||
- \[`ruff`\] Implementation `unsafe-markup-call` from `flake8-markupsafe` plugin (`RUF035`) ([#14224](https://github.com/astral-sh/ruff/pull/14224))
|
||||
- \[`ruff`\] Report problems for `attrs` dataclasses (`RUF008`, `RUF009`) ([#14327](https://github.com/astral-sh/ruff/pull/14327))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-boolean-trap`\] Exclude dunder methods that define operators (`FBT001`) ([#14203](https://github.com/astral-sh/ruff/pull/14203))
|
||||
- \[`flake8-pyi`\] Add "replace with `Self`" fix (`PYI034`) ([#14217](https://github.com/astral-sh/ruff/pull/14217))
|
||||
- \[`flake8-pyi`\] Always autofix `duplicate-union-members` (`PYI016`) ([#14270](https://github.com/astral-sh/ruff/pull/14270))
|
||||
- \[`flake8-pyi`\] Improve autofix for nested and mixed type unions for `unnecessary-type-union` (`PYI055`) ([#14272](https://github.com/astral-sh/ruff/pull/14272))
|
||||
- \[`flake8-pyi`\] Mark fix as unsafe when type annotation contains comments for `duplicate-literal-member` (`PYI062`) ([#14268](https://github.com/astral-sh/ruff/pull/14268))
|
||||
|
||||
### Server
|
||||
|
||||
- Use the current working directory to resolve settings from `ruff.configuration` ([#14352](https://github.com/astral-sh/ruff/pull/14352))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid conflicts between `PLC014` (`useless-import-alias`) and `I002` (`missing-required-import`) by considering `lint.isort.required-imports` for `PLC014` ([#14287](https://github.com/astral-sh/ruff/pull/14287))
|
||||
- \[`flake8-type-checking`\] Skip quoting annotation if it becomes invalid syntax (`TCH001`)
|
||||
- \[`flake8-pyi`\] Avoid using `typing.Self` in stub files pre-Python 3.11 (`PYI034`) ([#14230](https://github.com/astral-sh/ruff/pull/14230))
|
||||
- \[`flake8-pytest-style`\] Flag `pytest.raises` call with keyword argument `expected_exception` (`PT011`) ([#14298](https://github.com/astral-sh/ruff/pull/14298))
|
||||
- \[`flake8-simplify`\] Infer "unknown" truthiness for literal iterables whose items are all unpacks (`SIM222`) ([#14263](https://github.com/astral-sh/ruff/pull/14263))
|
||||
- \[`flake8-type-checking`\] Fix false positives for `typing.Annotated` (`TCH001`) ([#14311](https://github.com/astral-sh/ruff/pull/14311))
|
||||
- \[`pylint`\] Allow `await` at the top-level scope of a notebook (`PLE1142`) ([#14225](https://github.com/astral-sh/ruff/pull/14225))
|
||||
- \[`pylint`\] Fix miscellaneous issues in `await-outside-async` detection (`PLE1142`) ([#14218](https://github.com/astral-sh/ruff/pull/14218))
|
||||
- \[`pyupgrade`\] Avoid applying PEP 646 rewrites in invalid contexts (`UP044`) ([#14234](https://github.com/astral-sh/ruff/pull/14234))
|
||||
- \[`pyupgrade`\] Detect permutations in redundant open modes (`UP015`) ([#14255](https://github.com/astral-sh/ruff/pull/14255))
|
||||
- \[`refurb`\] Avoid triggering `hardcoded-string-charset` for reordered sets (`FURB156`) ([#14233](https://github.com/astral-sh/ruff/pull/14233))
|
||||
- \[`refurb`\] Further special cases added to `verbose-decimal-constructor` (`FURB157`) ([#14216](https://github.com/astral-sh/ruff/pull/14216))
|
||||
- \[`refurb`\] Use `UserString` instead of non-existent `UserStr` (`FURB189`) ([#14209](https://github.com/astral-sh/ruff/pull/14209))
|
||||
- \[`ruff`\] Avoid treating lowercase letters as `# noqa` codes (`RUF100`) ([#14229](https://github.com/astral-sh/ruff/pull/14229))
|
||||
- \[`ruff`\] Do not report when `Optional` has no type arguments (`RUF013`) ([#14181](https://github.com/astral-sh/ruff/pull/14181))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add "Notebook behavior" section for `F704`, `PLE1142` ([#14266](https://github.com/astral-sh/ruff/pull/14266))
|
||||
- Document comment policy around fix safety ([#14300](https://github.com/astral-sh/ruff/pull/14300))
|
||||
@@ -1,304 +0,0 @@
|
||||
# Changelog 0.8.x
|
||||
|
||||
## 0.8.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.8.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- **Default to Python 3.9**
|
||||
|
||||
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
|
||||
|
||||
- **Changed location of `pydoclint` diagnostics**
|
||||
|
||||
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
|
||||
|
||||
If you've opted into these preview rules but have them suppressed using
|
||||
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
|
||||
some places, this change may mean that you need to move the `noqa` suppression
|
||||
comments. Most users should be unaffected by this change.
|
||||
|
||||
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
|
||||
|
||||
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
|
||||
|
||||
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
|
||||
|
||||
- **Changes to the line width calculation**
|
||||
|
||||
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
|
||||
|
||||
### Removed Rules
|
||||
|
||||
The following deprecated rules have been removed:
|
||||
|
||||
- [`missing-type-self`](https://docs.astral.sh/ruff/rules/missing-type-self/) (`ANN101`)
|
||||
- [`missing-type-cls`](https://docs.astral.sh/ruff/rules/missing-type-cls/) (`ANN102`)
|
||||
- [`syntax-error`](https://docs.astral.sh/ruff/rules/syntax-error/) (`E999`)
|
||||
- [`pytest-missing-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-missing-fixture-name-underscore/) (`PT004`)
|
||||
- [`pytest-incorrect-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-incorrect-fixture-name-underscore/) (`PT005`)
|
||||
- [`unpacked-list-comprehension`](https://docs.astral.sh/ruff/rules/unpacked-list-comprehension/) (`UP027`)
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new rule codes:
|
||||
|
||||
- [`flake8-type-checking`](https://docs.astral.sh/ruff/rules/#flake8-type-checking-tc): `TCH` to `TC`
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`builtin-import-shadowing`](https://docs.astral.sh/ruff/rules/builtin-import-shadowing/) (`A004`)
|
||||
- [`mutable-contextvar-default`](https://docs.astral.sh/ruff/rules/mutable-contextvar-default/) (`B039`)
|
||||
- [`fast-api-redundant-response-model`](https://docs.astral.sh/ruff/rules/fast-api-redundant-response-model/) (`FAST001`)
|
||||
- [`fast-api-non-annotated-dependency`](https://docs.astral.sh/ruff/rules/fast-api-non-annotated-dependency/) (`FAST002`)
|
||||
- [`dict-index-missing-items`](https://docs.astral.sh/ruff/rules/dict-index-missing-items/) (`PLC0206`)
|
||||
- [`pep484-style-positional-only-parameter`](https://docs.astral.sh/ruff/rules/pep484-style-positional-only-parameter/) (`PYI063`)
|
||||
- [`redundant-final-literal`](https://docs.astral.sh/ruff/rules/redundant-final-literal/) (`PYI064`)
|
||||
- [`bad-version-info-order`](https://docs.astral.sh/ruff/rules/bad-version-info-order/) (`PYI066`)
|
||||
- [`parenthesize-chained-operators`](https://docs.astral.sh/ruff/rules/parenthesize-chained-operators/) (`RUF021`)
|
||||
- [`unsorted-dunder-all`](https://docs.astral.sh/ruff/rules/unsorted-dunder-all/) (`RUF022`)
|
||||
- [`unsorted-dunder-slots`](https://docs.astral.sh/ruff/rules/unsorted-dunder-slots/) (`RUF023`)
|
||||
- [`assert-with-print-message`](https://docs.astral.sh/ruff/rules/assert-with-print-message/) (`RUF030`)
|
||||
- [`unnecessary-default-type-args`](https://docs.astral.sh/ruff/rules/unnecessary-default-type-args/) (`UP043`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`ambiguous-variable-name`](https://docs.astral.sh/ruff/rules/ambiguous-variable-name/) (`E741`): Violations in stub files are now ignored. Stub authors typically don't control variable names.
|
||||
- [`printf-string-formatting`](https://docs.astral.sh/ruff/rules/printf-string-formatting/) (`UP031`): Report all `printf`-like usages even if no autofix is available
|
||||
|
||||
The following fixes have been stabilized:
|
||||
|
||||
- [`zip-instead-of-pairwise`](https://docs.astral.sh/ruff/rules/zip-instead-of-pairwise/) (`RUF007`)
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-datetimez`\] Exempt `min.time()` and `max.time()` (`DTZ901`) ([#14394](https://github.com/astral-sh/ruff/pull/14394))
|
||||
- \[`flake8-pie`\] Mark fix as unsafe if the following statement is a string literal (`PIE790`) ([#14393](https://github.com/astral-sh/ruff/pull/14393))
|
||||
- \[`flake8-pyi`\] New rule `redundant-none-literal` (`PYI061`) ([#14316](https://github.com/astral-sh/ruff/pull/14316))
|
||||
- \[`flake8-pyi`\] Add autofix for `redundant-numeric-union` (`PYI041`) ([#14273](https://github.com/astral-sh/ruff/pull/14273))
|
||||
- \[`ruff`\] New rule `map-int-version-parsing` (`RUF048`) ([#14373](https://github.com/astral-sh/ruff/pull/14373))
|
||||
- \[`ruff`\] New rule `redundant-bool-literal` (`RUF038`) ([#14319](https://github.com/astral-sh/ruff/pull/14319))
|
||||
- \[`ruff`\] New rule `unraw-re-pattern` (`RUF039`) ([#14446](https://github.com/astral-sh/ruff/pull/14446))
|
||||
- \[`pycodestyle`\] Exempt `pytest.importorskip()` calls (`E402`) ([#14474](https://github.com/astral-sh/ruff/pull/14474))
|
||||
- \[`pylint`\] Autofix suggests using sets when possible (`PLR1714`) ([#14372](https://github.com/astral-sh/ruff/pull/14372))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- [`invalid-pyproject-toml`](https://docs.astral.sh/ruff/rules/invalid-pyproject-toml/) (`RUF200`): Updated to reflect the provisionally accepted [PEP 639](https://peps.python.org/pep-0639/).
|
||||
- \[`flake8-pyi`\] Avoid panic in unfixable case (`PYI041`) ([#14402](https://github.com/astral-sh/ruff/pull/14402))
|
||||
- \[`flake8-type-checking`\] Correctly handle quotes in subscript expression when generating an autofix ([#14371](https://github.com/astral-sh/ruff/pull/14371))
|
||||
- \[`pylint`\] Suggest correct autofix for `__contains__` (`PLC2801`) ([#14424](https://github.com/astral-sh/ruff/pull/14424))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Ruff now emits a warning instead of an error when a configuration [`ignore`](https://docs.astral.sh/ruff/settings/#lint_ignore)s a rule that has been removed ([#14435](https://github.com/astral-sh/ruff/pull/14435))
|
||||
- Ruff now validates that `lint.flake8-import-conventions.aliases` only uses valid module names and aliases ([#14477](https://github.com/astral-sh/ruff/pull/14477))
|
||||
|
||||
## 0.8.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- Formatter: Avoid invalid syntax for format-spec with quotes for all Python versions ([#14625](https://github.com/astral-sh/ruff/pull/14625))
|
||||
- Formatter: Consider quotes inside format-specs when choosing the quotes for an f-string ([#14493](https://github.com/astral-sh/ruff/pull/14493))
|
||||
- Formatter: Do not consider f-strings with escaped newlines as multiline ([#14624](https://github.com/astral-sh/ruff/pull/14624))
|
||||
- Formatter: Fix f-string formatting in assignment statement ([#14454](https://github.com/astral-sh/ruff/pull/14454))
|
||||
- Formatter: Fix unnecessary space around power operator (`**`) in overlong f-string expressions ([#14489](https://github.com/astral-sh/ruff/pull/14489))
|
||||
- \[`airflow`\] Avoid implicit `schedule` argument to `DAG` and `@dag` (`AIR301`) ([#14581](https://github.com/astral-sh/ruff/pull/14581))
|
||||
- \[`flake8-builtins`\] Exempt private built-in modules (`A005`) ([#14505](https://github.com/astral-sh/ruff/pull/14505))
|
||||
- \[`flake8-pytest-style`\] Fix `pytest.mark.parametrize` rules to check calls instead of decorators ([#14515](https://github.com/astral-sh/ruff/pull/14515))
|
||||
- \[`flake8-type-checking`\] Implement `runtime-cast-value` (`TC006`) ([#14511](https://github.com/astral-sh/ruff/pull/14511))
|
||||
- \[`flake8-type-checking`\] Implement `unquoted-type-alias` (`TC007`) and `quoted-type-alias` (`TC008`) ([#12927](https://github.com/astral-sh/ruff/pull/12927))
|
||||
- \[`flake8-use-pathlib`\] Recommend `Path.iterdir()` over `os.listdir()` (`PTH208`) ([#14509](https://github.com/astral-sh/ruff/pull/14509))
|
||||
- \[`pylint`\] Extend `invalid-envvar-default` to detect `os.environ.get` (`PLW1508`) ([#14512](https://github.com/astral-sh/ruff/pull/14512))
|
||||
- \[`pylint`\] Implement `len-test` (`PLC1802`) ([#14309](https://github.com/astral-sh/ruff/pull/14309))
|
||||
- \[`refurb`\] Fix bug where methods defined using lambdas were flagged by `FURB118` ([#14639](https://github.com/astral-sh/ruff/pull/14639))
|
||||
- \[`ruff`\] Auto-add `r` prefix when string has no backslashes for `unraw-re-pattern` (`RUF039`) ([#14536](https://github.com/astral-sh/ruff/pull/14536))
|
||||
- \[`ruff`\] Implement `invalid-assert-message-literal-argument` (`RUF040`) ([#14488](https://github.com/astral-sh/ruff/pull/14488))
|
||||
- \[`ruff`\] Implement `unnecessary-nested-literal` (`RUF041`) ([#14323](https://github.com/astral-sh/ruff/pull/14323))
|
||||
- \[`ruff`\] Implement `unnecessary-regular-expression` (`RUF055`) ([#14659](https://github.com/astral-sh/ruff/pull/14659))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Ignore more rules for stub files ([#14541](https://github.com/astral-sh/ruff/pull/14541))
|
||||
- \[`pep8-naming`\] Eliminate false positives for single-letter names (`N811`, `N814`) ([#14584](https://github.com/astral-sh/ruff/pull/14584))
|
||||
- \[`pyflakes`\] Avoid false positives in `@no_type_check` contexts (`F821`, `F722`) ([#14615](https://github.com/astral-sh/ruff/pull/14615))
|
||||
- \[`ruff`\] Detect redirected-noqa in file-level comments (`RUF101`) ([#14635](https://github.com/astral-sh/ruff/pull/14635))
|
||||
- \[`ruff`\] Mark fixes for `unsorted-dunder-all` and `unsorted-dunder-slots` as unsafe when there are complex comments in the sequence (`RUF022`, `RUF023`) ([#14560](https://github.com/astral-sh/ruff/pull/14560))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid fixing code to `None | None` for `redundant-none-literal` (`PYI061`) and `never-union` (`RUF020`) ([#14583](https://github.com/astral-sh/ruff/pull/14583), [#14589](https://github.com/astral-sh/ruff/pull/14589))
|
||||
- \[`flake8-bugbear`\] Fix `mutable-contextvar-default` to resolve annotated function calls properly (`B039`) ([#14532](https://github.com/astral-sh/ruff/pull/14532))
|
||||
- \[`flake8-pyi`, `ruff`\] Fix traversal of nested literals and unions (`PYI016`, `PYI051`, `PYI055`, `PYI062`, `RUF041`) ([#14641](https://github.com/astral-sh/ruff/pull/14641))
|
||||
- \[`flake8-pyi`\] Avoid rewriting invalid type expressions in `unnecessary-type-union` (`PYI055`) ([#14660](https://github.com/astral-sh/ruff/pull/14660))
|
||||
- \[`flake8-type-checking`\] Avoid syntax errors and type checking problem for quoted annotations autofix (`TC003`, `TC006`) ([#14634](https://github.com/astral-sh/ruff/pull/14634))
|
||||
- \[`pylint`\] Do not wrap function calls in parentheses in the fix for unnecessary-dunder-call (`PLC2801`) ([#14601](https://github.com/astral-sh/ruff/pull/14601))
|
||||
- \[`ruff`\] Handle `attrs`'s `auto_attribs` correctly (`RUF009`) ([#14520](https://github.com/astral-sh/ruff/pull/14520))
|
||||
|
||||
## 0.8.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Avoid deprecated values (`AIR302`) ([#14582](https://github.com/astral-sh/ruff/pull/14582))
|
||||
- \[`airflow`\] Extend removed names for `AIR302` ([#14734](https://github.com/astral-sh/ruff/pull/14734))
|
||||
- \[`ruff`\] Extend `unnecessary-regular-expression` to non-literal strings (`RUF055`) ([#14679](https://github.com/astral-sh/ruff/pull/14679))
|
||||
- \[`ruff`\] Implement `used-dummy-variable` (`RUF052`) ([#14611](https://github.com/astral-sh/ruff/pull/14611))
|
||||
- \[`ruff`\] Implement `unnecessary-cast-to-int` (`RUF046`) ([#14697](https://github.com/astral-sh/ruff/pull/14697))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`airflow`\] Check `AIR001` from builtin or providers `operators` module ([#14631](https://github.com/astral-sh/ruff/pull/14631))
|
||||
- \[`flake8-pytest-style`\] Remove `@` in `pytest.mark.parametrize` rule messages ([#14770](https://github.com/astral-sh/ruff/pull/14770))
|
||||
- \[`pandas-vet`\] Skip rules if the `panda` module hasn't been seen ([#14671](https://github.com/astral-sh/ruff/pull/14671))
|
||||
- \[`pylint`\] Fix false negatives for `ascii` and `sorted` in `len-as-condition` (`PLC1802`) ([#14692](https://github.com/astral-sh/ruff/pull/14692))
|
||||
- \[`refurb`\] Guard `hashlib` imports and mark `hashlib-digest-hex` fix as safe (`FURB181`) ([#14694](https://github.com/astral-sh/ruff/pull/14694))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-import-conventions`\] Improve syntax check for aliases supplied in configuration for `unconventional-import-alias` (`ICN001`) ([#14745](https://github.com/astral-sh/ruff/pull/14745))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Revert: [pyflakes] Avoid false positives in `@no_type_check` contexts (`F821`, `F722`) (#14615) ([#14726](https://github.com/astral-sh/ruff/pull/14726))
|
||||
- \[`pep8-naming`\] Avoid false positive for `class Bar(type(foo))` (`N804`) ([#14683](https://github.com/astral-sh/ruff/pull/14683))
|
||||
- \[`pycodestyle`\] Handle f-strings properly for `invalid-escape-sequence` (`W605`) ([#14748](https://github.com/astral-sh/ruff/pull/14748))
|
||||
- \[`pylint`\] Ignore `@overload` in `PLR0904` ([#14730](https://github.com/astral-sh/ruff/pull/14730))
|
||||
- \[`refurb`\] Handle non-finite decimals in `verbose-decimal-constructor` (`FURB157`) ([#14596](https://github.com/astral-sh/ruff/pull/14596))
|
||||
- \[`ruff`\] Avoid emitting `assignment-in-assert` when all references to the assigned variable are themselves inside `assert`s (`RUF018`) ([#14661](https://github.com/astral-sh/ruff/pull/14661))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Improve docs for `flake8-use-pathlib` rules ([#14741](https://github.com/astral-sh/ruff/pull/14741))
|
||||
- Improve error messages and docs for `flake8-comprehensions` rules ([#14729](https://github.com/astral-sh/ruff/pull/14729))
|
||||
- \[`flake8-type-checking`\] Expands `TC006` docs to better explain itself ([#14749](https://github.com/astral-sh/ruff/pull/14749))
|
||||
|
||||
## 0.8.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix fstring formatting removing overlong implicit concatenated string in expression part ([#14811](https://github.com/astral-sh/ruff/pull/14811))
|
||||
- \[`airflow`\] Add fix to remove deprecated keyword arguments (`AIR302`) ([#14887](https://github.com/astral-sh/ruff/pull/14887))
|
||||
- \[`airflow`\]: Extend rule to include deprecated names for Airflow 3.0 (`AIR302`) ([#14765](https://github.com/astral-sh/ruff/pull/14765) and [#14804](https://github.com/astral-sh/ruff/pull/14804))
|
||||
- \[`flake8-bugbear`\] Improve error messages for `except*` (`B025`, `B029`, `B030`, `B904`) ([#14815](https://github.com/astral-sh/ruff/pull/14815))
|
||||
- \[`flake8-bugbear`\] `itertools.batched()` without explicit `strict` (`B911`) ([#14408](https://github.com/astral-sh/ruff/pull/14408))
|
||||
- \[`flake8-use-pathlib`\] Dotless suffix passed to `Path.with_suffix()` (`PTH210`) ([#14779](https://github.com/astral-sh/ruff/pull/14779))
|
||||
- \[`pylint`\] Include parentheses and multiple comparators in check for `boolean-chained-comparison` (`PLR1716`) ([#14781](https://github.com/astral-sh/ruff/pull/14781))
|
||||
- \[`ruff`\] Do not simplify `round()` calls (`RUF046`) ([#14832](https://github.com/astral-sh/ruff/pull/14832))
|
||||
- \[`ruff`\] Don't emit `used-dummy-variable` on function parameters (`RUF052`) ([#14818](https://github.com/astral-sh/ruff/pull/14818))
|
||||
- \[`ruff`\] Implement `if-key-in-dict-del` (`RUF051`) ([#14553](https://github.com/astral-sh/ruff/pull/14553))
|
||||
- \[`ruff`\] Mark autofix for `RUF052` as always unsafe ([#14824](https://github.com/astral-sh/ruff/pull/14824))
|
||||
- \[`ruff`\] Teach autofix for `used-dummy-variable` about TypeVars etc. (`RUF052`) ([#14819](https://github.com/astral-sh/ruff/pull/14819))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Offer unsafe autofix for `no-explicit-stacklevel` (`B028`) ([#14829](https://github.com/astral-sh/ruff/pull/14829))
|
||||
- \[`flake8-pyi`\] Skip all type definitions in `string-or-bytes-too-long` (`PYI053`) ([#14797](https://github.com/astral-sh/ruff/pull/14797))
|
||||
- \[`pyupgrade`\] Do not report when a UTF-8 comment is followed by a non-UTF-8 one (`UP009`) ([#14728](https://github.com/astral-sh/ruff/pull/14728))
|
||||
- \[`pyupgrade`\] Mark fixes for `convert-typed-dict-functional-to-class` and `convert-named-tuple-functional-to-class` as unsafe if they will remove comments (`UP013`, `UP014`) ([#14842](https://github.com/astral-sh/ruff/pull/14842))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Raise syntax error for mixing `except` and `except*` ([#14895](https://github.com/astral-sh/ruff/pull/14895))
|
||||
- \[`flake8-bugbear`\] Fix `B028` to allow `stacklevel` to be explicitly assigned as a positional argument ([#14868](https://github.com/astral-sh/ruff/pull/14868))
|
||||
- \[`flake8-bugbear`\] Skip `B028` if `warnings.warn` is called with `*args` or `**kwargs` ([#14870](https://github.com/astral-sh/ruff/pull/14870))
|
||||
- \[`flake8-comprehensions`\] Skip iterables with named expressions in `unnecessary-map` (`C417`) ([#14827](https://github.com/astral-sh/ruff/pull/14827))
|
||||
- \[`flake8-pyi`\] Also remove `self` and `cls`'s annotation (`PYI034`) ([#14801](https://github.com/astral-sh/ruff/pull/14801))
|
||||
- \[`flake8-pytest-style`\] Fix `pytest-parametrize-names-wrong-type` (`PT006`) to edit both `argnames` and `argvalues` if both of them are single-element tuples/lists ([#14699](https://github.com/astral-sh/ruff/pull/14699))
|
||||
- \[`perflint`\] Improve autofix for `PERF401` ([#14369](https://github.com/astral-sh/ruff/pull/14369))
|
||||
- \[`pylint`\] Fix `PLW1508` false positive for default string created via a mult operation ([#14841](https://github.com/astral-sh/ruff/pull/14841))
|
||||
|
||||
## 0.8.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Extend `AIR302` with additional functions and classes ([#15015](https://github.com/astral-sh/ruff/pull/15015))
|
||||
- \[`airflow`\] Implement `moved-to-provider-in-3` for modules that has been moved to Airflow providers (`AIR303`) ([#14764](https://github.com/astral-sh/ruff/pull/14764))
|
||||
- \[`flake8-use-pathlib`\] Extend check for invalid path suffix to include the case `"."` (`PTH210`) ([#14902](https://github.com/astral-sh/ruff/pull/14902))
|
||||
- \[`perflint`\] Fix panic in `PERF401` when list variable is after the `for` loop ([#14971](https://github.com/astral-sh/ruff/pull/14971))
|
||||
- \[`perflint`\] Simplify finding the loop target in `PERF401` ([#15025](https://github.com/astral-sh/ruff/pull/15025))
|
||||
- \[`pylint`\] Preserve original value format (`PLR6104`) ([#14978](https://github.com/astral-sh/ruff/pull/14978))
|
||||
- \[`ruff`\] Avoid false positives for `RUF027` for typing context bindings ([#15037](https://github.com/astral-sh/ruff/pull/15037))
|
||||
- \[`ruff`\] Check for ambiguous pattern passed to `pytest.raises()` (`RUF043`) ([#14966](https://github.com/astral-sh/ruff/pull/14966))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bandit`\] Check `S105` for annotated assignment ([#15059](https://github.com/astral-sh/ruff/pull/15059))
|
||||
- \[`flake8-pyi`\] More autofixes for `redundant-none-literal` (`PYI061`) ([#14872](https://github.com/astral-sh/ruff/pull/14872))
|
||||
- \[`pydocstyle`\] Skip leading whitespace for `D403` ([#14963](https://github.com/astral-sh/ruff/pull/14963))
|
||||
- \[`ruff`\] Skip `SQLModel` base classes for `mutable-class-default` (`RUF012`) ([#14949](https://github.com/astral-sh/ruff/pull/14949))
|
||||
|
||||
### Bug
|
||||
|
||||
- \[`perflint`\] Parenthesize walrus expressions in autofix for `manual-list-comprehension` (`PERF401`) ([#15050](https://github.com/astral-sh/ruff/pull/15050))
|
||||
|
||||
### Server
|
||||
|
||||
- Check diagnostic refresh support from client capability which enables dynamic configuration for various editors ([#15014](https://github.com/astral-sh/ruff/pull/15014))
|
||||
|
||||
## 0.8.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Extend names moved from core to provider (`AIR303`) ([#15145](https://github.com/astral-sh/ruff/pull/15145), [#15159](https://github.com/astral-sh/ruff/pull/15159), [#15196](https://github.com/astral-sh/ruff/pull/15196), [#15216](https://github.com/astral-sh/ruff/pull/15216))
|
||||
- \[`airflow`\] Extend rule to check class attributes, methods, arguments (`AIR302`) ([#15054](https://github.com/astral-sh/ruff/pull/15054), [#15083](https://github.com/astral-sh/ruff/pull/15083))
|
||||
- \[`fastapi`\] Update `FAST002` to check keyword-only arguments ([#15119](https://github.com/astral-sh/ruff/pull/15119))
|
||||
- \[`flake8-type-checking`\] Disable `TC006` and `TC007` in stub files ([#15179](https://github.com/astral-sh/ruff/pull/15179))
|
||||
- \[`pylint`\] Detect nested methods correctly (`PLW1641`) ([#15032](https://github.com/astral-sh/ruff/pull/15032))
|
||||
- \[`ruff`\] Detect more strict-integer expressions (`RUF046`) ([#14833](https://github.com/astral-sh/ruff/pull/14833))
|
||||
- \[`ruff`\] Implement `falsy-dict-get-fallback` (`RUF056`) ([#15160](https://github.com/astral-sh/ruff/pull/15160))
|
||||
- \[`ruff`\] Implement `unnecessary-round` (`RUF057`) ([#14828](https://github.com/astral-sh/ruff/pull/14828))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Visit PEP 764 inline `TypedDict` keys as non-type-expressions ([#15073](https://github.com/astral-sh/ruff/pull/15073))
|
||||
- \[`flake8-comprehensions`\] Skip `C416` if comprehension contains unpacking ([#14909](https://github.com/astral-sh/ruff/pull/14909))
|
||||
- \[`flake8-pie`\] Allow `cast(SomeType, ...)` (`PIE796`) ([#15141](https://github.com/astral-sh/ruff/pull/15141))
|
||||
- \[`flake8-simplify`\] More precise inference for dictionaries (`SIM300`) ([#15164](https://github.com/astral-sh/ruff/pull/15164))
|
||||
- \[`flake8-use-pathlib`\] Catch redundant joins in `PTH201` and avoid syntax errors ([#15177](https://github.com/astral-sh/ruff/pull/15177))
|
||||
- \[`pycodestyle`\] Preserve original value format (`E731`) ([#15097](https://github.com/astral-sh/ruff/pull/15097))
|
||||
- \[`pydocstyle`\] Split on first whitespace character (`D403`) ([#15082](https://github.com/astral-sh/ruff/pull/15082))
|
||||
- \[`pyupgrade`\] Add all PEP-585 names to `UP006` rule ([#5454](https://github.com/astral-sh/ruff/pull/5454))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-type-checking`\] Improve flexibility of `runtime-evaluated-decorators` ([#15204](https://github.com/astral-sh/ruff/pull/15204))
|
||||
- \[`pydocstyle`\] Add setting to ignore missing documentation for `*args` and `**kwargs` parameters (`D417`) ([#15210](https://github.com/astral-sh/ruff/pull/15210))
|
||||
- \[`ruff`\] Add an allowlist for `unsafe-markup-use` (`RUF035`) ([#15076](https://github.com/astral-sh/ruff/pull/15076))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix type subscript on older python versions ([#15090](https://github.com/astral-sh/ruff/pull/15090))
|
||||
- Use `TypeChecker` for detecting `fastapi` routes ([#15093](https://github.com/astral-sh/ruff/pull/15093))
|
||||
- \[`pycodestyle`\] Avoid false positives and negatives related to type parameter default syntax (`E225`, `E251`) ([#15214](https://github.com/astral-sh/ruff/pull/15214))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix incorrect doc in `shebang-not-executable` (`EXE001`) and add git+windows solution to executable bit ([#15208](https://github.com/astral-sh/ruff/pull/15208))
|
||||
- Rename rules currently not conforming to naming convention ([#15102](https://github.com/astral-sh/ruff/pull/15102))
|
||||
|
||||
## 0.8.6
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`format`\]: Preserve multiline implicit concatenated strings in docstring positions ([#15126](https://github.com/astral-sh/ruff/pull/15126))
|
||||
- \[`ruff`\] Add rule to detect empty literal in deque call (`RUF025`) ([#15104](https://github.com/astral-sh/ruff/pull/15104))
|
||||
- \[`ruff`\] Avoid reporting when `ndigits` is possibly negative (`RUF057`) ([#15234](https://github.com/astral-sh/ruff/pull/15234))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-todos`\] remove issue code length restriction (`TD003`) ([#15175](https://github.com/astral-sh/ruff/pull/15175))
|
||||
- \[`pyflakes`\] Ignore errors in `@no_type_check` string annotations (`F722`, `F821`) ([#15215](https://github.com/astral-sh/ruff/pull/15215))
|
||||
|
||||
### CLI
|
||||
|
||||
- Show errors for attempted fixes only when passed `--verbose` ([#15237](https://github.com/astral-sh/ruff/pull/15237))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`ruff`\] Avoid syntax error when removing int over multiple lines (`RUF046`) ([#15230](https://github.com/astral-sh/ruff/pull/15230))
|
||||
- \[`pyupgrade`\] Revert "Add all PEP-585 names to `UP006` rule" ([#15250](https://github.com/astral-sh/ruff/pull/15250))
|
||||
@@ -1,475 +0,0 @@
|
||||
# Changelog 0.9.x
|
||||
|
||||
## 0.9.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.9.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
Ruff now formats your code according to the 2025 style guide. As a result, your code might now get formatted differently. See the formatter section for a detailed list of changes.
|
||||
|
||||
This release doesn’t remove or remap any existing stable rules.
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`stdlib-module-shadowing`](https://docs.astral.sh/ruff/rules/stdlib-module-shadowing/) (`A005`).
|
||||
This rule has also been renamed: previously, it was called `builtin-module-shadowing`.
|
||||
- [`builtin-lambda-argument-shadowing`](https://docs.astral.sh/ruff/rules/builtin-lambda-argument-shadowing/) (`A006`)
|
||||
- [`slice-to-remove-prefix-or-suffix`](https://docs.astral.sh/ruff/rules/slice-to-remove-prefix-or-suffix/) (`FURB188`)
|
||||
- [`boolean-chained-comparison`](https://docs.astral.sh/ruff/rules/boolean-chained-comparison/) (`PLR1716`)
|
||||
- [`decimal-from-float-literal`](https://docs.astral.sh/ruff/rules/decimal-from-float-literal/) (`RUF032`)
|
||||
- [`post-init-default`](https://docs.astral.sh/ruff/rules/post-init-default/) (`RUF033`)
|
||||
- [`useless-if-else`](https://docs.astral.sh/ruff/rules/useless-if-else/) (`RUF034`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`pytest-parametrize-names-wrong-type`](https://docs.astral.sh/ruff/rules/pytest-parametrize-names-wrong-type/) (`PT006`): Detect [`pytest.parametrize`](https://docs.pytest.org/en/7.1.x/how-to/parametrize.html#parametrize) calls outside decorators and calls with keyword arguments.
|
||||
- [`module-import-not-at-top-of-file`](https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file/) (`E402`): Ignore [`pytest.importorskip`](https://docs.pytest.org/en/7.1.x/reference/reference.html#pytest-importorskip) calls between import statements.
|
||||
- [`mutable-dataclass-default`](https://docs.astral.sh/ruff/rules/mutable-dataclass-default/) (`RUF008`) and [`function-call-in-dataclass-default-argument`](https://docs.astral.sh/ruff/rules/function-call-in-dataclass-default-argument/) (`RUF009`): Add support for [`attrs`](https://www.attrs.org/en/stable/).
|
||||
- [`bad-version-info-comparison`](https://docs.astral.sh/ruff/rules/bad-version-info-comparison/) (`PYI006`): Extend the rule to check non-stub files.
|
||||
|
||||
The following fixes or improvements to fixes have been stabilized:
|
||||
|
||||
- [`redundant-numeric-union`](https://docs.astral.sh/ruff/rules/redundant-numeric-union/) (`PYI041`)
|
||||
- [`duplicate-union-members`](https://docs.astral.sh/ruff/rules/duplicate-union-member/) (`PYI016`)
|
||||
|
||||
### Formatter
|
||||
|
||||
This release introduces the new 2025 stable style ([#13371](https://github.com/astral-sh/ruff/issues/13371)), stabilizing the following changes:
|
||||
|
||||
- Format expressions in f-string elements ([#7594](https://github.com/astral-sh/ruff/issues/7594))
|
||||
- Alternate quotes for strings inside f-strings ([#13860](https://github.com/astral-sh/ruff/pull/13860))
|
||||
- Preserve the casing of hex codes in f-string debug expressions ([#14766](https://github.com/astral-sh/ruff/issues/14766))
|
||||
- Choose the quote style for each string literal in an implicitly concatenated f-string rather than for the entire string ([#13539](https://github.com/astral-sh/ruff/pull/13539))
|
||||
- Automatically join an implicitly concatenated string into a single string literal if it fits on a single line ([#9457](https://github.com/astral-sh/ruff/issues/9457))
|
||||
- Remove the [`ISC001`](https://docs.astral.sh/ruff/rules/single-line-implicit-string-concatenation/) incompatibility warning ([#15123](https://github.com/astral-sh/ruff/pull/15123))
|
||||
- Prefer parenthesizing the `assert` message over breaking the assertion expression ([#9457](https://github.com/astral-sh/ruff/issues/9457))
|
||||
- Automatically parenthesize over-long `if` guards in `match` `case` clauses ([#13513](https://github.com/astral-sh/ruff/pull/13513))
|
||||
- More consistent formatting for `match` `case` patterns ([#6933](https://github.com/astral-sh/ruff/issues/6933))
|
||||
- Avoid unnecessary parentheses around return type annotations ([#13381](https://github.com/astral-sh/ruff/pull/13381))
|
||||
- Keep the opening parentheses on the same line as the `if` keyword for comprehensions where the condition has a leading comment ([#12282](https://github.com/astral-sh/ruff/pull/12282))
|
||||
- More consistent formatting for `with` statements with a single context manager for Python 3.8 or older ([#10276](https://github.com/astral-sh/ruff/pull/10276))
|
||||
- Correctly calculate the line-width for code blocks in docstrings when using `max-doc-code-line-length = "dynamic"` ([#13523](https://github.com/astral-sh/ruff/pull/13523))
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Implement `class-as-data-structure` (`B903`) ([#9601](https://github.com/astral-sh/ruff/pull/9601))
|
||||
- \[`flake8-type-checking`\] Apply `quoted-type-alias` more eagerly in `TYPE_CHECKING` blocks and ignore it in stubs (`TC008`) ([#15180](https://github.com/astral-sh/ruff/pull/15180))
|
||||
- \[`pylint`\] Ignore `eq-without-hash` in stub files (`PLW1641`) ([#15310](https://github.com/astral-sh/ruff/pull/15310))
|
||||
- \[`pyupgrade`\] Split `UP007` into two individual rules: `UP007` for `Union` and `UP045` for `Optional` (`UP007`, `UP045`) ([#15313](https://github.com/astral-sh/ruff/pull/15313))
|
||||
- \[`ruff`\] New rule that detects classes that are both an enum and a `dataclass` (`RUF049`) ([#15299](https://github.com/astral-sh/ruff/pull/15299))
|
||||
- \[`ruff`\] Recode `RUF025` to `RUF037` (`RUF037`) ([#15258](https://github.com/astral-sh/ruff/pull/15258))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-builtins`\] Ignore [`stdlib-module-shadowing`](https://docs.astral.sh/ruff/rules/stdlib-module-shadowing/) in stub files(`A005`) ([#15350](https://github.com/astral-sh/ruff/pull/15350))
|
||||
- \[`flake8-return`\] Add support for functions returning `typing.Never` (`RET503`) ([#15298](https://github.com/astral-sh/ruff/pull/15298))
|
||||
|
||||
### Server
|
||||
|
||||
- Improve the observability by removing the need for the ["trace" value](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#traceValue) to turn on or off logging. The server logging is solely controlled using the [`logLevel` server setting](https://docs.astral.sh/ruff/editors/settings/#loglevel)
|
||||
which defaults to `info`. This addresses the issue where users were notified about an error and told to consult the log, but it didn’t contain any messages. ([#15232](https://github.com/astral-sh/ruff/pull/15232))
|
||||
- Ignore diagnostics from other sources for code action requests ([#15373](https://github.com/astral-sh/ruff/pull/15373))
|
||||
|
||||
### CLI
|
||||
|
||||
- Improve the error message for `--config key=value` when the `key` is for a table and it’s a simple `value`
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`eradicate`\] Ignore metadata blocks directly followed by normal blocks (`ERA001`) ([#15330](https://github.com/astral-sh/ruff/pull/15330))
|
||||
- \[`flake8-django`\] Recognize other magic methods (`DJ012`) ([#15365](https://github.com/astral-sh/ruff/pull/15365))
|
||||
- \[`pycodestyle`\] Avoid false positives related to type aliases (`E252`) ([#15356](https://github.com/astral-sh/ruff/pull/15356))
|
||||
- \[`pydocstyle`\] Avoid treating newline-separated sections as sub-sections (`D405`) ([#15311](https://github.com/astral-sh/ruff/pull/15311))
|
||||
- \[`pyflakes`\] Remove call when removing final argument from `format` (`F523`) ([#15309](https://github.com/astral-sh/ruff/pull/15309))
|
||||
- \[`refurb`\] Mark fix as unsafe when the right-hand side is a string (`FURB171`) ([#15273](https://github.com/astral-sh/ruff/pull/15273))
|
||||
- \[`ruff`\] Treat `)` as a regex metacharacter (`RUF043`, `RUF055`) ([#15318](https://github.com/astral-sh/ruff/pull/15318))
|
||||
- \[`ruff`\] Parenthesize the `int`-call argument when removing the `int` call would change semantics (`RUF046`) ([#15277](https://github.com/astral-sh/ruff/pull/15277))
|
||||
|
||||
## 0.9.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pycodestyle`\] Run `too-many-newlines-at-end-of-file` on each cell in notebooks (`W391`) ([#15308](https://github.com/astral-sh/ruff/pull/15308))
|
||||
- \[`ruff`\] Omit diagnostic for shadowed private function parameters in `used-dummy-variable` (`RUF052`) ([#15376](https://github.com/astral-sh/ruff/pull/15376))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Improve `assert-raises-exception` message (`B017`) ([#15389](https://github.com/astral-sh/ruff/pull/15389))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Preserve trailing end-of line comments for the last string literal in implicitly concatenated strings ([#15378](https://github.com/astral-sh/ruff/pull/15378))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix a bug where the server and client notebooks were out of sync after reordering cells ([#15398](https://github.com/astral-sh/ruff/pull/15398))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-pie`\] Correctly remove wrapping parentheses (`PIE800`) ([#15394](https://github.com/astral-sh/ruff/pull/15394))
|
||||
- \[`pyupgrade`\] Handle comments and multiline expressions correctly (`UP037`) ([#15337](https://github.com/astral-sh/ruff/pull/15337))
|
||||
|
||||
## 0.9.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Fix typo "security_managr" to "security_manager" (`AIR303`) ([#15463](https://github.com/astral-sh/ruff/pull/15463))
|
||||
- \[`airflow`\] extend and fix AIR302 rules ([#15525](https://github.com/astral-sh/ruff/pull/15525))
|
||||
- \[`fastapi`\] Handle parameters with `Depends` correctly (`FAST003`) ([#15364](https://github.com/astral-sh/ruff/pull/15364))
|
||||
- \[`flake8-pytest-style`\] Implement pytest.warns diagnostics (`PT029`, `PT030`, `PT031`) ([#15444](https://github.com/astral-sh/ruff/pull/15444))
|
||||
- \[`flake8-pytest-style`\] Test function parameters with default arguments (`PT028`) ([#15449](https://github.com/astral-sh/ruff/pull/15449))
|
||||
- \[`flake8-type-checking`\] Avoid false positives for `|` in `TC008` ([#15201](https://github.com/astral-sh/ruff/pull/15201))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-todos`\] Allow VSCode GitHub PR extension style links in `missing-todo-link` (`TD003`) ([#15519](https://github.com/astral-sh/ruff/pull/15519))
|
||||
- \[`pyflakes`\] Show syntax error message for `F722` ([#15523](https://github.com/astral-sh/ruff/pull/15523))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix curly bracket spacing around f-string expressions containing curly braces ([#15471](https://github.com/astral-sh/ruff/pull/15471))
|
||||
- Fix joining of f-strings with different quotes when using quote style `Preserve` ([#15524](https://github.com/astral-sh/ruff/pull/15524))
|
||||
|
||||
### Server
|
||||
|
||||
- Avoid indexing the same workspace multiple times ([#15495](https://github.com/astral-sh/ruff/pull/15495))
|
||||
- Display context for `ruff.configuration` errors ([#15452](https://github.com/astral-sh/ruff/pull/15452))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Remove `flatten` to improve deserialization error messages ([#15414](https://github.com/astral-sh/ruff/pull/15414))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Parse triple-quoted string annotations as if parenthesized ([#15387](https://github.com/astral-sh/ruff/pull/15387))
|
||||
- \[`fastapi`\] Update `Annotated` fixes (`FAST002`) ([#15462](https://github.com/astral-sh/ruff/pull/15462))
|
||||
- \[`flake8-bandit`\] Check for `builtins` instead of `builtin` (`S102`, `PTH123`) ([#15443](https://github.com/astral-sh/ruff/pull/15443))
|
||||
- \[`flake8-pathlib`\] Fix `--select` for `os-path-dirname` (`PTH120`) ([#15446](https://github.com/astral-sh/ruff/pull/15446))
|
||||
- \[`ruff`\] Fix false positive on global keyword (`RUF052`) ([#15235](https://github.com/astral-sh/ruff/pull/15235))
|
||||
|
||||
## 0.9.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Argument `fail_stop` in DAG has been renamed as `fail_fast` (`AIR302`) ([#15633](https://github.com/astral-sh/ruff/pull/15633))
|
||||
- \[`airflow`\] Extend `AIR303` with more symbols ([#15611](https://github.com/astral-sh/ruff/pull/15611))
|
||||
- \[`flake8-bandit`\] Report all references to suspicious functions (`S3`) ([#15541](https://github.com/astral-sh/ruff/pull/15541))
|
||||
- \[`flake8-pytest-style`\] Do not emit diagnostics for empty `for` loops (`PT012`, `PT031`) ([#15542](https://github.com/astral-sh/ruff/pull/15542))
|
||||
- \[`flake8-simplify`\] Avoid double negations (`SIM103`) ([#15562](https://github.com/astral-sh/ruff/pull/15562))
|
||||
- \[`pyflakes`\] Fix infinite loop with unused local import in `__init__.py` (`F401`) ([#15517](https://github.com/astral-sh/ruff/pull/15517))
|
||||
- \[`pylint`\] Do not report methods with only one `EM101`-compatible `raise` (`PLR6301`) ([#15507](https://github.com/astral-sh/ruff/pull/15507))
|
||||
- \[`pylint`\] Implement `redefined-slots-in-subclass` (`W0244`) ([#9640](https://github.com/astral-sh/ruff/pull/9640))
|
||||
- \[`pyupgrade`\] Add rules to use PEP 695 generics in classes and functions (`UP046`, `UP047`) ([#15565](https://github.com/astral-sh/ruff/pull/15565), [#15659](https://github.com/astral-sh/ruff/pull/15659))
|
||||
- \[`refurb`\] Implement `for-loop-writes` (`FURB122`) ([#10630](https://github.com/astral-sh/ruff/pull/10630))
|
||||
- \[`ruff`\] Implement `needless-else` clause (`RUF047`) ([#15051](https://github.com/astral-sh/ruff/pull/15051))
|
||||
- \[`ruff`\] Implement `starmap-zip` (`RUF058`) ([#15483](https://github.com/astral-sh/ruff/pull/15483))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Do not raise error if keyword argument is present and target-python version is less or equals than 3.9 (`B903`) ([#15549](https://github.com/astral-sh/ruff/pull/15549))
|
||||
- \[`flake8-comprehensions`\] strip parentheses around generators in `unnecessary-generator-set` (`C401`) ([#15553](https://github.com/astral-sh/ruff/pull/15553))
|
||||
- \[`flake8-pytest-style`\] Rewrite references to `.exception` (`PT027`) ([#15680](https://github.com/astral-sh/ruff/pull/15680))
|
||||
- \[`flake8-simplify`\] Mark fixes as unsafe (`SIM201`, `SIM202`) ([#15626](https://github.com/astral-sh/ruff/pull/15626))
|
||||
- \[`flake8-type-checking`\] Fix some safe fixes being labeled unsafe (`TC006`,`TC008`) ([#15638](https://github.com/astral-sh/ruff/pull/15638))
|
||||
- \[`isort`\] Omit trailing whitespace in `unsorted-imports` (`I001`) ([#15518](https://github.com/astral-sh/ruff/pull/15518))
|
||||
- \[`pydoclint`\] Allow ignoring one line docstrings for `DOC` rules ([#13302](https://github.com/astral-sh/ruff/pull/13302))
|
||||
- \[`pyflakes`\] Apply redefinition fixes by source code order (`F811`) ([#15575](https://github.com/astral-sh/ruff/pull/15575))
|
||||
- \[`pyflakes`\] Avoid removing too many imports in `redefined-while-unused` (`F811`) ([#15585](https://github.com/astral-sh/ruff/pull/15585))
|
||||
- \[`pyflakes`\] Group redefinition fixes by source statement (`F811`) ([#15574](https://github.com/astral-sh/ruff/pull/15574))
|
||||
- \[`pylint`\] Include name of base class in message for `redefined-slots-in-subclass` (`W0244`) ([#15559](https://github.com/astral-sh/ruff/pull/15559))
|
||||
- \[`ruff`\] Update fix for `RUF055` to use `var == value` ([#15605](https://github.com/astral-sh/ruff/pull/15605))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix bracket spacing for single-element tuples in f-string expressions ([#15537](https://github.com/astral-sh/ruff/pull/15537))
|
||||
- Fix unstable f-string formatting for expressions containing a trailing comma ([#15545](https://github.com/astral-sh/ruff/pull/15545))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid quadratic membership check in import fixes ([#15576](https://github.com/astral-sh/ruff/pull/15576))
|
||||
|
||||
### Server
|
||||
|
||||
- Allow `unsafe-fixes` settings for code actions ([#15666](https://github.com/astral-sh/ruff/pull/15666))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-bandit`\] Add missing single-line/dotall regex flag (`S608`) ([#15654](https://github.com/astral-sh/ruff/pull/15654))
|
||||
- \[`flake8-import-conventions`\] Fix infinite loop between `ICN001` and `I002` (`ICN001`) ([#15480](https://github.com/astral-sh/ruff/pull/15480))
|
||||
- \[`flake8-simplify`\] Do not emit diagnostics for expressions inside string type annotations (`SIM222`, `SIM223`) ([#15405](https://github.com/astral-sh/ruff/pull/15405))
|
||||
- \[`pyflakes`\] Treat arguments passed to the `default=` parameter of `TypeVar` as type expressions (`F821`) ([#15679](https://github.com/astral-sh/ruff/pull/15679))
|
||||
- \[`pyupgrade`\] Avoid syntax error when the iterable is a non-parenthesized tuple (`UP028`) ([#15543](https://github.com/astral-sh/ruff/pull/15543))
|
||||
- \[`ruff`\] Exempt `NewType` calls where the original type is immutable (`RUF009`) ([#15588](https://github.com/astral-sh/ruff/pull/15588))
|
||||
- Preserve raw string prefix and escapes in all codegen fixes ([#15694](https://github.com/astral-sh/ruff/pull/15694))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Generate documentation redirects for lowercase rule codes ([#15564](https://github.com/astral-sh/ruff/pull/15564))
|
||||
- `TRY300`: Add some extra notes on not catching exceptions you didn't expect ([#15036](https://github.com/astral-sh/ruff/pull/15036))
|
||||
|
||||
## 0.9.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Extend airflow context parameter check for `BaseOperator.execute` (`AIR302`) ([#15713](https://github.com/astral-sh/ruff/pull/15713))
|
||||
- \[`airflow`\] Update `AIR302` to check for deprecated context keys ([#15144](https://github.com/astral-sh/ruff/pull/15144))
|
||||
- \[`flake8-bandit`\] Permit suspicious imports within stub files (`S4`) ([#15822](https://github.com/astral-sh/ruff/pull/15822))
|
||||
- \[`pylint`\] Do not trigger `PLR6201` on empty collections ([#15732](https://github.com/astral-sh/ruff/pull/15732))
|
||||
- \[`refurb`\] Do not emit diagnostic when loop variables are used outside loop body (`FURB122`) ([#15757](https://github.com/astral-sh/ruff/pull/15757))
|
||||
- \[`ruff`\] Add support for more `re` patterns (`RUF055`) ([#15764](https://github.com/astral-sh/ruff/pull/15764))
|
||||
- \[`ruff`\] Check for shadowed `map` before suggesting fix (`RUF058`) ([#15790](https://github.com/astral-sh/ruff/pull/15790))
|
||||
- \[`ruff`\] Do not emit diagnostic when all arguments to `zip()` are variadic (`RUF058`) ([#15744](https://github.com/astral-sh/ruff/pull/15744))
|
||||
- \[`ruff`\] Parenthesize fix when argument spans multiple lines for `unnecessary-round` (`RUF057`) ([#15703](https://github.com/astral-sh/ruff/pull/15703))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Preserve quote style in generated code ([#15726](https://github.com/astral-sh/ruff/pull/15726), [#15778](https://github.com/astral-sh/ruff/pull/15778), [#15794](https://github.com/astral-sh/ruff/pull/15794))
|
||||
- \[`flake8-bugbear`\] Exempt `NewType` calls where the original type is immutable (`B008`) ([#15765](https://github.com/astral-sh/ruff/pull/15765))
|
||||
- \[`pylint`\] Honor banned top-level imports by `TID253` in `PLC0415`. ([#15628](https://github.com/astral-sh/ruff/pull/15628))
|
||||
- \[`pyupgrade`\] Ignore `is_typeddict` and `TypedDict` for `deprecated-import` (`UP035`) ([#15800](https://github.com/astral-sh/ruff/pull/15800))
|
||||
|
||||
### CLI
|
||||
|
||||
- Fix formatter warning message for `flake8-quotes` option ([#15788](https://github.com/astral-sh/ruff/pull/15788))
|
||||
- Implement tab autocomplete for `ruff config` ([#15603](https://github.com/astral-sh/ruff/pull/15603))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-comprehensions`\] Do not emit `unnecessary-map` diagnostic when lambda has different arity (`C417`) ([#15802](https://github.com/astral-sh/ruff/pull/15802))
|
||||
- \[`flake8-comprehensions`\] Parenthesize `sorted` when needed for `unnecessary-call-around-sorted` (`C413`) ([#15825](https://github.com/astral-sh/ruff/pull/15825))
|
||||
- \[`pyupgrade`\] Handle end-of-line comments for `quoted-annotation` (`UP037`) ([#15824](https://github.com/astral-sh/ruff/pull/15824))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add missing config docstrings ([#15803](https://github.com/astral-sh/ruff/pull/15803))
|
||||
- Add references to `trio.run_process` and `anyio.run_process` ([#15761](https://github.com/astral-sh/ruff/pull/15761))
|
||||
- Use `uv init --lib` in tutorial ([#15718](https://github.com/astral-sh/ruff/pull/15718))
|
||||
|
||||
## 0.9.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- Recognize all symbols named `TYPE_CHECKING` for `in_type_checking_block` ([#15719](https://github.com/astral-sh/ruff/pull/15719))
|
||||
- \[`flake8-comprehensions`\] Handle builtins at top of file correctly for `unnecessary-dict-comprehension-for-iterable` (`C420`) ([#15837](https://github.com/astral-sh/ruff/pull/15837))
|
||||
- \[`flake8-logging`\] `.exception()` and `exc_info=` outside exception handlers (`LOG004`, `LOG014`) ([#15799](https://github.com/astral-sh/ruff/pull/15799))
|
||||
- \[`flake8-pyi`\] Fix incorrect behaviour of `custom-typevar-return-type` preview-mode autofix if `typing` was already imported (`PYI019`) ([#15853](https://github.com/astral-sh/ruff/pull/15853))
|
||||
- \[`flake8-pyi`\] Fix more complex cases (`PYI019`) ([#15821](https://github.com/astral-sh/ruff/pull/15821))
|
||||
- \[`flake8-pyi`\] Make `PYI019` autofixable for `.py` files in preview mode as well as stubs ([#15889](https://github.com/astral-sh/ruff/pull/15889))
|
||||
- \[`flake8-pyi`\] Remove type parameter correctly when it is the last (`PYI019`) ([#15854](https://github.com/astral-sh/ruff/pull/15854))
|
||||
- \[`pylint`\] Fix missing parens in unsafe fix for `unnecessary-dunder-call` (`PLC2801`) ([#15762](https://github.com/astral-sh/ruff/pull/15762))
|
||||
- \[`pyupgrade`\] Better messages and diagnostic range (`UP015`) ([#15872](https://github.com/astral-sh/ruff/pull/15872))
|
||||
- \[`pyupgrade`\] Rename private type parameters in PEP 695 generics (`UP049`) ([#15862](https://github.com/astral-sh/ruff/pull/15862))
|
||||
- \[`refurb`\] Also report non-name expressions (`FURB169`) ([#15905](https://github.com/astral-sh/ruff/pull/15905))
|
||||
- \[`refurb`\] Mark fix as unsafe if there are comments (`FURB171`) ([#15832](https://github.com/astral-sh/ruff/pull/15832))
|
||||
- \[`ruff`\] Classes with mixed type variable style (`RUF053`) ([#15841](https://github.com/astral-sh/ruff/pull/15841))
|
||||
- \[`airflow`\] `BashOperator` has been moved to `airflow.providers.standard.operators.bash.BashOperator` (`AIR302`) ([#15922](https://github.com/astral-sh/ruff/pull/15922))
|
||||
- \[`flake8-pyi`\] Add autofix for unused-private-type-var (`PYI018`) ([#15999](https://github.com/astral-sh/ruff/pull/15999))
|
||||
- \[`flake8-pyi`\] Significantly improve accuracy of `PYI019` if preview mode is enabled ([#15888](https://github.com/astral-sh/ruff/pull/15888))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Preserve triple quotes and prefixes for strings ([#15818](https://github.com/astral-sh/ruff/pull/15818))
|
||||
- \[`flake8-comprehensions`\] Skip when `TypeError` present from too many (kw)args for `C410`,`C411`, and `C418` ([#15838](https://github.com/astral-sh/ruff/pull/15838))
|
||||
- \[`flake8-pyi`\] Rename `PYI019` and improve its diagnostic message ([#15885](https://github.com/astral-sh/ruff/pull/15885))
|
||||
- \[`pep8-naming`\] Ignore `@override` methods (`N803`) ([#15954](https://github.com/astral-sh/ruff/pull/15954))
|
||||
- \[`pyupgrade`\] Reuse replacement logic from `UP046` and `UP047` to preserve more comments (`UP040`) ([#15840](https://github.com/astral-sh/ruff/pull/15840))
|
||||
- \[`ruff`\] Analyze deferred annotations before enforcing `mutable-(data)class-default` and `function-call-in-dataclass-default-argument` (`RUF008`,`RUF009`,`RUF012`) ([#15921](https://github.com/astral-sh/ruff/pull/15921))
|
||||
- \[`pycodestyle`\] Exempt `sys.path += ...` calls (`E402`) ([#15980](https://github.com/astral-sh/ruff/pull/15980))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Config error only when `flake8-import-conventions` alias conflicts with `isort.required-imports` bound name ([#15918](https://github.com/astral-sh/ruff/pull/15918))
|
||||
- Workaround Even Better TOML crash related to `allOf` ([#15992](https://github.com/astral-sh/ruff/pull/15992))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-comprehensions`\] Unnecessary `list` comprehension (rewrite as a `set` comprehension) (`C403`) - Handle extraneous parentheses around list comprehension ([#15877](https://github.com/astral-sh/ruff/pull/15877))
|
||||
- \[`flake8-comprehensions`\] Handle trailing comma in fixes for `unnecessary-generator-list/set` (`C400`,`C401`) ([#15929](https://github.com/astral-sh/ruff/pull/15929))
|
||||
- \[`flake8-pyi`\] Fix several correctness issues with `custom-type-var-return-type` (`PYI019`) ([#15851](https://github.com/astral-sh/ruff/pull/15851))
|
||||
- \[`pep8-naming`\] Consider any number of leading underscore for `N801` ([#15988](https://github.com/astral-sh/ruff/pull/15988))
|
||||
- \[`pyflakes`\] Visit forward annotations in `TypeAliasType` as types (`F401`) ([#15829](https://github.com/astral-sh/ruff/pull/15829))
|
||||
- \[`pylint`\] Correct min/max auto-fix and suggestion for (`PL1730`) ([#15930](https://github.com/astral-sh/ruff/pull/15930))
|
||||
- \[`refurb`\] Handle unparenthesized tuples correctly (`FURB122`, `FURB142`) ([#15953](https://github.com/astral-sh/ruff/pull/15953))
|
||||
- \[`refurb`\] Avoid `None | None` as well as better detection and fix (`FURB168`) ([#15779](https://github.com/astral-sh/ruff/pull/15779))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add deprecation warning for `ruff-lsp` related settings ([#15850](https://github.com/astral-sh/ruff/pull/15850))
|
||||
- Docs (`linter.md`): clarify that Python files are always searched for in subdirectories ([#15882](https://github.com/astral-sh/ruff/pull/15882))
|
||||
- Fix a typo in `non_pep695_generic_class.rs` ([#15946](https://github.com/astral-sh/ruff/pull/15946))
|
||||
- Improve Docs: Pylint subcategories' codes ([#15909](https://github.com/astral-sh/ruff/pull/15909))
|
||||
- Remove non-existing `lint.extendIgnore` editor setting ([#15844](https://github.com/astral-sh/ruff/pull/15844))
|
||||
- Update black deviations ([#15928](https://github.com/astral-sh/ruff/pull/15928))
|
||||
- Mention `UP049` in `UP046` and `UP047`, add `See also` section to `UP040` ([#15956](https://github.com/astral-sh/ruff/pull/15956))
|
||||
- Add instance variable examples to `RUF012` ([#15982](https://github.com/astral-sh/ruff/pull/15982))
|
||||
- Explain precedence for `ignore` and `select` config ([#15883](https://github.com/astral-sh/ruff/pull/15883))
|
||||
|
||||
## 0.9.6
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Add `external_task.{ExternalTaskMarker, ExternalTaskSensor}` for `AIR302` ([#16014](https://github.com/astral-sh/ruff/pull/16014))
|
||||
- \[`flake8-builtins`\] Make strict module name comparison optional (`A005`) ([#15951](https://github.com/astral-sh/ruff/pull/15951))
|
||||
- \[`flake8-pyi`\] Extend fix to Python \<= 3.9 for `redundant-none-literal` (`PYI061`) ([#16044](https://github.com/astral-sh/ruff/pull/16044))
|
||||
- \[`pylint`\] Also report when the object isn't a literal (`PLE1310`) ([#15985](https://github.com/astral-sh/ruff/pull/15985))
|
||||
- \[`ruff`\] Implement `indented-form-feed` (`RUF054`) ([#16049](https://github.com/astral-sh/ruff/pull/16049))
|
||||
- \[`ruff`\] Skip type definitions for `missing-f-string-syntax` (`RUF027`) ([#16054](https://github.com/astral-sh/ruff/pull/16054))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-annotations`\] Correct syntax for `typing.Union` in suggested return type fixes for `ANN20x` rules ([#16025](https://github.com/astral-sh/ruff/pull/16025))
|
||||
- \[`flake8-builtins`\] Match upstream module name comparison (`A005`) ([#16006](https://github.com/astral-sh/ruff/pull/16006))
|
||||
- \[`flake8-comprehensions`\] Detect overshadowed `list`/`set`/`dict`, ignore variadics and named expressions (`C417`) ([#15955](https://github.com/astral-sh/ruff/pull/15955))
|
||||
- \[`flake8-pie`\] Remove following comma correctly when the unpacked dictionary is empty (`PIE800`) ([#16008](https://github.com/astral-sh/ruff/pull/16008))
|
||||
- \[`flake8-simplify`\] Only trigger `SIM401` on known dictionaries ([#15995](https://github.com/astral-sh/ruff/pull/15995))
|
||||
- \[`pylint`\] Do not report calls when object type and argument type mismatch, remove custom escape handling logic (`PLE1310`) ([#15984](https://github.com/astral-sh/ruff/pull/15984))
|
||||
- \[`pyupgrade`\] Comments within parenthesized value ranges should not affect applicability (`UP040`) ([#16027](https://github.com/astral-sh/ruff/pull/16027))
|
||||
- \[`pyupgrade`\] Don't introduce invalid syntax when upgrading old-style type aliases with parenthesized multiline values (`UP040`) ([#16026](https://github.com/astral-sh/ruff/pull/16026))
|
||||
- \[`pyupgrade`\] Ensure we do not rename two type parameters to the same name (`UP049`) ([#16038](https://github.com/astral-sh/ruff/pull/16038))
|
||||
- \[`pyupgrade`\] \[`ruff`\] Don't apply renamings if the new name is shadowed in a scope of one of the references to the binding (`UP049`, `RUF052`) ([#16032](https://github.com/astral-sh/ruff/pull/16032))
|
||||
- \[`ruff`\] Update `RUF009` to behave similar to `B008` and ignore attributes with immutable types ([#16048](https://github.com/astral-sh/ruff/pull/16048))
|
||||
|
||||
### Server
|
||||
|
||||
- Root exclusions in the server to project root ([#16043](https://github.com/astral-sh/ruff/pull/16043))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-datetime`\] Ignore `.replace()` calls while looking for `.astimezone` ([#16050](https://github.com/astral-sh/ruff/pull/16050))
|
||||
- \[`flake8-type-checking`\] Avoid `TC004` false positive where the runtime definition is provided by `__getattr__` ([#16052](https://github.com/astral-sh/ruff/pull/16052))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Improve `ruff-lsp` migration document ([#16072](https://github.com/astral-sh/ruff/pull/16072))
|
||||
- Undeprecate `ruff.nativeServer` ([#16039](https://github.com/astral-sh/ruff/pull/16039))
|
||||
|
||||
## 0.9.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- Consider `__new__` methods as special function type for enforcing class method or static method rules ([#13305](https://github.com/astral-sh/ruff/pull/13305))
|
||||
- \[`airflow`\] Improve the internal logic to differentiate deprecated symbols (`AIR303`) ([#16013](https://github.com/astral-sh/ruff/pull/16013))
|
||||
- \[`refurb`\] Manual timezone monkeypatching (`FURB162`) ([#16113](https://github.com/astral-sh/ruff/pull/16113))
|
||||
- \[`ruff`\] Implicit class variable in dataclass (`RUF045`) ([#14349](https://github.com/astral-sh/ruff/pull/14349))
|
||||
- \[`ruff`\] Skip singleton starred expressions for `incorrectly-parenthesized-tuple-in-subscript` (`RUF031`) ([#16083](https://github.com/astral-sh/ruff/pull/16083))
|
||||
- \[`refurb`\] Check for subclasses includes subscript expressions (`FURB189`) ([#16155](https://github.com/astral-sh/ruff/pull/16155))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-debugger`\] Also flag `sys.breakpointhook` and `sys.__breakpointhook__` (`T100`) ([#16191](https://github.com/astral-sh/ruff/pull/16191))
|
||||
- \[`pycodestyle`\] Exempt `site.addsitedir(...)` calls (`E402`) ([#16251](https://github.com/astral-sh/ruff/pull/16251))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix unstable formatting of trailing end-of-line comments of parenthesized attribute values ([#16187](https://github.com/astral-sh/ruff/pull/16187))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix handling of requests received after shutdown message ([#16262](https://github.com/astral-sh/ruff/pull/16262))
|
||||
- Ignore `source.organizeImports.ruff` and `source.fixAll.ruff` code actions for a notebook cell ([#16154](https://github.com/astral-sh/ruff/pull/16154))
|
||||
- Include document specific debug info for `ruff.printDebugInformation` ([#16215](https://github.com/astral-sh/ruff/pull/16215))
|
||||
- Update server to return the debug info as string with `ruff.printDebugInformation` ([#16214](https://github.com/astral-sh/ruff/pull/16214))
|
||||
|
||||
### CLI
|
||||
|
||||
- Warn on invalid `noqa` even when there are no diagnostics ([#16178](https://github.com/astral-sh/ruff/pull/16178))
|
||||
- Better error messages while loading configuration `extend`s ([#15658](https://github.com/astral-sh/ruff/pull/15658))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-comprehensions`\] Handle trailing comma in `C403` fix ([#16110](https://github.com/astral-sh/ruff/pull/16110))
|
||||
- \[`flake8-pyi`\] Avoid flagging `custom-typevar-for-self` on metaclass methods (`PYI019`) ([#16141](https://github.com/astral-sh/ruff/pull/16141))
|
||||
- \[`pydocstyle`\] Handle arguments with the same names as sections (`D417`) ([#16011](https://github.com/astral-sh/ruff/pull/16011))
|
||||
- \[`pylint`\] Correct ordering of arguments in fix for `if-stmt-min-max` (`PLR1730`) ([#16080](https://github.com/astral-sh/ruff/pull/16080))
|
||||
- \[`pylint`\] Do not offer fix for raw strings (`PLE251`) ([#16132](https://github.com/astral-sh/ruff/pull/16132))
|
||||
- \[`pyupgrade`\] Do not upgrade functional `TypedDicts` with private field names to the class-based syntax (`UP013`) ([#16219](https://github.com/astral-sh/ruff/pull/16219))
|
||||
- \[`pyupgrade`\] Handle micro version numbers correctly (`UP036`) ([#16091](https://github.com/astral-sh/ruff/pull/16091))
|
||||
- \[`pyupgrade`\] Unwrap unary expressions correctly (`UP018`) ([#15919](https://github.com/astral-sh/ruff/pull/15919))
|
||||
- \[`refurb`\] Correctly handle lengths of literal strings in `slice-to-remove-prefix-or-suffix` (`FURB188`) ([#16237](https://github.com/astral-sh/ruff/pull/16237))
|
||||
- \[`ruff`\] Skip `RUF001` diagnostics when visiting string type definitions ([#16122](https://github.com/astral-sh/ruff/pull/16122))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add FAQ entry for `source.*` code actions in Notebook ([#16212](https://github.com/astral-sh/ruff/pull/16212))
|
||||
- Add `SECURITY.md` ([#16224](https://github.com/astral-sh/ruff/pull/16224))
|
||||
|
||||
## 0.9.8
|
||||
|
||||
### Preview features
|
||||
|
||||
- Start detecting version-related syntax errors in the parser ([#16090](https://github.com/astral-sh/ruff/pull/16090))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pylint`\] Mark fix unsafe (`PLW1507`) ([#16343](https://github.com/astral-sh/ruff/pull/16343))
|
||||
- \[`pylint`\] Catch `case np.nan`/`case math.nan` in `match` statements (`PLW0177`) ([#16378](https://github.com/astral-sh/ruff/pull/16378))
|
||||
- \[`ruff`\] Add more Pydantic models variants to the list of default copy semantics (`RUF012`) ([#16291](https://github.com/astral-sh/ruff/pull/16291))
|
||||
|
||||
### Server
|
||||
|
||||
- Avoid indexing the project if `configurationPreference` is `editorOnly` ([#16381](https://github.com/astral-sh/ruff/pull/16381))
|
||||
- Avoid unnecessary info at non-trace server log level ([#16389](https://github.com/astral-sh/ruff/pull/16389))
|
||||
- Expand `ruff.configuration` to allow inline config ([#16296](https://github.com/astral-sh/ruff/pull/16296))
|
||||
- Notify users for invalid client settings ([#16361](https://github.com/astral-sh/ruff/pull/16361))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `per-file-target-version` option ([#16257](https://github.com/astral-sh/ruff/pull/16257))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`refurb`\] Do not consider docstring(s) (`FURB156`) ([#16391](https://github.com/astral-sh/ruff/pull/16391))
|
||||
- \[`flake8-self`\] Ignore attribute accesses on instance-like variables (`SLF001`) ([#16149](https://github.com/astral-sh/ruff/pull/16149))
|
||||
- \[`pylint`\] Fix false positives, add missing methods, and support positional-only parameters (`PLE0302`) ([#16263](https://github.com/astral-sh/ruff/pull/16263))
|
||||
- \[`flake8-pyi`\] Mark `PYI030` fix unsafe when comments are deleted ([#16322](https://github.com/astral-sh/ruff/pull/16322))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix example for `S611` ([#16316](https://github.com/astral-sh/ruff/pull/16316))
|
||||
- Normalize inconsistent markdown headings in docstrings ([#16364](https://github.com/astral-sh/ruff/pull/16364))
|
||||
- Document MSRV policy ([#16384](https://github.com/astral-sh/ruff/pull/16384))
|
||||
|
||||
## 0.9.9
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix caching of unsupported-syntax errors ([#16425](https://github.com/astral-sh/ruff/pull/16425))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Only show unsupported-syntax errors in editors when preview mode is enabled ([#16429](https://github.com/astral-sh/ruff/pull/16429))
|
||||
|
||||
## 0.9.10
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`ruff`\] Add new rule `RUF059`: Unused unpacked assignment ([#16449](https://github.com/astral-sh/ruff/pull/16449))
|
||||
- \[`syntax-errors`\] Detect assignment expressions before Python 3.8 ([#16383](https://github.com/astral-sh/ruff/pull/16383))
|
||||
- \[`syntax-errors`\] Named expressions in decorators before Python 3.9 ([#16386](https://github.com/astral-sh/ruff/pull/16386))
|
||||
- \[`syntax-errors`\] Parenthesized keyword argument names after Python 3.8 ([#16482](https://github.com/astral-sh/ruff/pull/16482))
|
||||
- \[`syntax-errors`\] Positional-only parameters before Python 3.8 ([#16481](https://github.com/astral-sh/ruff/pull/16481))
|
||||
- \[`syntax-errors`\] Tuple unpacking in `return` and `yield` before Python 3.8 ([#16485](https://github.com/astral-sh/ruff/pull/16485))
|
||||
- \[`syntax-errors`\] Type parameter defaults before Python 3.13 ([#16447](https://github.com/astral-sh/ruff/pull/16447))
|
||||
- \[`syntax-errors`\] Type parameter lists before Python 3.12 ([#16479](https://github.com/astral-sh/ruff/pull/16479))
|
||||
- \[`syntax-errors`\] `except*` before Python 3.11 ([#16446](https://github.com/astral-sh/ruff/pull/16446))
|
||||
- \[`syntax-errors`\] `type` statements before Python 3.12 ([#16478](https://github.com/astral-sh/ruff/pull/16478))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Escape template filenames in glob patterns in configuration ([#16407](https://github.com/astral-sh/ruff/pull/16407))
|
||||
- \[`flake8-simplify`\] Exempt unittest context methods for `SIM115` rule ([#16439](https://github.com/astral-sh/ruff/pull/16439))
|
||||
- Formatter: Fix syntax error location in notebooks ([#16499](https://github.com/astral-sh/ruff/pull/16499))
|
||||
- \[`pyupgrade`\] Do not offer fix when at least one target is `global`/`nonlocal` (`UP028`) ([#16451](https://github.com/astral-sh/ruff/pull/16451))
|
||||
- \[`flake8-builtins`\] Ignore variables matching module attribute names (`A001`) ([#16454](https://github.com/astral-sh/ruff/pull/16454))
|
||||
- \[`pylint`\] Convert `code` keyword argument to a positional argument in fix for (`PLR1722`) ([#16424](https://github.com/astral-sh/ruff/pull/16424))
|
||||
|
||||
### CLI
|
||||
|
||||
- Move rule code from `description` to `check_name` in GitLab output serializer ([#16437](https://github.com/astral-sh/ruff/pull/16437))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`pydocstyle`\] Clarify that `D417` only checks docstrings with an arguments section ([#16494](https://github.com/astral-sh/ruff/pull/16494))
|
||||
37
clippy.toml
37
clippy.toml
@@ -1,26 +1,21 @@
|
||||
doc-valid-idents = [
|
||||
"..",
|
||||
"CodeQL",
|
||||
"CPython",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
"SNMPv1",
|
||||
"SNMPv2",
|
||||
"SNMPv3",
|
||||
"PyFlakes",
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
40
crates/red_knot/Cargo.toml
Normal file
40
crates/red_knot/Cargo.toml
Normal file
@@ -0,0 +1,40 @@
|
||||
[package]
|
||||
name = "red_knot"
|
||||
version = "0.0.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
documentation.workspace = true
|
||||
repository.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true }
|
||||
red_knot_server = { workspace = true }
|
||||
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true, features = ["wrap_help"] }
|
||||
colored = { workspace = true }
|
||||
countme = { workspace = true, features = ["enable"] }
|
||||
crossbeam = { workspace = true }
|
||||
ctrlc = { version = "3.4.4" }
|
||||
rayon = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true, features = ["release_max_level_debug"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
tracing-flame = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
filetime = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
Before Width: | Height: | Size: 40 KiB After Width: | Height: | Size: 40 KiB |
123
crates/red_knot/docs/tracing.md
Normal file
123
crates/red_knot/docs/tracing.md
Normal file
@@ -0,0 +1,123 @@
|
||||
# Tracing
|
||||
|
||||
Traces are a useful tool to narrow down the location of a bug or, at least, to understand why the compiler is doing a particular thing.
|
||||
Note, tracing messages with severity `debug` or greater are user-facing. They should be phrased accordingly.
|
||||
Tracing spans are only shown when using `-vvv`.
|
||||
|
||||
## Verbosity levels
|
||||
|
||||
The CLI supports different verbosity levels.
|
||||
|
||||
- default: Only show errors and warnings.
|
||||
- `-v` activates `info!`: Show generally useful information such as paths of configuration files, detected platform, etc., but it's not a lot of messages, it's something you'll activate in CI by default. cargo build e.g. shows you which packages are fresh.
|
||||
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
|
||||
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
|
||||
|
||||
## `RED_KNOT_LOG`
|
||||
|
||||
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
|
||||
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
|
||||
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
|
||||
|
||||
### Examples
|
||||
|
||||
#### Show all debug messages
|
||||
|
||||
Shows debug messages from all crates.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=debug
|
||||
```
|
||||
|
||||
#### Show salsa query execution messages
|
||||
|
||||
Show the salsa `execute: my_query` messages in addition to all red knot messages.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=ruff=trace,red_knot=trace,salsa=info
|
||||
```
|
||||
|
||||
#### Show typing traces
|
||||
|
||||
Only show traces for the `red_knot_python_semantic::types` module.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG="red_knot_python_semantic::types"
|
||||
```
|
||||
|
||||
Note: Ensure that you use `-vvv` to see tracing spans.
|
||||
|
||||
#### Show messages for a single file
|
||||
|
||||
Shows all messages that are inside of a span for a specific file.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=red_knot[{file=/home/micha/astral/test/x.py}]=trace
|
||||
```
|
||||
|
||||
**Note**: Tracing still shows all spans because tracing can't know at the time of entering the span
|
||||
whether one if its children has the file `x.py`.
|
||||
|
||||
**Note**: Salsa currently logs the entire memoized values. In our case, the source text and parsed AST.
|
||||
This very quickly leads to extremely long outputs.
|
||||
|
||||
## Tracing and Salsa
|
||||
|
||||
Be mindful about using `tracing` in Salsa queries, especially when using `warn` or `error` because it isn't guaranteed
|
||||
that the query will execute after restoring from a persistent cache. In which case the user won't see the message.
|
||||
|
||||
For example, don't use `tracing` to show the user a message when generating a lint violation failed
|
||||
because the message would only be shown when linting the file the first time, but not on subsequent analysis
|
||||
runs or when restoring from a persistent cache. This can be confusing for users because they
|
||||
don't understand why a specific lint violation isn't raised. Instead, change your
|
||||
query to return the failure as part of the query's result or use a Salsa accumulator.
|
||||
|
||||
## Tracing in tests
|
||||
|
||||
You can use `ruff_db::testing::setup_logging` or `ruff_db::testing::setup_logging_with_filter` to set up logging in tests.
|
||||
|
||||
```rust
|
||||
use ruff_db::testing::setup_logging;
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
let _logging = setup_logging();
|
||||
|
||||
tracing::info!("This message will be printed to stderr");
|
||||
}
|
||||
```
|
||||
|
||||
Note: Most test runners capture stderr and only show its output when a test fails.
|
||||
|
||||
Note also that `setup_logging` only sets up logging for the current thread because [`set_global_default`](https://docs.rs/tracing/latest/tracing/subscriber/fn.set_global_default.html) can only be
|
||||
called **once**.
|
||||
|
||||
## Release builds
|
||||
|
||||
`trace!` events are removed in release builds.
|
||||
|
||||
## Profiling
|
||||
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
|
||||
```
|
||||
|
||||
You can convert the textual representation into a visual one using `inferno`.
|
||||
|
||||
```shell
|
||||
cargo install inferno
|
||||
```
|
||||
|
||||
```shell
|
||||
# flamegraph
|
||||
cat tracing.folded | inferno-flamegraph > tracing-flamegraph.svg
|
||||
|
||||
# flamechart
|
||||
cat tracing.folded | inferno-flamegraph --flamechart > tracing-flamechart.svg
|
||||
```
|
||||
|
||||

|
||||
|
||||
See [`tracing-flame`](https://crates.io/crates/tracing-flame) for more details.
|
||||
254
crates/red_knot/src/logging.rs
Normal file
254
crates/red_knot/src/logging.rs
Normal file
@@ -0,0 +1,254 @@
|
||||
//! Sets up logging for Red Knot
|
||||
|
||||
use anyhow::Context;
|
||||
use colored::Colorize;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use tracing::{Event, Subscriber};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> VerbosityLevel {
|
||||
match self.verbose {
|
||||
0 => VerbosityLevel::Default,
|
||||
1 => VerbosityLevel::Verbose,
|
||||
2 => VerbosityLevel::ExtraVerbose,
|
||||
_ => VerbosityLevel::Trace,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
/// Default output level. Only shows Ruff and Red Knot events up to the [`WARN`](tracing::Level::WARN).
|
||||
Default,
|
||||
|
||||
/// Enables verbose output. Emits Ruff and Red Knot events up to the [`INFO`](tracing::Level::INFO).
|
||||
/// Corresponds to `-v`.
|
||||
Verbose,
|
||||
|
||||
/// Enables a more verbose tracing format and emits Ruff and Red Knot events up to [`DEBUG`](tracing::Level::DEBUG).
|
||||
/// Corresponds to `-vv`
|
||||
ExtraVerbose,
|
||||
|
||||
/// Enables all tracing events and uses a tree-like output format. Corresponds to `-vvv`.
|
||||
Trace,
|
||||
}
|
||||
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Default => LevelFilter::WARN,
|
||||
VerbosityLevel::Verbose => LevelFilter::INFO,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
|
||||
VerbosityLevel::Trace => LevelFilter::TRACE,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) const fn is_trace(self) -> bool {
|
||||
matches!(self, VerbosityLevel::Trace)
|
||||
}
|
||||
|
||||
pub(crate) const fn is_extra_verbose(self) -> bool {
|
||||
matches!(self, VerbosityLevel::ExtraVerbose)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuard> {
|
||||
use tracing_subscriber::prelude::*;
|
||||
|
||||
// The `RED_KNOT_LOG` environment variable overrides the default log level.
|
||||
let filter = if let Ok(log_env_variable) = std::env::var("RED_KNOT_LOG") {
|
||||
EnvFilter::builder()
|
||||
.parse(log_env_variable)
|
||||
.context("Failed to parse directives specified in RED_KNOT_LOG environment variable.")?
|
||||
} else {
|
||||
match level {
|
||||
VerbosityLevel::Default => {
|
||||
// Show warning traces
|
||||
EnvFilter::default().add_directive(LevelFilter::WARN.into())
|
||||
}
|
||||
level => {
|
||||
let level_filter = level.level_filter();
|
||||
|
||||
// Show info|debug|trace events, but allow `RED_KNOT_LOG` to override
|
||||
let filter = EnvFilter::default().add_directive(
|
||||
format!("red_knot={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
);
|
||||
|
||||
filter.add_directive(
|
||||
format!("ruff={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (profiling_layer, guard) = setup_profile();
|
||||
|
||||
let registry = tracing_subscriber::registry()
|
||||
.with(filter)
|
||||
.with(profiling_layer);
|
||||
|
||||
if level.is_trace() {
|
||||
let subscriber = registry.with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(std::io::stderr)
|
||||
.with_timer(tracing_tree::time::Uptime::default()),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
} else {
|
||||
let subscriber = registry.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.event_format(RedKnotFormat {
|
||||
display_level: true,
|
||||
display_timestamp: level.is_extra_verbose(),
|
||||
show_spans: false,
|
||||
})
|
||||
.with_writer(std::io::stderr),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
}
|
||||
|
||||
Ok(TracingGuard {
|
||||
_flame_guard: guard,
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn setup_profile<S>() -> (
|
||||
Option<tracing_flame::FlameLayer<S, BufWriter<File>>>,
|
||||
Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
)
|
||||
where
|
||||
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||
{
|
||||
if let Ok("1" | "true") = std::env::var("RED_KNOT_LOG_PROFILE").as_deref() {
|
||||
let (layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded")
|
||||
.expect("Flame layer to be created");
|
||||
(Some(layer), Some(guard))
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TracingGuard {
|
||||
_flame_guard: Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
}
|
||||
|
||||
struct RedKnotFormat {
|
||||
display_timestamp: bool,
|
||||
display_level: bool,
|
||||
show_spans: bool,
|
||||
}
|
||||
|
||||
/// See <https://docs.rs/tracing-subscriber/0.3.18/src/tracing_subscriber/fmt/format/mod.rs.html#1026-1156>
|
||||
impl<S, N> FormatEvent<S, N> for RedKnotFormat
|
||||
where
|
||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||
N: for<'a> FormatFields<'a> + 'static,
|
||||
{
|
||||
fn format_event(
|
||||
&self,
|
||||
ctx: &FmtContext<'_, S, N>,
|
||||
mut writer: Writer<'_>,
|
||||
event: &Event<'_>,
|
||||
) -> fmt::Result {
|
||||
let meta = event.metadata();
|
||||
let ansi = writer.has_ansi_escapes();
|
||||
|
||||
if self.display_timestamp {
|
||||
let timestamp = chrono::Local::now()
|
||||
.format("%Y-%m-%d %H:%M:%S.%f")
|
||||
.to_string();
|
||||
if ansi {
|
||||
write!(writer, "{} ", timestamp.dimmed())?;
|
||||
} else {
|
||||
write!(
|
||||
writer,
|
||||
"{} ",
|
||||
chrono::Local::now().format("%Y-%m-%d %H:%M:%S.%f")
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.display_level {
|
||||
let level = meta.level();
|
||||
// Same colors as tracing
|
||||
if ansi {
|
||||
let formatted_level = level.to_string();
|
||||
match *level {
|
||||
tracing::Level::TRACE => {
|
||||
write!(writer, "{} ", formatted_level.purple().bold())?;
|
||||
}
|
||||
tracing::Level::DEBUG => write!(writer, "{} ", formatted_level.blue().bold())?,
|
||||
tracing::Level::INFO => write!(writer, "{} ", formatted_level.green().bold())?,
|
||||
tracing::Level::WARN => write!(writer, "{} ", formatted_level.yellow().bold())?,
|
||||
tracing::Level::ERROR => write!(writer, "{} ", level.to_string().red().bold())?,
|
||||
}
|
||||
} else {
|
||||
write!(writer, "{level} ")?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.show_spans {
|
||||
let span = event.parent();
|
||||
let mut seen = false;
|
||||
|
||||
let span = span
|
||||
.and_then(|id| ctx.span(id))
|
||||
.or_else(|| ctx.lookup_current());
|
||||
|
||||
let scope = span.into_iter().flat_map(|span| span.scope().from_root());
|
||||
|
||||
for span in scope {
|
||||
seen = true;
|
||||
if ansi {
|
||||
write!(writer, "{}:", span.metadata().name().bold())?;
|
||||
} else {
|
||||
write!(writer, "{}:", span.metadata().name())?;
|
||||
}
|
||||
}
|
||||
|
||||
if seen {
|
||||
writer.write_char(' ')?;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
||||
|
||||
writeln!(writer)
|
||||
}
|
||||
}
|
||||
374
crates/red_knot/src/main.rs
Normal file
374
crates/red_knot/src/main.rs
Normal file
@@ -0,0 +1,374 @@
|
||||
use std::process::{ExitCode, Termination};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use clap::Parser;
|
||||
use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
|
||||
use red_knot_python_semantic::{ProgramSettings, SearchPathSettings};
|
||||
use red_knot_server::run_server;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::site_packages::VirtualEnvironment;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::WorkspaceWatcher;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use target_version::TargetVersion;
|
||||
|
||||
use crate::logging::{setup_tracing, Verbosity};
|
||||
|
||||
mod logging;
|
||||
mod target_version;
|
||||
mod verbosity;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "red-knot",
|
||||
about = "An extremely fast Python type checker."
|
||||
)]
|
||||
#[command(version)]
|
||||
struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Option<Command>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Changes the current working directory.",
|
||||
long_help = "Changes the current working directory before any specified operations. This affects the workspace and configuration discovery.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
current_directory: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Path to the virtual environment the project uses",
|
||||
long_help = "\
|
||||
Path to the virtual environment the project uses. \
|
||||
If provided, red-knot will use the `site-packages` directory of this virtual environment \
|
||||
to resolve type information for the project's third-party dependencies.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
venv_path: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "DIRECTORY",
|
||||
help = "Custom directory to use for stdlib typeshed stubs"
|
||||
)]
|
||||
custom_typeshed_dir: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "PATH",
|
||||
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
|
||||
)]
|
||||
extra_search_path: Vec<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Python version to assume when resolving types",
|
||||
default_value_t = TargetVersion::default(),
|
||||
value_name="VERSION")
|
||||
]
|
||||
target_version: TargetVersion,
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Run in watch mode by re-running whenever files change",
|
||||
short = 'W'
|
||||
)]
|
||||
watch: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Start the language server
|
||||
Server,
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
pub fn main() -> ExitStatus {
|
||||
run().unwrap_or_else(|error| {
|
||||
use std::io::Write;
|
||||
|
||||
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
|
||||
let mut stderr = std::io::stderr().lock();
|
||||
|
||||
// This communicates that this isn't a linter error but Red Knot itself hard-errored for
|
||||
// some reason (e.g. failed to resolve the configuration)
|
||||
writeln!(stderr, "{}", "Red Knot failed".red().bold()).ok();
|
||||
// Currently we generally only see one error, but e.g. with io errors when resolving
|
||||
// the configuration it is help to chain errors ("resolving configuration failed" ->
|
||||
// "failed to read file: subdir/pyproject.toml")
|
||||
for cause in error.chain() {
|
||||
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
|
||||
}
|
||||
|
||||
ExitStatus::Error
|
||||
})
|
||||
}
|
||||
|
||||
fn run() -> anyhow::Result<ExitStatus> {
|
||||
let Args {
|
||||
command,
|
||||
current_directory,
|
||||
custom_typeshed_dir,
|
||||
extra_search_path: extra_paths,
|
||||
venv_path,
|
||||
target_version,
|
||||
verbosity,
|
||||
watch,
|
||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
if matches!(command, Some(Command::Server)) {
|
||||
return run_server().map(|()| ExitStatus::Success);
|
||||
}
|
||||
|
||||
let verbosity = verbosity.level();
|
||||
countme::enable(verbosity.is_trace());
|
||||
let _guard = setup_tracing(verbosity)?;
|
||||
|
||||
// The base path to which all CLI arguments are relative to.
|
||||
let cli_base_path = {
|
||||
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
|
||||
SystemPathBuf::from_path_buf(cwd)
|
||||
.map_err(|path| {
|
||||
anyhow!(
|
||||
"The current working directory '{}' contains non-unicode characters. Red Knot only supports unicode paths.",
|
||||
path.display()
|
||||
)
|
||||
})?
|
||||
};
|
||||
|
||||
let cwd = current_directory
|
||||
.map(|cwd| {
|
||||
if cwd.as_std_path().is_dir() {
|
||||
Ok(SystemPath::absolute(&cwd, &cli_base_path))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided current-directory path '{cwd}' is not a directory."
|
||||
))
|
||||
}
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| cli_base_path.clone());
|
||||
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(system.current_directory(), &system)?;
|
||||
|
||||
// TODO: Verify the remaining search path settings eagerly.
|
||||
let site_packages = venv_path
|
||||
.map(|path| {
|
||||
VirtualEnvironment::new(path, &OsSystem::new(cli_base_path))
|
||||
.and_then(|venv| venv.site_packages_directories(&system))
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: target_version.into(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root: workspace_metadata.root().to_path_buf(),
|
||||
custom_typeshed: custom_typeshed_dir,
|
||||
site_packages,
|
||||
},
|
||||
};
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system)?;
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
ctrlc::set_handler(move || {
|
||||
let mut lock = main_loop_cancellation_token.lock().unwrap();
|
||||
|
||||
if let Some(token) = lock.take() {
|
||||
token.stop();
|
||||
}
|
||||
})?;
|
||||
|
||||
let exit_status = if watch {
|
||||
main_loop.watch(&mut db)?
|
||||
} else {
|
||||
main_loop.run(&mut db)
|
||||
};
|
||||
|
||||
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
|
||||
|
||||
std::mem::forget(db);
|
||||
|
||||
Ok(exit_status)
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum ExitStatus {
|
||||
/// Checking was successful and there were no errors.
|
||||
Success = 0,
|
||||
|
||||
/// Checking was successful but there were errors.
|
||||
Failure = 1,
|
||||
|
||||
/// Checking failed.
|
||||
Error = 2,
|
||||
}
|
||||
|
||||
impl Termination for ExitStatus {
|
||||
fn report(self) -> ExitCode {
|
||||
ExitCode::from(self as u8)
|
||||
}
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
/// Sender that can be used to send messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
|
||||
/// Receiver for the messages sent **to** the main loop.
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
|
||||
(
|
||||
Self {
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
}
|
||||
|
||||
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<ExitStatus> {
|
||||
tracing::debug!("Starting watch mode");
|
||||
let sender = self.sender.clone();
|
||||
let watcher = watch::directory_watcher(move |event| {
|
||||
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
|
||||
})?;
|
||||
|
||||
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
|
||||
|
||||
self.run(db);
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
|
||||
fn run(mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
|
||||
let result = self.main_loop(db);
|
||||
|
||||
tracing::debug!("Exiting main loop");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn main_loop(&mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
// Schedule the first check.
|
||||
tracing::debug!("Starting main loop");
|
||||
|
||||
let mut revision = 0u64;
|
||||
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
MainLoopMessage::CheckWorkspace => {
|
||||
let db = db.snapshot();
|
||||
let sender = self.sender.clone();
|
||||
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = db.check() {
|
||||
// Send the result back to the main loop for printing.
|
||||
sender
|
||||
.send(MainLoopMessage::CheckCompleted { result, revision })
|
||||
.unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
MainLoopMessage::CheckCompleted {
|
||||
result,
|
||||
revision: check_revision,
|
||||
} => {
|
||||
let has_diagnostics = !result.is_empty();
|
||||
if check_revision == revision {
|
||||
for diagnostic in result {
|
||||
tracing::error!("{}", diagnostic);
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
|
||||
);
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return if has_diagnostics {
|
||||
ExitStatus::Failure
|
||||
} else {
|
||||
ExitStatus::Success
|
||||
};
|
||||
}
|
||||
|
||||
tracing::trace!("Counts after last check:\n{}", countme::get_all());
|
||||
}
|
||||
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes);
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
}
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
// Cancel any pending queries and wait for them to complete.
|
||||
// TODO: Don't use Salsa internal APIs
|
||||
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
|
||||
let _ = db.zalsa_mut();
|
||||
return ExitStatus::Success;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Waiting for next main loop message.");
|
||||
}
|
||||
|
||||
ExitStatus::Success
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MainLoopCancellationToken {
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
}
|
||||
|
||||
impl MainLoopCancellationToken {
|
||||
fn stop(self) {
|
||||
self.sender.send(MainLoopMessage::Exit).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckWorkspace,
|
||||
CheckCompleted { result: Vec<String>, revision: u64 },
|
||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||
Exit,
|
||||
}
|
||||
48
crates/red_knot/src/target_version.rs
Normal file
48
crates/red_knot/src/target_version.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl TargetVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "py37",
|
||||
Self::Py38 => "py38",
|
||||
Self::Py39 => "py39",
|
||||
Self::Py310 => "py310",
|
||||
Self::Py311 => "py311",
|
||||
Self::Py312 => "py312",
|
||||
Self::Py313 => "py313",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::PY37,
|
||||
TargetVersion::Py38 => Self::PY38,
|
||||
TargetVersion::Py39 => Self::PY39,
|
||||
TargetVersion::Py310 => Self::PY310,
|
||||
TargetVersion::Py311 => Self::PY311,
|
||||
TargetVersion::Py312 => Self::PY312,
|
||||
TargetVersion::Py313 => Self::PY313,
|
||||
}
|
||||
}
|
||||
}
|
||||
1
crates/red_knot/src/verbosity.rs
Normal file
1
crates/red_knot/src/verbosity.rs
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
1239
crates/red_knot/tests/file_watching.rs
Normal file
1239
crates/red_knot/tests/file_watching.rs
Normal file
File diff suppressed because it is too large
Load Diff
52
crates/red_knot_python_semantic/Cargo.toml
Normal file
52
crates/red_knot_python_semantic/Cargo.toml
Normal file
@@ -0,0 +1,52 @@
|
||||
[package]
|
||||
name = "red_knot_python_semantic"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
compact_str = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
ordermap = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
static_assertions = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true, features = ["zstd", "deflate"] }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["os", "testing"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
9
crates/red_knot_python_semantic/README.md
Normal file
9
crates/red_knot_python_semantic/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Red Knot
|
||||
|
||||
Semantic analysis for the red-knot project.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_python_semantic/vendor/typeshed`. The file `crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
87
crates/red_knot_python_semantic/build.rs
Normal file
87
crates/red_knot_python_semantic/build.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
//! Build script to package our vendored typeshed files
|
||||
//! into a zip archive that can be included in the Ruff binary.
|
||||
//!
|
||||
//! This script should be automatically run at build time
|
||||
//! whenever the script itself changes, or whenever any files
|
||||
//! in `crates/red_knot_python_semantic/vendor/typeshed` change.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
|
||||
use path_slash::PathExt;
|
||||
use zip::result::ZipResult;
|
||||
use zip::write::{FileOptions, ZipWriter};
|
||||
use zip::CompressionMethod;
|
||||
|
||||
const TYPESHED_SOURCE_DIR: &str = "vendor/typeshed";
|
||||
const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
||||
|
||||
/// Recursively zip the contents of an entire directory.
|
||||
///
|
||||
/// This routine is adapted from a recipe at
|
||||
/// <https://github.com/zip-rs/zip-old/blob/5d0f198124946b7be4e5969719a7f29f363118cd/examples/write_dir.rs>
|
||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
let mut zip = ZipWriter::new(writer);
|
||||
|
||||
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
|
||||
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
|
||||
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
|
||||
// (WASM itself is already slower than a native build for a specific platform).
|
||||
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
|
||||
// architecture of the system running the build script and not the architecture of the build-target.
|
||||
// That's why we use the `TARGET` environment variable here.
|
||||
let method = if std::env::var("TARGET").unwrap().contains("wasm32") {
|
||||
CompressionMethod::Deflated
|
||||
} else {
|
||||
CompressionMethod::Zstd
|
||||
};
|
||||
|
||||
let options = FileOptions::default()
|
||||
.compression_method(method)
|
||||
.unix_permissions(0o644);
|
||||
|
||||
for entry in walkdir::WalkDir::new(directory_path) {
|
||||
let dir_entry = entry.unwrap();
|
||||
let absolute_path = dir_entry.path();
|
||||
let normalized_relative_path = absolute_path
|
||||
.strip_prefix(Path::new(directory_path))
|
||||
.unwrap()
|
||||
.to_slash()
|
||||
.expect("Unexpected non-utf8 typeshed path!");
|
||||
|
||||
// Write file or directory explicitly
|
||||
// Some unzip tools unzip files with directory paths correctly, some do not!
|
||||
if absolute_path.is_file() {
|
||||
println!("adding file {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.start_file(normalized_relative_path, options)?;
|
||||
let mut f = File::open(absolute_path)?;
|
||||
std::io::copy(&mut f, &mut zip).unwrap();
|
||||
} else if !normalized_relative_path.is_empty() {
|
||||
// Only if not root! Avoids path spec / warning
|
||||
// and mapname conversion failed error on unzip
|
||||
println!("adding dir {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.add_directory(normalized_relative_path, options)?;
|
||||
}
|
||||
}
|
||||
zip.finish()
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed={TYPESHED_SOURCE_DIR}");
|
||||
assert!(
|
||||
Path::new(TYPESHED_SOURCE_DIR).is_dir(),
|
||||
"Where is typeshed?"
|
||||
);
|
||||
let out_dir = std::env::var("OUT_DIR").unwrap();
|
||||
|
||||
// N.B. Deliberately using `format!()` instead of `Path::join()` here,
|
||||
// so that we use `/` as a path separator on all platforms.
|
||||
// That enables us to load the typeshed zip at compile time in `module.rs`
|
||||
// (otherwise we'd have to dynamically determine the exact path to the typeshed zip
|
||||
// based on the default path separator for the specific platform we're on,
|
||||
// which can't be done at compile time.)
|
||||
let zipped_typeshed_location = format!("{out_dir}{TYPESHED_ZIP_LOCATION}");
|
||||
|
||||
let zipped_typeshed = File::create(zipped_typeshed_location).unwrap();
|
||||
zip_dir(TYPESHED_SOURCE_DIR, zipped_typeshed).unwrap();
|
||||
}
|
||||
163
crates/red_knot_python_semantic/src/ast_node_ref.rs
Normal file
163
crates/red_knot_python_semantic/src/ast_node_ref.rs
Normal file
@@ -0,0 +1,163 @@
|
||||
use std::hash::Hash;
|
||||
use std::ops::Deref;
|
||||
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
|
||||
/// Ref-counted owned reference to an AST node.
|
||||
///
|
||||
/// The type holds an owned reference to the node's ref-counted [`ParsedModule`].
|
||||
/// Holding on to the node's [`ParsedModule`] guarantees that the reference to the
|
||||
/// node must still be valid.
|
||||
///
|
||||
/// Holding on to any [`AstNodeRef`] prevents the [`ParsedModule`] from being released.
|
||||
///
|
||||
/// ## Equality
|
||||
/// Two `AstNodeRef` are considered equal if their wrapped nodes are equal.
|
||||
#[derive(Clone)]
|
||||
pub struct AstNodeRef<T> {
|
||||
/// Owned reference to the node's [`ParsedModule`].
|
||||
///
|
||||
/// The node's reference is guaranteed to remain valid as long as it's enclosing
|
||||
/// [`ParsedModule`] is alive.
|
||||
_parsed: ParsedModule,
|
||||
|
||||
/// Pointer to the referenced node.
|
||||
node: std::ptr::NonNull<T>,
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
impl<T> AstNodeRef<T> {
|
||||
/// Creates a new `AstNodeRef` that reference `node`. The `parsed` is the [`ParsedModule`] to
|
||||
/// which the `AstNodeRef` belongs.
|
||||
///
|
||||
/// ## Safety
|
||||
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the
|
||||
/// [`ParsedModule`] to which `node` belongs. It's the caller's responsibility to ensure that
|
||||
/// the invariant `node belongs to parsed` is upheld.
|
||||
|
||||
pub(super) unsafe fn new(parsed: ParsedModule, node: &T) -> Self {
|
||||
Self {
|
||||
_parsed: parsed,
|
||||
node: std::ptr::NonNull::from(node),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a reference to the wrapped node.
|
||||
pub fn node(&self) -> &T {
|
||||
// SAFETY: Holding on to `parsed` ensures that the AST to which `node` belongs is still
|
||||
// alive and not moved.
|
||||
unsafe { self.node.as_ref() }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for AstNodeRef<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.node()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Debug for AstNodeRef<T>
|
||||
where
|
||||
T: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("AstNodeRef").field(&self.node()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PartialEq for AstNodeRef<T>
|
||||
where
|
||||
T: PartialEq,
|
||||
{
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.node().eq(other.node())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Eq for AstNodeRef<T> where T: Eq {}
|
||||
|
||||
impl<T> Hash for AstNodeRef<T>
|
||||
where
|
||||
T: Hash,
|
||||
{
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.node().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
unsafe impl<T> Send for AstNodeRef<T> where T: Send {}
|
||||
#[allow(unsafe_code)]
|
||||
unsafe impl<T> Sync for AstNodeRef<T> where T: Sync {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_parser::parse_unchecked_source;
|
||||
|
||||
#[test]
|
||||
#[allow(unsafe_code)]
|
||||
fn equality() {
|
||||
let parsed_raw = parse_unchecked_source("1 + 2", PySourceType::Python);
|
||||
let parsed = ParsedModule::new(parsed_raw.clone());
|
||||
|
||||
let stmt = &parsed.syntax().body[0];
|
||||
|
||||
let node1 = unsafe { AstNodeRef::new(parsed.clone(), stmt) };
|
||||
let node2 = unsafe { AstNodeRef::new(parsed.clone(), stmt) };
|
||||
|
||||
assert_eq!(node1, node2);
|
||||
|
||||
// Compare from different trees
|
||||
let cloned = ParsedModule::new(parsed_raw);
|
||||
let stmt_cloned = &cloned.syntax().body[0];
|
||||
let cloned_node = unsafe { AstNodeRef::new(cloned.clone(), stmt_cloned) };
|
||||
|
||||
assert_eq!(node1, cloned_node);
|
||||
|
||||
let other_raw = parse_unchecked_source("2 + 2", PySourceType::Python);
|
||||
let other = ParsedModule::new(other_raw);
|
||||
|
||||
let other_stmt = &other.syntax().body[0];
|
||||
let other_node = unsafe { AstNodeRef::new(other.clone(), other_stmt) };
|
||||
|
||||
assert_ne!(node1, other_node);
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
#[test]
|
||||
fn inequality() {
|
||||
let parsed_raw = parse_unchecked_source("1 + 2", PySourceType::Python);
|
||||
let parsed = ParsedModule::new(parsed_raw.clone());
|
||||
|
||||
let stmt = &parsed.syntax().body[0];
|
||||
let node = unsafe { AstNodeRef::new(parsed.clone(), stmt) };
|
||||
|
||||
let other_raw = parse_unchecked_source("2 + 2", PySourceType::Python);
|
||||
let other = ParsedModule::new(other_raw);
|
||||
|
||||
let other_stmt = &other.syntax().body[0];
|
||||
let other_node = unsafe { AstNodeRef::new(other.clone(), other_stmt) };
|
||||
|
||||
assert_ne!(node, other_node);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(unsafe_code)]
|
||||
fn debug() {
|
||||
let parsed_raw = parse_unchecked_source("1 + 2", PySourceType::Python);
|
||||
let parsed = ParsedModule::new(parsed_raw.clone());
|
||||
|
||||
let stmt = &parsed.syntax().body[0];
|
||||
|
||||
let stmt_node = unsafe { AstNodeRef::new(parsed.clone(), stmt) };
|
||||
|
||||
let debug = format!("{stmt_node:?}");
|
||||
|
||||
assert_eq!(debug, format!("AstNodeRef({stmt:?})"));
|
||||
}
|
||||
}
|
||||
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::semantic_index::global_scope;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::Db;
|
||||
|
||||
/// Salsa query to get the builtins scope.
|
||||
///
|
||||
/// Can return None if a custom typeshed is used that is missing `builtins.pyi`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn builtins_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
|
||||
let builtins_name =
|
||||
ModuleName::new_static("builtins").expect("Expected 'builtins' to be a valid module name");
|
||||
let builtins_file = resolve_module(db, builtins_name)?.file();
|
||||
Some(global_scope(db, builtins_file))
|
||||
}
|
||||
112
crates/red_knot_python_semantic/src/db.rs
Normal file
112
crates/red_knot_python_semantic/src/db.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
/// Database giving access to semantic information about a Python program.
|
||||
#[salsa::db]
|
||||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
|
||||
fn is_file_open(&self, file: File) -> bool;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::module_resolver::vendored_typeshed_stubs;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use super::Db;
|
||||
|
||||
#[salsa::db]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
events: std::sync::Arc<std::sync::Mutex<Vec<salsa::Event>>>,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().clone(),
|
||||
events: std::sync::Arc::default(),
|
||||
files: Files::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
||||
let inner = Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
||||
|
||||
let events = inner.get_mut().unwrap();
|
||||
std::mem::take(&mut *events)
|
||||
}
|
||||
|
||||
/// Clears the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
||||
&mut self.system
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SourceDb for TestDb {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
&self.vendored
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl Db for TestDb {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {
|
||||
let event = event();
|
||||
tracing::trace!("event: {event:?}");
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
}
|
||||
24
crates/red_knot_python_semantic/src/lib.rs
Normal file
24
crates/red_knot_python_semantic/src/lib.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use std::hash::BuildHasherDefault;
|
||||
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
pub use db::Db;
|
||||
pub use module_name::ModuleName;
|
||||
pub use module_resolver::{resolve_module, system_module_search_paths, vendored_typeshed_stubs};
|
||||
pub use program::{Program, ProgramSettings, SearchPathSettings};
|
||||
pub use python_version::PythonVersion;
|
||||
pub use semantic_model::{HasTy, SemanticModel};
|
||||
|
||||
pub mod ast_node_ref;
|
||||
mod builtins;
|
||||
mod db;
|
||||
mod module_name;
|
||||
mod module_resolver;
|
||||
mod node_key;
|
||||
mod program;
|
||||
mod python_version;
|
||||
pub mod semantic_index;
|
||||
mod semantic_model;
|
||||
pub mod types;
|
||||
|
||||
type FxOrderSet<V> = ordermap::set::OrderSet<V, BuildHasherDefault<FxHasher>>;
|
||||
216
crates/red_knot_python_semantic/src/module_name.rs
Normal file
216
crates/red_knot_python_semantic/src/module_name.rs
Normal file
@@ -0,0 +1,216 @@
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
|
||||
use compact_str::{CompactString, ToCompactString};
|
||||
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
|
||||
/// A module name, e.g. `foo.bar`.
|
||||
///
|
||||
/// Always normalized to the absolute form (never a relative module name, i.e., never `.foo`).
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
|
||||
pub struct ModuleName(compact_str::CompactString);
|
||||
|
||||
impl ModuleName {
|
||||
/// Creates a new module name for `name`. Returns `Some` if `name` is a valid, absolute
|
||||
/// module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn new(name: &str) -> Option<Self> {
|
||||
Self::is_valid_name(name).then(|| Self(CompactString::from(name)))
|
||||
}
|
||||
|
||||
/// Creates a new module name for `name` where `name` is a static string.
|
||||
/// Returns `Some` if `name` is a valid, absolute module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
|
||||
/// assert_eq!(ModuleName::new_static(""), None);
|
||||
/// assert_eq!(ModuleName::new_static("..foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static(".foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo."), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo..bar"), None);
|
||||
/// assert_eq!(ModuleName::new_static("2000"), None);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn new_static(name: &'static str) -> Option<Self> {
|
||||
Self::is_valid_name(name).then(|| Self(CompactString::const_new(name)))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn is_valid_name(name: &str) -> bool {
|
||||
!name.is_empty() && name.split('.').all(is_identifier)
|
||||
}
|
||||
|
||||
/// An iterator over the components of the module name:
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn components(&self) -> impl DoubleEndedIterator<Item = &str> {
|
||||
self.0.split('.')
|
||||
}
|
||||
|
||||
/// The name of this module's immediate parent, if it has a parent.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("root").unwrap().parent(), None);
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn parent(&self) -> Option<ModuleName> {
|
||||
let (parent, _) = self.0.rsplit_once('.')?;
|
||||
Some(Self(parent.to_compact_string()))
|
||||
}
|
||||
|
||||
/// Returns `true` if the name starts with `other`.
|
||||
///
|
||||
/// This is equivalent to checking if `self` is a sub-module of `other`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
///
|
||||
/// assert!(!ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("bar").unwrap()));
|
||||
/// assert!(!ModuleName::new_static("foo_bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn starts_with(&self, other: &ModuleName) -> bool {
|
||||
let mut self_components = self.components();
|
||||
let other_components = other.components();
|
||||
|
||||
for other_component in other_components {
|
||||
if self_components.next() != Some(other_component) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
#[inline]
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
|
||||
/// Construct a [`ModuleName`] from a sequence of parts.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(&*ModuleName::from_components(["a"]).unwrap(), "a");
|
||||
/// assert_eq!(&*ModuleName::from_components(["a", "b"]).unwrap(), "a.b");
|
||||
/// assert_eq!(&*ModuleName::from_components(["a", "b", "c"]).unwrap(), "a.b.c");
|
||||
///
|
||||
/// assert_eq!(ModuleName::from_components(["a-b"]), None);
|
||||
/// assert_eq!(ModuleName::from_components(["a", "a-b"]), None);
|
||||
/// assert_eq!(ModuleName::from_components(["a", "b", "a-b-c"]), None);
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn from_components<'a>(components: impl IntoIterator<Item = &'a str>) -> Option<Self> {
|
||||
let mut components = components.into_iter();
|
||||
let first_part = components.next()?;
|
||||
if !is_identifier(first_part) {
|
||||
return None;
|
||||
}
|
||||
let name = if let Some(second_part) = components.next() {
|
||||
if !is_identifier(second_part) {
|
||||
return None;
|
||||
}
|
||||
let mut name = format!("{first_part}.{second_part}");
|
||||
for part in components {
|
||||
if !is_identifier(part) {
|
||||
return None;
|
||||
}
|
||||
name.push('.');
|
||||
name.push_str(part);
|
||||
}
|
||||
CompactString::from(&name)
|
||||
} else {
|
||||
CompactString::from(first_part)
|
||||
};
|
||||
Some(Self(name))
|
||||
}
|
||||
|
||||
/// Extend `self` with the components of `other`
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// let mut module_name = ModuleName::new_static("foo").unwrap();
|
||||
/// module_name.extend(&ModuleName::new_static("bar").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar");
|
||||
/// module_name.extend(&ModuleName::new_static("baz.eggs.ham").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar.baz.eggs.ham");
|
||||
/// ```
|
||||
pub fn extend(&mut self, other: &ModuleName) {
|
||||
self.0.push('.');
|
||||
self.0.push_str(other);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ModuleName {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<str> for ModuleName {
|
||||
fn eq(&self, other: &str) -> bool {
|
||||
self.as_str() == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<ModuleName> for str {
|
||||
fn eq(&self, other: &ModuleName) -> bool {
|
||||
self == other.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ModuleName {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
47
crates/red_knot_python_semantic/src/module_resolver/mod.rs
Normal file
47
crates/red_knot_python_semantic/src/module_resolver/mod.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
pub(crate) use module::Module;
|
||||
pub use resolver::resolve_module;
|
||||
pub(crate) use resolver::{file_to_module, SearchPaths};
|
||||
use ruff_db::system::SystemPath;
|
||||
pub use typeshed::vendored_typeshed_stubs;
|
||||
|
||||
use crate::module_resolver::resolver::search_paths;
|
||||
use crate::Db;
|
||||
use resolver::SearchPathIterator;
|
||||
|
||||
mod module;
|
||||
mod path;
|
||||
mod resolver;
|
||||
mod state;
|
||||
mod typeshed;
|
||||
|
||||
#[cfg(test)]
|
||||
mod testing;
|
||||
|
||||
/// Returns an iterator over all search paths pointing to a system path
|
||||
pub fn system_module_search_paths(db: &dyn Db) -> SystemModuleSearchPathsIter {
|
||||
SystemModuleSearchPathsIter {
|
||||
inner: search_paths(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SystemModuleSearchPathsIter<'db> {
|
||||
inner: SearchPathIterator<'db>,
|
||||
}
|
||||
|
||||
impl<'db> Iterator for SystemModuleSearchPathsIter<'db> {
|
||||
type Item = &'db SystemPath;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let next = self.inner.next()?;
|
||||
|
||||
if let Some(system_path) = next.as_system_path() {
|
||||
return Some(system_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for SystemModuleSearchPathsIter<'_> {}
|
||||
@@ -0,0 +1,85 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_db::files::File;
|
||||
|
||||
use super::path::SearchPath;
|
||||
use crate::module_name::ModuleName;
|
||||
|
||||
/// Representation of a Python module.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct Module {
|
||||
inner: Arc<ModuleInner>,
|
||||
}
|
||||
|
||||
impl Module {
|
||||
pub(crate) fn new(
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: SearchPath,
|
||||
file: File,
|
||||
) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(ModuleInner {
|
||||
name,
|
||||
kind,
|
||||
search_path,
|
||||
file,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// The absolute name of the module (e.g. `foo.bar`)
|
||||
pub fn name(&self) -> &ModuleName {
|
||||
&self.inner.name
|
||||
}
|
||||
|
||||
/// The file to the source code that defines this module
|
||||
pub fn file(&self) -> File {
|
||||
self.inner.file
|
||||
}
|
||||
|
||||
/// The search path from which the module was resolved.
|
||||
pub(crate) fn search_path(&self) -> &SearchPath {
|
||||
&self.inner.search_path
|
||||
}
|
||||
|
||||
/// Determine whether this module is a single-file module or a package
|
||||
pub fn kind(&self) -> ModuleKind {
|
||||
self.inner.kind
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file())
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
struct ModuleInner {
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: SearchPath,
|
||||
file: File,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ModuleKind {
|
||||
/// A single-file module (e.g. `foo.py` or `foo.pyi`)
|
||||
Module,
|
||||
|
||||
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
|
||||
Package,
|
||||
}
|
||||
|
||||
impl ModuleKind {
|
||||
pub const fn is_package(self) -> bool {
|
||||
matches!(self, ModuleKind::Package)
|
||||
}
|
||||
}
|
||||
@@ -4,15 +4,16 @@ use std::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
use camino::{Utf8Path, Utf8PathBuf};
|
||||
use ruff_db::files::{File, FileError, FilePath, system_path_to_file, vendored_path_to_file};
|
||||
|
||||
use ruff_db::files::{system_path_to_file, vendored_path_to_file, File, FileError};
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::{VendoredPath, VendoredPathBuf};
|
||||
|
||||
use super::typeshed::{TypeshedVersionsParseError, TypeshedVersionsQueryResult, typeshed_versions};
|
||||
use crate::Db;
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolver::ResolverContext;
|
||||
use crate::site_packages::SitePackagesDiscoveryError;
|
||||
|
||||
use super::state::ResolverState;
|
||||
use super::typeshed::{TypeshedVersionsParseError, TypeshedVersionsQueryResult};
|
||||
|
||||
/// A path that points to a Python module.
|
||||
///
|
||||
@@ -58,16 +59,8 @@ impl ModulePath {
|
||||
self.relative_path.push(component);
|
||||
}
|
||||
|
||||
pub(crate) fn pop(&mut self) -> bool {
|
||||
self.relative_path.pop()
|
||||
}
|
||||
|
||||
pub(super) fn search_path(&self) -> &SearchPath {
|
||||
&self.search_path
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(super) fn is_directory(&self, resolver: &ResolverContext) -> bool {
|
||||
pub(crate) fn is_directory(&self, resolver: &ResolverState) -> bool {
|
||||
let ModulePath {
|
||||
search_path,
|
||||
relative_path,
|
||||
@@ -77,21 +70,21 @@ impl ModulePath {
|
||||
| SearchPathInner::FirstParty(search_path)
|
||||
| SearchPathInner::SitePackages(search_path)
|
||||
| SearchPathInner::Editable(search_path) => {
|
||||
system_path_to_file(resolver.db, search_path.join(relative_path))
|
||||
system_path_to_file(resolver.db.upcast(), search_path.join(relative_path))
|
||||
== Err(FileError::IsADirectory)
|
||||
}
|
||||
SearchPathInner::StandardLibraryCustom(stdlib_root) => {
|
||||
match query_stdlib_version(relative_path, resolver) {
|
||||
match query_stdlib_version(Some(stdlib_root), relative_path, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => false,
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
| TypeshedVersionsQueryResult::MaybeExists => {
|
||||
system_path_to_file(resolver.db, stdlib_root.join(relative_path))
|
||||
system_path_to_file(resolver.db.upcast(), stdlib_root.join(relative_path))
|
||||
== Err(FileError::IsADirectory)
|
||||
}
|
||||
}
|
||||
}
|
||||
SearchPathInner::StandardLibraryVendored(stdlib_root) => {
|
||||
match query_stdlib_version(relative_path, resolver) {
|
||||
match query_stdlib_version(None, relative_path, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => false,
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
| TypeshedVersionsQueryResult::MaybeExists => resolver
|
||||
@@ -103,7 +96,7 @@ impl ModulePath {
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(super) fn is_regular_package(&self, resolver: &ResolverContext) -> bool {
|
||||
pub(crate) fn is_regular_package(&self, resolver: &ResolverState) -> bool {
|
||||
let ModulePath {
|
||||
search_path,
|
||||
relative_path,
|
||||
@@ -115,23 +108,23 @@ impl ModulePath {
|
||||
| SearchPathInner::SitePackages(search_path)
|
||||
| SearchPathInner::Editable(search_path) => {
|
||||
let absolute_path = search_path.join(relative_path);
|
||||
|
||||
system_path_to_file(resolver.db, absolute_path.join("__init__.py")).is_ok()
|
||||
|| system_path_to_file(resolver.db, absolute_path.join("__init__.pyi")).is_ok()
|
||||
system_path_to_file(resolver.db.upcast(), absolute_path.join("__init__.py")).is_ok()
|
||||
|| system_path_to_file(resolver.db.upcast(), absolute_path.join("__init__.py"))
|
||||
.is_ok()
|
||||
}
|
||||
SearchPathInner::StandardLibraryCustom(search_path) => {
|
||||
match query_stdlib_version(relative_path, resolver) {
|
||||
match query_stdlib_version(Some(search_path), relative_path, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => false,
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
| TypeshedVersionsQueryResult::MaybeExists => system_path_to_file(
|
||||
resolver.db,
|
||||
resolver.db.upcast(),
|
||||
search_path.join(relative_path).join("__init__.pyi"),
|
||||
)
|
||||
.is_ok(),
|
||||
}
|
||||
}
|
||||
SearchPathInner::StandardLibraryVendored(search_path) => {
|
||||
match query_stdlib_version(relative_path, resolver) {
|
||||
match query_stdlib_version(None, relative_path, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => false,
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
| TypeshedVersionsQueryResult::MaybeExists => resolver
|
||||
@@ -142,26 +135,9 @@ impl ModulePath {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn to_system_path(&self) -> Option<SystemPathBuf> {
|
||||
let ModulePath {
|
||||
search_path,
|
||||
relative_path,
|
||||
} = self;
|
||||
match &*search_path.0 {
|
||||
SearchPathInner::Extra(search_path)
|
||||
| SearchPathInner::FirstParty(search_path)
|
||||
| SearchPathInner::SitePackages(search_path)
|
||||
| SearchPathInner::Editable(search_path) => Some(search_path.join(relative_path)),
|
||||
SearchPathInner::StandardLibraryCustom(stdlib_root) => {
|
||||
Some(stdlib_root.join(relative_path))
|
||||
}
|
||||
SearchPathInner::StandardLibraryVendored(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(super) fn to_file(&self, resolver: &ResolverContext) -> Option<File> {
|
||||
let db = resolver.db;
|
||||
pub(crate) fn to_file(&self, resolver: &ResolverState) -> Option<File> {
|
||||
let db = resolver.db.upcast();
|
||||
let ModulePath {
|
||||
search_path,
|
||||
relative_path,
|
||||
@@ -174,7 +150,7 @@ impl ModulePath {
|
||||
system_path_to_file(db, search_path.join(relative_path)).ok()
|
||||
}
|
||||
SearchPathInner::StandardLibraryCustom(stdlib_root) => {
|
||||
match query_stdlib_version(relative_path, resolver) {
|
||||
match query_stdlib_version(Some(stdlib_root), relative_path, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => None,
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
| TypeshedVersionsQueryResult::MaybeExists => {
|
||||
@@ -183,7 +159,7 @@ impl ModulePath {
|
||||
}
|
||||
}
|
||||
SearchPathInner::StandardLibraryVendored(stdlib_root) => {
|
||||
match query_stdlib_version(relative_path, resolver) {
|
||||
match query_stdlib_version(None, relative_path, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => None,
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
| TypeshedVersionsQueryResult::MaybeExists => {
|
||||
@@ -204,18 +180,7 @@ impl ModulePath {
|
||||
stdlib_path_to_module_name(relative_path)
|
||||
} else {
|
||||
let parent = relative_path.parent()?;
|
||||
let parent_components = parent.components().enumerate().map(|(index, component)| {
|
||||
let component = component.as_str();
|
||||
|
||||
// For stub packages, strip the `-stubs` suffix from the first component
|
||||
// because it isn't a valid module name part AND the module name is the name without the `-stubs`.
|
||||
if index == 0 {
|
||||
component.strip_suffix("-stubs").unwrap_or(component)
|
||||
} else {
|
||||
component
|
||||
}
|
||||
});
|
||||
|
||||
let parent_components = parent.components().map(|component| component.as_str());
|
||||
let skip_final_part =
|
||||
relative_path.ends_with("__init__.py") || relative_path.ends_with("__init__.pyi");
|
||||
if skip_final_part {
|
||||
@@ -308,15 +273,19 @@ fn stdlib_path_to_module_name(relative_path: &Utf8Path) -> Option<ModuleName> {
|
||||
|
||||
#[must_use]
|
||||
fn query_stdlib_version(
|
||||
custom_stdlib_root: Option<&SystemPath>,
|
||||
relative_path: &Utf8Path,
|
||||
context: &ResolverContext,
|
||||
resolver: &ResolverState,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
let Some(module_name) = stdlib_path_to_module_name(relative_path) else {
|
||||
return TypeshedVersionsQueryResult::DoesNotExist;
|
||||
};
|
||||
let ResolverContext { db, python_version } = context;
|
||||
|
||||
typeshed_versions(*db).query_module(&module_name, *python_version)
|
||||
let ResolverState {
|
||||
db,
|
||||
typeshed_versions,
|
||||
target_version,
|
||||
} = resolver;
|
||||
typeshed_versions.query_module(*db, &module_name, custom_stdlib_root, *target_version)
|
||||
}
|
||||
|
||||
/// Enumeration describing the various ways in which validation of a search path might fail.
|
||||
@@ -324,54 +293,58 @@ fn query_stdlib_version(
|
||||
/// If validation fails for a search path derived from the user settings,
|
||||
/// a message must be displayed to the user,
|
||||
/// as type checking cannot be done reliably in these circumstances.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum SearchPathValidationError {
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) enum SearchPathValidationError {
|
||||
/// The path provided by the user was not a directory
|
||||
#[error("{0} does not point to a directory")]
|
||||
NotADirectory(SystemPathBuf),
|
||||
|
||||
/// The path provided by the user is a directory,
|
||||
/// but no `stdlib/` subdirectory exists.
|
||||
/// (This is only relevant for stdlib search paths.)
|
||||
#[error("The directory at {0} has no `stdlib/` subdirectory")]
|
||||
NoStdlibSubdirectory(SystemPathBuf),
|
||||
|
||||
/// The typeshed path provided by the user is a directory,
|
||||
/// but `stdlib/VERSIONS` could not be read.
|
||||
/// but no `stdlib/VERSIONS` file exists.
|
||||
/// (This is only relevant for stdlib search paths.)
|
||||
#[error("Failed to read the custom typeshed versions file '{path}'")]
|
||||
FailedToReadVersionsFile {
|
||||
path: SystemPathBuf,
|
||||
#[source]
|
||||
error: std::io::Error,
|
||||
},
|
||||
NoVersionsFile(SystemPathBuf),
|
||||
|
||||
/// `stdlib/VERSIONS` is a directory.
|
||||
/// (This is only relevant for stdlib search paths.)
|
||||
VersionsIsADirectory(SystemPathBuf),
|
||||
|
||||
/// The path provided by the user is a directory,
|
||||
/// and a `stdlib/VERSIONS` file exists, but it fails to parse.
|
||||
/// (This is only relevant for stdlib search paths.)
|
||||
#[error(transparent)]
|
||||
VersionsParseError(TypeshedVersionsParseError),
|
||||
|
||||
/// Failed to discover the site-packages for the configured virtual environment.
|
||||
#[error("Failed to discover the site-packages directory")]
|
||||
SitePackagesDiscovery(#[source] SitePackagesDiscoveryError),
|
||||
}
|
||||
|
||||
impl From<TypeshedVersionsParseError> for SearchPathValidationError {
|
||||
fn from(value: TypeshedVersionsParseError) -> Self {
|
||||
Self::VersionsParseError(value)
|
||||
impl fmt::Display for SearchPathValidationError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::NotADirectory(path) => write!(f, "{path} does not point to a directory"),
|
||||
Self::NoStdlibSubdirectory(path) => {
|
||||
write!(f, "The directory at {path} has no `stdlib/` subdirectory")
|
||||
}
|
||||
Self::NoVersionsFile(path) => write!(f, "Expected a file at {path}/stdlib/VERSIONS"),
|
||||
Self::VersionsIsADirectory(path) => write!(f, "{path}/stdlib/VERSIONS is a directory."),
|
||||
Self::VersionsParseError(underlying_error) => underlying_error.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SitePackagesDiscoveryError> for SearchPathValidationError {
|
||||
fn from(value: SitePackagesDiscoveryError) -> Self {
|
||||
Self::SitePackagesDiscovery(value)
|
||||
impl std::error::Error for SearchPathValidationError {
|
||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
if let Self::VersionsParseError(underlying_error) = self {
|
||||
Some(underlying_error)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type SearchPathResult<T> = Result<T, SearchPathValidationError>;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
enum SearchPathInner {
|
||||
Extra(SystemPathBuf),
|
||||
FirstParty(SystemPathBuf),
|
||||
@@ -406,15 +379,16 @@ enum SearchPathInner {
|
||||
/// or the "Editable" category. For the "First-party", "Site-packages"
|
||||
/// and "Standard-library" categories, however, there will always be exactly
|
||||
/// one search path from that category in any given list of search paths.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub(crate) struct SearchPath(Arc<SearchPathInner>);
|
||||
|
||||
impl SearchPath {
|
||||
fn directory_path(system: &dyn System, root: SystemPathBuf) -> SearchPathResult<SystemPathBuf> {
|
||||
if system.is_directory(&root) {
|
||||
Ok(root)
|
||||
let canonicalized = system.canonicalize_path(&root).unwrap_or(root);
|
||||
if system.is_directory(&canonicalized) {
|
||||
Ok(canonicalized)
|
||||
} else {
|
||||
Err(SearchPathValidationError::NotADirectory(root))
|
||||
Err(SearchPathValidationError::NotADirectory(canonicalized))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -433,24 +407,32 @@ impl SearchPath {
|
||||
}
|
||||
|
||||
/// Create a new standard-library search path pointing to a custom directory on disk
|
||||
pub(crate) fn custom_stdlib(
|
||||
system: &dyn System,
|
||||
typeshed: &SystemPath,
|
||||
) -> SearchPathResult<Self> {
|
||||
if !system.is_directory(typeshed) {
|
||||
pub(crate) fn custom_stdlib(db: &dyn Db, typeshed: SystemPathBuf) -> SearchPathResult<Self> {
|
||||
let system = db.system();
|
||||
if !system.is_directory(&typeshed) {
|
||||
return Err(SearchPathValidationError::NotADirectory(
|
||||
typeshed.to_path_buf(),
|
||||
));
|
||||
}
|
||||
|
||||
let stdlib =
|
||||
Self::directory_path(system, typeshed.join("stdlib")).map_err(|err| match err {
|
||||
SearchPathValidationError::NotADirectory(_) => {
|
||||
SearchPathValidationError::NoStdlibSubdirectory(typeshed.to_path_buf())
|
||||
SearchPathValidationError::NotADirectory(path) => {
|
||||
SearchPathValidationError::NoStdlibSubdirectory(path)
|
||||
}
|
||||
err => err,
|
||||
})?;
|
||||
|
||||
let typeshed_versions =
|
||||
system_path_to_file(db.upcast(), stdlib.join("VERSIONS")).map_err(|err| match err {
|
||||
FileError::NotFound => SearchPathValidationError::NoVersionsFile(typeshed),
|
||||
FileError::IsADirectory => {
|
||||
SearchPathValidationError::VersionsIsADirectory(typeshed)
|
||||
}
|
||||
})?;
|
||||
super::typeshed::parse_typeshed_versions(db, typeshed_versions)
|
||||
.as_ref()
|
||||
.map_err(|validation_error| {
|
||||
SearchPathValidationError::VersionsParseError(validation_error.clone())
|
||||
})?;
|
||||
Ok(Self(Arc::new(SearchPathInner::StandardLibraryCustom(
|
||||
stdlib,
|
||||
))))
|
||||
@@ -504,10 +486,6 @@ impl SearchPath {
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn is_first_party(&self) -> bool {
|
||||
matches!(&*self.0, SearchPathInner::FirstParty(_))
|
||||
}
|
||||
|
||||
fn is_valid_extension(&self, extension: &str) -> bool {
|
||||
if self.is_standard_library() {
|
||||
extension == "pyi"
|
||||
@@ -640,70 +618,15 @@ impl PartialEq<SearchPath> for VendoredPathBuf {
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for SearchPath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match &*self.0 {
|
||||
SearchPathInner::Extra(system_path_buf)
|
||||
| SearchPathInner::FirstParty(system_path_buf)
|
||||
| SearchPathInner::SitePackages(system_path_buf)
|
||||
| SearchPathInner::Editable(system_path_buf)
|
||||
| SearchPathInner::StandardLibraryCustom(system_path_buf) => system_path_buf.fmt(f),
|
||||
SearchPathInner::StandardLibraryVendored(vendored_path_buf) => vendored_path_buf.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(super) enum SystemOrVendoredPathRef<'db> {
|
||||
System(&'db SystemPath),
|
||||
Vendored(&'db VendoredPath),
|
||||
}
|
||||
|
||||
impl<'db> SystemOrVendoredPathRef<'db> {
|
||||
pub(super) fn try_from_file(db: &'db dyn Db, file: File) -> Option<Self> {
|
||||
match file.path(db) {
|
||||
FilePath::System(system) => Some(Self::System(system)),
|
||||
FilePath::Vendored(vendored) => Some(Self::Vendored(vendored)),
|
||||
FilePath::SystemVirtual(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn file_name(&self) -> Option<&str> {
|
||||
match self {
|
||||
Self::System(system) => system.file_name(),
|
||||
Self::Vendored(vendored) => vendored.file_name(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn parent<'a>(&'a self) -> Option<SystemOrVendoredPathRef<'a>>
|
||||
where
|
||||
'a: 'db,
|
||||
{
|
||||
match self {
|
||||
Self::System(system) => system.parent().map(Self::System),
|
||||
Self::Vendored(vendored) => vendored.parent().map(Self::Vendored),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SystemOrVendoredPathRef<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
SystemOrVendoredPathRef::System(system) => system.fmt(f),
|
||||
SystemOrVendoredPathRef::Vendored(vendored) => vendored.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::Db;
|
||||
use ruff_python_ast::PythonVersion;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::module_resolver::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
|
||||
|
||||
use super::*;
|
||||
use crate::module_resolver::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
impl ModulePath {
|
||||
#[must_use]
|
||||
@@ -715,6 +638,15 @@ mod tests {
|
||||
}
|
||||
|
||||
impl SearchPath {
|
||||
#[must_use]
|
||||
pub(crate) fn is_stdlib_search_path(&self) -> bool {
|
||||
matches!(
|
||||
&*self.0,
|
||||
SearchPathInner::StandardLibraryCustom(_)
|
||||
| SearchPathInner::StandardLibraryVendored(_)
|
||||
)
|
||||
}
|
||||
|
||||
fn join(&self, component: &str) -> ModulePath {
|
||||
self.to_module_path().join(component)
|
||||
}
|
||||
@@ -725,11 +657,11 @@ mod tests {
|
||||
let TestCase {
|
||||
db, src, stdlib, ..
|
||||
} = TestCaseBuilder::new()
|
||||
.with_mocked_typeshed(MockedTypeshed::default())
|
||||
.with_custom_typeshed(MockedTypeshed::default())
|
||||
.build();
|
||||
|
||||
assert_eq!(
|
||||
SearchPath::custom_stdlib(db.system(), stdlib.parent().unwrap())
|
||||
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
|
||||
.unwrap()
|
||||
.to_module_path()
|
||||
.with_py_extension(),
|
||||
@@ -737,7 +669,7 @@ mod tests {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
&SearchPath::custom_stdlib(db.system(), stdlib.parent().unwrap())
|
||||
&SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
|
||||
.unwrap()
|
||||
.join("foo")
|
||||
.with_pyi_extension(),
|
||||
@@ -846,9 +778,9 @@ mod tests {
|
||||
#[should_panic(expected = "Extension must be `pyi`; got `py`")]
|
||||
fn stdlib_path_invalid_join_py() {
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_mocked_typeshed(MockedTypeshed::default())
|
||||
.with_custom_typeshed(MockedTypeshed::default())
|
||||
.build();
|
||||
SearchPath::custom_stdlib(db.system(), stdlib.parent().unwrap())
|
||||
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
|
||||
.unwrap()
|
||||
.to_module_path()
|
||||
.push("bar.py");
|
||||
@@ -858,9 +790,9 @@ mod tests {
|
||||
#[should_panic(expected = "Extension must be `pyi`; got `rs`")]
|
||||
fn stdlib_path_invalid_join_rs() {
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_mocked_typeshed(MockedTypeshed::default())
|
||||
.with_custom_typeshed(MockedTypeshed::default())
|
||||
.build();
|
||||
SearchPath::custom_stdlib(db.system(), stdlib.parent().unwrap())
|
||||
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
|
||||
.unwrap()
|
||||
.to_module_path()
|
||||
.push("bar.rs");
|
||||
@@ -889,10 +821,10 @@ mod tests {
|
||||
#[test]
|
||||
fn relativize_stdlib_path_errors() {
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_mocked_typeshed(MockedTypeshed::default())
|
||||
.with_custom_typeshed(MockedTypeshed::default())
|
||||
.build();
|
||||
|
||||
let root = SearchPath::custom_stdlib(db.system(), stdlib.parent().unwrap()).unwrap();
|
||||
let root = SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf()).unwrap();
|
||||
|
||||
// Must have a `.pyi` extension or no extension:
|
||||
let bad_absolute_path = SystemPath::new("foo/stdlib/x.py");
|
||||
@@ -934,13 +866,14 @@ mod tests {
|
||||
|
||||
fn typeshed_test_case(
|
||||
typeshed: MockedTypeshed,
|
||||
python_version: PythonVersion,
|
||||
target_version: PythonVersion,
|
||||
) -> (TestDb, SearchPath) {
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_mocked_typeshed(typeshed)
|
||||
.with_python_version(python_version)
|
||||
.with_custom_typeshed(typeshed)
|
||||
.with_target_version(target_version)
|
||||
.build();
|
||||
let stdlib = SearchPath::custom_stdlib(db.system(), stdlib.parent().unwrap()).unwrap();
|
||||
let stdlib =
|
||||
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf()).unwrap();
|
||||
(db, stdlib)
|
||||
}
|
||||
|
||||
@@ -965,19 +898,17 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let asyncio_regular_package = stdlib_path.join("asyncio");
|
||||
assert!(asyncio_regular_package.is_directory(&resolver));
|
||||
assert!(asyncio_regular_package.is_regular_package(&resolver));
|
||||
// Paths to directories don't resolve to VfsFiles
|
||||
assert_eq!(asyncio_regular_package.to_file(&resolver), None);
|
||||
assert!(
|
||||
asyncio_regular_package
|
||||
.join("__init__.pyi")
|
||||
.to_file(&resolver)
|
||||
.is_some()
|
||||
);
|
||||
assert!(asyncio_regular_package
|
||||
.join("__init__.pyi")
|
||||
.to_file(&resolver)
|
||||
.is_some());
|
||||
|
||||
// The `asyncio` package exists on Python 3.8, but the `asyncio.tasks` submodule does not,
|
||||
// according to the `VERSIONS` file in our typeshed mock:
|
||||
@@ -995,7 +926,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let xml_namespace_package = stdlib_path.join("xml");
|
||||
assert!(xml_namespace_package.is_directory(&resolver));
|
||||
@@ -1017,7 +948,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let functools_module = stdlib_path.join("functools.pyi");
|
||||
assert!(functools_module.to_file(&resolver).is_some());
|
||||
@@ -1033,7 +964,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let collections_regular_package = stdlib_path.join("collections");
|
||||
assert_eq!(collections_regular_package.to_file(&resolver), None);
|
||||
@@ -1049,7 +980,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let importlib_namespace_package = stdlib_path.join("importlib");
|
||||
assert_eq!(importlib_namespace_package.to_file(&resolver), None);
|
||||
@@ -1070,7 +1001,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let non_existent = stdlib_path.join("doesnt_even_exist");
|
||||
assert_eq!(non_existent.to_file(&resolver), None);
|
||||
@@ -1098,7 +1029,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY39);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY39);
|
||||
|
||||
// Since we've set the target version to Py39,
|
||||
// `collections` should now exist as a directory, according to VERSIONS...
|
||||
@@ -1107,12 +1038,10 @@ mod tests {
|
||||
assert!(collections_regular_package.is_regular_package(&resolver));
|
||||
// (This is still `None`, as directories don't resolve to `Vfs` files)
|
||||
assert_eq!(collections_regular_package.to_file(&resolver), None);
|
||||
assert!(
|
||||
collections_regular_package
|
||||
.join("__init__.pyi")
|
||||
.to_file(&resolver)
|
||||
.is_some()
|
||||
);
|
||||
assert!(collections_regular_package
|
||||
.join("__init__.pyi")
|
||||
.to_file(&resolver)
|
||||
.is_some());
|
||||
|
||||
// ...and so should the `asyncio.tasks` submodule (though it's still not a directory):
|
||||
let asyncio_tasks_module = stdlib_path.join("asyncio/tasks.pyi");
|
||||
@@ -1129,7 +1058,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY39);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY39);
|
||||
|
||||
// The `importlib` directory now also exists
|
||||
let importlib_namespace_package = stdlib_path.join("importlib");
|
||||
@@ -1153,7 +1082,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY39);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY39);
|
||||
|
||||
// The `xml` package no longer exists on py39:
|
||||
let xml_namespace_package = stdlib_path.join("xml");
|
||||
1731
crates/red_knot_python_semantic/src/module_resolver/resolver.rs
Normal file
1731
crates/red_knot_python_semantic/src/module_resolver/resolver.rs
Normal file
File diff suppressed because it is too large
Load Diff
25
crates/red_knot_python_semantic/src/module_resolver/state.rs
Normal file
25
crates/red_knot_python_semantic/src/module_resolver/state.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use super::typeshed::LazyTypeshedVersions;
|
||||
use crate::db::Db;
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
pub(crate) struct ResolverState<'db> {
|
||||
pub(crate) db: &'db dyn Db,
|
||||
pub(crate) typeshed_versions: LazyTypeshedVersions<'db>,
|
||||
pub(crate) target_version: PythonVersion,
|
||||
}
|
||||
|
||||
impl<'db> ResolverState<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db, target_version: PythonVersion) -> Self {
|
||||
Self {
|
||||
db,
|
||||
typeshed_versions: LazyTypeshedVersions::new(),
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn vendored(&self) -> &VendoredFileSystem {
|
||||
self.db.vendored()
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,10 @@
|
||||
use ruff_db::Db;
|
||||
use ruff_db::system::{
|
||||
DbWithTestSystem as _, DbWithWritableSystem as _, SystemPath, SystemPathBuf,
|
||||
};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPathBuf;
|
||||
use ruff_python_ast::PythonVersion;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::{ProgramSettings, PythonPlatform, PythonVersionSource, PythonVersionWithSource};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::ProgramSettings;
|
||||
|
||||
/// A test case for the module resolver.
|
||||
///
|
||||
@@ -21,7 +18,7 @@ pub(crate) struct TestCase<T> {
|
||||
// so this is a single directory instead of a `Vec` of directories,
|
||||
// like it is in `ruff_db::Program`.
|
||||
pub(crate) site_packages: SystemPathBuf,
|
||||
pub(crate) python_version: PythonVersion,
|
||||
pub(crate) target_version: PythonVersion,
|
||||
}
|
||||
|
||||
/// A `(file_name, file_contents)` tuple
|
||||
@@ -70,13 +67,13 @@ pub(crate) struct UnspecifiedTypeshed;
|
||||
/// ```rs
|
||||
/// let test_case = TestCaseBuilder::new()
|
||||
/// .with_src_files(...)
|
||||
/// .with_python_version(...)
|
||||
/// .with_target_version(...)
|
||||
/// .build();
|
||||
/// ```
|
||||
///
|
||||
/// For tests checking that standard-library module resolution is working
|
||||
/// correctly, you should usually create a [`MockedTypeshed`] instance
|
||||
/// and pass it to the [`TestCaseBuilder::with_mocked_typeshed`] method.
|
||||
/// and pass it to the [`TestCaseBuilder::with_custom_typeshed`] method.
|
||||
/// If you need to check something that involves the vendored typeshed stubs
|
||||
/// we include as part of the binary, you can instead use the
|
||||
/// [`TestCaseBuilder::with_vendored_typeshed`] method.
|
||||
@@ -88,13 +85,13 @@ pub(crate) struct UnspecifiedTypeshed;
|
||||
/// const TYPESHED = MockedTypeshed { ... };
|
||||
///
|
||||
/// let test_case = resolver_test_case()
|
||||
/// .with_mocked_typeshed(TYPESHED)
|
||||
/// .with_python_version(...)
|
||||
/// .with_custom_typeshed(TYPESHED)
|
||||
/// .with_target_version(...)
|
||||
/// .build();
|
||||
///
|
||||
/// let test_case2 = resolver_test_case()
|
||||
/// .with_vendored_typeshed()
|
||||
/// .with_python_version(...)
|
||||
/// .with_target_version(...)
|
||||
/// .build();
|
||||
/// ```
|
||||
///
|
||||
@@ -103,8 +100,7 @@ pub(crate) struct UnspecifiedTypeshed;
|
||||
/// to `()`.
|
||||
pub(crate) struct TestCaseBuilder<T> {
|
||||
typeshed_option: T,
|
||||
python_version: PythonVersion,
|
||||
python_platform: PythonPlatform,
|
||||
target_version: PythonVersion,
|
||||
first_party_files: Vec<FileSpec>,
|
||||
site_packages_files: Vec<FileSpec>,
|
||||
}
|
||||
@@ -122,9 +118,9 @@ impl<T> TestCaseBuilder<T> {
|
||||
self
|
||||
}
|
||||
|
||||
/// Specify the Python version the module resolver should assume
|
||||
pub(crate) fn with_python_version(mut self, python_version: PythonVersion) -> Self {
|
||||
self.python_version = python_version;
|
||||
/// Specify the target Python version the module resolver should assume
|
||||
pub(crate) fn with_target_version(mut self, target_version: PythonVersion) -> Self {
|
||||
self.target_version = target_version;
|
||||
self
|
||||
}
|
||||
|
||||
@@ -150,8 +146,7 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
pub(crate) fn new() -> TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
Self {
|
||||
typeshed_option: UnspecifiedTypeshed,
|
||||
python_version: PythonVersion::default(),
|
||||
python_platform: PythonPlatform::default(),
|
||||
target_version: PythonVersion::default(),
|
||||
first_party_files: vec![],
|
||||
site_packages_files: vec![],
|
||||
}
|
||||
@@ -161,37 +156,32 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
pub(crate) fn with_vendored_typeshed(self) -> TestCaseBuilder<VendoredTypeshed> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option: _,
|
||||
python_version,
|
||||
python_platform,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
TestCaseBuilder {
|
||||
typeshed_option: VendoredTypeshed,
|
||||
python_version,
|
||||
python_platform,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
}
|
||||
}
|
||||
|
||||
/// Use a mock typeshed directory for this test case
|
||||
pub(crate) fn with_mocked_typeshed(
|
||||
pub(crate) fn with_custom_typeshed(
|
||||
self,
|
||||
typeshed: MockedTypeshed,
|
||||
) -> TestCaseBuilder<MockedTypeshed> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option: _,
|
||||
python_version,
|
||||
python_platform,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
|
||||
TestCaseBuilder {
|
||||
typeshed_option: typeshed,
|
||||
python_version,
|
||||
python_platform,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
}
|
||||
@@ -203,15 +193,14 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
src,
|
||||
stdlib: _,
|
||||
site_packages,
|
||||
python_version,
|
||||
} = self.with_mocked_typeshed(MockedTypeshed::default()).build();
|
||||
|
||||
target_version,
|
||||
} = self.with_custom_typeshed(MockedTypeshed::default()).build();
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: (),
|
||||
site_packages,
|
||||
python_version,
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -220,8 +209,7 @@ impl TestCaseBuilder<MockedTypeshed> {
|
||||
pub(crate) fn build(self) -> TestCase<SystemPathBuf> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option,
|
||||
python_version,
|
||||
python_platform,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
@@ -236,27 +224,23 @@ impl TestCaseBuilder<MockedTypeshed> {
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
python_version: PythonVersionWithSource {
|
||||
version: python_version,
|
||||
source: PythonVersionSource::default(),
|
||||
},
|
||||
python_platform,
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages_paths: vec![site_packages.clone()],
|
||||
..SearchPathSettings::new(vec![src.clone()])
|
||||
}
|
||||
.to_search_paths(db.system(), db.vendored())
|
||||
.expect("valid search path settings"),
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: typeshed.join("stdlib"),
|
||||
site_packages,
|
||||
python_version,
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -282,8 +266,7 @@ impl TestCaseBuilder<VendoredTypeshed> {
|
||||
pub(crate) fn build(self) -> TestCase<VendoredPathBuf> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option: VendoredTypeshed,
|
||||
python_version,
|
||||
python_platform,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
@@ -297,26 +280,23 @@ impl TestCaseBuilder<VendoredTypeshed> {
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
python_version: PythonVersionWithSource {
|
||||
version: python_version,
|
||||
source: PythonVersionSource::default(),
|
||||
},
|
||||
python_platform,
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
site_packages_paths: vec![site_packages.clone()],
|
||||
..SearchPathSettings::new(vec![src.clone()])
|
||||
}
|
||||
.to_search_paths(db.system(), db.vendored())
|
||||
.expect("valid search path settings"),
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: VendoredPathBuf::from("stdlib"),
|
||||
site_packages,
|
||||
python_version,
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
pub use self::vendored::vendored_typeshed_stubs;
|
||||
pub(super) use self::versions::{
|
||||
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsParseError,
|
||||
TypeshedVersionsQueryResult,
|
||||
};
|
||||
|
||||
mod vendored;
|
||||
mod versions;
|
||||
@@ -0,0 +1,99 @@
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
// The file path here is hardcoded in this crate's `build.rs` script.
|
||||
// Luckily this crate will fail to build if this file isn't available at build time.
|
||||
static TYPESHED_ZIP_BYTES: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
|
||||
|
||||
pub fn vendored_typeshed_stubs() -> &'static VendoredFileSystem {
|
||||
static VENDORED_TYPESHED_STUBS: Lazy<VendoredFileSystem> =
|
||||
Lazy::new(|| VendoredFileSystem::new_static(TYPESHED_ZIP_BYTES).unwrap());
|
||||
&VENDORED_TYPESHED_STUBS
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::io::{self, Read};
|
||||
use std::path::Path;
|
||||
|
||||
use ruff_db::vendored::VendoredPath;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn typeshed_zip_created_at_build_time() {
|
||||
let mut typeshed_zip_archive =
|
||||
zip::ZipArchive::new(io::Cursor::new(TYPESHED_ZIP_BYTES)).unwrap();
|
||||
|
||||
let mut functools_module_stub = typeshed_zip_archive
|
||||
.by_name("stdlib/functools.pyi")
|
||||
.unwrap();
|
||||
assert!(functools_module_stub.is_file());
|
||||
|
||||
let mut functools_module_stub_source = String::new();
|
||||
functools_module_stub
|
||||
.read_to_string(&mut functools_module_stub_source)
|
||||
.unwrap();
|
||||
|
||||
assert!(functools_module_stub_source.contains("def update_wrapper("));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typeshed_vfs_consistent_with_vendored_stubs() {
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_stubs = vendored_typeshed_stubs();
|
||||
|
||||
let mut empty_iterator = true;
|
||||
for entry in walkdir::WalkDir::new(&vendored_typeshed_dir).min_depth(1) {
|
||||
empty_iterator = false;
|
||||
let entry = entry.unwrap();
|
||||
let absolute_path = entry.path();
|
||||
let file_type = entry.file_type();
|
||||
|
||||
let relative_path = absolute_path
|
||||
.strip_prefix(&vendored_typeshed_dir)
|
||||
.unwrap_or_else(|_| {
|
||||
panic!("Expected {absolute_path:?} to be a child of {vendored_typeshed_dir:?}")
|
||||
});
|
||||
|
||||
let vendored_path = <&VendoredPath>::try_from(relative_path)
|
||||
.unwrap_or_else(|_| panic!("Expected {relative_path:?} to be valid UTF-8"));
|
||||
|
||||
assert!(
|
||||
vendored_typeshed_stubs.exists(vendored_path),
|
||||
"Expected {vendored_path:?} to exist in the `VendoredFileSystem`!
|
||||
|
||||
Vendored file system:
|
||||
|
||||
{vendored_typeshed_stubs:#?}
|
||||
"
|
||||
);
|
||||
|
||||
let vendored_path_kind = vendored_typeshed_stubs
|
||||
.metadata(vendored_path)
|
||||
.unwrap_or_else(|_| {
|
||||
panic!(
|
||||
"Expected metadata for {vendored_path:?} to be retrievable from the `VendoredFileSystem!
|
||||
|
||||
Vendored file system:
|
||||
|
||||
{vendored_typeshed_stubs:#?}
|
||||
"
|
||||
)
|
||||
})
|
||||
.kind();
|
||||
|
||||
assert_eq!(
|
||||
vendored_path_kind.is_directory(),
|
||||
file_type.is_dir(),
|
||||
"{vendored_path:?} had type {vendored_path_kind:?}, inconsistent with fs path {relative_path:?}: {file_type:?}"
|
||||
);
|
||||
}
|
||||
|
||||
assert!(
|
||||
!empty_iterator,
|
||||
"Expected there to be at least one file or directory in the vendored typeshed stubs!"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,766 @@
|
||||
use std::cell::OnceCell;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
use std::num::{NonZeroU16, NonZeroUsize};
|
||||
use std::ops::{RangeFrom, RangeInclusive};
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use ruff_db::system::SystemPath;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LazyTypeshedVersions<'db>(OnceCell<&'db TypeshedVersions>);
|
||||
|
||||
impl<'db> LazyTypeshedVersions<'db> {
|
||||
#[must_use]
|
||||
pub(crate) fn new() -> Self {
|
||||
Self(OnceCell::new())
|
||||
}
|
||||
|
||||
/// Query whether a module exists at runtime in the stdlib on a certain Python version.
|
||||
///
|
||||
/// Simply probing whether a file exists in typeshed is insufficient for this question,
|
||||
/// as a module in the stdlib may have been added in Python 3.10, but the typeshed stub
|
||||
/// will still be available (either in a custom typeshed dir or in our vendored copy)
|
||||
/// even if the user specified Python 3.8 as the target version.
|
||||
///
|
||||
/// For top-level modules and packages, the VERSIONS file can always provide an unambiguous answer
|
||||
/// as to whether the module exists on the specified target version. However, VERSIONS does not
|
||||
/// provide comprehensive information on all submodules, meaning that this method sometimes
|
||||
/// returns [`TypeshedVersionsQueryResult::MaybeExists`].
|
||||
/// See [`TypeshedVersionsQueryResult`] for more details.
|
||||
#[must_use]
|
||||
pub(crate) fn query_module(
|
||||
&self,
|
||||
db: &'db dyn Db,
|
||||
module: &ModuleName,
|
||||
stdlib_root: Option<&SystemPath>,
|
||||
target_version: PythonVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
let versions = self.0.get_or_init(|| {
|
||||
let versions_path = if let Some(system_path) = stdlib_root {
|
||||
system_path.join("VERSIONS")
|
||||
} else {
|
||||
return &VENDORED_VERSIONS;
|
||||
};
|
||||
let Ok(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
|
||||
todo!(
|
||||
"Still need to figure out how to handle VERSIONS files being deleted \
|
||||
from custom typeshed directories! Expected a file to exist at {versions_path}"
|
||||
)
|
||||
};
|
||||
// TODO(Alex/Micha): If VERSIONS is invalid,
|
||||
// this should invalidate not just the specific module resolution we're currently attempting,
|
||||
// but all type inference that depends on any standard-library types.
|
||||
// Unwrapping here is not correct...
|
||||
parse_typeshed_versions(db, versions_file).as_ref().unwrap()
|
||||
});
|
||||
versions.query_module(module, target_version)
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn parse_typeshed_versions(
|
||||
db: &dyn Db,
|
||||
versions_file: File,
|
||||
) -> Result<TypeshedVersions, TypeshedVersionsParseError> {
|
||||
// TODO: Handle IO errors
|
||||
let file_content = versions_file
|
||||
.read_to_string(db.upcast())
|
||||
.unwrap_or_default();
|
||||
file_content.parse()
|
||||
}
|
||||
|
||||
static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
|
||||
TypeshedVersions::from_str(
|
||||
&vendored_typeshed_stubs()
|
||||
.read_to_string("stdlib/VERSIONS")
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(crate) struct TypeshedVersionsParseError {
|
||||
line_number: Option<NonZeroU16>,
|
||||
reason: TypeshedVersionsParseErrorKind,
|
||||
}
|
||||
|
||||
impl fmt::Display for TypeshedVersionsParseError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let TypeshedVersionsParseError {
|
||||
line_number,
|
||||
reason,
|
||||
} = self;
|
||||
if let Some(line_number) = line_number {
|
||||
write!(
|
||||
f,
|
||||
"Error while parsing line {line_number} of typeshed's VERSIONS file: {reason}"
|
||||
)
|
||||
} else {
|
||||
write!(f, "Error while parsing typeshed's VERSIONS file: {reason}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for TypeshedVersionsParseError {
|
||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
if let TypeshedVersionsParseErrorKind::IntegerParsingFailure { err, .. } = &self.reason {
|
||||
Some(err)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(super) enum TypeshedVersionsParseErrorKind {
|
||||
TooManyLines(NonZeroUsize),
|
||||
UnexpectedNumberOfColons,
|
||||
InvalidModuleName(String),
|
||||
UnexpectedNumberOfHyphens,
|
||||
UnexpectedNumberOfPeriods(String),
|
||||
IntegerParsingFailure {
|
||||
version: String,
|
||||
err: std::num::ParseIntError,
|
||||
},
|
||||
}
|
||||
|
||||
impl fmt::Display for TypeshedVersionsParseErrorKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::TooManyLines(num_lines) => write!(
|
||||
f,
|
||||
"File has too many lines ({num_lines}); maximum allowed is {}",
|
||||
NonZeroU16::MAX
|
||||
),
|
||||
Self::UnexpectedNumberOfColons => {
|
||||
f.write_str("Expected every non-comment line to have exactly one colon")
|
||||
}
|
||||
Self::InvalidModuleName(name) => write!(
|
||||
f,
|
||||
"Expected all components of '{name}' to be valid Python identifiers"
|
||||
),
|
||||
Self::UnexpectedNumberOfHyphens => {
|
||||
f.write_str("Expected every non-comment line to have exactly one '-' character")
|
||||
}
|
||||
Self::UnexpectedNumberOfPeriods(format) => write!(
|
||||
f,
|
||||
"Expected all versions to be in the form {{MAJOR}}.{{MINOR}}; got '{format}'"
|
||||
),
|
||||
Self::IntegerParsingFailure { version, err } => write!(
|
||||
f,
|
||||
"Failed to convert '{version}' to a pair of integers due to {err}",
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct TypeshedVersions(FxHashMap<ModuleName, PyVersionRange>);
|
||||
|
||||
impl TypeshedVersions {
|
||||
#[must_use]
|
||||
fn exact(&self, module_name: &ModuleName) -> Option<&PyVersionRange> {
|
||||
self.0.get(module_name)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn query_module(
|
||||
&self,
|
||||
module: &ModuleName,
|
||||
target_version: PythonVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
if let Some(range) = self.exact(module) {
|
||||
if range.contains(target_version) {
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
} else {
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
}
|
||||
} else {
|
||||
let mut module = module.parent();
|
||||
while let Some(module_to_try) = module {
|
||||
if let Some(range) = self.exact(&module_to_try) {
|
||||
return {
|
||||
if range.contains(target_version) {
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
} else {
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
}
|
||||
};
|
||||
}
|
||||
module = module_to_try.parent();
|
||||
}
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Possible answers [`LazyTypeshedVersions::query_module()`] could give to the question:
|
||||
/// "Does this module exist in the stdlib at runtime on a certain target version?"
|
||||
#[derive(Debug, Copy, PartialEq, Eq, Clone, Hash)]
|
||||
pub(crate) enum TypeshedVersionsQueryResult {
|
||||
/// The module definitely exists in the stdlib at runtime on the user-specified target version.
|
||||
///
|
||||
/// For example:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the `asyncio.tasks` module exists in the stdlib
|
||||
/// - The VERSIONS file contains the line `asyncio.tasks: 3.8-`
|
||||
Exists,
|
||||
|
||||
/// The module definitely does not exist in the stdlib on the user-specified target version.
|
||||
///
|
||||
/// For example:
|
||||
/// - We're querying whether the `foo` module exists in the stdlib
|
||||
/// - There is no top-level `foo` module in VERSIONS
|
||||
///
|
||||
/// OR:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the module `importlib.abc` exists in the stdlib
|
||||
/// - The VERSIONS file contains the line `importlib.abc: 3.10-`,
|
||||
/// indicating that the module was added in 3.10
|
||||
///
|
||||
/// OR:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the module `collections.abc` exists in the stdlib
|
||||
/// - The VERSIONS file does not contain any information about the `collections.abc` submodule,
|
||||
/// but *does* contain the line `collections: 3.10-`,
|
||||
/// indicating that the entire `collections` package was added in Python 3.10.
|
||||
DoesNotExist,
|
||||
|
||||
/// The module potentially exists in the stdlib and, if it does,
|
||||
/// it definitely exists on the user-specified target version.
|
||||
///
|
||||
/// This variant is only relevant for submodules,
|
||||
/// for which the typeshed VERSIONS file does not provide comprehensive information.
|
||||
/// (The VERSIONS file is guaranteed to provide information about all top-level stdlib modules and packages,
|
||||
/// but not necessarily about all submodules within each top-level package.)
|
||||
///
|
||||
/// For example:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the `asyncio.staggered` module exists in the stdlib
|
||||
/// - The typeshed VERSIONS file contains the line `asyncio: 3.8`,
|
||||
/// indicating that the `asyncio` package was added in Python 3.8,
|
||||
/// but does not contain any explicit information about the `asyncio.staggered` submodule.
|
||||
MaybeExists,
|
||||
}
|
||||
|
||||
impl FromStr for TypeshedVersions {
|
||||
type Err = TypeshedVersionsParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut map = FxHashMap::default();
|
||||
|
||||
for (line_index, line) in s.lines().enumerate() {
|
||||
// humans expect line numbers to be 1-indexed
|
||||
let line_number = NonZeroUsize::new(line_index.saturating_add(1)).unwrap();
|
||||
|
||||
let Ok(line_number) = NonZeroU16::try_from(line_number) else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number: None,
|
||||
reason: TypeshedVersionsParseErrorKind::TooManyLines(line_number),
|
||||
});
|
||||
};
|
||||
|
||||
let Some(content) = line.split('#').map(str::trim).next() else {
|
||||
continue;
|
||||
};
|
||||
if content.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut parts = content.split(':').map(str::trim);
|
||||
let (Some(module_name), Some(rest), None) = (parts.next(), parts.next(), parts.next())
|
||||
else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number: Some(line_number),
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons,
|
||||
});
|
||||
};
|
||||
|
||||
let Some(module_name) = ModuleName::new(module_name) else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number: Some(line_number),
|
||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
||||
module_name.to_string(),
|
||||
),
|
||||
});
|
||||
};
|
||||
|
||||
match PyVersionRange::from_str(rest) {
|
||||
Ok(version) => map.insert(module_name, version),
|
||||
Err(reason) => {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number: Some(line_number),
|
||||
reason,
|
||||
})
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(Self(map))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TypeshedVersions {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let sorted_items: BTreeMap<&ModuleName, &PyVersionRange> = self.0.iter().collect();
|
||||
for (module_name, range) in sorted_items {
|
||||
writeln!(f, "{module_name}: {range}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
enum PyVersionRange {
|
||||
AvailableFrom(RangeFrom<PythonVersion>),
|
||||
AvailableWithin(RangeInclusive<PythonVersion>),
|
||||
}
|
||||
|
||||
impl PyVersionRange {
|
||||
#[must_use]
|
||||
fn contains(&self, version: PythonVersion) -> bool {
|
||||
match self {
|
||||
Self::AvailableFrom(inner) => inner.contains(&version),
|
||||
Self::AvailableWithin(inner) => inner.contains(&version),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for PyVersionRange {
|
||||
type Err = TypeshedVersionsParseErrorKind;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.split('-').map(str::trim);
|
||||
match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(lower), Some(""), None) => {
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
Ok(Self::AvailableFrom(lower..))
|
||||
}
|
||||
(Some(lower), Some(upper), None) => {
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
let upper = PythonVersion::from_versions_file_string(upper)?;
|
||||
Ok(Self::AvailableWithin(lower..=upper))
|
||||
}
|
||||
_ => Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PyVersionRange {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::AvailableFrom(range_from) => write!(f, "{}-", range_from.start),
|
||||
Self::AvailableWithin(range_inclusive) => {
|
||||
write!(f, "{}-{}", range_inclusive.start(), range_inclusive.end())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PythonVersion {
|
||||
fn from_versions_file_string(s: &str) -> Result<Self, TypeshedVersionsParseErrorKind> {
|
||||
let mut parts = s.split('.').map(str::trim);
|
||||
let (Some(major), Some(minor), None) = (parts.next(), parts.next(), parts.next()) else {
|
||||
return Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
s.to_string(),
|
||||
));
|
||||
};
|
||||
PythonVersion::try_from((major, minor)).map_err(|int_parse_error| {
|
||||
TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err: int_parse_error,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::num::{IntErrorKind, NonZeroU16};
|
||||
use std::path::Path;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use super::*;
|
||||
|
||||
const TYPESHED_STDLIB_DIR: &str = "stdlib";
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
const ONE: Option<NonZeroU16> = Some(unsafe { NonZeroU16::new_unchecked(1) });
|
||||
|
||||
impl TypeshedVersions {
|
||||
#[must_use]
|
||||
fn contains_exact(&self, module: &ModuleName) -> bool {
|
||||
self.exact(module).is_some()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_parse_vendored_versions_file() {
|
||||
let versions_data = include_str!(concat!(
|
||||
env!("CARGO_MANIFEST_DIR"),
|
||||
"/vendor/typeshed/stdlib/VERSIONS"
|
||||
));
|
||||
|
||||
let versions = TypeshedVersions::from_str(versions_data).unwrap();
|
||||
assert!(versions.len() > 100);
|
||||
assert!(versions.len() < 1000);
|
||||
|
||||
let asyncio = ModuleName::new_static("asyncio").unwrap();
|
||||
let asyncio_staggered = ModuleName::new_static("asyncio.staggered").unwrap();
|
||||
let audioop = ModuleName::new_static("audioop").unwrap();
|
||||
|
||||
assert!(versions.contains_exact(&asyncio));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&asyncio_staggered));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY38),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&audioop));
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, PythonVersion::PY312),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, PythonVersion::PY313),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typeshed_versions_consistent_with_vendored_stubs() {
|
||||
const VERSIONS_DATA: &str = include_str!("../../../vendor/typeshed/stdlib/VERSIONS");
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
|
||||
|
||||
let mut empty_iterator = true;
|
||||
|
||||
let stdlib_stubs_path = vendored_typeshed_dir.join(TYPESHED_STDLIB_DIR);
|
||||
|
||||
for entry in std::fs::read_dir(&stdlib_stubs_path).unwrap() {
|
||||
empty_iterator = false;
|
||||
let entry = entry.unwrap();
|
||||
let absolute_path = entry.path();
|
||||
|
||||
let relative_path = absolute_path
|
||||
.strip_prefix(&stdlib_stubs_path)
|
||||
.unwrap_or_else(|_| panic!("Expected path to be a child of {stdlib_stubs_path:?} but found {absolute_path:?}"));
|
||||
|
||||
let relative_path_str = relative_path.as_os_str().to_str().unwrap_or_else(|| {
|
||||
panic!("Expected all typeshed paths to be valid UTF-8; got {relative_path:?}")
|
||||
});
|
||||
if relative_path_str == "VERSIONS" {
|
||||
continue;
|
||||
}
|
||||
|
||||
let top_level_module = if let Some(extension) = relative_path.extension() {
|
||||
// It was a file; strip off the file extension to get the module name:
|
||||
let extension = extension
|
||||
.to_str()
|
||||
.unwrap_or_else(||panic!("Expected all file extensions to be UTF-8; was not true for {relative_path:?}"));
|
||||
|
||||
relative_path_str
|
||||
.strip_suffix(extension)
|
||||
.and_then(|string| string.strip_suffix('.')).unwrap_or_else(|| {
|
||||
panic!("Expected path {relative_path_str:?} to end with computed extension {extension:?}")
|
||||
})
|
||||
} else {
|
||||
// It was a directory; no need to do anything to get the module name
|
||||
relative_path_str
|
||||
};
|
||||
|
||||
let top_level_module = ModuleName::new(top_level_module)
|
||||
.unwrap_or_else(|| panic!("{top_level_module:?} was not a valid module name!"));
|
||||
|
||||
assert!(vendored_typeshed_versions.contains_exact(&top_level_module));
|
||||
}
|
||||
|
||||
assert!(
|
||||
!empty_iterator,
|
||||
"Expected there to be at least one file or directory in the vendored typeshed stubs"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_parse_mock_versions_file() {
|
||||
const VERSIONS: &str = "\
|
||||
# a comment
|
||||
# some more comment
|
||||
# yet more comment
|
||||
|
||||
|
||||
# and some more comment
|
||||
|
||||
bar: 2.7-3.10
|
||||
|
||||
# more comment
|
||||
bar.baz: 3.1-3.9
|
||||
foo: 3.8- # trailing comment
|
||||
";
|
||||
let parsed_versions = TypeshedVersions::from_str(VERSIONS).unwrap();
|
||||
assert_eq!(parsed_versions.len(), 3);
|
||||
assert_snapshot!(parsed_versions.to_string(), @r###"
|
||||
bar: 2.7-3.10
|
||||
bar.baz: 3.1-3.9
|
||||
foo: 3.8-
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_within_range_parsed_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("bar: 2.7-3.10").unwrap();
|
||||
let bar = ModuleName::new_static("bar").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_from_range_parsed_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("foo: 3.8-").unwrap();
|
||||
let foo = ModuleName::new_static("foo").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_exact(&foo));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY38),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn explicit_submodule_parsed_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("bar.baz: 3.1-3.9").unwrap();
|
||||
let bar_baz = ModuleName::new_static("bar.baz").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar_baz));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY39),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn implicit_submodule_queried_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("bar: 2.7-3.10").unwrap();
|
||||
let bar_eggs = ModuleName::new_static("bar.eggs").unwrap();
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&bar_eggs));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nonexistent_module_queried_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("eggs: 3.8-").unwrap();
|
||||
let spam = ModuleName::new_static("spam").unwrap();
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&spam));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY313),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_huge_versions_file() {
|
||||
let offset = 100;
|
||||
let too_many = u16::MAX as usize + offset;
|
||||
|
||||
let mut massive_versions_file = String::new();
|
||||
for i in 0..too_many {
|
||||
massive_versions_file.push_str(&format!("x{i}: 3.8-\n"));
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str(&massive_versions_file),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: None,
|
||||
reason: TypeshedVersionsParseErrorKind::TooManyLines(
|
||||
NonZeroUsize::new(too_many + 1 - offset).unwrap()
|
||||
)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_bad_colon_number() {
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo 3.7"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo:: 3.7"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_non_identifier_modules() {
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("not!an!identifier!: 3.7"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
||||
"not!an!identifier!".to_string()
|
||||
)
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("(also_not).(an_identifier): 3.7"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
||||
"(also_not).(an_identifier)".to_string()
|
||||
)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_bad_hyphen_number() {
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3.8"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3.8--"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3.8--3.9"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_bad_period_number() {
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 38-"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods("38".to_string())
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3..8-"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
"3..8".to_string()
|
||||
)
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3.8-3..11"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
"3..11".to_string()
|
||||
)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_non_digits() {
|
||||
let err = TypeshedVersions::from_str("foo: 1.two-").unwrap_err();
|
||||
assert_eq!(err.line_number, ONE);
|
||||
let TypeshedVersionsParseErrorKind::IntegerParsingFailure { version, err } = err.reason
|
||||
else {
|
||||
panic!()
|
||||
};
|
||||
assert_eq!(version, "1.two".to_string());
|
||||
assert_eq!(*err.kind(), IntErrorKind::InvalidDigit);
|
||||
|
||||
let err = TypeshedVersions::from_str("foo: 3.8-four.9").unwrap_err();
|
||||
assert_eq!(err.line_number, ONE);
|
||||
let TypeshedVersionsParseErrorKind::IntegerParsingFailure { version, err } = err.reason
|
||||
else {
|
||||
panic!()
|
||||
};
|
||||
assert_eq!(version, "four.9".to_string());
|
||||
assert_eq!(*err.kind(), IntErrorKind::InvalidDigit);
|
||||
}
|
||||
}
|
||||
36
crates/red_knot_python_semantic/src/node_key.rs
Normal file
36
crates/red_knot_python_semantic/src/node_key.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use ruff_python_ast::{AnyNodeRef, AstNode, NodeKind};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// Compact key for a node for use in a hash map.
|
||||
///
|
||||
/// Compares two nodes by their kind and text range.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub(super) struct NodeKey {
|
||||
kind: NodeKind,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
#[inline]
|
||||
pub(super) fn from_node<'a, N>(node: &N) -> Self
|
||||
where
|
||||
N: AstNode,
|
||||
{
|
||||
NodeKey {
|
||||
kind: node.kind(),
|
||||
range: node.range(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(super) fn from_ref<'a, N>(node: N) -> Self
|
||||
where
|
||||
N: Into<AnyNodeRef<'a>>,
|
||||
{
|
||||
let node = node.into();
|
||||
NodeKey {
|
||||
kind: node.kind(),
|
||||
range: node.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
78
crates/red_knot_python_semantic/src/program.rs
Normal file
78
crates/red_knot_python_semantic/src/program.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use crate::python_version::PythonVersion;
|
||||
use anyhow::Context;
|
||||
use salsa::Durability;
|
||||
use salsa::Setter;
|
||||
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
|
||||
use crate::module_resolver::SearchPaths;
|
||||
use crate::Db;
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub struct Program {
|
||||
pub target_version: PythonVersion,
|
||||
|
||||
#[default]
|
||||
#[return_ref]
|
||||
pub(crate) search_paths: SearchPaths,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> anyhow::Result<Self> {
|
||||
let ProgramSettings {
|
||||
target_version,
|
||||
search_paths,
|
||||
} = settings;
|
||||
|
||||
tracing::info!("Target version: Python {target_version}");
|
||||
|
||||
let search_paths = SearchPaths::from_settings(db, search_paths)
|
||||
.with_context(|| "Invalid search path settings")?;
|
||||
|
||||
Ok(Program::builder(settings.target_version)
|
||||
.durability(Durability::HIGH)
|
||||
.search_paths(search_paths)
|
||||
.new(db))
|
||||
}
|
||||
|
||||
pub fn update_search_paths(
|
||||
&self,
|
||||
db: &mut dyn Db,
|
||||
search_path_settings: SearchPathSettings,
|
||||
) -> anyhow::Result<()> {
|
||||
let search_paths = SearchPaths::from_settings(db, search_path_settings)?;
|
||||
|
||||
if self.search_paths(db) != &search_paths {
|
||||
tracing::debug!("Update search paths");
|
||||
self.set_search_paths(db).to(search_paths);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub struct ProgramSettings {
|
||||
pub target_version: PythonVersion,
|
||||
pub search_paths: SearchPathSettings,
|
||||
}
|
||||
|
||||
/// Configures the search paths for module resolution.
|
||||
#[derive(Eq, PartialEq, Debug, Clone, Default)]
|
||||
pub struct SearchPathSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Vec<SystemPathBuf>,
|
||||
|
||||
/// The root of the workspace, used for finding first-party modules.
|
||||
pub src_root: SystemPathBuf,
|
||||
|
||||
/// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||
/// bundled as a zip file in the binary
|
||||
pub custom_typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: Vec<SystemPathBuf>,
|
||||
}
|
||||
62
crates/red_knot_python_semantic/src/python_version.rs
Normal file
62
crates/red_knot_python_semantic/src/python_version.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use std::fmt;
|
||||
|
||||
/// Representation of a Python version.
|
||||
///
|
||||
/// Unlike the `TargetVersion` enums in the CLI crates,
|
||||
/// this does not necessarily represent a Python version that we actually support.
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct PythonVersion {
|
||||
pub major: u8,
|
||||
pub minor: u8,
|
||||
}
|
||||
|
||||
impl PythonVersion {
|
||||
pub const PY37: PythonVersion = PythonVersion { major: 3, minor: 7 };
|
||||
pub const PY38: PythonVersion = PythonVersion { major: 3, minor: 8 };
|
||||
pub const PY39: PythonVersion = PythonVersion { major: 3, minor: 9 };
|
||||
pub const PY310: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
};
|
||||
pub const PY311: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
};
|
||||
pub const PY312: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
};
|
||||
pub const PY313: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
};
|
||||
|
||||
pub fn free_threaded_build_available(self) -> bool {
|
||||
self >= PythonVersion::PY313
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PythonVersion {
|
||||
fn default() -> Self {
|
||||
Self::PY38
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<(&str, &str)> for PythonVersion {
|
||||
type Error = std::num::ParseIntError;
|
||||
|
||||
fn try_from(value: (&str, &str)) -> Result<Self, Self::Error> {
|
||||
let (major, minor) = value;
|
||||
Ok(Self {
|
||||
major: major.parse()?,
|
||||
minor: minor.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PythonVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PythonVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
1048
crates/red_knot_python_semantic/src/semantic_index.rs
Normal file
1048
crates/red_knot_python_semantic/src/semantic_index.rs
Normal file
File diff suppressed because it is too large
Load Diff
212
crates/red_knot_python_semantic/src/semantic_index/ast_ids.rs
Normal file
212
crates/red_knot_python_semantic/src/semantic_index/ast_ids.rs
Normal file
@@ -0,0 +1,212 @@
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_index::newtype_index;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::ExpressionRef;
|
||||
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::Db;
|
||||
|
||||
/// AST ids for a single scope.
|
||||
///
|
||||
/// The motivation for building the AST ids per scope isn't about reducing invalidation because
|
||||
/// the struct changes whenever the parsed AST changes. Instead, it's mainly that we can
|
||||
/// build the AST ids struct when building the symbol table and also keep the property that
|
||||
/// IDs of outer scopes are unaffected by changes in inner scopes.
|
||||
///
|
||||
/// For example, we don't want that adding new statements to `foo` changes the statement id of `x = foo()` in:
|
||||
///
|
||||
/// ```python
|
||||
/// def foo():
|
||||
/// return 5
|
||||
///
|
||||
/// x = foo()
|
||||
/// ```
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct AstIds {
|
||||
/// Maps expressions to their expression id.
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
/// Maps expressions which "use" a symbol (that is, [`ast::ExprName`]) to a use id.
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
impl AstIds {
|
||||
fn expression_id(&self, key: impl Into<ExpressionNodeKey>) -> ScopedExpressionId {
|
||||
self.expressions_map[&key.into()]
|
||||
}
|
||||
|
||||
fn use_id(&self, key: impl Into<ExpressionNodeKey>) -> ScopedUseId {
|
||||
self.uses_map[&key.into()]
|
||||
}
|
||||
}
|
||||
|
||||
fn ast_ids<'db>(db: &'db dyn Db, scope: ScopeId) -> &'db AstIds {
|
||||
semantic_index(db, scope.file(db)).ast_ids(scope.file_scope_id(db))
|
||||
}
|
||||
|
||||
pub trait HasScopedUseId {
|
||||
/// The type of the ID uniquely identifying the use.
|
||||
type Id: Copy;
|
||||
|
||||
/// Returns the ID that uniquely identifies the use in `scope`.
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id;
|
||||
}
|
||||
|
||||
/// Uniquely identifies a use of a name in a [`crate::semantic_index::symbol::FileScopeId`].
|
||||
#[newtype_index]
|
||||
pub struct ScopedUseId;
|
||||
|
||||
impl HasScopedUseId for ast::ExprName {
|
||||
type Id = ScopedUseId;
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
let expression_ref = ExpressionRef::from(self);
|
||||
expression_ref.scoped_use_id(db, scope)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasScopedUseId for ast::ExpressionRef<'_> {
|
||||
type Id = ScopedUseId;
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
let ast_ids = ast_ids(db, scope);
|
||||
ast_ids.use_id(*self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HasScopedAstId {
|
||||
/// The type of the ID uniquely identifying the node.
|
||||
type Id: Copy;
|
||||
|
||||
/// Returns the ID that uniquely identifies the node in `scope`.
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id;
|
||||
}
|
||||
|
||||
/// Uniquely identifies an [`ast::Expr`] in a [`crate::semantic_index::symbol::FileScopeId`].
|
||||
#[newtype_index]
|
||||
pub struct ScopedExpressionId;
|
||||
|
||||
macro_rules! impl_has_scoped_expression_id {
|
||||
($ty: ty) => {
|
||||
impl HasScopedAstId for $ty {
|
||||
type Id = ScopedExpressionId;
|
||||
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
let expression_ref = ExpressionRef::from(self);
|
||||
expression_ref.scoped_ast_id(db, scope)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_has_scoped_expression_id!(ast::ExprBoolOp);
|
||||
impl_has_scoped_expression_id!(ast::ExprName);
|
||||
impl_has_scoped_expression_id!(ast::ExprBinOp);
|
||||
impl_has_scoped_expression_id!(ast::ExprUnaryOp);
|
||||
impl_has_scoped_expression_id!(ast::ExprLambda);
|
||||
impl_has_scoped_expression_id!(ast::ExprIf);
|
||||
impl_has_scoped_expression_id!(ast::ExprDict);
|
||||
impl_has_scoped_expression_id!(ast::ExprSet);
|
||||
impl_has_scoped_expression_id!(ast::ExprListComp);
|
||||
impl_has_scoped_expression_id!(ast::ExprSetComp);
|
||||
impl_has_scoped_expression_id!(ast::ExprDictComp);
|
||||
impl_has_scoped_expression_id!(ast::ExprGenerator);
|
||||
impl_has_scoped_expression_id!(ast::ExprAwait);
|
||||
impl_has_scoped_expression_id!(ast::ExprYield);
|
||||
impl_has_scoped_expression_id!(ast::ExprYieldFrom);
|
||||
impl_has_scoped_expression_id!(ast::ExprCompare);
|
||||
impl_has_scoped_expression_id!(ast::ExprCall);
|
||||
impl_has_scoped_expression_id!(ast::ExprFString);
|
||||
impl_has_scoped_expression_id!(ast::ExprStringLiteral);
|
||||
impl_has_scoped_expression_id!(ast::ExprBytesLiteral);
|
||||
impl_has_scoped_expression_id!(ast::ExprNumberLiteral);
|
||||
impl_has_scoped_expression_id!(ast::ExprBooleanLiteral);
|
||||
impl_has_scoped_expression_id!(ast::ExprNoneLiteral);
|
||||
impl_has_scoped_expression_id!(ast::ExprEllipsisLiteral);
|
||||
impl_has_scoped_expression_id!(ast::ExprAttribute);
|
||||
impl_has_scoped_expression_id!(ast::ExprSubscript);
|
||||
impl_has_scoped_expression_id!(ast::ExprStarred);
|
||||
impl_has_scoped_expression_id!(ast::ExprNamed);
|
||||
impl_has_scoped_expression_id!(ast::ExprList);
|
||||
impl_has_scoped_expression_id!(ast::ExprTuple);
|
||||
impl_has_scoped_expression_id!(ast::ExprSlice);
|
||||
impl_has_scoped_expression_id!(ast::ExprIpyEscapeCommand);
|
||||
impl_has_scoped_expression_id!(ast::Expr);
|
||||
|
||||
impl HasScopedAstId for ast::ExpressionRef<'_> {
|
||||
type Id = ScopedExpressionId;
|
||||
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
let ast_ids = ast_ids(db, scope);
|
||||
ast_ids.expression_id(*self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct AstIdsBuilder {
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
impl AstIdsBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
expressions_map: FxHashMap::default(),
|
||||
uses_map: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds `expr` to the expression ids map and returns its id.
|
||||
pub(super) fn record_expression(&mut self, expr: &ast::Expr) -> ScopedExpressionId {
|
||||
let expression_id = self.expressions_map.len().into();
|
||||
|
||||
self.expressions_map.insert(expr.into(), expression_id);
|
||||
|
||||
expression_id
|
||||
}
|
||||
|
||||
/// Adds `expr` to the use ids map and returns its id.
|
||||
pub(super) fn record_use(&mut self, expr: &ast::Expr) -> ScopedUseId {
|
||||
let use_id = self.uses_map.len().into();
|
||||
|
||||
self.uses_map.insert(expr.into(), use_id);
|
||||
|
||||
use_id
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> AstIds {
|
||||
self.expressions_map.shrink_to_fit();
|
||||
self.uses_map.shrink_to_fit();
|
||||
|
||||
AstIds {
|
||||
expressions_map: self.expressions_map,
|
||||
uses_map: self.uses_map,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Node key that can only be constructed for expressions.
|
||||
pub(crate) mod node_key {
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
use crate::node_key::NodeKey;
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
|
||||
pub(crate) struct ExpressionNodeKey(NodeKey);
|
||||
|
||||
impl From<ast::ExpressionRef<'_>> for ExpressionNodeKey {
|
||||
#[inline]
|
||||
fn from(value: ast::ExpressionRef<'_>) -> Self {
|
||||
Self(NodeKey::from_ref(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Expr> for ExpressionNodeKey {
|
||||
#[inline]
|
||||
fn from(value: &ast::Expr) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
805
crates/red_knot_python_semantic/src/semantic_index/builder.rs
Normal file
805
crates/red_knot_python_semantic/src/semantic_index/builder.rs
Normal file
@@ -0,0 +1,805 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
use ruff_index::IndexVec;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::name::Name;
|
||||
use ruff_python_ast::visitor::{walk_expr, walk_pattern, walk_stmt, Visitor};
|
||||
use ruff_python_ast::AnyParameterRef;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIdsBuilder;
|
||||
use crate::semantic_index::definition::{
|
||||
AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionNodeKey,
|
||||
DefinitionNodeRef, ImportFromDefinitionNodeRef,
|
||||
};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolFlags,
|
||||
SymbolTableBuilder,
|
||||
};
|
||||
use crate::semantic_index::use_def::{FlowSnapshot, UseDefMapBuilder};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::Db;
|
||||
|
||||
pub(super) struct SemanticIndexBuilder<'db> {
|
||||
// Builder state
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
module: &'db ParsedModule,
|
||||
scope_stack: Vec<FileScopeId>,
|
||||
/// The assignment we're currently visiting.
|
||||
current_assignment: Option<CurrentAssignment<'db>>,
|
||||
/// Flow states at each `break` in the current loop.
|
||||
loop_break_states: Vec<FlowSnapshot>,
|
||||
|
||||
// Semantic Index fields
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
scope_ids_by_scope: IndexVec<FileScopeId, ScopeId<'db>>,
|
||||
symbol_tables: IndexVec<FileScopeId, SymbolTableBuilder>,
|
||||
ast_ids: IndexVec<FileScopeId, AstIdsBuilder>,
|
||||
use_def_maps: IndexVec<FileScopeId, UseDefMapBuilder<'db>>,
|
||||
scopes_by_node: FxHashMap<NodeWithScopeKey, FileScopeId>,
|
||||
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
|
||||
definitions_by_node: FxHashMap<DefinitionNodeKey, Definition<'db>>,
|
||||
expressions_by_node: FxHashMap<ExpressionNodeKey, Expression<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> SemanticIndexBuilder<'db> {
|
||||
pub(super) fn new(db: &'db dyn Db, file: File, parsed: &'db ParsedModule) -> Self {
|
||||
let mut builder = Self {
|
||||
db,
|
||||
file,
|
||||
module: parsed,
|
||||
scope_stack: Vec::new(),
|
||||
current_assignment: None,
|
||||
loop_break_states: vec![],
|
||||
|
||||
scopes: IndexVec::new(),
|
||||
symbol_tables: IndexVec::new(),
|
||||
ast_ids: IndexVec::new(),
|
||||
scope_ids_by_scope: IndexVec::new(),
|
||||
use_def_maps: IndexVec::new(),
|
||||
|
||||
scopes_by_expression: FxHashMap::default(),
|
||||
scopes_by_node: FxHashMap::default(),
|
||||
definitions_by_node: FxHashMap::default(),
|
||||
expressions_by_node: FxHashMap::default(),
|
||||
};
|
||||
|
||||
builder.push_scope_with_parent(NodeWithScopeRef::Module, None);
|
||||
|
||||
builder
|
||||
}
|
||||
|
||||
fn current_scope(&self) -> FileScopeId {
|
||||
*self
|
||||
.scope_stack
|
||||
.last()
|
||||
.expect("Always to have a root scope")
|
||||
}
|
||||
|
||||
fn push_scope(&mut self, node: NodeWithScopeRef) {
|
||||
let parent = self.current_scope();
|
||||
self.push_scope_with_parent(node, Some(parent));
|
||||
}
|
||||
|
||||
fn push_scope_with_parent(&mut self, node: NodeWithScopeRef, parent: Option<FileScopeId>) {
|
||||
let children_start = self.scopes.next_index() + 1;
|
||||
|
||||
let scope = Scope {
|
||||
parent,
|
||||
kind: node.scope_kind(),
|
||||
descendents: children_start..children_start,
|
||||
};
|
||||
|
||||
let file_scope_id = self.scopes.push(scope);
|
||||
self.symbol_tables.push(SymbolTableBuilder::new());
|
||||
self.use_def_maps.push(UseDefMapBuilder::new());
|
||||
let ast_id_scope = self.ast_ids.push(AstIdsBuilder::new());
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
// SAFETY: `node` is guaranteed to be a child of `self.module`
|
||||
let scope_id = ScopeId::new(
|
||||
self.db,
|
||||
self.file,
|
||||
file_scope_id,
|
||||
unsafe { node.to_kind(self.module.clone()) },
|
||||
countme::Count::default(),
|
||||
);
|
||||
|
||||
self.scope_ids_by_scope.push(scope_id);
|
||||
self.scopes_by_node.insert(node.node_key(), file_scope_id);
|
||||
|
||||
debug_assert_eq!(ast_id_scope, file_scope_id);
|
||||
|
||||
self.scope_stack.push(file_scope_id);
|
||||
}
|
||||
|
||||
fn pop_scope(&mut self) -> FileScopeId {
|
||||
let id = self.scope_stack.pop().expect("Root scope to be present");
|
||||
let children_end = self.scopes.next_index();
|
||||
let scope = &mut self.scopes[id];
|
||||
scope.descendents = scope.descendents.start..children_end;
|
||||
id
|
||||
}
|
||||
|
||||
fn current_symbol_table(&mut self) -> &mut SymbolTableBuilder {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.symbol_tables[scope_id]
|
||||
}
|
||||
|
||||
fn current_use_def_map_mut(&mut self) -> &mut UseDefMapBuilder<'db> {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.use_def_maps[scope_id]
|
||||
}
|
||||
|
||||
fn current_use_def_map(&self) -> &UseDefMapBuilder<'db> {
|
||||
let scope_id = self.current_scope();
|
||||
&self.use_def_maps[scope_id]
|
||||
}
|
||||
|
||||
fn current_ast_ids(&mut self) -> &mut AstIdsBuilder {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.ast_ids[scope_id]
|
||||
}
|
||||
|
||||
fn flow_snapshot(&self) -> FlowSnapshot {
|
||||
self.current_use_def_map().snapshot()
|
||||
}
|
||||
|
||||
fn flow_restore(&mut self, state: FlowSnapshot) {
|
||||
self.current_use_def_map_mut().restore(state);
|
||||
}
|
||||
|
||||
fn flow_merge(&mut self, state: FlowSnapshot) {
|
||||
self.current_use_def_map_mut().merge(state);
|
||||
}
|
||||
|
||||
fn add_or_update_symbol(&mut self, name: Name, flags: SymbolFlags) -> ScopedSymbolId {
|
||||
let symbol_table = self.current_symbol_table();
|
||||
let (symbol_id, added) = symbol_table.add_or_update_symbol(name, flags);
|
||||
if added {
|
||||
let use_def_map = self.current_use_def_map_mut();
|
||||
use_def_map.add_symbol(symbol_id);
|
||||
}
|
||||
symbol_id
|
||||
}
|
||||
|
||||
fn add_definition<'a>(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
definition_node: impl Into<DefinitionNodeRef<'a>>,
|
||||
) -> Definition<'db> {
|
||||
let definition_node: DefinitionNodeRef<'_> = definition_node.into();
|
||||
let definition = Definition::new(
|
||||
self.db,
|
||||
self.file,
|
||||
self.current_scope(),
|
||||
symbol,
|
||||
#[allow(unsafe_code)]
|
||||
unsafe {
|
||||
definition_node.into_owned(self.module.clone())
|
||||
},
|
||||
countme::Count::default(),
|
||||
);
|
||||
|
||||
self.definitions_by_node
|
||||
.insert(definition_node.key(), definition);
|
||||
self.current_use_def_map_mut()
|
||||
.record_definition(symbol, definition);
|
||||
|
||||
definition
|
||||
}
|
||||
|
||||
fn add_constraint(&mut self, constraint_node: &ast::Expr) -> Expression<'db> {
|
||||
let expression = self.add_standalone_expression(constraint_node);
|
||||
self.current_use_def_map_mut().record_constraint(expression);
|
||||
|
||||
expression
|
||||
}
|
||||
|
||||
/// Record an expression that needs to be a Salsa ingredient, because we need to infer its type
|
||||
/// standalone (type narrowing tests, RHS of an assignment.)
|
||||
fn add_standalone_expression(&mut self, expression_node: &ast::Expr) -> Expression<'db> {
|
||||
let expression = Expression::new(
|
||||
self.db,
|
||||
self.file,
|
||||
self.current_scope(),
|
||||
#[allow(unsafe_code)]
|
||||
unsafe {
|
||||
AstNodeRef::new(self.module.clone(), expression_node)
|
||||
},
|
||||
countme::Count::default(),
|
||||
);
|
||||
self.expressions_by_node
|
||||
.insert(expression_node.into(), expression);
|
||||
expression
|
||||
}
|
||||
|
||||
fn with_type_params(
|
||||
&mut self,
|
||||
with_scope: NodeWithScopeRef,
|
||||
type_params: Option<&'db ast::TypeParams>,
|
||||
nested: impl FnOnce(&mut Self) -> FileScopeId,
|
||||
) -> FileScopeId {
|
||||
if let Some(type_params) = type_params {
|
||||
self.push_scope(with_scope);
|
||||
|
||||
for type_param in &type_params.type_params {
|
||||
let (name, bound, default) = match type_param {
|
||||
ast::TypeParam::TypeVar(ast::TypeParamTypeVar {
|
||||
range: _,
|
||||
name,
|
||||
bound,
|
||||
default,
|
||||
}) => (name, bound, default),
|
||||
ast::TypeParam::ParamSpec(ast::TypeParamParamSpec {
|
||||
name, default, ..
|
||||
}) => (name, &None, default),
|
||||
ast::TypeParam::TypeVarTuple(ast::TypeParamTypeVarTuple {
|
||||
name,
|
||||
default,
|
||||
..
|
||||
}) => (name, &None, default),
|
||||
};
|
||||
// TODO create Definition for typevars
|
||||
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
if let Some(bound) = bound {
|
||||
self.visit_expr(bound);
|
||||
}
|
||||
if let Some(default) = default {
|
||||
self.visit_expr(default);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let nested_scope = nested(self);
|
||||
|
||||
if type_params.is_some() {
|
||||
self.pop_scope();
|
||||
}
|
||||
|
||||
nested_scope
|
||||
}
|
||||
|
||||
/// Visit a list of [`Comprehension`] nodes, assumed to be the "generators" that compose a
|
||||
/// comprehension (that is, the `for x in y` and `for y in z` parts of `x for x in y for y in z`.)
|
||||
///
|
||||
/// [`Comprehension`]: ast::Comprehension
|
||||
fn visit_generators(&mut self, scope: NodeWithScopeRef, generators: &'db [ast::Comprehension]) {
|
||||
let mut generators_iter = generators.iter();
|
||||
|
||||
let Some(generator) = generators_iter.next() else {
|
||||
unreachable!("Expression must contain at least one generator");
|
||||
};
|
||||
|
||||
// The `iter` of the first generator is evaluated in the outer scope, while all subsequent
|
||||
// nodes are evaluated in the inner scope.
|
||||
self.visit_expr(&generator.iter);
|
||||
self.push_scope(scope);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: true,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.current_assignment = None;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
|
||||
for generator in generators_iter {
|
||||
self.visit_expr(&generator.iter);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: false,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.current_assignment = None;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn declare_parameter(&mut self, parameter: AnyParameterRef) {
|
||||
let symbol =
|
||||
self.add_or_update_symbol(parameter.name().id().clone(), SymbolFlags::IS_DEFINED);
|
||||
|
||||
let definition = self.add_definition(symbol, parameter);
|
||||
|
||||
if let AnyParameterRef::NonVariadic(with_default) = parameter {
|
||||
// Insert a mapping from the parameter to the same definition.
|
||||
// This ensures that calling `HasTy::ty` on the inner parameter returns
|
||||
// a valid type (and doesn't panic)
|
||||
self.definitions_by_node.insert(
|
||||
DefinitionNodeRef::from(AnyParameterRef::Variadic(&with_default.parameter)).key(),
|
||||
definition,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn build(mut self) -> SemanticIndex<'db> {
|
||||
let module = self.module;
|
||||
self.visit_body(module.suite());
|
||||
|
||||
// Pop the root scope
|
||||
self.pop_scope();
|
||||
assert!(self.scope_stack.is_empty());
|
||||
|
||||
assert!(self.current_assignment.is_none());
|
||||
|
||||
let mut symbol_tables: IndexVec<_, _> = self
|
||||
.symbol_tables
|
||||
.into_iter()
|
||||
.map(|builder| Arc::new(builder.finish()))
|
||||
.collect();
|
||||
|
||||
let mut use_def_maps: IndexVec<_, _> = self
|
||||
.use_def_maps
|
||||
.into_iter()
|
||||
.map(|builder| Arc::new(builder.finish()))
|
||||
.collect();
|
||||
|
||||
let mut ast_ids: IndexVec<_, _> = self
|
||||
.ast_ids
|
||||
.into_iter()
|
||||
.map(super::ast_ids::AstIdsBuilder::finish)
|
||||
.collect();
|
||||
|
||||
self.scopes.shrink_to_fit();
|
||||
symbol_tables.shrink_to_fit();
|
||||
use_def_maps.shrink_to_fit();
|
||||
ast_ids.shrink_to_fit();
|
||||
self.scopes_by_expression.shrink_to_fit();
|
||||
self.definitions_by_node.shrink_to_fit();
|
||||
|
||||
self.scope_ids_by_scope.shrink_to_fit();
|
||||
self.scopes_by_node.shrink_to_fit();
|
||||
|
||||
SemanticIndex {
|
||||
symbol_tables,
|
||||
scopes: self.scopes,
|
||||
definitions_by_node: self.definitions_by_node,
|
||||
expressions_by_node: self.expressions_by_node,
|
||||
scope_ids_by_scope: self.scope_ids_by_scope,
|
||||
ast_ids,
|
||||
scopes_by_expression: self.scopes_by_expression,
|
||||
scopes_by_node: self.scopes_by_node,
|
||||
use_def_maps,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db, 'ast> Visitor<'ast> for SemanticIndexBuilder<'db>
|
||||
where
|
||||
'ast: 'db,
|
||||
{
|
||||
fn visit_stmt(&mut self, stmt: &'ast ast::Stmt) {
|
||||
match stmt {
|
||||
ast::Stmt::FunctionDef(function_def) => {
|
||||
for decorator in &function_def.decorator_list {
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
|
||||
let symbol = self
|
||||
.add_or_update_symbol(function_def.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, function_def);
|
||||
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in function_def
|
||||
.parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
|
||||
self.with_type_params(
|
||||
NodeWithScopeRef::FunctionTypeParameters(function_def),
|
||||
function_def.type_params.as_deref(),
|
||||
|builder| {
|
||||
builder.visit_parameters(&function_def.parameters);
|
||||
if let Some(expr) = &function_def.returns {
|
||||
builder.visit_annotation(expr);
|
||||
}
|
||||
|
||||
builder.push_scope(NodeWithScopeRef::Function(function_def));
|
||||
|
||||
// Add symbols and definitions for the parameters to the function scope.
|
||||
for parameter in &*function_def.parameters {
|
||||
builder.declare_parameter(parameter);
|
||||
}
|
||||
|
||||
builder.visit_body(&function_def.body);
|
||||
builder.pop_scope()
|
||||
},
|
||||
);
|
||||
}
|
||||
ast::Stmt::ClassDef(class) => {
|
||||
for decorator in &class.decorator_list {
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
|
||||
let symbol =
|
||||
self.add_or_update_symbol(class.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, class);
|
||||
|
||||
self.with_type_params(
|
||||
NodeWithScopeRef::ClassTypeParameters(class),
|
||||
class.type_params.as_deref(),
|
||||
|builder| {
|
||||
if let Some(arguments) = &class.arguments {
|
||||
builder.visit_arguments(arguments);
|
||||
}
|
||||
|
||||
builder.push_scope(NodeWithScopeRef::Class(class));
|
||||
builder.visit_body(&class.body);
|
||||
|
||||
builder.pop_scope()
|
||||
},
|
||||
);
|
||||
}
|
||||
ast::Stmt::Import(node) => {
|
||||
for alias in &node.names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
asname.id.clone()
|
||||
} else {
|
||||
Name::new(alias.name.id.split('.').next().unwrap())
|
||||
};
|
||||
|
||||
let symbol = self.add_or_update_symbol(symbol_name, SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, alias);
|
||||
}
|
||||
}
|
||||
ast::Stmt::ImportFrom(node) => {
|
||||
for (alias_index, alias) in node.names.iter().enumerate() {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
&asname.id
|
||||
} else {
|
||||
&alias.name.id
|
||||
};
|
||||
|
||||
let symbol =
|
||||
self.add_or_update_symbol(symbol_name.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, ImportFromDefinitionNodeRef { node, alias_index });
|
||||
}
|
||||
}
|
||||
ast::Stmt::Assign(node) => {
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
self.visit_expr(&node.value);
|
||||
self.add_standalone_expression(&node.value);
|
||||
self.current_assignment = Some(node.into());
|
||||
for target in &node.targets {
|
||||
self.visit_expr(target);
|
||||
}
|
||||
self.current_assignment = None;
|
||||
}
|
||||
ast::Stmt::AnnAssign(node) => {
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
// TODO deferred annotation visiting
|
||||
self.visit_expr(&node.annotation);
|
||||
if let Some(value) = &node.value {
|
||||
self.visit_expr(value);
|
||||
}
|
||||
self.current_assignment = Some(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.current_assignment = None;
|
||||
}
|
||||
ast::Stmt::AugAssign(
|
||||
aug_assign @ ast::StmtAugAssign {
|
||||
range: _,
|
||||
target,
|
||||
op: _,
|
||||
value,
|
||||
},
|
||||
) => {
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
self.visit_expr(value);
|
||||
self.current_assignment = Some(aug_assign.into());
|
||||
self.visit_expr(target);
|
||||
self.current_assignment = None;
|
||||
}
|
||||
ast::Stmt::If(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
self.add_constraint(&node.test);
|
||||
self.visit_body(&node.body);
|
||||
let mut post_clauses: Vec<FlowSnapshot> = vec![];
|
||||
for clause in &node.elif_else_clauses {
|
||||
// snapshot after every block except the last; the last one will just become
|
||||
// the state that we merge the other snapshots into
|
||||
post_clauses.push(self.flow_snapshot());
|
||||
// we can only take an elif/else branch if none of the previous ones were
|
||||
// taken, so the block entry state is always `pre_if`
|
||||
self.flow_restore(pre_if.clone());
|
||||
self.visit_elif_else_clause(clause);
|
||||
}
|
||||
for post_clause_state in post_clauses {
|
||||
self.flow_merge(post_clause_state);
|
||||
}
|
||||
let has_else = node
|
||||
.elif_else_clauses
|
||||
.last()
|
||||
.is_some_and(|clause| clause.test.is_none());
|
||||
if !has_else {
|
||||
// if there's no else clause, then it's possible we took none of the branches,
|
||||
// and the pre_if state can reach here
|
||||
self.flow_merge(pre_if);
|
||||
}
|
||||
}
|
||||
ast::Stmt::While(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
|
||||
let pre_loop = self.flow_snapshot();
|
||||
|
||||
// Save aside any break states from an outer loop
|
||||
let saved_break_states = std::mem::take(&mut self.loop_break_states);
|
||||
self.visit_body(&node.body);
|
||||
// Get the break states from the body of this loop, and restore the saved outer
|
||||
// ones.
|
||||
let break_states =
|
||||
std::mem::replace(&mut self.loop_break_states, saved_break_states);
|
||||
|
||||
// We may execute the `else` clause without ever executing the body, so merge in
|
||||
// the pre-loop state before visiting `else`.
|
||||
self.flow_merge(pre_loop);
|
||||
self.visit_body(&node.orelse);
|
||||
|
||||
// Breaking out of a while loop bypasses the `else` clause, so merge in the break
|
||||
// states after visiting `else`.
|
||||
for break_state in break_states {
|
||||
self.flow_merge(break_state);
|
||||
}
|
||||
}
|
||||
ast::Stmt::Break(_) => {
|
||||
self.loop_break_states.push(self.flow_snapshot());
|
||||
}
|
||||
_ => {
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'ast ast::Expr) {
|
||||
self.scopes_by_expression
|
||||
.insert(expr.into(), self.current_scope());
|
||||
self.current_ast_ids().record_expression(expr);
|
||||
|
||||
match expr {
|
||||
ast::Expr::Name(name_node @ ast::ExprName { id, ctx, .. }) => {
|
||||
let mut flags = match ctx {
|
||||
ast::ExprContext::Load => SymbolFlags::IS_USED,
|
||||
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Del => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Invalid => SymbolFlags::empty(),
|
||||
};
|
||||
if matches!(
|
||||
self.current_assignment,
|
||||
Some(CurrentAssignment::AugAssign(_))
|
||||
) && !ctx.is_invalid()
|
||||
{
|
||||
// For augmented assignment, the target expression is also used, so we should
|
||||
// record that as a use.
|
||||
flags |= SymbolFlags::IS_USED;
|
||||
}
|
||||
let symbol = self.add_or_update_symbol(id.clone(), flags);
|
||||
if flags.contains(SymbolFlags::IS_DEFINED) {
|
||||
match self.current_assignment {
|
||||
Some(CurrentAssignment::Assign(assignment)) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
AssignmentDefinitionNodeRef {
|
||||
assignment,
|
||||
target: name_node,
|
||||
},
|
||||
);
|
||||
}
|
||||
Some(CurrentAssignment::AnnAssign(ann_assign)) => {
|
||||
self.add_definition(symbol, ann_assign);
|
||||
}
|
||||
Some(CurrentAssignment::AugAssign(aug_assign)) => {
|
||||
self.add_definition(symbol, aug_assign);
|
||||
}
|
||||
Some(CurrentAssignment::Named(named)) => {
|
||||
// TODO(dhruvmanila): If the current scope is a comprehension, then the
|
||||
// named expression is implicitly nonlocal. This is yet to be
|
||||
// implemented.
|
||||
self.add_definition(symbol, named);
|
||||
}
|
||||
Some(CurrentAssignment::Comprehension { node, first }) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
ComprehensionDefinitionNodeRef { node, first },
|
||||
);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
if flags.contains(SymbolFlags::IS_USED) {
|
||||
let use_id = self.current_ast_ids().record_use(expr);
|
||||
self.current_use_def_map_mut().record_use(symbol, use_id);
|
||||
}
|
||||
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
ast::Expr::Named(node) => {
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
self.current_assignment = Some(node.into());
|
||||
// TODO walrus in comprehensions is implicitly nonlocal
|
||||
self.visit_expr(&node.target);
|
||||
self.current_assignment = None;
|
||||
self.visit_expr(&node.value);
|
||||
}
|
||||
ast::Expr::Lambda(lambda) => {
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
self.visit_parameters(parameters);
|
||||
}
|
||||
self.push_scope(NodeWithScopeRef::Lambda(lambda));
|
||||
|
||||
// Add symbols and definitions for the parameters to the lambda scope.
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
for parameter in &**parameters {
|
||||
self.declare_parameter(parameter);
|
||||
}
|
||||
}
|
||||
|
||||
self.visit_expr(lambda.body.as_ref());
|
||||
}
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
body, test, orelse, ..
|
||||
}) => {
|
||||
// TODO detect statically known truthy or falsy test (via type inference, not naive
|
||||
// AST inspection, so we can't simplify here, need to record test expression for
|
||||
// later checking)
|
||||
self.visit_expr(test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
self.visit_expr(body);
|
||||
let post_body = self.flow_snapshot();
|
||||
self.flow_restore(pre_if);
|
||||
self.visit_expr(orelse);
|
||||
self.flow_merge(post_body);
|
||||
}
|
||||
ast::Expr::ListComp(
|
||||
list_comprehension @ ast::ExprListComp {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::ListComprehension(list_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::SetComp(
|
||||
set_comprehension @ ast::ExprSetComp {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::SetComprehension(set_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::Generator(
|
||||
generator @ ast::ExprGenerator {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(NodeWithScopeRef::GeneratorExpression(generator), generators);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::DictComp(
|
||||
dict_comprehension @ ast::ExprDictComp {
|
||||
key,
|
||||
value,
|
||||
generators,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::DictComprehension(dict_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(key);
|
||||
self.visit_expr(value);
|
||||
}
|
||||
_ => {
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(
|
||||
expr,
|
||||
ast::Expr::Lambda(_)
|
||||
| ast::Expr::ListComp(_)
|
||||
| ast::Expr::SetComp(_)
|
||||
| ast::Expr::Generator(_)
|
||||
| ast::Expr::DictComp(_)
|
||||
) {
|
||||
self.pop_scope();
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_parameters(&mut self, parameters: &'ast ruff_python_ast::Parameters) {
|
||||
// Intentionally avoid walking default expressions, as we handle them in the enclosing
|
||||
// scope.
|
||||
for parameter in parameters.iter().map(ast::AnyParameterRef::as_parameter) {
|
||||
self.visit_parameter(parameter);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_pattern(&mut self, pattern: &'ast ast::Pattern) {
|
||||
if let ast::Pattern::MatchAs(ast::PatternMatchAs {
|
||||
name: Some(name), ..
|
||||
})
|
||||
| ast::Pattern::MatchStar(ast::PatternMatchStar {
|
||||
name: Some(name),
|
||||
range: _,
|
||||
})
|
||||
| ast::Pattern::MatchMapping(ast::PatternMatchMapping {
|
||||
rest: Some(name), ..
|
||||
}) = pattern
|
||||
{
|
||||
// TODO(dhruvmanila): Add definition
|
||||
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
}
|
||||
|
||||
walk_pattern(self, pattern);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum CurrentAssignment<'a> {
|
||||
Assign(&'a ast::StmtAssign),
|
||||
AnnAssign(&'a ast::StmtAnnAssign),
|
||||
AugAssign(&'a ast::StmtAugAssign),
|
||||
Named(&'a ast::ExprNamed),
|
||||
Comprehension {
|
||||
node: &'a ast::Comprehension,
|
||||
first: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAssign> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtAssign) -> Self {
|
||||
Self::Assign(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAnnAssign> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtAnnAssign) -> Self {
|
||||
Self::AnnAssign(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAugAssign> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtAugAssign) -> Self {
|
||||
Self::AugAssign(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::ExprNamed) -> Self {
|
||||
Self::Named(value)
|
||||
}
|
||||
}
|
||||
330
crates/red_knot_python_semantic/src/semantic_index/definition.rs
Normal file
330
crates/red_knot_python_semantic/src/semantic_index/definition.rs
Normal file
@@ -0,0 +1,330 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::node_key::NodeKey;
|
||||
use crate::semantic_index::symbol::{FileScopeId, ScopeId, ScopedSymbolId};
|
||||
use crate::Db;
|
||||
|
||||
#[salsa::tracked]
|
||||
pub struct Definition<'db> {
|
||||
/// The file in which the definition occurs.
|
||||
#[id]
|
||||
pub(crate) file: File,
|
||||
|
||||
/// The scope in which the definition occurs.
|
||||
#[id]
|
||||
pub(crate) file_scope: FileScopeId,
|
||||
|
||||
/// The symbol defined.
|
||||
#[id]
|
||||
pub(crate) symbol: ScopedSymbolId,
|
||||
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: DefinitionKind,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Definition<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> Definition<'db> {
|
||||
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
|
||||
self.file_scope(db).to_scope_id(db, self.file(db))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum DefinitionNodeRef<'a> {
|
||||
Import(&'a ast::Alias),
|
||||
ImportFrom(ImportFromDefinitionNodeRef<'a>),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Class(&'a ast::StmtClassDef),
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
Assignment(AssignmentDefinitionNodeRef<'a>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
AugmentedAssignment(&'a ast::StmtAugAssign),
|
||||
Comprehension(ComprehensionDefinitionNodeRef<'a>),
|
||||
Parameter(ast::AnyParameterRef<'a>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtFunctionDef) -> Self {
|
||||
Self::Function(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtClassDef> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtClassDef) -> Self {
|
||||
Self::Class(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::ExprNamed) -> Self {
|
||||
Self::NamedExpression(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAnnAssign> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtAnnAssign) -> Self {
|
||||
Self::AnnotatedAssignment(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAugAssign> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtAugAssign) -> Self {
|
||||
Self::AugmentedAssignment(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::Alias> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: &'a ast::Alias) -> Self {
|
||||
Self::Import(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ImportFromDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: ImportFromDefinitionNodeRef<'a>) -> Self {
|
||||
Self::ImportFrom(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: AssignmentDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Assignment(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ComprehensionDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Comprehension(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ast::AnyParameterRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ast::AnyParameterRef<'a>) -> Self {
|
||||
Self::Parameter(node)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
pub(crate) alias_index: usize,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct AssignmentDefinitionNodeRef<'a> {
|
||||
pub(crate) assignment: &'a ast::StmtAssign,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ComprehensionDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::Comprehension,
|
||||
pub(crate) first: bool,
|
||||
}
|
||||
|
||||
impl DefinitionNodeRef<'_> {
|
||||
#[allow(unsafe_code)]
|
||||
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionKind {
|
||||
match self {
|
||||
DefinitionNodeRef::Import(alias) => {
|
||||
DefinitionKind::Import(AstNodeRef::new(parsed, alias))
|
||||
}
|
||||
DefinitionNodeRef::ImportFrom(ImportFromDefinitionNodeRef { node, alias_index }) => {
|
||||
DefinitionKind::ImportFrom(ImportFromDefinitionKind {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
alias_index,
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::Function(function) => {
|
||||
DefinitionKind::Function(AstNodeRef::new(parsed, function))
|
||||
}
|
||||
DefinitionNodeRef::Class(class) => {
|
||||
DefinitionKind::Class(AstNodeRef::new(parsed, class))
|
||||
}
|
||||
DefinitionNodeRef::NamedExpression(named) => {
|
||||
DefinitionKind::NamedExpression(AstNodeRef::new(parsed, named))
|
||||
}
|
||||
DefinitionNodeRef::Assignment(AssignmentDefinitionNodeRef { assignment, target }) => {
|
||||
DefinitionKind::Assignment(AssignmentDefinitionKind {
|
||||
assignment: AstNodeRef::new(parsed.clone(), assignment),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::AnnotatedAssignment(assign) => {
|
||||
DefinitionKind::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
|
||||
}
|
||||
DefinitionNodeRef::AugmentedAssignment(augmented_assignment) => {
|
||||
DefinitionKind::AugmentedAssignment(AstNodeRef::new(parsed, augmented_assignment))
|
||||
}
|
||||
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef { node, first }) => {
|
||||
DefinitionKind::Comprehension(ComprehensionDefinitionKind {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
first,
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::Parameter(parameter) => match parameter {
|
||||
ast::AnyParameterRef::Variadic(parameter) => {
|
||||
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => {
|
||||
DefinitionKind::ParameterWithDefault(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn key(self) -> DefinitionNodeKey {
|
||||
match self {
|
||||
Self::Import(node) => node.into(),
|
||||
Self::ImportFrom(ImportFromDefinitionNodeRef { node, alias_index }) => {
|
||||
(&node.names[alias_index]).into()
|
||||
}
|
||||
Self::Function(node) => node.into(),
|
||||
Self::Class(node) => node.into(),
|
||||
Self::NamedExpression(node) => node.into(),
|
||||
Self::Assignment(AssignmentDefinitionNodeRef {
|
||||
assignment: _,
|
||||
target,
|
||||
}) => target.into(),
|
||||
Self::AnnotatedAssignment(node) => node.into(),
|
||||
Self::AugmentedAssignment(node) => node.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { node, first: _ }) => node.into(),
|
||||
Self::Parameter(node) => match node {
|
||||
ast::AnyParameterRef::Variadic(parameter) => parameter.into(),
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => parameter.into(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum DefinitionKind {
|
||||
Import(AstNodeRef<ast::Alias>),
|
||||
ImportFrom(ImportFromDefinitionKind),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Class(AstNodeRef<ast::StmtClassDef>),
|
||||
NamedExpression(AstNodeRef<ast::ExprNamed>),
|
||||
Assignment(AssignmentDefinitionKind),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
AugmentedAssignment(AstNodeRef<ast::StmtAugAssign>),
|
||||
Comprehension(ComprehensionDefinitionKind),
|
||||
Parameter(AstNodeRef<ast::Parameter>),
|
||||
ParameterWithDefault(AstNodeRef<ast::ParameterWithDefault>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ComprehensionDefinitionKind {
|
||||
node: AstNodeRef<ast::Comprehension>,
|
||||
first: bool,
|
||||
}
|
||||
|
||||
impl ComprehensionDefinitionKind {
|
||||
pub(crate) fn node(&self) -> &ast::Comprehension {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn is_first(&self) -> bool {
|
||||
self.first
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportFromDefinitionKind {
|
||||
node: AstNodeRef<ast::StmtImportFrom>,
|
||||
alias_index: usize,
|
||||
}
|
||||
|
||||
impl ImportFromDefinitionKind {
|
||||
pub(crate) fn import(&self) -> &ast::StmtImportFrom {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn alias(&self) -> &ast::Alias {
|
||||
&self.node.node().names[self.alias_index]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct AssignmentDefinitionKind {
|
||||
assignment: AstNodeRef<ast::StmtAssign>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
}
|
||||
|
||||
impl AssignmentDefinitionKind {
|
||||
pub(crate) fn assignment(&self) -> &ast::StmtAssign {
|
||||
self.assignment.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
|
||||
pub(crate) struct DefinitionNodeKey(NodeKey);
|
||||
|
||||
impl From<&ast::Alias> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Alias) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtFunctionDef> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtFunctionDef) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtClassDef> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtClassDef) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ExprName> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ExprName) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ExprNamed> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ExprNamed) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtAnnAssign> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtAnnAssign) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtAugAssign> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtAugAssign) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Comprehension> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Comprehension) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Parameter> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Parameter) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ParameterWithDefault> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ParameterWithDefault) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::db::Db;
|
||||
use crate::semantic_index::symbol::{FileScopeId, ScopeId};
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast as ast;
|
||||
use salsa;
|
||||
|
||||
/// An independently type-inferable expression.
|
||||
///
|
||||
/// Includes constraint expressions (e.g. if tests) and the RHS of an unpacking assignment.
|
||||
#[salsa::tracked]
|
||||
pub(crate) struct Expression<'db> {
|
||||
/// The file in which the expression occurs.
|
||||
#[id]
|
||||
pub(crate) file: File,
|
||||
|
||||
/// The scope in which the expression occurs.
|
||||
#[id]
|
||||
pub(crate) file_scope: FileScopeId,
|
||||
|
||||
/// The expression node.
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node_ref: AstNodeRef<ast::Expr>,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Expression<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> Expression<'db> {
|
||||
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
|
||||
self.file_scope(db).to_scope_id(db, self.file(db))
|
||||
}
|
||||
}
|
||||
437
crates/red_knot_python_semantic/src/semantic_index/symbol.rs
Normal file
437
crates/red_knot_python_semantic/src/semantic_index/symbol.rs
Normal file
@@ -0,0 +1,437 @@
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::ops::Range;
|
||||
|
||||
use bitflags::bitflags;
|
||||
use hashbrown::hash_map::RawEntryMut;
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use ruff_python_ast::name::Name;
|
||||
use ruff_python_ast::{self as ast};
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::node_key::NodeKey;
|
||||
use crate::semantic_index::{semantic_index, SymbolMap};
|
||||
use crate::Db;
|
||||
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub struct Symbol {
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
}
|
||||
|
||||
impl Symbol {
|
||||
fn new(name: Name) -> Self {
|
||||
Self {
|
||||
name,
|
||||
flags: SymbolFlags::empty(),
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_flags(&mut self, flags: SymbolFlags) {
|
||||
self.flags.insert(flags);
|
||||
}
|
||||
|
||||
/// The symbol's name.
|
||||
pub fn name(&self) -> &Name {
|
||||
&self.name
|
||||
}
|
||||
|
||||
/// Is the symbol used in its containing scope?
|
||||
pub fn is_used(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_USED)
|
||||
}
|
||||
|
||||
/// Is the symbol defined in its containing scope?
|
||||
pub fn is_defined(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_DEFINED)
|
||||
}
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub(super) struct SymbolFlags: u8 {
|
||||
const IS_USED = 1 << 0;
|
||||
const IS_DEFINED = 1 << 1;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_GLOBAL = 1 << 2;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_NONLOCAL = 1 << 3;
|
||||
}
|
||||
}
|
||||
|
||||
/// ID that uniquely identifies a symbol in a file.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct FileSymbolId {
|
||||
scope: FileScopeId,
|
||||
scoped_symbol_id: ScopedSymbolId,
|
||||
}
|
||||
|
||||
impl FileSymbolId {
|
||||
pub fn scope(self) -> FileScopeId {
|
||||
self.scope
|
||||
}
|
||||
|
||||
pub(crate) fn scoped_symbol_id(self) -> ScopedSymbolId {
|
||||
self.scoped_symbol_id
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FileSymbolId> for ScopedSymbolId {
|
||||
fn from(val: FileSymbolId) -> Self {
|
||||
val.scoped_symbol_id()
|
||||
}
|
||||
}
|
||||
|
||||
/// Symbol ID that uniquely identifies a symbol inside a [`Scope`].
|
||||
#[newtype_index]
|
||||
pub struct ScopedSymbolId;
|
||||
|
||||
/// A cross-module identifier of a scope that can be used as a salsa query parameter.
|
||||
#[salsa::tracked]
|
||||
pub struct ScopeId<'db> {
|
||||
#[id]
|
||||
pub file: File,
|
||||
#[id]
|
||||
pub file_scope_id: FileScopeId,
|
||||
|
||||
/// The node that introduces this scope.
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub node: NodeWithScopeKind,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<ScopeId<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> ScopeId<'db> {
|
||||
pub(crate) fn is_function_like(self, db: &'db dyn Db) -> bool {
|
||||
// Type parameter scopes behave like function scopes in terms of name resolution; CPython
|
||||
// symbol table also uses the term "function-like" for these scopes.
|
||||
matches!(
|
||||
self.node(db),
|
||||
NodeWithScopeKind::ClassTypeParameters(_)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(_)
|
||||
| NodeWithScopeKind::Function(_)
|
||||
| NodeWithScopeKind::ListComprehension(_)
|
||||
| NodeWithScopeKind::SetComprehension(_)
|
||||
| NodeWithScopeKind::DictComprehension(_)
|
||||
| NodeWithScopeKind::GeneratorExpression(_)
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn name(self, db: &'db dyn Db) -> &'db str {
|
||||
match self.node(db) {
|
||||
NodeWithScopeKind::Module => "<module>",
|
||||
NodeWithScopeKind::Class(class) | NodeWithScopeKind::ClassTypeParameters(class) => {
|
||||
class.name.as_str()
|
||||
}
|
||||
NodeWithScopeKind::Function(function)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(function) => function.name.as_str(),
|
||||
NodeWithScopeKind::Lambda(_) => "<lambda>",
|
||||
NodeWithScopeKind::ListComprehension(_) => "<listcomp>",
|
||||
NodeWithScopeKind::SetComprehension(_) => "<setcomp>",
|
||||
NodeWithScopeKind::DictComprehension(_) => "<dictcomp>",
|
||||
NodeWithScopeKind::GeneratorExpression(_) => "<generator>",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// ID that uniquely identifies a scope inside of a module.
|
||||
#[newtype_index]
|
||||
pub struct FileScopeId;
|
||||
|
||||
impl FileScopeId {
|
||||
/// Returns the scope id of the module-global scope.
|
||||
pub fn global() -> Self {
|
||||
FileScopeId::from_u32(0)
|
||||
}
|
||||
|
||||
pub fn to_scope_id(self, db: &dyn Db, file: File) -> ScopeId<'_> {
|
||||
let index = semantic_index(db, file);
|
||||
index.scope_ids_by_scope[self]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub struct Scope {
|
||||
pub(super) parent: Option<FileScopeId>,
|
||||
pub(super) kind: ScopeKind,
|
||||
pub(super) descendents: Range<FileScopeId>,
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
pub fn parent(self) -> Option<FileScopeId> {
|
||||
self.parent
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> ScopeKind {
|
||||
self.kind
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ScopeKind {
|
||||
Module,
|
||||
Annotation,
|
||||
Class,
|
||||
Function,
|
||||
Comprehension,
|
||||
}
|
||||
|
||||
impl ScopeKind {
|
||||
pub const fn is_comprehension(self) -> bool {
|
||||
matches!(self, ScopeKind::Comprehension)
|
||||
}
|
||||
}
|
||||
|
||||
/// Symbol table for a specific [`Scope`].
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolTable {
|
||||
/// The symbols in this scope.
|
||||
symbols: IndexVec<ScopedSymbolId, Symbol>,
|
||||
|
||||
/// The symbols indexed by name.
|
||||
symbols_by_name: SymbolMap,
|
||||
}
|
||||
|
||||
impl SymbolTable {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
symbols: IndexVec::new(),
|
||||
symbols_by_name: SymbolMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn shrink_to_fit(&mut self) {
|
||||
self.symbols.shrink_to_fit();
|
||||
}
|
||||
|
||||
pub(crate) fn symbol(&self, symbol_id: impl Into<ScopedSymbolId>) -> &Symbol {
|
||||
&self.symbols[symbol_id.into()]
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn symbol_ids(&self) -> impl Iterator<Item = ScopedSymbolId> {
|
||||
self.symbols.indices()
|
||||
}
|
||||
|
||||
pub fn symbols(&self) -> impl Iterator<Item = &Symbol> {
|
||||
self.symbols.iter()
|
||||
}
|
||||
|
||||
/// Returns the symbol named `name`.
|
||||
pub(crate) fn symbol_by_name(&self, name: &str) -> Option<&Symbol> {
|
||||
let id = self.symbol_id_by_name(name)?;
|
||||
Some(self.symbol(id))
|
||||
}
|
||||
|
||||
/// Returns the [`ScopedSymbolId`] of the symbol named `name`.
|
||||
pub(crate) fn symbol_id_by_name(&self, name: &str) -> Option<ScopedSymbolId> {
|
||||
let (id, ()) = self
|
||||
.symbols_by_name
|
||||
.raw_entry()
|
||||
.from_hash(Self::hash_name(name), |id| {
|
||||
self.symbol(*id).name().as_str() == name
|
||||
})?;
|
||||
|
||||
Some(*id)
|
||||
}
|
||||
|
||||
fn hash_name(name: &str) -> u64 {
|
||||
let mut hasher = FxHasher::default();
|
||||
name.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for SymbolTable {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
// We don't need to compare the symbols_by_name because the name is already captured in `Symbol`.
|
||||
self.symbols == other.symbols
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for SymbolTable {}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct SymbolTableBuilder {
|
||||
table: SymbolTable,
|
||||
}
|
||||
|
||||
impl SymbolTableBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
table: SymbolTable::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_or_update_symbol(
|
||||
&mut self,
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
) -> (ScopedSymbolId, bool) {
|
||||
let hash = SymbolTable::hash_name(&name);
|
||||
let entry = self
|
||||
.table
|
||||
.symbols_by_name
|
||||
.raw_entry_mut()
|
||||
.from_hash(hash, |id| self.table.symbols[*id].name() == &name);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(entry) => {
|
||||
let symbol = &mut self.table.symbols[*entry.key()];
|
||||
symbol.insert_flags(flags);
|
||||
|
||||
(*entry.key(), false)
|
||||
}
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let mut symbol = Symbol::new(name);
|
||||
symbol.insert_flags(flags);
|
||||
|
||||
let id = self.table.symbols.push(symbol);
|
||||
entry.insert_with_hasher(hash, id, (), |id| {
|
||||
SymbolTable::hash_name(self.table.symbols[*id].name().as_str())
|
||||
});
|
||||
(id, true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> SymbolTable {
|
||||
self.table.shrink_to_fit();
|
||||
self.table
|
||||
}
|
||||
}
|
||||
|
||||
/// Reference to a node that introduces a new scope.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum NodeWithScopeRef<'a> {
|
||||
Module,
|
||||
Class(&'a ast::StmtClassDef),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Lambda(&'a ast::ExprLambda),
|
||||
FunctionTypeParameters(&'a ast::StmtFunctionDef),
|
||||
ClassTypeParameters(&'a ast::StmtClassDef),
|
||||
ListComprehension(&'a ast::ExprListComp),
|
||||
SetComprehension(&'a ast::ExprSetComp),
|
||||
DictComprehension(&'a ast::ExprDictComp),
|
||||
GeneratorExpression(&'a ast::ExprGenerator),
|
||||
}
|
||||
|
||||
impl NodeWithScopeRef<'_> {
|
||||
/// Converts the unowned reference to an owned [`NodeWithScopeKind`].
|
||||
///
|
||||
/// # Safety
|
||||
/// The node wrapped by `self` must be a child of `module`.
|
||||
#[allow(unsafe_code)]
|
||||
pub(super) unsafe fn to_kind(self, module: ParsedModule) -> NodeWithScopeKind {
|
||||
match self {
|
||||
NodeWithScopeRef::Module => NodeWithScopeKind::Module,
|
||||
NodeWithScopeRef::Class(class) => {
|
||||
NodeWithScopeKind::Class(AstNodeRef::new(module, class))
|
||||
}
|
||||
NodeWithScopeRef::Function(function) => {
|
||||
NodeWithScopeKind::Function(AstNodeRef::new(module, function))
|
||||
}
|
||||
NodeWithScopeRef::Lambda(lambda) => {
|
||||
NodeWithScopeKind::Lambda(AstNodeRef::new(module, lambda))
|
||||
}
|
||||
NodeWithScopeRef::FunctionTypeParameters(function) => {
|
||||
NodeWithScopeKind::FunctionTypeParameters(AstNodeRef::new(module, function))
|
||||
}
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKind::ClassTypeParameters(AstNodeRef::new(module, class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKind::ListComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKind::SetComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKind::DictComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKind::GeneratorExpression(AstNodeRef::new(module, generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn scope_kind(self) -> ScopeKind {
|
||||
match self {
|
||||
NodeWithScopeRef::Module => ScopeKind::Module,
|
||||
NodeWithScopeRef::Class(_) => ScopeKind::Class,
|
||||
NodeWithScopeRef::Function(_) => ScopeKind::Function,
|
||||
NodeWithScopeRef::Lambda(_) => ScopeKind::Function,
|
||||
NodeWithScopeRef::FunctionTypeParameters(_)
|
||||
| NodeWithScopeRef::ClassTypeParameters(_) => ScopeKind::Annotation,
|
||||
NodeWithScopeRef::ListComprehension(_)
|
||||
| NodeWithScopeRef::SetComprehension(_)
|
||||
| NodeWithScopeRef::DictComprehension(_)
|
||||
| NodeWithScopeRef::GeneratorExpression(_) => ScopeKind::Comprehension,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn node_key(self) -> NodeWithScopeKey {
|
||||
match self {
|
||||
NodeWithScopeRef::Module => NodeWithScopeKey::Module,
|
||||
NodeWithScopeRef::Class(class) => NodeWithScopeKey::Class(NodeKey::from_node(class)),
|
||||
NodeWithScopeRef::Function(function) => {
|
||||
NodeWithScopeKey::Function(NodeKey::from_node(function))
|
||||
}
|
||||
NodeWithScopeRef::Lambda(lambda) => {
|
||||
NodeWithScopeKey::Lambda(NodeKey::from_node(lambda))
|
||||
}
|
||||
NodeWithScopeRef::FunctionTypeParameters(function) => {
|
||||
NodeWithScopeKey::FunctionTypeParameters(NodeKey::from_node(function))
|
||||
}
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKey::ClassTypeParameters(NodeKey::from_node(class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKey::ListComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKey::SetComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKey::DictComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKey::GeneratorExpression(NodeKey::from_node(generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Node that introduces a new scope.
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum NodeWithScopeKind {
|
||||
Module,
|
||||
Class(AstNodeRef<ast::StmtClassDef>),
|
||||
ClassTypeParameters(AstNodeRef<ast::StmtClassDef>),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
FunctionTypeParameters(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Lambda(AstNodeRef<ast::ExprLambda>),
|
||||
ListComprehension(AstNodeRef<ast::ExprListComp>),
|
||||
SetComprehension(AstNodeRef<ast::ExprSetComp>),
|
||||
DictComprehension(AstNodeRef<ast::ExprDictComp>),
|
||||
GeneratorExpression(AstNodeRef<ast::ExprGenerator>),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub(crate) enum NodeWithScopeKey {
|
||||
Module,
|
||||
Class(NodeKey),
|
||||
ClassTypeParameters(NodeKey),
|
||||
Function(NodeKey),
|
||||
FunctionTypeParameters(NodeKey),
|
||||
Lambda(NodeKey),
|
||||
ListComprehension(NodeKey),
|
||||
SetComprehension(NodeKey),
|
||||
DictComprehension(NodeKey),
|
||||
GeneratorExpression(NodeKey),
|
||||
}
|
||||
375
crates/red_knot_python_semantic/src/semantic_index/use_def.rs
Normal file
375
crates/red_knot_python_semantic/src/semantic_index/use_def.rs
Normal file
@@ -0,0 +1,375 @@
|
||||
//! Build a map from each use of a symbol to the definitions visible from that use, and the
|
||||
//! type-narrowing constraints that apply to each definition.
|
||||
//!
|
||||
//! Let's take this code sample:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1
|
||||
//! x = 2
|
||||
//! y = x
|
||||
//! if y is not None:
|
||||
//! x = 3
|
||||
//! else:
|
||||
//! x = 4
|
||||
//! z = x
|
||||
//! ```
|
||||
//!
|
||||
//! In this snippet, we have four definitions of `x` (the statements assigning `1`, `2`, `3`,
|
||||
//! and `4` to it), and two uses of `x` (the `y = x` and `z = x` assignments). The first
|
||||
//! [`Definition`] of `x` is never visible to any use, because it's immediately replaced by the
|
||||
//! second definition, before any use happens. (A linter could thus flag the statement `x = 1`
|
||||
//! as likely superfluous.)
|
||||
//!
|
||||
//! The first use of `x` has one definition visible to it: the assignment `x = 2`.
|
||||
//!
|
||||
//! Things get a bit more complex when we have branches. We will definitely take either the `if` or
|
||||
//! the `else` branch. Thus, the second use of `x` has two definitions visible to it: `x = 3` and
|
||||
//! `x = 4`. The `x = 2` definition is no longer visible, because it must be replaced by either `x
|
||||
//! = 3` or `x = 4`, no matter which branch was taken. We don't know which branch was taken, so we
|
||||
//! must consider both definitions as visible, which means eventually we would (in type inference)
|
||||
//! look at these two definitions and infer a type of `Literal[3, 4]` -- the union of `Literal[3]`
|
||||
//! and `Literal[4]` -- for the second use of `x`.
|
||||
//!
|
||||
//! So that's one question our use-def map needs to answer: given a specific use of a symbol, which
|
||||
//! definition(s) is/are visible from that use. In
|
||||
//! [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number all uses (that means a `Name` node
|
||||
//! with `Load` context) so we have a `ScopedUseId` to efficiently represent each use.
|
||||
//!
|
||||
//! Another case we need to handle is when a symbol is referenced from a different scope (the most
|
||||
//! obvious example of this is an import). We call this "public" use of a symbol. So the other
|
||||
//! question we need to be able to answer is, what are the publicly-visible definitions of each
|
||||
//! symbol?
|
||||
//!
|
||||
//! Technically, public use of a symbol could also occur from any point in control flow of the
|
||||
//! scope where the symbol is defined (via inline imports and import cycles, in the case of an
|
||||
//! import, or via a function call partway through the local scope that ends up using a symbol from
|
||||
//! the scope via a global or nonlocal reference.) But modeling this fully accurately requires
|
||||
//! whole-program analysis that isn't tractable for an efficient incremental compiler, since it
|
||||
//! means a given symbol could have a different type every place it's referenced throughout the
|
||||
//! program, depending on the shape of arbitrarily-sized call/import graphs. So we follow other
|
||||
//! Python type-checkers in making the simplifying assumption that usually the scope will finish
|
||||
//! execution before its symbols are made visible to other scopes; for instance, most imports will
|
||||
//! import from a complete module, not a partially-executed module. (We may want to get a little
|
||||
//! smarter than this in the future, in particular for closures, but for now this is where we
|
||||
//! start.)
|
||||
//!
|
||||
//! So this means that the publicly-visible definitions of a symbol are the definitions still
|
||||
//! visible at the end of the scope; effectively we have an implicit "use" of every symbol at the
|
||||
//! end of the scope.
|
||||
//!
|
||||
//! We also need to know, for a given definition of a symbol, what type-narrowing constraints apply
|
||||
//! to it. For instance, in this code sample:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
//! if x is not None:
|
||||
//! y = x
|
||||
//! ```
|
||||
//!
|
||||
//! At the use of `x` in `y = x`, the visible definition of `x` is `1 if flag else None`, which
|
||||
//! would infer as the type `Literal[1] | None`. But the constraint `x is not None` dominates this
|
||||
//! use, which means we can rule out the possibility that `x` is `None` here, which should give us
|
||||
//! the type `Literal[1]` for this use.
|
||||
//!
|
||||
//! The data structure we build to answer these questions is the `UseDefMap`. It has a
|
||||
//! `definitions_by_use` vector indexed by [`ScopedUseId`] and a `public_definitions` vector
|
||||
//! indexed by [`ScopedSymbolId`]. The values in each of these vectors are (in principle) a list of
|
||||
//! visible definitions at that use, or at the end of the scope for that symbol, with a list of the
|
||||
//! dominating constraints for each of those definitions.
|
||||
//!
|
||||
//! In order to avoid vectors-of-vectors-of-vectors and all the allocations that would entail, we
|
||||
//! don't actually store these "list of visible definitions" as a vector of [`Definition`].
|
||||
//! Instead, the values in `definitions_by_use` and `public_definitions` are a [`SymbolState`]
|
||||
//! struct which uses bit-sets to track definitions and constraints in terms of
|
||||
//! [`ScopedDefinitionId`] and [`ScopedConstraintId`], which are indices into the `all_definitions`
|
||||
//! and `all_constraints` indexvecs in the [`UseDefMap`].
|
||||
//!
|
||||
//! There is another special kind of possible "definition" for a symbol: there might be a path from
|
||||
//! the scope entry to a given use in which the symbol is never bound.
|
||||
//!
|
||||
//! The simplest way to model "unbound" would be as an actual [`Definition`] itself: the initial
|
||||
//! visible [`Definition`] for each symbol in a scope. But actually modeling it this way would
|
||||
//! unnecessarily increase the number of [`Definition`] that Salsa must track. Since "unbound" is a
|
||||
//! special definition in that all symbols share it, and it doesn't have any additional per-symbol
|
||||
//! state, and constraints are irrelevant to it, we can represent it more efficiently: we use the
|
||||
//! `may_be_unbound` boolean on the [`SymbolState`] struct. If this flag is `true`, it means the
|
||||
//! symbol/use really has one additional visible "definition", which is the unbound state. If this
|
||||
//! flag is `false`, it means we've eliminated the possibility of unbound: every path we've
|
||||
//! followed includes a definition for this symbol.
|
||||
//!
|
||||
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use, definition, and
|
||||
//! constraint as they are encountered by the
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder) AST visit. For
|
||||
//! each symbol, the builder tracks the `SymbolState` for that symbol. When we hit a use of a
|
||||
//! symbol, it records the current state for that symbol for that use. When we reach the end of the
|
||||
//! scope, it records the state for each symbol as the public definitions of that symbol.
|
||||
//!
|
||||
//! Let's walk through the above example. Initially we record for `x` that it has no visible
|
||||
//! definitions, and may be unbound. When we see `x = 1`, we record that as the sole visible
|
||||
//! definition of `x`, and flip `may_be_unbound` to `false`. Then we see `x = 2`, and it replaces
|
||||
//! `x = 1` as the sole visible definition of `x`. When we get to `y = x`, we record that the
|
||||
//! visible definitions for that use of `x` are just the `x = 2` definition.
|
||||
//!
|
||||
//! Then we hit the `if` branch. We visit the `test` node (`flag` in this case), since that will
|
||||
//! happen regardless. Then we take a pre-branch snapshot of the currently visible definitions for
|
||||
//! all symbols, which we'll need later. Then we record `flag` as a possible constraint on the
|
||||
//! currently visible definition (`x = 2`), and go ahead and visit the `if` body. When we see `x =
|
||||
//! 3`, it replaces `x = 2` (constrained by `flag`) as the sole visible definition of `x`. At the
|
||||
//! end of the `if` body, we take another snapshot of the currently-visible definitions; we'll call
|
||||
//! this the post-if-body snapshot.
|
||||
//!
|
||||
//! Now we need to visit the `else` clause. The conditions when entering the `else` clause should
|
||||
//! be the pre-if conditions; if we are entering the `else` clause, we know that the `if` test
|
||||
//! failed and we didn't execute the `if` body. So we first reset the builder to the pre-if state,
|
||||
//! using the snapshot we took previously (meaning we now have `x = 2` as the sole visible
|
||||
//! definition for `x` again), then visit the `else` clause, where `x = 4` replaces `x = 2` as the
|
||||
//! sole visible definition of `x`.
|
||||
//!
|
||||
//! Now we reach the end of the if/else, and want to visit the following code. The state here needs
|
||||
//! to reflect that we might have gone through the `if` branch, or we might have gone through the
|
||||
//! `else` branch, and we don't know which. So we need to "merge" our current builder state
|
||||
//! (reflecting the end-of-else state, with `x = 4` as the only visible definition) with our
|
||||
//! post-if-body snapshot (which has `x = 3` as the only visible definition). The result of this
|
||||
//! merge is that we now have two visible definitions of `x`: `x = 3` and `x = 4`.
|
||||
//!
|
||||
//! The [`UseDefMapBuilder`] itself just exposes methods for taking a snapshot, resetting to a
|
||||
//! snapshot, and merging a snapshot into the current state. The logic using these methods lives in
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder), e.g. where it
|
||||
//! visits a `StmtIf` node.
|
||||
//!
|
||||
//! (In the future we may have some other questions we want to answer as well, such as "is this
|
||||
//! definition used?", which will require tracking a bit more info in our map, e.g. a "used" bit
|
||||
//! for each [`Definition`] which is flipped to true when we record that definition for a use.)
|
||||
use self::symbol_state::{
|
||||
ConstraintIdIterator, DefinitionIdWithConstraintsIterator, ScopedConstraintId,
|
||||
ScopedDefinitionId, SymbolState,
|
||||
};
|
||||
use crate::semantic_index::ast_ids::ScopedUseId;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::ScopedSymbolId;
|
||||
use ruff_index::IndexVec;
|
||||
|
||||
mod bitset;
|
||||
mod symbol_state;
|
||||
|
||||
/// Applicable definitions and constraints for every use of a name.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct UseDefMap<'db> {
|
||||
/// Array of [`Definition`] in this scope.
|
||||
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
|
||||
/// Array of constraints (as [`Expression`]) in this scope.
|
||||
all_constraints: IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
|
||||
/// [`SymbolState`] visible at a [`ScopedUseId`].
|
||||
definitions_by_use: IndexVec<ScopedUseId, SymbolState>,
|
||||
|
||||
/// [`SymbolState`] visible at end of scope for each symbol.
|
||||
public_definitions: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMap<'db> {
|
||||
pub(crate) fn use_definitions(
|
||||
&self,
|
||||
use_id: ScopedUseId,
|
||||
) -> DefinitionWithConstraintsIterator<'_, 'db> {
|
||||
DefinitionWithConstraintsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
all_constraints: &self.all_constraints,
|
||||
inner: self.definitions_by_use[use_id].visible_definitions(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn use_may_be_unbound(&self, use_id: ScopedUseId) -> bool {
|
||||
self.definitions_by_use[use_id].may_be_unbound()
|
||||
}
|
||||
|
||||
pub(crate) fn public_definitions(
|
||||
&self,
|
||||
symbol: ScopedSymbolId,
|
||||
) -> DefinitionWithConstraintsIterator<'_, 'db> {
|
||||
DefinitionWithConstraintsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
all_constraints: &self.all_constraints,
|
||||
inner: self.public_definitions[symbol].visible_definitions(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn public_may_be_unbound(&self, symbol: ScopedSymbolId) -> bool {
|
||||
self.public_definitions[symbol].may_be_unbound()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DefinitionWithConstraintsIterator<'map, 'db> {
|
||||
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
inner: DefinitionIdWithConstraintsIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for DefinitionWithConstraintsIterator<'map, 'db> {
|
||||
type Item = DefinitionWithConstraints<'map, 'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner
|
||||
.next()
|
||||
.map(|def_id_with_constraints| DefinitionWithConstraints {
|
||||
definition: self.all_definitions[def_id_with_constraints.definition],
|
||||
constraints: ConstraintsIterator {
|
||||
all_constraints: self.all_constraints,
|
||||
constraint_ids: def_id_with_constraints.constraint_ids,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DefinitionWithConstraintsIterator<'_, '_> {}
|
||||
|
||||
pub(crate) struct DefinitionWithConstraints<'map, 'db> {
|
||||
pub(crate) definition: Definition<'db>,
|
||||
pub(crate) constraints: ConstraintsIterator<'map, 'db>,
|
||||
}
|
||||
|
||||
pub(crate) struct ConstraintsIterator<'map, 'db> {
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
constraint_ids: ConstraintIdIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
|
||||
type Item = Expression<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.constraint_ids
|
||||
.next()
|
||||
.map(|constraint_id| self.all_constraints[constraint_id])
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for ConstraintsIterator<'_, '_> {}
|
||||
|
||||
/// A snapshot of the definitions and constraints state at a particular point in control flow.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct FlowSnapshot {
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct UseDefMapBuilder<'db> {
|
||||
/// Append-only array of [`Definition`]; None is unbound.
|
||||
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
|
||||
/// Append-only array of constraints (as [`Expression`]).
|
||||
all_constraints: IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
|
||||
/// Visible definitions at each so-far-recorded use.
|
||||
definitions_by_use: IndexVec<ScopedUseId, SymbolState>,
|
||||
|
||||
/// Currently visible definitions for each symbol.
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMapBuilder<'db> {
|
||||
pub(super) fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
|
||||
let new_symbol = self.definitions_by_symbol.push(SymbolState::unbound());
|
||||
debug_assert_eq!(symbol, new_symbol);
|
||||
}
|
||||
|
||||
pub(super) fn record_definition(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
// We have a new definition of a symbol; this replaces any previous definitions in this
|
||||
// path.
|
||||
let def_id = self.all_definitions.push(definition);
|
||||
self.definitions_by_symbol[symbol] = SymbolState::with(def_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_constraint(&mut self, constraint: Expression<'db>) {
|
||||
let constraint_id = self.all_constraints.push(constraint);
|
||||
for definitions in &mut self.definitions_by_symbol {
|
||||
definitions.add_constraint(constraint_id);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn record_use(&mut self, symbol: ScopedSymbolId, use_id: ScopedUseId) {
|
||||
// We have a use of a symbol; clone the currently visible definitions for that symbol, and
|
||||
// record them as the visible definitions for this use.
|
||||
let new_use = self
|
||||
.definitions_by_use
|
||||
.push(self.definitions_by_symbol[symbol].clone());
|
||||
debug_assert_eq!(use_id, new_use);
|
||||
}
|
||||
|
||||
/// Take a snapshot of the current visible-symbols state.
|
||||
pub(super) fn snapshot(&self) -> FlowSnapshot {
|
||||
FlowSnapshot {
|
||||
definitions_by_symbol: self.definitions_by_symbol.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Restore the current builder visible-definitions state to the given snapshot.
|
||||
pub(super) fn restore(&mut self, snapshot: FlowSnapshot) {
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
let num_symbols = self.definitions_by_symbol.len();
|
||||
debug_assert!(num_symbols >= snapshot.definitions_by_symbol.len());
|
||||
|
||||
// Restore the current visible-definitions state to the given snapshot.
|
||||
self.definitions_by_symbol = snapshot.definitions_by_symbol;
|
||||
|
||||
// If the snapshot we are restoring is missing some symbols we've recorded since, we need
|
||||
// to fill them in so the symbol IDs continue to line up. Since they don't exist in the
|
||||
// snapshot, the correct state to fill them in with is "unbound".
|
||||
self.definitions_by_symbol
|
||||
.resize(num_symbols, SymbolState::unbound());
|
||||
}
|
||||
|
||||
/// Merge the given snapshot into the current state, reflecting that we might have taken either
|
||||
/// path to get here. The new visible-definitions state for each symbol should include
|
||||
/// definitions from both the prior state and the snapshot.
|
||||
pub(super) fn merge(&mut self, snapshot: FlowSnapshot) {
|
||||
// The tricky thing about merging two Ranges pointing into `all_definitions` is that if the
|
||||
// two Ranges aren't already adjacent in `all_definitions`, we will have to copy at least
|
||||
// one or the other of the ranges to the end of `all_definitions` so as to make them
|
||||
// adjacent. We can't ever move things around in `all_definitions` because previously
|
||||
// recorded uses may still have ranges pointing to any part of it; all we can do is append.
|
||||
// It's possible we may end up with some old entries in `all_definitions` that nobody is
|
||||
// pointing to, but that's OK.
|
||||
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
debug_assert!(self.definitions_by_symbol.len() >= snapshot.definitions_by_symbol.len());
|
||||
|
||||
let mut snapshot_definitions_iter = snapshot.definitions_by_symbol.into_iter();
|
||||
for current in &mut self.definitions_by_symbol {
|
||||
if let Some(snapshot) = snapshot_definitions_iter.next() {
|
||||
current.merge(snapshot);
|
||||
} else {
|
||||
// Symbol not present in snapshot, so it's unbound from that path.
|
||||
current.add_unbound();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> UseDefMap<'db> {
|
||||
self.all_definitions.shrink_to_fit();
|
||||
self.all_constraints.shrink_to_fit();
|
||||
self.definitions_by_symbol.shrink_to_fit();
|
||||
self.definitions_by_use.shrink_to_fit();
|
||||
|
||||
UseDefMap {
|
||||
all_definitions: self.all_definitions,
|
||||
all_constraints: self.all_constraints,
|
||||
definitions_by_use: self.definitions_by_use,
|
||||
public_definitions: self.definitions_by_symbol,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,228 @@
|
||||
/// Ordered set of `u32`.
|
||||
///
|
||||
/// Uses an inline bit-set for small values (up to 64 * B), falls back to heap allocated vector of
|
||||
/// blocks for larger values.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(super) enum BitSet<const B: usize> {
|
||||
/// Bit-set (in 64-bit blocks) for the first 64 * B entries.
|
||||
Inline([u64; B]),
|
||||
|
||||
/// Overflow beyond 64 * B.
|
||||
Heap(Vec<u64>),
|
||||
}
|
||||
|
||||
impl<const B: usize> Default for BitSet<B> {
|
||||
fn default() -> Self {
|
||||
// B * 64 must fit in a u32, or else we have unusable bits; this assertion makes the
|
||||
// truncating casts to u32 below safe. This would be better as a const assertion, but
|
||||
// that's not possible on stable with const generic params. (B should never really be
|
||||
// anywhere close to this large.)
|
||||
assert!(B * 64 < (u32::MAX as usize));
|
||||
// This implementation requires usize >= 32 bits.
|
||||
static_assertions::const_assert!(usize::BITS >= 32);
|
||||
Self::Inline([0; B])
|
||||
}
|
||||
}
|
||||
|
||||
impl<const B: usize> BitSet<B> {
|
||||
/// Create and return a new [`BitSet`] with a single `value` inserted.
|
||||
pub(super) fn with(value: u32) -> Self {
|
||||
let mut bitset = Self::default();
|
||||
bitset.insert(value);
|
||||
bitset
|
||||
}
|
||||
|
||||
/// Convert from Inline to Heap, if needed, and resize the Heap vector, if needed.
|
||||
fn resize(&mut self, value: u32) {
|
||||
let num_blocks_needed = (value / 64) + 1;
|
||||
match self {
|
||||
Self::Inline(blocks) => {
|
||||
let mut vec = blocks.to_vec();
|
||||
vec.resize(num_blocks_needed as usize, 0);
|
||||
*self = Self::Heap(vec);
|
||||
}
|
||||
Self::Heap(vec) => {
|
||||
vec.resize(num_blocks_needed as usize, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn blocks_mut(&mut self) -> &mut [u64] {
|
||||
match self {
|
||||
Self::Inline(blocks) => blocks.as_mut_slice(),
|
||||
Self::Heap(blocks) => blocks.as_mut_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
fn blocks(&self) -> &[u64] {
|
||||
match self {
|
||||
Self::Inline(blocks) => blocks.as_slice(),
|
||||
Self::Heap(blocks) => blocks.as_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert a value into the [`BitSet`].
|
||||
///
|
||||
/// Return true if the value was newly inserted, false if already present.
|
||||
pub(super) fn insert(&mut self, value: u32) -> bool {
|
||||
let value_usize = value as usize;
|
||||
let (block, index) = (value_usize / 64, value_usize % 64);
|
||||
if block >= self.blocks().len() {
|
||||
self.resize(value);
|
||||
}
|
||||
let blocks = self.blocks_mut();
|
||||
let missing = blocks[block] & (1 << index) == 0;
|
||||
blocks[block] |= 1 << index;
|
||||
missing
|
||||
}
|
||||
|
||||
/// Intersect in-place with another [`BitSet`].
|
||||
pub(super) fn intersect(&mut self, other: &BitSet<B>) {
|
||||
let my_blocks = self.blocks_mut();
|
||||
let other_blocks = other.blocks();
|
||||
let min_len = my_blocks.len().min(other_blocks.len());
|
||||
for i in 0..min_len {
|
||||
my_blocks[i] &= other_blocks[i];
|
||||
}
|
||||
for block in my_blocks.iter_mut().skip(min_len) {
|
||||
*block = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an iterator over the values (in ascending order) in this [`BitSet`].
|
||||
pub(super) fn iter(&self) -> BitSetIterator<'_, B> {
|
||||
let blocks = self.blocks();
|
||||
BitSetIterator {
|
||||
blocks,
|
||||
current_block_index: 0,
|
||||
current_block: blocks[0],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over values in a [`BitSet`].
|
||||
#[derive(Debug)]
|
||||
pub(super) struct BitSetIterator<'a, const B: usize> {
|
||||
/// The blocks we are iterating over.
|
||||
blocks: &'a [u64],
|
||||
|
||||
/// The index of the block we are currently iterating through.
|
||||
current_block_index: usize,
|
||||
|
||||
/// The block we are currently iterating through (and zeroing as we go.)
|
||||
current_block: u64,
|
||||
}
|
||||
|
||||
impl<const B: usize> Iterator for BitSetIterator<'_, B> {
|
||||
type Item = u32;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while self.current_block == 0 {
|
||||
if self.current_block_index + 1 >= self.blocks.len() {
|
||||
return None;
|
||||
}
|
||||
self.current_block_index += 1;
|
||||
self.current_block = self.blocks[self.current_block_index];
|
||||
}
|
||||
let lowest_bit_set = self.current_block.trailing_zeros();
|
||||
// reset the lowest set bit, without a data dependency on `lowest_bit_set`
|
||||
self.current_block &= self.current_block.wrapping_sub(1);
|
||||
// SAFETY: `lowest_bit_set` cannot be more than 64, `current_block_index` cannot be more
|
||||
// than `B - 1`, and we check above that `B * 64 < u32::MAX`. So both `64 *
|
||||
// current_block_index` and the final value here must fit in u32.
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
Some(lowest_bit_set + (64 * self.current_block_index) as u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl<const B: usize> std::iter::FusedIterator for BitSetIterator<'_, B> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::BitSet;
|
||||
|
||||
fn assert_bitset<const B: usize>(bitset: &BitSet<B>, contents: &[u32]) {
|
||||
assert_eq!(bitset.iter().collect::<Vec<_>>(), contents);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iter() {
|
||||
let mut b = BitSet::<1>::with(3);
|
||||
b.insert(27);
|
||||
b.insert(6);
|
||||
assert!(matches!(b, BitSet::Inline(_)));
|
||||
assert_bitset(&b, &[3, 6, 27]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iter_overflow() {
|
||||
let mut b = BitSet::<1>::with(140);
|
||||
b.insert(100);
|
||||
b.insert(129);
|
||||
assert!(matches!(b, BitSet::Heap(_)));
|
||||
assert_bitset(&b, &[100, 129, 140]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(23);
|
||||
b2.insert(5);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_mixed_1() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(5);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_mixed_2() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(23);
|
||||
b2.insert(89);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_heap() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(90);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_heap_2() {
|
||||
let mut b1 = BitSet::<1>::with(89);
|
||||
let mut b2 = BitSet::<1>::with(89);
|
||||
b1.insert(91);
|
||||
b2.insert(90);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[89]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_blocks() {
|
||||
let mut b = BitSet::<2>::with(120);
|
||||
b.insert(45);
|
||||
assert!(matches!(b, BitSet::Inline(_)));
|
||||
assert_bitset(&b, &[45, 120]);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,374 @@
|
||||
//! Track visible definitions of a symbol, and applicable constraints per definition.
|
||||
//!
|
||||
//! These data structures operate entirely on scope-local newtype-indices for definitions and
|
||||
//! constraints, referring to their location in the `all_definitions` and `all_constraints`
|
||||
//! indexvecs in [`super::UseDefMapBuilder`].
|
||||
//!
|
||||
//! We need to track arbitrary associations between definitions and constraints, not just a single
|
||||
//! set of currently dominating constraints (where "dominating" means "control flow must have
|
||||
//! passed through it to reach this point"), because we can have dominating constraints that apply
|
||||
//! to some definitions but not others, as in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
//! if x is not None:
|
||||
//! if flag2:
|
||||
//! x = 2 if flag else None
|
||||
//! x
|
||||
//! ```
|
||||
//!
|
||||
//! The `x is not None` constraint dominates the final use of `x`, but it applies only to the first
|
||||
//! definition of `x`, not the second, so `None` is a possible value for `x`.
|
||||
//!
|
||||
//! And we can't just track, for each definition, an index into a list of dominating constraints,
|
||||
//! either, because we can have definitions which are still visible, but subject to constraints
|
||||
//! that are no longer dominating, as in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 0
|
||||
//! if flag1:
|
||||
//! x = 1 if flag2 else None
|
||||
//! assert x is not None
|
||||
//! x
|
||||
//! ```
|
||||
//!
|
||||
//! From the point of view of the final use of `x`, the `x is not None` constraint no longer
|
||||
//! dominates, but it does dominate the `x = 1 if flag2 else None` definition, so we have to keep
|
||||
//! track of that.
|
||||
//!
|
||||
//! The data structures used here ([`BitSet`] and [`smallvec::SmallVec`]) optimize for keeping all
|
||||
//! data inline (avoiding lots of scattered allocations) in small-to-medium cases, and falling back
|
||||
//! to heap allocation to be able to scale to arbitrary numbers of definitions and constraints when
|
||||
//! needed.
|
||||
use super::bitset::{BitSet, BitSetIterator};
|
||||
use ruff_index::newtype_index;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
/// A newtype-index for a definition in a particular scope.
|
||||
#[newtype_index]
|
||||
pub(super) struct ScopedDefinitionId;
|
||||
|
||||
/// A newtype-index for a constraint expression in a particular scope.
|
||||
#[newtype_index]
|
||||
pub(super) struct ScopedConstraintId;
|
||||
|
||||
/// Can reference this * 64 total definitions inline; more will fall back to the heap.
|
||||
const INLINE_DEFINITION_BLOCKS: usize = 3;
|
||||
|
||||
/// A [`BitSet`] of [`ScopedDefinitionId`], representing visible definitions of a symbol in a scope.
|
||||
type Definitions = BitSet<INLINE_DEFINITION_BLOCKS>;
|
||||
type DefinitionsIterator<'a> = BitSetIterator<'a, INLINE_DEFINITION_BLOCKS>;
|
||||
|
||||
/// Can reference this * 64 total constraints inline; more will fall back to the heap.
|
||||
const INLINE_CONSTRAINT_BLOCKS: usize = 2;
|
||||
|
||||
/// Can keep inline this many visible definitions per symbol at a given time; more will go to heap.
|
||||
const INLINE_VISIBLE_DEFINITIONS_PER_SYMBOL: usize = 4;
|
||||
|
||||
/// One [`BitSet`] of applicable [`ScopedConstraintId`] per visible definition.
|
||||
type InlineConstraintArray =
|
||||
[BitSet<INLINE_CONSTRAINT_BLOCKS>; INLINE_VISIBLE_DEFINITIONS_PER_SYMBOL];
|
||||
type Constraints = SmallVec<InlineConstraintArray>;
|
||||
type ConstraintsIterator<'a> = std::slice::Iter<'a, BitSet<INLINE_CONSTRAINT_BLOCKS>>;
|
||||
type ConstraintsIntoIterator = smallvec::IntoIter<InlineConstraintArray>;
|
||||
|
||||
/// Visible definitions and narrowing constraints for a single symbol at some point in control flow.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct SymbolState {
|
||||
/// [`BitSet`]: which [`ScopedDefinitionId`] are visible for this symbol?
|
||||
visible_definitions: Definitions,
|
||||
|
||||
/// For each definition, which [`ScopedConstraintId`] apply?
|
||||
///
|
||||
/// This is a [`smallvec::SmallVec`] which should always have one [`BitSet`] of constraints per
|
||||
/// definition in `visible_definitions`.
|
||||
constraints: Constraints,
|
||||
|
||||
/// Could the symbol be unbound at this point?
|
||||
may_be_unbound: bool,
|
||||
}
|
||||
|
||||
/// A single [`ScopedDefinitionId`] with an iterator of its applicable [`ScopedConstraintId`].
|
||||
#[derive(Debug)]
|
||||
pub(super) struct DefinitionIdWithConstraints<'a> {
|
||||
pub(super) definition: ScopedDefinitionId,
|
||||
pub(super) constraint_ids: ConstraintIdIterator<'a>,
|
||||
}
|
||||
|
||||
impl SymbolState {
|
||||
/// Return a new [`SymbolState`] representing an unbound symbol.
|
||||
pub(super) fn unbound() -> Self {
|
||||
Self {
|
||||
visible_definitions: Definitions::default(),
|
||||
constraints: Constraints::default(),
|
||||
may_be_unbound: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a new [`SymbolState`] representing a symbol with a single visible definition.
|
||||
pub(super) fn with(definition_id: ScopedDefinitionId) -> Self {
|
||||
let mut constraints = Constraints::with_capacity(1);
|
||||
constraints.push(BitSet::default());
|
||||
Self {
|
||||
visible_definitions: Definitions::with(definition_id.into()),
|
||||
constraints,
|
||||
may_be_unbound: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add Unbound as a possibility for this symbol.
|
||||
pub(super) fn add_unbound(&mut self) {
|
||||
self.may_be_unbound = true;
|
||||
}
|
||||
|
||||
/// Add given constraint to all currently-visible definitions.
|
||||
pub(super) fn add_constraint(&mut self, constraint_id: ScopedConstraintId) {
|
||||
for bitset in &mut self.constraints {
|
||||
bitset.insert(constraint_id.into());
|
||||
}
|
||||
}
|
||||
|
||||
/// Merge another [`SymbolState`] into this one.
|
||||
pub(super) fn merge(&mut self, b: SymbolState) {
|
||||
let mut a = Self {
|
||||
visible_definitions: Definitions::default(),
|
||||
constraints: Constraints::default(),
|
||||
may_be_unbound: self.may_be_unbound || b.may_be_unbound,
|
||||
};
|
||||
std::mem::swap(&mut a, self);
|
||||
let mut a_defs_iter = a.visible_definitions.iter();
|
||||
let mut b_defs_iter = b.visible_definitions.iter();
|
||||
let mut a_constraints_iter = a.constraints.into_iter();
|
||||
let mut b_constraints_iter = b.constraints.into_iter();
|
||||
|
||||
let mut opt_a_def: Option<u32> = a_defs_iter.next();
|
||||
let mut opt_b_def: Option<u32> = b_defs_iter.next();
|
||||
|
||||
// Iterate through the definitions from `a` and `b`, always processing the lower definition
|
||||
// ID first, and pushing each definition onto the merged `SymbolState` with its
|
||||
// constraints. If a definition is found in both `a` and `b`, push it with the intersection
|
||||
// of the constraints from the two paths; a constraint that applies from only one possible
|
||||
// path is irrelevant.
|
||||
|
||||
// Helper to push `def`, with constraints in `constraints_iter`, onto `self`.
|
||||
let push = |def, constraints_iter: &mut ConstraintsIntoIterator, merged: &mut Self| {
|
||||
merged.visible_definitions.insert(def);
|
||||
// SAFETY: we only ever create SymbolState with either no definitions and no constraint
|
||||
// bitsets (`::unbound`) or one definition and one constraint bitset (`::with`), and
|
||||
// `::merge` always pushes one definition and one constraint bitset together (just
|
||||
// below), so the number of definitions and the number of constraint bitsets can never
|
||||
// get out of sync.
|
||||
let constraints = constraints_iter
|
||||
.next()
|
||||
.expect("definitions and constraints length mismatch");
|
||||
merged.constraints.push(constraints);
|
||||
};
|
||||
|
||||
loop {
|
||||
match (opt_a_def, opt_b_def) {
|
||||
(Some(a_def), Some(b_def)) => match a_def.cmp(&b_def) {
|
||||
std::cmp::Ordering::Less => {
|
||||
// Next definition ID is only in `a`, push it to `self` and advance `a`.
|
||||
push(a_def, &mut a_constraints_iter, self);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Greater => {
|
||||
// Next definition ID is only in `b`, push it to `self` and advance `b`.
|
||||
push(b_def, &mut b_constraints_iter, self);
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Equal => {
|
||||
// Next definition is in both; push to `self` and intersect constraints.
|
||||
push(a_def, &mut b_constraints_iter, self);
|
||||
// SAFETY: we only ever create SymbolState with either no definitions and
|
||||
// no constraint bitsets (`::unbound`) or one definition and one constraint
|
||||
// bitset (`::with`), and `::merge` always pushes one definition and one
|
||||
// constraint bitset together (just below), so the number of definitions
|
||||
// and the number of constraint bitsets can never get out of sync.
|
||||
let a_constraints = a_constraints_iter
|
||||
.next()
|
||||
.expect("definitions and constraints length mismatch");
|
||||
// If the same definition is visible through both paths, any constraint
|
||||
// that applies on only one path is irrelevant to the resulting type from
|
||||
// unioning the two paths, so we intersect the constraints.
|
||||
self.constraints
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.intersect(&a_constraints);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
},
|
||||
(Some(a_def), None) => {
|
||||
// We've exhausted `b`, just push the def from `a` and move on to the next.
|
||||
push(a_def, &mut a_constraints_iter, self);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
}
|
||||
(None, Some(b_def)) => {
|
||||
// We've exhausted `a`, just push the def from `b` and move on to the next.
|
||||
push(b_def, &mut b_constraints_iter, self);
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
(None, None) => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get iterator over visible definitions with constraints.
|
||||
pub(super) fn visible_definitions(&self) -> DefinitionIdWithConstraintsIterator {
|
||||
DefinitionIdWithConstraintsIterator {
|
||||
definitions: self.visible_definitions.iter(),
|
||||
constraints: self.constraints.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Could the symbol be unbound?
|
||||
pub(super) fn may_be_unbound(&self) -> bool {
|
||||
self.may_be_unbound
|
||||
}
|
||||
}
|
||||
|
||||
/// The default state of a symbol (if we've seen no definitions of it) is unbound.
|
||||
impl Default for SymbolState {
|
||||
fn default() -> Self {
|
||||
SymbolState::unbound()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct DefinitionIdWithConstraintsIterator<'a> {
|
||||
definitions: DefinitionsIterator<'a>,
|
||||
constraints: ConstraintsIterator<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DefinitionIdWithConstraintsIterator<'a> {
|
||||
type Item = DefinitionIdWithConstraints<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match (self.definitions.next(), self.constraints.next()) {
|
||||
(None, None) => None,
|
||||
(Some(def), Some(constraints)) => Some(DefinitionIdWithConstraints {
|
||||
definition: ScopedDefinitionId::from_u32(def),
|
||||
constraint_ids: ConstraintIdIterator {
|
||||
wrapped: constraints.iter(),
|
||||
},
|
||||
}),
|
||||
// SAFETY: see above.
|
||||
_ => unreachable!("definitions and constraints length mismatch"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DefinitionIdWithConstraintsIterator<'_> {}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct ConstraintIdIterator<'a> {
|
||||
wrapped: BitSetIterator<'a, INLINE_CONSTRAINT_BLOCKS>,
|
||||
}
|
||||
|
||||
impl Iterator for ConstraintIdIterator<'_> {
|
||||
type Item = ScopedConstraintId;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.wrapped.next().map(ScopedConstraintId::from_u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for ConstraintIdIterator<'_> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{ScopedConstraintId, ScopedDefinitionId, SymbolState};
|
||||
|
||||
impl SymbolState {
|
||||
pub(crate) fn assert(&self, may_be_unbound: bool, expected: &[&str]) {
|
||||
assert_eq!(self.may_be_unbound(), may_be_unbound);
|
||||
let actual = self
|
||||
.visible_definitions()
|
||||
.map(|def_id_with_constraints| {
|
||||
format!(
|
||||
"{}<{}>",
|
||||
def_id_with_constraints.definition.as_u32(),
|
||||
def_id_with_constraints
|
||||
.constraint_ids
|
||||
.map(ScopedConstraintId::as_u32)
|
||||
.map(|idx| idx.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unbound() {
|
||||
let cd = SymbolState::unbound();
|
||||
|
||||
cd.assert(true, &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with() {
|
||||
let cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
|
||||
cd.assert(false, &["0<>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_unbound() {
|
||||
let mut cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd.add_unbound();
|
||||
|
||||
cd.assert(true, &["0<>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_constraint() {
|
||||
let mut cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
cd.assert(false, &["0<0>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merge() {
|
||||
// merging the same definition with the same constraint keeps the constraint
|
||||
let mut cd0a = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd0a.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
let mut cd0b = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd0b.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
cd0a.merge(cd0b);
|
||||
let mut cd0 = cd0a;
|
||||
cd0.assert(false, &["0<0>"]);
|
||||
|
||||
// merging the same definition with differing constraints drops all constraints
|
||||
let mut cd1a = SymbolState::with(ScopedDefinitionId::from_u32(1));
|
||||
cd1a.add_constraint(ScopedConstraintId::from_u32(1));
|
||||
|
||||
let mut cd1b = SymbolState::with(ScopedDefinitionId::from_u32(1));
|
||||
cd1b.add_constraint(ScopedConstraintId::from_u32(2));
|
||||
|
||||
cd1a.merge(cd1b);
|
||||
let cd1 = cd1a;
|
||||
cd1.assert(false, &["1<>"]);
|
||||
|
||||
// merging a constrained definition with unbound keeps both
|
||||
let mut cd2a = SymbolState::with(ScopedDefinitionId::from_u32(2));
|
||||
cd2a.add_constraint(ScopedConstraintId::from_u32(3));
|
||||
|
||||
let cd2b = SymbolState::unbound();
|
||||
|
||||
cd2a.merge(cd2b);
|
||||
let cd2 = cd2a;
|
||||
cd2.assert(true, &["2<3>"]);
|
||||
|
||||
// merging different definitions keeps them each with their existing constraints
|
||||
cd0.merge(cd2);
|
||||
let cd = cd0;
|
||||
cd.assert(true, &["0<0>", "2<3>"]);
|
||||
}
|
||||
}
|
||||
255
crates/red_knot_python_semantic/src/semantic_model.rs
Normal file
255
crates/red_knot_python_semantic/src/semantic_model.rs
Normal file
@@ -0,0 +1,255 @@
|
||||
use ruff_db::files::{File, FilePath};
|
||||
use ruff_db::source::line_index;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{Expr, ExpressionRef};
|
||||
use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{resolve_module, Module};
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::types::{definition_ty, global_symbol_ty_by_name, infer_scope_types, Type};
|
||||
use crate::Db;
|
||||
|
||||
pub struct SemanticModel<'db> {
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
}
|
||||
|
||||
impl<'db> SemanticModel<'db> {
|
||||
pub fn new(db: &'db dyn Db, file: File) -> Self {
|
||||
Self { db, file }
|
||||
}
|
||||
|
||||
// TODO we don't actually want to expose the Db directly to lint rules, but we need to find a
|
||||
// solution for exposing information from types
|
||||
pub fn db(&self) -> &dyn Db {
|
||||
self.db
|
||||
}
|
||||
|
||||
pub fn file_path(&self) -> &FilePath {
|
||||
self.file.path(self.db)
|
||||
}
|
||||
|
||||
pub fn line_index(&self) -> LineIndex {
|
||||
line_index(self.db.upcast(), self.file)
|
||||
}
|
||||
|
||||
pub fn resolve_module(&self, module_name: ModuleName) -> Option<Module> {
|
||||
resolve_module(self.db, module_name)
|
||||
}
|
||||
|
||||
pub fn global_symbol_ty(&self, module: &Module, symbol_name: &str) -> Type<'db> {
|
||||
global_symbol_ty_by_name(self.db, module.file(), symbol_name)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HasTy {
|
||||
/// Returns the inferred type of `self`.
|
||||
///
|
||||
/// ## Panics
|
||||
/// May panic if `self` is from another file than `model`.
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db>;
|
||||
}
|
||||
|
||||
impl HasTy for ast::ExpressionRef<'_> {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let file_scope = index.expression_scope_id(*self);
|
||||
let scope = file_scope.to_scope_id(model.db, model.file);
|
||||
|
||||
let expression_id = self.scoped_ast_id(model.db, scope);
|
||||
infer_scope_types(model.db, scope).expression_ty(expression_id)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_expression_has_ty {
|
||||
($ty: ty) => {
|
||||
impl HasTy for $ty {
|
||||
#[inline]
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let expression_ref = ExpressionRef::from(self);
|
||||
expression_ref.ty(model)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_expression_has_ty!(ast::ExprBoolOp);
|
||||
impl_expression_has_ty!(ast::ExprNamed);
|
||||
impl_expression_has_ty!(ast::ExprBinOp);
|
||||
impl_expression_has_ty!(ast::ExprUnaryOp);
|
||||
impl_expression_has_ty!(ast::ExprLambda);
|
||||
impl_expression_has_ty!(ast::ExprIf);
|
||||
impl_expression_has_ty!(ast::ExprDict);
|
||||
impl_expression_has_ty!(ast::ExprSet);
|
||||
impl_expression_has_ty!(ast::ExprListComp);
|
||||
impl_expression_has_ty!(ast::ExprSetComp);
|
||||
impl_expression_has_ty!(ast::ExprDictComp);
|
||||
impl_expression_has_ty!(ast::ExprGenerator);
|
||||
impl_expression_has_ty!(ast::ExprAwait);
|
||||
impl_expression_has_ty!(ast::ExprYield);
|
||||
impl_expression_has_ty!(ast::ExprYieldFrom);
|
||||
impl_expression_has_ty!(ast::ExprCompare);
|
||||
impl_expression_has_ty!(ast::ExprCall);
|
||||
impl_expression_has_ty!(ast::ExprFString);
|
||||
impl_expression_has_ty!(ast::ExprStringLiteral);
|
||||
impl_expression_has_ty!(ast::ExprBytesLiteral);
|
||||
impl_expression_has_ty!(ast::ExprNumberLiteral);
|
||||
impl_expression_has_ty!(ast::ExprBooleanLiteral);
|
||||
impl_expression_has_ty!(ast::ExprNoneLiteral);
|
||||
impl_expression_has_ty!(ast::ExprEllipsisLiteral);
|
||||
impl_expression_has_ty!(ast::ExprAttribute);
|
||||
impl_expression_has_ty!(ast::ExprSubscript);
|
||||
impl_expression_has_ty!(ast::ExprStarred);
|
||||
impl_expression_has_ty!(ast::ExprName);
|
||||
impl_expression_has_ty!(ast::ExprList);
|
||||
impl_expression_has_ty!(ast::ExprTuple);
|
||||
impl_expression_has_ty!(ast::ExprSlice);
|
||||
impl_expression_has_ty!(ast::ExprIpyEscapeCommand);
|
||||
|
||||
impl HasTy for ast::Expr {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
match self {
|
||||
Expr::BoolOp(inner) => inner.ty(model),
|
||||
Expr::Named(inner) => inner.ty(model),
|
||||
Expr::BinOp(inner) => inner.ty(model),
|
||||
Expr::UnaryOp(inner) => inner.ty(model),
|
||||
Expr::Lambda(inner) => inner.ty(model),
|
||||
Expr::If(inner) => inner.ty(model),
|
||||
Expr::Dict(inner) => inner.ty(model),
|
||||
Expr::Set(inner) => inner.ty(model),
|
||||
Expr::ListComp(inner) => inner.ty(model),
|
||||
Expr::SetComp(inner) => inner.ty(model),
|
||||
Expr::DictComp(inner) => inner.ty(model),
|
||||
Expr::Generator(inner) => inner.ty(model),
|
||||
Expr::Await(inner) => inner.ty(model),
|
||||
Expr::Yield(inner) => inner.ty(model),
|
||||
Expr::YieldFrom(inner) => inner.ty(model),
|
||||
Expr::Compare(inner) => inner.ty(model),
|
||||
Expr::Call(inner) => inner.ty(model),
|
||||
Expr::FString(inner) => inner.ty(model),
|
||||
Expr::StringLiteral(inner) => inner.ty(model),
|
||||
Expr::BytesLiteral(inner) => inner.ty(model),
|
||||
Expr::NumberLiteral(inner) => inner.ty(model),
|
||||
Expr::BooleanLiteral(inner) => inner.ty(model),
|
||||
Expr::NoneLiteral(inner) => inner.ty(model),
|
||||
Expr::EllipsisLiteral(inner) => inner.ty(model),
|
||||
Expr::Attribute(inner) => inner.ty(model),
|
||||
Expr::Subscript(inner) => inner.ty(model),
|
||||
Expr::Starred(inner) => inner.ty(model),
|
||||
Expr::Name(inner) => inner.ty(model),
|
||||
Expr::List(inner) => inner.ty(model),
|
||||
Expr::Tuple(inner) => inner.ty(model),
|
||||
Expr::Slice(inner) => inner.ty(model),
|
||||
Expr::IpyEscapeCommand(inner) => inner.ty(model),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_definition_has_ty {
|
||||
($ty: ty) => {
|
||||
impl HasTy for $ty {
|
||||
#[inline]
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_definition_has_ty!(ast::StmtFunctionDef);
|
||||
impl_definition_has_ty!(ast::StmtClassDef);
|
||||
impl_definition_has_ty!(ast::Alias);
|
||||
impl_definition_has_ty!(ast::Parameter);
|
||||
impl_definition_has_ty!(ast::ParameterWithDefault);
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::types::Type;
|
||||
use crate::{HasTy, ProgramSettings, SemanticModel};
|
||||
|
||||
fn setup_db<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<TestDb> {
|
||||
let mut db = TestDb::new();
|
||||
db.write_files(files)?;
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_ty() -> anyhow::Result<()> {
|
||||
let db = setup_db([("/src/foo.py", "def test(): pass")])?;
|
||||
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
|
||||
let function = ast.suite()[0].as_function_def_stmt().unwrap();
|
||||
let model = SemanticModel::new(&db, foo);
|
||||
let ty = function.ty(&model);
|
||||
|
||||
assert!(matches!(ty, Type::Function(_)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn class_ty() -> anyhow::Result<()> {
|
||||
let db = setup_db([("/src/foo.py", "class Test: pass")])?;
|
||||
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
|
||||
let class = ast.suite()[0].as_class_def_stmt().unwrap();
|
||||
let model = SemanticModel::new(&db, foo);
|
||||
let ty = class.ty(&model);
|
||||
|
||||
assert!(matches!(ty, Type::Class(_)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn alias_ty() -> anyhow::Result<()> {
|
||||
let db = setup_db([
|
||||
("/src/foo.py", "class Test: pass"),
|
||||
("/src/bar.py", "from foo import Test"),
|
||||
])?;
|
||||
|
||||
let bar = system_path_to_file(&db, "/src/bar.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, bar);
|
||||
|
||||
let import = ast.suite()[0].as_import_from_stmt().unwrap();
|
||||
let alias = &import.names[0];
|
||||
let model = SemanticModel::new(&db, bar);
|
||||
let ty = alias.ty(&model);
|
||||
|
||||
assert!(matches!(ty, Type::Class(_)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
485
crates/red_knot_python_semantic/src/types.rs
Normal file
485
crates/red_knot_python_semantic/src/types.rs
Normal file
@@ -0,0 +1,485 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::{
|
||||
global_scope, semantic_index, symbol_table, use_def_map, DefinitionWithConstraints,
|
||||
DefinitionWithConstraintsIterator,
|
||||
};
|
||||
use crate::types::narrow::narrowing_constraint;
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
pub(crate) use self::builder::{IntersectionBuilder, UnionBuilder};
|
||||
pub(crate) use self::diagnostic::TypeCheckDiagnostics;
|
||||
pub(crate) use self::infer::{
|
||||
infer_definition_types, infer_expression_types, infer_scope_types, TypeInference,
|
||||
};
|
||||
|
||||
mod builder;
|
||||
mod diagnostic;
|
||||
mod display;
|
||||
mod infer;
|
||||
mod narrow;
|
||||
|
||||
pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics {
|
||||
let _span = tracing::trace_span!("check_types", file=?file.path(db)).entered();
|
||||
|
||||
let index = semantic_index(db, file);
|
||||
let mut diagnostics = TypeCheckDiagnostics::new();
|
||||
|
||||
for scope_id in index.scope_ids() {
|
||||
let result = infer_scope_types(db, scope_id);
|
||||
diagnostics.extend(result.diagnostics());
|
||||
}
|
||||
|
||||
diagnostics
|
||||
}
|
||||
|
||||
/// Infer the public type of a symbol (its type as seen from outside its scope).
|
||||
pub(crate) fn symbol_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
scope: ScopeId<'db>,
|
||||
symbol: ScopedSymbolId,
|
||||
) -> Type<'db> {
|
||||
let _span = tracing::trace_span!("symbol_ty", ?symbol).entered();
|
||||
|
||||
let use_def = use_def_map(db, scope);
|
||||
definitions_ty(
|
||||
db,
|
||||
use_def.public_definitions(symbol),
|
||||
use_def
|
||||
.public_may_be_unbound(symbol)
|
||||
.then_some(Type::Unbound),
|
||||
)
|
||||
}
|
||||
|
||||
/// Shorthand for `symbol_ty` that takes a symbol name instead of an ID.
|
||||
pub(crate) fn symbol_ty_by_name<'db>(
|
||||
db: &'db dyn Db,
|
||||
scope: ScopeId<'db>,
|
||||
name: &str,
|
||||
) -> Type<'db> {
|
||||
let table = symbol_table(db, scope);
|
||||
table
|
||||
.symbol_id_by_name(name)
|
||||
.map(|symbol| symbol_ty(db, scope, symbol))
|
||||
.unwrap_or(Type::Unbound)
|
||||
}
|
||||
|
||||
/// Shorthand for `symbol_ty` that looks up a module-global symbol by name in a file.
|
||||
pub(crate) fn global_symbol_ty_by_name<'db>(db: &'db dyn Db, file: File, name: &str) -> Type<'db> {
|
||||
symbol_ty_by_name(db, global_scope(db, file), name)
|
||||
}
|
||||
|
||||
/// Shorthand for `symbol_ty` that looks up a symbol in the builtins.
|
||||
///
|
||||
/// Returns `Unbound` if the builtins module isn't available for some reason.
|
||||
pub(crate) fn builtins_symbol_ty_by_name<'db>(db: &'db dyn Db, name: &str) -> Type<'db> {
|
||||
builtins_scope(db)
|
||||
.map(|builtins| symbol_ty_by_name(db, builtins, name))
|
||||
.unwrap_or(Type::Unbound)
|
||||
}
|
||||
|
||||
/// Infer the type of a [`Definition`].
|
||||
pub(crate) fn definition_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -> Type<'db> {
|
||||
let inference = infer_definition_types(db, definition);
|
||||
inference.definition_ty(definition)
|
||||
}
|
||||
|
||||
/// Infer the combined type of an array of [`Definition`]s, plus one optional "unbound type".
|
||||
///
|
||||
/// Will return a union if there is more than one definition, or at least one plus an unbound
|
||||
/// type.
|
||||
///
|
||||
/// The "unbound type" represents the type in case control flow may not have passed through any
|
||||
/// definitions in this scope. If this isn't possible, then it will be `None`. If it is possible,
|
||||
/// and the result in that case should be Unbound (e.g. an unbound function local), then it will be
|
||||
/// `Some(Type::Unbound)`. If it is possible and the result should be something else (e.g. an
|
||||
/// implicit global lookup), then `unbound_type` will be `Some(the_global_symbol_type)`.
|
||||
///
|
||||
/// # Panics
|
||||
/// Will panic if called with zero definitions and no `unbound_ty`. This is a logic error,
|
||||
/// as any symbol with zero visible definitions clearly may be unbound, and the caller should
|
||||
/// provide an `unbound_ty`.
|
||||
pub(crate) fn definitions_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
definitions_with_constraints: DefinitionWithConstraintsIterator<'_, 'db>,
|
||||
unbound_ty: Option<Type<'db>>,
|
||||
) -> Type<'db> {
|
||||
let def_types = definitions_with_constraints.map(
|
||||
|DefinitionWithConstraints {
|
||||
definition,
|
||||
constraints,
|
||||
}| {
|
||||
let mut constraint_tys =
|
||||
constraints.filter_map(|test| narrowing_constraint(db, test, definition));
|
||||
let definition_ty = definition_ty(db, definition);
|
||||
if let Some(first_constraint_ty) = constraint_tys.next() {
|
||||
let mut builder = IntersectionBuilder::new(db);
|
||||
builder = builder
|
||||
.add_positive(definition_ty)
|
||||
.add_positive(first_constraint_ty);
|
||||
for constraint_ty in constraint_tys {
|
||||
builder = builder.add_positive(constraint_ty);
|
||||
}
|
||||
builder.build()
|
||||
} else {
|
||||
definition_ty
|
||||
}
|
||||
},
|
||||
);
|
||||
let mut all_types = unbound_ty.into_iter().chain(def_types);
|
||||
|
||||
let Some(first) = all_types.next() else {
|
||||
panic!("definitions_ty should never be called with zero definitions and no unbound_ty.")
|
||||
};
|
||||
|
||||
if let Some(second) = all_types.next() {
|
||||
let mut builder = UnionBuilder::new(db);
|
||||
builder = builder.add(first).add(second);
|
||||
|
||||
for variant in all_types {
|
||||
builder = builder.add(variant);
|
||||
}
|
||||
|
||||
builder.build()
|
||||
} else {
|
||||
first
|
||||
}
|
||||
}
|
||||
|
||||
/// Unique ID for a type.
|
||||
#[derive(Copy, Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Hash)]
|
||||
pub enum Type<'db> {
|
||||
/// the dynamic type: a statically-unknown set of values
|
||||
Any,
|
||||
/// the empty set of values
|
||||
Never,
|
||||
/// unknown type (either no annotation, or some kind of type error)
|
||||
/// equivalent to Any, or possibly to object in strict mode
|
||||
Unknown,
|
||||
/// name does not exist or is not bound to any value (this represents an error, but with some
|
||||
/// leniency options it could be silently resolved to Unknown in some cases)
|
||||
Unbound,
|
||||
/// the None object -- TODO remove this in favor of Instance(types.NoneType)
|
||||
None,
|
||||
/// a specific function object
|
||||
Function(FunctionType<'db>),
|
||||
/// a specific module object
|
||||
Module(File),
|
||||
/// a specific class object
|
||||
Class(ClassType<'db>),
|
||||
/// the set of Python objects with the given class in their __class__'s method resolution order
|
||||
Instance(ClassType<'db>),
|
||||
/// the set of objects in any of the types in the union
|
||||
Union(UnionType<'db>),
|
||||
/// the set of objects in all of the types in the intersection
|
||||
Intersection(IntersectionType<'db>),
|
||||
/// An integer literal
|
||||
IntLiteral(i64),
|
||||
/// A boolean literal, either `True` or `False`.
|
||||
BooleanLiteral(bool),
|
||||
// TODO protocols, callable types, overloads, generics, type vars
|
||||
}
|
||||
|
||||
impl<'db> Type<'db> {
|
||||
pub const fn is_unbound(&self) -> bool {
|
||||
matches!(self, Type::Unbound)
|
||||
}
|
||||
|
||||
pub const fn is_unknown(&self) -> bool {
|
||||
matches!(self, Type::Unknown)
|
||||
}
|
||||
|
||||
pub const fn is_never(&self) -> bool {
|
||||
matches!(self, Type::Never)
|
||||
}
|
||||
|
||||
pub fn may_be_unbound(&self, db: &'db dyn Db) -> bool {
|
||||
match self {
|
||||
Type::Unbound => true,
|
||||
Type::Union(union) => union.contains(db, Type::Unbound),
|
||||
// Unbound can't appear in an intersection, because an intersection with Unbound
|
||||
// simplifies to just Unbound.
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn replace_unbound_with(&self, db: &'db dyn Db, replacement: Type<'db>) -> Type<'db> {
|
||||
match self {
|
||||
Type::Unbound => replacement,
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
.into_iter()
|
||||
.fold(UnionBuilder::new(db), |builder, ty| {
|
||||
builder.add(ty.replace_unbound_with(db, replacement))
|
||||
})
|
||||
.build(),
|
||||
ty => *ty,
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve a member access of a type.
|
||||
///
|
||||
/// For example, if `foo` is `Type::Instance(<Bar>)`,
|
||||
/// `foo.member(&db, "baz")` returns the type of `baz` attributes
|
||||
/// as accessed from instances of the `Bar` class.
|
||||
///
|
||||
/// TODO: use of this method currently requires manually checking
|
||||
/// whether the returned type is `Unknown`/`Unbound`
|
||||
/// (or a union with `Unknown`/`Unbound`) in many places.
|
||||
/// Ideally we'd use a more type-safe pattern, such as returning
|
||||
/// an `Option` or a `Result` from this method, which would force
|
||||
/// us to explicitly consider whether to handle an error or propagate
|
||||
/// it up the call stack.
|
||||
#[must_use]
|
||||
pub fn member(&self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
match self {
|
||||
Type::Any => Type::Any,
|
||||
Type::Never => {
|
||||
// TODO: attribute lookup on Never type
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Unknown => Type::Unknown,
|
||||
Type::Unbound => Type::Unbound,
|
||||
Type::None => {
|
||||
// TODO: attribute lookup on None type
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Function(_) => {
|
||||
// TODO: attribute lookup on function type
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Module(file) => global_symbol_ty_by_name(db, *file, name),
|
||||
Type::Class(class) => class.class_member(db, name),
|
||||
Type::Instance(_) => {
|
||||
// TODO MRO? get_own_instance_member, get_instance_member
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
.iter()
|
||||
.fold(UnionBuilder::new(db), |builder, element_ty| {
|
||||
builder.add(element_ty.member(db, name))
|
||||
})
|
||||
.build(),
|
||||
Type::Intersection(_) => {
|
||||
// TODO perform the get_member on each type in the intersection
|
||||
// TODO return the intersection of those results
|
||||
Type::Unknown
|
||||
}
|
||||
Type::IntLiteral(_) => {
|
||||
// TODO raise error
|
||||
Type::Unknown
|
||||
}
|
||||
Type::BooleanLiteral(_) => Type::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn instance(&self) -> Type<'db> {
|
||||
match self {
|
||||
Type::Any => Type::Any,
|
||||
Type::Unknown => Type::Unknown,
|
||||
Type::Class(class) => Type::Instance(*class),
|
||||
_ => Type::Unknown, // TODO type errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct FunctionType<'db> {
|
||||
/// name of the function at definition
|
||||
pub name: Name,
|
||||
|
||||
/// types of all decorators on this function
|
||||
decorators: Vec<Type<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> FunctionType<'db> {
|
||||
pub fn has_decorator(self, db: &dyn Db, decorator: Type<'_>) -> bool {
|
||||
self.decorators(db).contains(&decorator)
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct ClassType<'db> {
|
||||
/// Name of the class at definition
|
||||
pub name: Name,
|
||||
|
||||
/// Types of all class bases
|
||||
bases: Vec<Type<'db>>,
|
||||
|
||||
body_scope: ScopeId<'db>,
|
||||
}
|
||||
|
||||
impl<'db> ClassType<'db> {
|
||||
/// Returns the class member of this class named `name`.
|
||||
///
|
||||
/// The member resolves to a member of the class itself or any of its bases.
|
||||
pub fn class_member(self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
let member = self.own_class_member(db, name);
|
||||
if !member.is_unbound() {
|
||||
return member;
|
||||
}
|
||||
|
||||
self.inherited_class_member(db, name)
|
||||
}
|
||||
|
||||
/// Returns the inferred type of the class member named `name`.
|
||||
pub fn own_class_member(self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
let scope = self.body_scope(db);
|
||||
symbol_ty_by_name(db, scope, name)
|
||||
}
|
||||
|
||||
pub fn inherited_class_member(self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
for base in self.bases(db) {
|
||||
let member = base.member(db, name);
|
||||
if !member.is_unbound() {
|
||||
return member;
|
||||
}
|
||||
}
|
||||
|
||||
Type::Unbound
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct UnionType<'db> {
|
||||
/// The union type includes values in any of these types.
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
pub fn contains(&self, db: &'db dyn Db, ty: Type<'db>) -> bool {
|
||||
self.elements(db).contains(&ty)
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct IntersectionType<'db> {
|
||||
/// The intersection type includes only values in all of these types.
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
|
||||
/// The intersection type does not include any value in any of these types.
|
||||
///
|
||||
/// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
/// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
/// directly in intersections rather than as a separate type.
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Context;
|
||||
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
|
||||
use super::TypeCheckDiagnostics;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
db.memory_file_system()
|
||||
.create_directory_all("/src")
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
fn assert_diagnostic_messages(diagnostics: &TypeCheckDiagnostics, expected: &[&str]) {
|
||||
let messages: Vec<&str> = diagnostics
|
||||
.iter()
|
||||
.map(|diagnostic| diagnostic.message())
|
||||
.collect();
|
||||
assert_eq!(&messages, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_import_statement() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("src/foo.py", "import bar\n")
|
||||
.context("Failed to write foo.py")?;
|
||||
|
||||
let foo = system_path_to_file(&db, "src/foo.py").context("Failed to resolve foo.py")?;
|
||||
|
||||
let diagnostics = super::check_types(&db, foo);
|
||||
assert_diagnostic_messages(&diagnostics, &["Import 'bar' could not be resolved."]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_import_from_statement() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("src/foo.py", "from bar import baz\n")
|
||||
.unwrap();
|
||||
let foo = system_path_to_file(&db, "src/foo.py").unwrap();
|
||||
let diagnostics = super::check_types(&db, foo);
|
||||
assert_diagnostic_messages(&diagnostics, &["Import 'bar' could not be resolved."]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_import_from_resolved_module() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([("/src/a.py", ""), ("/src/b.py", "from a import thing")])
|
||||
.unwrap();
|
||||
|
||||
let b_file = system_path_to_file(&db, "/src/b.py").unwrap();
|
||||
let b_file_diagnostics = super::check_types(&db, b_file);
|
||||
assert_diagnostic_messages(
|
||||
&b_file_diagnostics,
|
||||
&["Could not resolve import of 'thing' from 'a'"],
|
||||
);
|
||||
}
|
||||
|
||||
#[ignore = "\
|
||||
A spurious second 'Unresolved import' diagnostic message is emitted on `b.py`, \
|
||||
despite the symbol existing in the symbol table for `a.py`"]
|
||||
#[test]
|
||||
fn resolved_import_of_symbol_from_unresolved_import() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "import foo as foo"),
|
||||
("/src/b.py", "from a import foo"),
|
||||
])
|
||||
.unwrap();
|
||||
|
||||
let a_file = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
let a_file_diagnostics = super::check_types(&db, a_file);
|
||||
assert_diagnostic_messages(
|
||||
&a_file_diagnostics,
|
||||
&["Import 'foo' could not be resolved."],
|
||||
);
|
||||
|
||||
// Importing the unresolved import into a second first-party file should not trigger
|
||||
// an additional "unresolved import" violation
|
||||
let b_file = system_path_to_file(&db, "/src/b.py").unwrap();
|
||||
let b_file_diagnostics = super::check_types(&db, b_file);
|
||||
assert_eq!(&*b_file_diagnostics, &[]);
|
||||
}
|
||||
}
|
||||
471
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
471
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
@@ -0,0 +1,471 @@
|
||||
//! Smart builders for union and intersection types.
|
||||
//!
|
||||
//! Invariants we maintain here:
|
||||
//! * No single-element union types (should just be the contained type instead.)
|
||||
//! * No single-positive-element intersection types. Single-negative-element are OK, we don't
|
||||
//! have a standalone negation type so there's no other representation for this.
|
||||
//! * The same type should never appear more than once in a union or intersection. (This should
|
||||
//! be expanded to cover subtyping -- see below -- but for now we only implement it for type
|
||||
//! identity.)
|
||||
//! * Disjunctive normal form (DNF): the tree of unions and intersections can never be deeper
|
||||
//! than a union-of-intersections. Unions cannot contain other unions (the inner union just
|
||||
//! flattens into the outer one), intersections cannot contain other intersections (also
|
||||
//! flattens), and intersections cannot contain unions (the intersection distributes over the
|
||||
//! union, inverting it into a union-of-intersections).
|
||||
//!
|
||||
//! The implication of these invariants is that a [`UnionBuilder`] does not necessarily build a
|
||||
//! [`Type::Union`]. For example, if only one type is added to the [`UnionBuilder`], `build()` will
|
||||
//! just return that type directly. The same is true for [`IntersectionBuilder`]; for example, if a
|
||||
//! union type is added to the intersection, it will distribute and [`IntersectionBuilder::build`]
|
||||
//! may end up returning a [`Type::Union`] of intersections.
|
||||
//!
|
||||
//! In the future we should have these additional invariants, but they aren't implemented yet:
|
||||
//! * No type in a union can be a subtype of any other type in the union (just eliminate the
|
||||
//! subtype from the union).
|
||||
//! * No type in an intersection can be a supertype of any other type in the intersection (just
|
||||
//! eliminate the supertype from the intersection).
|
||||
//! * An intersection containing two non-overlapping types should simplify to [`Type::Never`].
|
||||
use crate::types::{IntersectionType, Type, UnionType};
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
pub(crate) struct UnionBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> UnionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
elements: FxOrderSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a type to this union.
|
||||
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
|
||||
match ty {
|
||||
Type::Union(union) => {
|
||||
self.elements.extend(&union.elements(self.db));
|
||||
}
|
||||
Type::Never => {}
|
||||
_ => {
|
||||
self.elements.insert(ty);
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> Type<'db> {
|
||||
match self.elements.len() {
|
||||
0 => Type::Never,
|
||||
1 => self.elements[0],
|
||||
_ => Type::Union(UnionType::new(self.db, self.elements)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct IntersectionBuilder<'db> {
|
||||
// Really this builds a union-of-intersections, because we always keep our set-theoretic types
|
||||
// in disjunctive normal form (DNF), a union of intersections. In the simplest case there's
|
||||
// just a single intersection in this vector, and we are building a single intersection type,
|
||||
// but if a union is added to the intersection, we'll distribute ourselves over that union and
|
||||
// create a union of intersections.
|
||||
intersections: Vec<InnerIntersectionBuilder<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> IntersectionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![InnerIntersectionBuilder::new()],
|
||||
}
|
||||
}
|
||||
|
||||
fn empty(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn add_positive(mut self, ty: Type<'db>) -> Self {
|
||||
if let Type::Union(union) = ty {
|
||||
// Distribute ourself over this union: for each union element, clone ourself and
|
||||
// intersect with that union element, then create a new union-of-intersections with all
|
||||
// of those sub-intersections in it. E.g. if `self` is a simple intersection `T1 & T2`
|
||||
// and we add `T3 | T4` to the intersection, we don't get `T1 & T2 & (T3 | T4)` (that's
|
||||
// not in DNF), we distribute the union and get `(T1 & T3) | (T2 & T3) | (T1 & T4) |
|
||||
// (T2 & T4)`. If `self` is already a union-of-intersections `(T1 & T2) | (T3 & T4)`
|
||||
// and we add `T5 | T6` to it, that flattens all the way out to `(T1 & T2 & T5) | (T1 &
|
||||
// T2 & T6) | (T3 & T4 & T5) ...` -- you get the idea.
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_positive(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
// If we are already a union-of-intersections, distribute the new intersected element
|
||||
// across all of those intersections.
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_positive(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn add_negative(mut self, ty: Type<'db>) -> Self {
|
||||
// See comments above in `add_positive`; this is just the negated version.
|
||||
if let Type::Union(union) = ty {
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_negative(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_negative(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build(mut self) -> Type<'db> {
|
||||
// Avoid allocating the UnionBuilder unnecessarily if we have just one intersection:
|
||||
if self.intersections.len() == 1 {
|
||||
self.intersections.pop().unwrap().build(self.db)
|
||||
} else {
|
||||
let mut builder = UnionBuilder::new(self.db);
|
||||
for inner in self.intersections {
|
||||
builder = builder.add(inner.build(self.db));
|
||||
}
|
||||
builder.build()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
struct InnerIntersectionBuilder<'db> {
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> InnerIntersectionBuilder<'db> {
|
||||
fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Adds a positive type to this intersection.
|
||||
fn add_positive(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
match ty {
|
||||
Type::Intersection(inter) => {
|
||||
let pos = inter.positive(db);
|
||||
let neg = inter.negative(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
_ => {
|
||||
if !self.negative.remove(&ty) {
|
||||
self.positive.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a negative type to this intersection.
|
||||
fn add_negative(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
// TODO Any/Unknown actually should not self-cancel
|
||||
match ty {
|
||||
Type::Intersection(intersection) => {
|
||||
let pos = intersection.negative(db);
|
||||
let neg = intersection.positive(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
Type::Never => {}
|
||||
Type::Unbound => {}
|
||||
_ => {
|
||||
if !self.positive.remove(&ty) {
|
||||
self.negative.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn simplify(&mut self) {
|
||||
// TODO this should be generalized based on subtyping, for now we just handle a few cases
|
||||
|
||||
// Never is a subtype of all types
|
||||
if self.positive.contains(&Type::Never) {
|
||||
self.positive.retain(Type::is_never);
|
||||
self.negative.clear();
|
||||
}
|
||||
|
||||
if self.positive.contains(&Type::Unbound) {
|
||||
self.positive.retain(Type::is_unbound);
|
||||
self.negative.clear();
|
||||
}
|
||||
|
||||
// None intersects only with object
|
||||
for pos in &self.positive {
|
||||
if let Type::Instance(_) = pos {
|
||||
// could be `object` type
|
||||
} else {
|
||||
self.negative.remove(&Type::None);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build(mut self, db: &'db dyn Db) -> Type<'db> {
|
||||
self.simplify();
|
||||
match (self.positive.len(), self.negative.len()) {
|
||||
(0, 0) => Type::Never,
|
||||
(1, 0) => self.positive[0],
|
||||
_ => {
|
||||
self.positive.shrink_to_fit();
|
||||
self.negative.shrink_to_fit();
|
||||
Type::Intersection(IntersectionType::new(db, self.positive, self.negative))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{IntersectionBuilder, IntersectionType, Type, UnionBuilder, UnionType};
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
TestDb::new()
|
||||
}
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
fn elements_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.elements(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(t0).add(t1).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_single() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).build();
|
||||
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_empty() {
|
||||
let db = setup_db();
|
||||
let ty = UnionBuilder::new(&db).build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_never() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).add(Type::Never).build();
|
||||
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_flatten() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let u1 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(u1).add(t2).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1, t2]);
|
||||
}
|
||||
|
||||
impl<'db> IntersectionType<'db> {
|
||||
fn pos_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.positive(db).into_iter().collect()
|
||||
}
|
||||
|
||||
fn neg_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.negative(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ta = Type::Any;
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_positive() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_positive(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_negative() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_negative(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, t1]);
|
||||
assert_eq!(inter.neg_vec(&db), &[ta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersection_distributes_over_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let ta = Type::Any;
|
||||
let u0 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
|
||||
let Type::Union(union) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_positive(u0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements_vec(&db)[..] else {
|
||||
panic!("expected a union of two intersections");
|
||||
};
|
||||
assert_eq!(i0.pos_vec(&db), &[ta, t0]);
|
||||
assert_eq!(i1.pos_vec(&db), &[ta, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_self_negation() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::None)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_positive(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_unbound() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unbound)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Unbound);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_unbound() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Unbound)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::IntLiteral(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_none() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::None)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::IntLiteral(1));
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user