Compare commits
2 Commits
type-check
...
preview-bi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d3e160dcb7 | ||
|
|
003851b54c |
4
.gitattributes
vendored
4
.gitattributes
vendored
@@ -2,13 +2,9 @@
|
||||
|
||||
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
|
||||
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
||||
|
||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
||||
|
||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||
*.md.snap linguist-language=Markdown
|
||||
|
||||
12
.github/CODEOWNERS
vendored
12
.github/CODEOWNERS
vendored
@@ -5,13 +5,5 @@
|
||||
# - The '*' pattern is global owners.
|
||||
# - Order is important. The last matching pattern has the most precedence.
|
||||
|
||||
/crates/ruff_notebook/ @dhruvmanila
|
||||
/crates/ruff_formatter/ @MichaReiser
|
||||
/crates/ruff_python_formatter/ @MichaReiser
|
||||
/crates/ruff_python_parser/ @MichaReiser @dhruvmanila
|
||||
|
||||
# flake8-pyi
|
||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
||||
|
||||
# Script for fuzzing the parser
|
||||
/scripts/fuzz-parser/ @AlexWaygood
|
||||
# Jupyter
|
||||
/crates/ruff_linter/src/jupyter/ @dhruvmanila
|
||||
|
||||
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -3,8 +3,6 @@ Thank you for taking the time to report an issue! We're glad to have you involve
|
||||
|
||||
If you're filing a bug report, please consider including the following information:
|
||||
|
||||
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
|
||||
e.g. "RUF001", "unused variable", "Jupyter notebook"
|
||||
* A minimal code snippet that reproduces the bug.
|
||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||
|
||||
21
.github/dependabot.yml
vendored
Normal file
21
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
labels: ["internal"]
|
||||
groups:
|
||||
actions:
|
||||
patterns:
|
||||
- "*"
|
||||
ignore:
|
||||
# The latest versions of these are not compatible with our release workflow
|
||||
- dependency-name: "actions/upload-artifact"
|
||||
- dependency-name: "actions/download-artifact"
|
||||
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
labels: ["internal"]
|
||||
68
.github/renovate.json5
vendored
68
.github/renovate.json5
vendored
@@ -1,68 +0,0 @@
|
||||
{
|
||||
$schema: "https://docs.renovatebot.com/renovate-schema.json",
|
||||
dependencyDashboard: true,
|
||||
suppressNotifications: ["prEditedNotification"],
|
||||
extends: ["config:recommended"],
|
||||
labels: ["internal"],
|
||||
schedule: ["before 4am on Monday"],
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
npm: {
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
enabled: true,
|
||||
},
|
||||
packageRules: [
|
||||
{
|
||||
// Group upload/download artifact updates, the versions are dependent
|
||||
groupName: "Artifact GitHub Actions dependencies",
|
||||
matchManagers: ["github-actions"],
|
||||
matchPackagePatterns: ["actions/.*-artifact"],
|
||||
description: "Weekly update of artifact-related GitHub Actions dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
description: "Weekly update of pre-commit dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "NPM Development dependencies",
|
||||
matchManagers: ["npm"],
|
||||
matchDepTypes: ["devDependencies"],
|
||||
description: "Weekly update of NPM development dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "Monaco",
|
||||
matchManagers: ["npm"],
|
||||
matchPackagePatterns: ["monaco"],
|
||||
description: "Weekly update of the Monaco editor",
|
||||
},
|
||||
{
|
||||
groupName: "strum",
|
||||
matchManagers: ["cargo"],
|
||||
matchPackagePatterns: ["strum"],
|
||||
description: "Weekly update of strum dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "ESLint",
|
||||
matchManagers: ["npm"],
|
||||
matchPackageNames: ["eslint"],
|
||||
allowedVersions: "<9",
|
||||
description: "Constraint ESLint to version 8 until TypeScript-eslint supports ESLint 9", // https://github.com/typescript-eslint/typescript-eslint/issues/8211
|
||||
},
|
||||
],
|
||||
vulnerabilityAlerts: {
|
||||
commitMessageSuffix: "",
|
||||
labels: ["internal", "security"],
|
||||
},
|
||||
}
|
||||
83
.github/workflows/ci.yaml
vendored
83
.github/workflows/ci.yaml
vendored
@@ -23,8 +23,6 @@ jobs:
|
||||
name: "Determine changes"
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
# Flag that is raised when any code that affects parser is changed
|
||||
parser: ${{ steps.changed.outputs.parser_any_changed }}
|
||||
# Flag that is raised when any code that affects linter is changed
|
||||
linter: ${{ steps.changed.outputs.linter_any_changed }}
|
||||
# Flag that is raised when any code that affects formatter is changed
|
||||
@@ -37,21 +35,10 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: tj-actions/changed-files@v44
|
||||
- uses: tj-actions/changed-files@v42
|
||||
id: changed
|
||||
with:
|
||||
files_yaml: |
|
||||
parser:
|
||||
- Cargo.toml
|
||||
- Cargo.lock
|
||||
- crates/ruff_python_trivia/**
|
||||
- crates/ruff_source_file/**
|
||||
- crates/ruff_text_size/**
|
||||
- crates/ruff_python_ast/**
|
||||
- crates/ruff_python_parser/**
|
||||
- scripts/fuzz-parser/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
linter:
|
||||
- Cargo.toml
|
||||
- Cargo.lock
|
||||
@@ -59,6 +46,7 @@ jobs:
|
||||
- "!crates/ruff_python_formatter/**"
|
||||
- "!crates/ruff_formatter/**"
|
||||
- "!crates/ruff_dev/**"
|
||||
- "!crates/ruff_shrinking/**"
|
||||
- scripts/*
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
@@ -145,7 +133,7 @@ jobs:
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug/ruff
|
||||
@@ -193,22 +181,6 @@ jobs:
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
runs-on: macos-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
cargo-fuzz:
|
||||
name: "cargo fuzz"
|
||||
runs-on: ubuntu-latest
|
||||
@@ -228,38 +200,6 @@ jobs:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- run: cargo fuzz build -s none
|
||||
|
||||
fuzz-parser:
|
||||
name: "Fuzz the parser"
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
with:
|
||||
name: ruff
|
||||
path: ruff-to-test
|
||||
- name: Fuzz
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
|
||||
python scripts/fuzz-parser/fuzz.py 0-500 --test-executable ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
|
||||
scripts:
|
||||
name: "test scripts"
|
||||
runs-on: ubuntu-latest
|
||||
@@ -288,7 +228,9 @@ jobs:
|
||||
- determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
# Ecosystem check needs linter and/or formatter changes.
|
||||
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
||||
if: github.event_name == 'pull_request' && ${{
|
||||
needs.determine_changes.outputs.code == 'true'
|
||||
}}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -296,7 +238,7 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v3
|
||||
name: Download comparison Ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
@@ -308,7 +250,6 @@ jobs:
|
||||
with:
|
||||
name: ruff
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
|
||||
- name: Install ruff-ecosystem
|
||||
@@ -383,13 +324,13 @@ jobs:
|
||||
run: |
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v3
|
||||
name: Upload PR Number
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v3
|
||||
name: Upload Results
|
||||
with:
|
||||
name: ecosystem-result
|
||||
@@ -530,7 +471,7 @@ jobs:
|
||||
- determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: extractions/setup-just@v2
|
||||
- uses: extractions/setup-just@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -543,7 +484,7 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v3
|
||||
name: Download development ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
@@ -567,7 +508,7 @@ jobs:
|
||||
benchmarks:
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
|
||||
72
.github/workflows/daily_fuzz.yaml
vendored
72
.github/workflows/daily_fuzz.yaml
vendored
@@ -1,72 +0,0 @@
|
||||
name: Daily parser fuzz
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/daily_fuzz.yaml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
PACKAGE_NAME: ruff
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
fuzz:
|
||||
name: Fuzz
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
run: cargo build --locked
|
||||
- name: Fuzz
|
||||
run: python scripts/fuzz-parser/fuzz.py $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily fuzz surfaced any bugs
|
||||
runs-on: ubuntu-latest
|
||||
needs: fuzz
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.fuzz.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.create({
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Daily parser fuzz failed on ${new Date().toDateString()}`,
|
||||
body: "Runs listed here: https://github.com/astral-sh/ruff/actions/workflows/daily_fuzz.yml",
|
||||
labels: ["bug", "parser", "fuzzer"],
|
||||
})
|
||||
95
.github/workflows/release.yaml
vendored
95
.github/workflows/release.yaml
vendored
@@ -52,13 +52,13 @@ jobs:
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-sdist
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
macos-x86_64:
|
||||
runs-on: macos-12
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -80,9 +80,9 @@ jobs:
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-macos-x86_64
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
@@ -90,15 +90,15 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-macos-x86_64
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
macos-aarch64:
|
||||
runs-on: macos-14
|
||||
macos-universal:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -106,23 +106,22 @@ jobs:
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: arm64
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - aarch64"
|
||||
- name: "Build wheels - universal2"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - aarch64"
|
||||
args: --release --locked --target universal2-apple-darwin --out dist
|
||||
- name: "Test wheel - universal2"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-aarch64-apple-darwin
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
@@ -130,9 +129,9 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-aarch64-apple-darwin
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -171,9 +170,9 @@ jobs:
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
@@ -182,9 +181,9 @@ jobs:
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.zip
|
||||
*.sha256
|
||||
@@ -219,9 +218,9 @@ jobs:
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
@@ -229,9 +228,9 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -252,12 +251,8 @@ jobs:
|
||||
arch: s390x
|
||||
- target: powerpc64le-unknown-linux-gnu
|
||||
arch: ppc64le
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: powerpc64-unknown-linux-gnu
|
||||
arch: ppc64
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -290,9 +285,9 @@ jobs:
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
@@ -300,9 +295,9 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -342,9 +337,9 @@ jobs:
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
@@ -352,9 +347,9 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -399,9 +394,9 @@ jobs:
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
@@ -409,9 +404,9 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -452,7 +447,7 @@ jobs:
|
||||
name: Upload to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- macos-aarch64
|
||||
- macos-universal
|
||||
- macos-x86_64
|
||||
- windows
|
||||
- linux
|
||||
@@ -468,11 +463,10 @@ jobs:
|
||||
# For pypi trusted publishing
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
pattern: wheels-*
|
||||
name: wheels
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
@@ -512,13 +506,12 @@ jobs:
|
||||
# For GitHub release publishing
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
pattern: binaries-*
|
||||
name: binaries
|
||||
path: binaries
|
||||
merge-multiple: true
|
||||
- name: "Publish to GitHub"
|
||||
uses: softprops/action-gh-release@v2
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
draft: true
|
||||
files: binaries/*
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -92,7 +92,6 @@ coverage.xml
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
repos/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
||||
@@ -17,4 +17,4 @@ MD013: false
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
siblings_only: true
|
||||
allow_different_nesting: true
|
||||
|
||||
@@ -13,7 +13,7 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.16
|
||||
rev: v0.15
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
@@ -31,7 +31,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.39.0
|
||||
rev: v0.37.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -41,7 +41,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.20.10
|
||||
rev: v1.16.22
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -55,7 +55,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.4.2
|
||||
rev: v0.1.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
@@ -70,7 +70,7 @@ repos:
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.1.0
|
||||
rev: v3.0.3
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
|
||||
@@ -1,56 +1,5 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.3.0
|
||||
|
||||
### Ruff 2024.2 style
|
||||
|
||||
The formatter now formats code according to the Ruff 2024.2 style guide. Read the [changelog](./CHANGELOG.md#030) for a detailed list of stabilized style changes.
|
||||
|
||||
### `isort`: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
|
||||
|
||||
Previously, Ruff used one or two blank lines (or the number configured by `isort.lines-after-imports`) after imports in typing stub files (`.pyi` files).
|
||||
The [typing style guide for stubs](https://typing.readthedocs.io/en/latest/source/stubs.html#style-guide) recommends using at most 1 blank line for grouping.
|
||||
As of this release, `isort` now always uses one blank line after imports in stub files, the same as the formatter.
|
||||
|
||||
### `build` is no longer excluded by default ([#10093](https://github.com/astral-sh/ruff/pull/10093))
|
||||
|
||||
Ruff maintains a list of directories and files that are excluded by default. This list now consists of the following patterns:
|
||||
|
||||
- `.bzr`
|
||||
- `.direnv`
|
||||
- `.eggs`
|
||||
- `.git`
|
||||
- `.git-rewrite`
|
||||
- `.hg`
|
||||
- `.ipynb_checkpoints`
|
||||
- `.mypy_cache`
|
||||
- `.nox`
|
||||
- `.pants.d`
|
||||
- `.pyenv`
|
||||
- `.pytest_cache`
|
||||
- `.pytype`
|
||||
- `.ruff_cache`
|
||||
- `.svn`
|
||||
- `.tox`
|
||||
- `.venv`
|
||||
- `.vscode`
|
||||
- `__pypackages__`
|
||||
- `_build`
|
||||
- `buck-out`
|
||||
- `dist`
|
||||
- `node_modules`
|
||||
- `site-packages`
|
||||
- `venv`
|
||||
|
||||
Previously, the `build` directory was included in this list. However, the `build` directory tends to be a not-unpopular directory
|
||||
name, and excluding it by default caused confusion. Ruff now no longer excludes `build` except if it is excluded by a `.gitignore` file
|
||||
or because it is listed in `extend-exclude`.
|
||||
|
||||
### `--format` is no longer a valid `rule` or `linter` command option
|
||||
|
||||
Previously, `ruff rule` and `ruff linter` accepted the `--format <FORMAT>` option as an alias for `--output-format`. Ruff no longer
|
||||
supports this alias. Please use `ruff rule --output-format <FORMAT>` and `ruff linter --output-format <FORMAT>` instead.
|
||||
|
||||
## 0.1.9
|
||||
|
||||
### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
|
||||
|
||||
547
CHANGELOG.md
547
CHANGELOG.md
@@ -1,544 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.4.3
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add support for PEP 696 syntax ([#11120](https://github.com/astral-sh/ruff/pull/11120))
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`refurb`\] Use function range for `reimplemented-operator` diagnostics ([#11271](https://github.com/astral-sh/ruff/pull/11271))
|
||||
- \[`refurb`\] Ignore methods in `reimplemented-operator` (`FURB118`) ([#11270](https://github.com/astral-sh/ruff/pull/11270))
|
||||
- \[`refurb`\] Implement `fstring-number-format` (`FURB116`) ([#10921](https://github.com/astral-sh/ruff/pull/10921))
|
||||
- \[`ruff`\] Implement `redirected-noqa` (`RUF101`) ([#11052](https://github.com/astral-sh/ruff/pull/11052))
|
||||
- \[`pyflakes`\] Distinguish between first-party and third-party imports for fix suggestions ([#11168](https://github.com/astral-sh/ruff/pull/11168))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Ignore non-abstract class attributes when enforcing `B024` ([#11210](https://github.com/astral-sh/ruff/pull/11210))
|
||||
- \[`flake8-logging`\] Include inline instantiations when detecting loggers ([#11154](https://github.com/astral-sh/ruff/pull/11154))
|
||||
- \[`pylint`\] Also emit `PLR0206` for properties with variadic parameters ([#11200](https://github.com/astral-sh/ruff/pull/11200))
|
||||
- \[`ruff`\] Detect duplicate codes as part of `unused-noqa` (`RUF100`) ([#10850](https://github.com/astral-sh/ruff/pull/10850))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Avoid multiline expression if format specifier is present ([#11123](https://github.com/astral-sh/ruff/pull/11123))
|
||||
|
||||
### LSP
|
||||
|
||||
- Write `ruff server` setup guide for Helix ([#11183](https://github.com/astral-sh/ruff/pull/11183))
|
||||
- `ruff server` no longer hangs after shutdown ([#11222](https://github.com/astral-sh/ruff/pull/11222))
|
||||
- `ruff server` reads from a configuration TOML file in the user configuration directory if no local configuration exists ([#11225](https://github.com/astral-sh/ruff/pull/11225))
|
||||
- `ruff server` respects `per-file-ignores` configuration ([#11224](https://github.com/astral-sh/ruff/pull/11224))
|
||||
- `ruff server`: Support a custom TOML configuration file ([#11140](https://github.com/astral-sh/ruff/pull/11140))
|
||||
- `ruff server`: Support setting to prioritize project configuration over editor configuration ([#11086](https://github.com/astral-sh/ruff/pull/11086))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid debug assertion around NFKC renames ([#11249](https://github.com/astral-sh/ruff/pull/11249))
|
||||
- \[`pyflakes`\] Prioritize `redefined-while-unused` over `unused-import` ([#11173](https://github.com/astral-sh/ruff/pull/11173))
|
||||
- \[`ruff`\] Respect `async` expressions in comprehension bodies ([#11219](https://github.com/astral-sh/ruff/pull/11219))
|
||||
- \[`pygrep_hooks`\] Fix `blanket-noqa` panic when last line has noqa with no newline (`PGH004`) ([#11108](https://github.com/astral-sh/ruff/pull/11108))
|
||||
- \[`perflint`\] Ignore list-copy recommendations for async `for` loops ([#11250](https://github.com/astral-sh/ruff/pull/11250))
|
||||
- \[`pyflakes`\] Improve `invalid-print-syntax` documentation ([#11171](https://github.com/astral-sh/ruff/pull/11171))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid allocations for isort module names ([#11251](https://github.com/astral-sh/ruff/pull/11251))
|
||||
- Build a separate ARM wheel for macOS ([#11149](https://github.com/astral-sh/ruff/pull/11149))
|
||||
|
||||
## 0.4.2
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-pyi`\] Allow for overloaded `__exit__` and `__aexit__` definitions (`PYI036`) ([#11057](https://github.com/astral-sh/ruff/pull/11057))
|
||||
- \[`pyupgrade`\] Catch usages of `"%s" % var` and provide an unsafe fix (`UP031`) ([#11019](https://github.com/astral-sh/ruff/pull/11019))
|
||||
- \[`refurb`\] Implement new rule that suggests min/max over `sorted()` (`FURB192`) ([#10868](https://github.com/astral-sh/ruff/pull/10868))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix an issue with missing diagnostics for Neovim and Helix ([#11092](https://github.com/astral-sh/ruff/pull/11092))
|
||||
- Implement hover documentation for `noqa` codes ([#11096](https://github.com/astral-sh/ruff/pull/11096))
|
||||
- Introduce common Ruff configuration options with new server settings ([#11062](https://github.com/astral-sh/ruff/pull/11062))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Use `macos-12` for building release wheels to enable macOS 11 compatibility ([#11146](https://github.com/astral-sh/ruff/pull/11146))
|
||||
- \[`flake8-blind-expect`\] Allow raise from in `BLE001` ([#11131](https://github.com/astral-sh/ruff/pull/11131))
|
||||
- \[`flake8-pyi`\] Allow simple assignments to `None` in enum class scopes (`PYI026`) ([#11128](https://github.com/astral-sh/ruff/pull/11128))
|
||||
- \[`flake8-simplify`\] Avoid raising `SIM911` for non-`zip` attribute calls ([#11126](https://github.com/astral-sh/ruff/pull/11126))
|
||||
- \[`refurb`\] Avoid `operator.itemgetter` suggestion for single-item tuple ([#11095](https://github.com/astral-sh/ruff/pull/11095))
|
||||
- \[`ruff`\] Respect per-file-ignores for `RUF100` with no other diagnostics ([#11058](https://github.com/astral-sh/ruff/pull/11058))
|
||||
- \[`ruff`\] Fix async comprehension false positive (`RUF029`) ([#11070](https://github.com/astral-sh/ruff/pull/11070))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-bugbear`\] Document explicitly disabling strict zip (`B905`) ([#11040](https://github.com/astral-sh/ruff/pull/11040))
|
||||
- \[`flake8-type-checking`\] Mention `lint.typing-modules` in `TCH001`, `TCH002`, and `TCH003` ([#11144](https://github.com/astral-sh/ruff/pull/11144))
|
||||
- \[`isort`\] Improve documentation around custom `isort` sections ([#11050](https://github.com/astral-sh/ruff/pull/11050))
|
||||
- \[`pylint`\] Fix documentation oversight for `invalid-X-returns` ([#11094](https://github.com/astral-sh/ruff/pull/11094))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use `matchit` to resolve per-file settings ([#11111](https://github.com/astral-sh/ruff/pull/11111))
|
||||
|
||||
## 0.4.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `invalid-hash-returned` (`PLE0309`) ([#10961](https://github.com/astral-sh/ruff/pull/10961))
|
||||
- \[`pylint`\] Implement `invalid-index-returned` (`PLE0305`) ([#10962](https://github.com/astral-sh/ruff/pull/10962))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pylint`\] Allow `NoReturn`-like functions for `__str__`, `__len__`, etc. (`PLE0307`) ([#11017](https://github.com/astral-sh/ruff/pull/11017))
|
||||
- Parser: Use empty range when there's "gap" in token source ([#11032](https://github.com/astral-sh/ruff/pull/11032))
|
||||
- \[`ruff`\] Ignore stub functions in `unused-async` (`RUF029`) ([#11026](https://github.com/astral-sh/ruff/pull/11026))
|
||||
- Parser: Expect indented case block instead of match stmt ([#11033](https://github.com/astral-sh/ruff/pull/11033))
|
||||
|
||||
## 0.4.0
|
||||
|
||||
### A new, hand-written parser
|
||||
|
||||
Ruff's new parser is **>2x faster**, which translates to a **20-40% speedup** for all linting and formatting invocations.
|
||||
There's a lot to say about this exciting change, so check out the [blog post](https://astral.sh/blog/ruff-v0.4.0) for more details!
|
||||
|
||||
See [#10036](https://github.com/astral-sh/ruff/pull/10036) for implementation details.
|
||||
|
||||
### A new language server in Rust
|
||||
|
||||
With this release, we also want to highlight our new language server. `ruff server` is a Rust-powered language
|
||||
server that comes built-in with Ruff. It can be used with any editor that supports the [Language Server Protocol](https://microsoft.github.io/language-server-protocol/) (LSP).
|
||||
It uses a multi-threaded, lock-free architecture inspired by `rust-analyzer` and it will open the door for a lot
|
||||
of exciting features. It’s also faster than our previous [Python-based language server](https://github.com/astral-sh/ruff-lsp)
|
||||
-- but you probably guessed that already.
|
||||
|
||||
`ruff server` is only in alpha, but it has a lot of features that you can try out today:
|
||||
|
||||
- Lints Python files automatically and shows quick-fixes when available
|
||||
- Formats Python files, with support for range formatting
|
||||
- Comes with commands for quickly performing actions: `ruff.applyAutofix`, `ruff.applyFormat`, and `ruff.applyOrganizeImports`
|
||||
- Supports `source.fixAll` and `source.organizeImports` source actions
|
||||
- Automatically reloads your project configuration when you change it
|
||||
|
||||
To setup `ruff server` with your editor, refer to the [README.md](https://github.com/astral-sh/ruff/blob/main/crates/ruff_server/README.md).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pycodestyle`\] Do not trigger `E3` rules on `def`s following a function/method with a dummy body ([#10704](https://github.com/astral-sh/ruff/pull/10704))
|
||||
- \[`pylint`\] Implement `invalid-bytes-returned` (`E0308`) ([#10959](https://github.com/astral-sh/ruff/pull/10959))
|
||||
- \[`pylint`\] Implement `invalid-length-returned` (`E0303`) ([#10963](https://github.com/astral-sh/ruff/pull/10963))
|
||||
- \[`pylint`\] Implement `self-cls-assignment` (`W0642`) ([#9267](https://github.com/astral-sh/ruff/pull/9267))
|
||||
- \[`pylint`\] Omit stubs from `invalid-bool` and `invalid-str-return-type` ([#11008](https://github.com/astral-sh/ruff/pull/11008))
|
||||
- \[`ruff`\] New rule `unused-async` (`RUF029`) to detect unneeded `async` keywords on functions ([#9966](https://github.com/astral-sh/ruff/pull/9966))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bandit`\] Allow `urllib.request.urlopen` calls with static `Request` argument (`S310`) ([#10964](https://github.com/astral-sh/ruff/pull/10964))
|
||||
- \[`flake8-bugbear`\] Treat `raise NotImplemented`-only bodies as stub functions (`B006`) ([#10990](https://github.com/astral-sh/ruff/pull/10990))
|
||||
- \[`flake8-slots`\] Respect same-file `Enum` subclasses (`SLOT000`) ([#11006](https://github.com/astral-sh/ruff/pull/11006))
|
||||
- \[`pylint`\] Support inverted comparisons (`PLR1730`) ([#10920](https://github.com/astral-sh/ruff/pull/10920))
|
||||
|
||||
### Linter
|
||||
|
||||
- Improve handling of builtin symbols in linter rules ([#10919](https://github.com/astral-sh/ruff/pull/10919))
|
||||
- Improve display of rules in `--show-settings` ([#11003](https://github.com/astral-sh/ruff/pull/11003))
|
||||
- Improve inference capabilities of the `BuiltinTypeChecker` ([#10976](https://github.com/astral-sh/ruff/pull/10976))
|
||||
- Resolve classes and functions relative to script name ([#10965](https://github.com/astral-sh/ruff/pull/10965))
|
||||
- Improve performance of `RuleTable::any_enabled` ([#10971](https://github.com/astral-sh/ruff/pull/10971))
|
||||
|
||||
### Server
|
||||
|
||||
*This section is devoted to updates for our new language server, written in Rust.*
|
||||
|
||||
- Enable ruff-specific source actions ([#10916](https://github.com/astral-sh/ruff/pull/10916))
|
||||
- Refreshes diagnostics for open files when file configuration is changed ([#10988](https://github.com/astral-sh/ruff/pull/10988))
|
||||
- Important errors are now shown as popups ([#10951](https://github.com/astral-sh/ruff/pull/10951))
|
||||
- Introduce settings for directly configuring the linter and formatter ([#10984](https://github.com/astral-sh/ruff/pull/10984))
|
||||
- Resolve configuration for each document individually ([#10950](https://github.com/astral-sh/ruff/pull/10950))
|
||||
- Write a setup guide for Neovim ([#10987](https://github.com/astral-sh/ruff/pull/10987))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `RUFF_OUTPUT_FILE` environment variable support ([#10992](https://github.com/astral-sh/ruff/pull/10992))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid `non-augmented-assignment` for reversed, non-commutative operators (`PLR6104`) ([#10909](https://github.com/astral-sh/ruff/pull/10909))
|
||||
- Limit commutative non-augmented-assignments to primitive data types (`PLR6104`) ([#10912](https://github.com/astral-sh/ruff/pull/10912))
|
||||
- Respect `per-file-ignores` for `RUF100` on blanket `# noqa` ([#10908](https://github.com/astral-sh/ruff/pull/10908))
|
||||
- Consider `if` expression for parenthesized with items parsing ([#11010](https://github.com/astral-sh/ruff/pull/11010))
|
||||
- Consider binary expr for parenthesized with items parsing ([#11012](https://github.com/astral-sh/ruff/pull/11012))
|
||||
- Reset `FOR_TARGET` context for all kinds of parentheses ([#11009](https://github.com/astral-sh/ruff/pull/11009))
|
||||
|
||||
## 0.3.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Implement `loop-iterator-mutation` (`B909`) ([#9578](https://github.com/astral-sh/ruff/pull/9578))
|
||||
- \[`pylint`\] Implement rule to prefer augmented assignment (`PLR6104`) ([#9932](https://github.com/astral-sh/ruff/pull/9932))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid TOCTOU errors in cache initialization ([#10884](https://github.com/astral-sh/ruff/pull/10884))
|
||||
- \[`pylint`\] Recode `nan-comparison` rule to `W0177` ([#10894](https://github.com/astral-sh/ruff/pull/10894))
|
||||
- \[`pylint`\] Reverse min-max logic in `if-stmt-min-max` ([#10890](https://github.com/astral-sh/ruff/pull/10890))
|
||||
|
||||
## 0.3.6
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `bad-staticmethod-argument` (`PLW0211`) ([#10781](https://github.com/astral-sh/ruff/pull/10781))
|
||||
- \[`pylint`\] Implement `if-stmt-min-max` (`PLR1730`, `PLR1731`) ([#10002](https://github.com/astral-sh/ruff/pull/10002))
|
||||
- \[`pyupgrade`\] Replace `str,Enum` multiple inheritance with `StrEnum` `UP042` ([#10713](https://github.com/astral-sh/ruff/pull/10713))
|
||||
- \[`refurb`\] Implement `if-expr-instead-of-or-operator` (`FURB110`) ([#10687](https://github.com/astral-sh/ruff/pull/10687))
|
||||
- \[`refurb`\] Implement `int-on-sliced-str` (`FURB166`) ([#10650](https://github.com/astral-sh/ruff/pull/10650))
|
||||
- \[`refurb`\] Implement `write-whole-file` (`FURB103`) ([#10802](https://github.com/astral-sh/ruff/pull/10802))
|
||||
- \[`refurb`\] Support `itemgetter` in `reimplemented-operator` (`FURB118`) ([#10526](https://github.com/astral-sh/ruff/pull/10526))
|
||||
- \[`flake8_comprehensions`\] Add `sum`/`min`/`max` to unnecessary comprehension check (`C419`) ([#10759](https://github.com/astral-sh/ruff/pull/10759))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pydocstyle`\] Require capitalizing docstrings where the first sentence is a single word (`D403`) ([#10776](https://github.com/astral-sh/ruff/pull/10776))
|
||||
- \[`pycodestyle`\] Ignore annotated lambdas in class scopes (`E731`) ([#10720](https://github.com/astral-sh/ruff/pull/10720))
|
||||
- \[`flake8-pyi`\] Various improvements to PYI034 ([#10807](https://github.com/astral-sh/ruff/pull/10807))
|
||||
- \[`flake8-slots`\] Flag subclasses of call-based `typing.NamedTuple`s as well as subclasses of `collections.namedtuple()` (`SLOT002`) ([#10808](https://github.com/astral-sh/ruff/pull/10808))
|
||||
- \[`pyflakes`\] Allow forward references in class bases in stub files (`F821`) ([#10779](https://github.com/astral-sh/ruff/pull/10779))
|
||||
- \[`pygrep-hooks`\] Improve `blanket-noqa` error message (`PGH004`) ([#10851](https://github.com/astral-sh/ruff/pull/10851))
|
||||
|
||||
### CLI
|
||||
|
||||
- Support `FORCE_COLOR` env var ([#10839](https://github.com/astral-sh/ruff/pull/10839))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Support negated patterns in `[extend-]per-file-ignores` ([#10852](https://github.com/astral-sh/ruff/pull/10852))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-import-conventions`\] Accept non-aliased (but correct) import in `unconventional-import-alias` (`ICN001`) ([#10729](https://github.com/astral-sh/ruff/pull/10729))
|
||||
- \[`flake8-quotes`\] Add semantic model flag when inside f-string replacement field ([#10766](https://github.com/astral-sh/ruff/pull/10766))
|
||||
- \[`pep8-naming`\] Recursively resolve `TypeDicts` for N815 violations ([#10719](https://github.com/astral-sh/ruff/pull/10719))
|
||||
- \[`flake8-quotes`\] Respect `Q00*` ignores in `flake8-quotes` rules ([#10728](https://github.com/astral-sh/ruff/pull/10728))
|
||||
- \[`flake8-simplify`\] Show negated condition in `needless-bool` diagnostics (`SIM103`) ([#10854](https://github.com/astral-sh/ruff/pull/10854))
|
||||
- \[`ruff`\] Use within-scope shadowed bindings in `asyncio-dangling-task` (`RUF006`) ([#10793](https://github.com/astral-sh/ruff/pull/10793))
|
||||
- \[`flake8-pytest-style`\] Fix single-tuple conversion in `pytest-parametrize-values-wrong-type` (`PT007`) ([#10862](https://github.com/astral-sh/ruff/pull/10862))
|
||||
- \[`flake8-return`\] Ignore assignments to annotated variables in `unnecessary-assign` (`RET504`) ([#10741](https://github.com/astral-sh/ruff/pull/10741))
|
||||
- \[`refurb`\] Do not allow any keyword arguments for `read-whole-file` in `rb` mode (`FURB101`) ([#10803](https://github.com/astral-sh/ruff/pull/10803))
|
||||
- \[`pylint`\] Don't recommend decorating staticmethods with `@singledispatch` (`PLE1519`, `PLE1520`) ([#10637](https://github.com/astral-sh/ruff/pull/10637))
|
||||
- \[`pydocstyle`\] Use section name range for all section-related docstring diagnostics ([#10740](https://github.com/astral-sh/ruff/pull/10740))
|
||||
- Respect `# noqa` directives on `__all__` openers ([#10798](https://github.com/astral-sh/ruff/pull/10798))
|
||||
|
||||
## 0.3.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `modified-iterating-set` (`E4703`) ([#10473](https://github.com/astral-sh/ruff/pull/10473))
|
||||
- \[`refurb`\] Implement `for-loop-set-mutations` (`FURB142`) ([#10583](https://github.com/astral-sh/ruff/pull/10583))
|
||||
- \[`refurb`\] Implement `unnecessary-from-float` (`FURB164`) ([#10647](https://github.com/astral-sh/ruff/pull/10647))
|
||||
- \[`refurb`\] Implement `verbose-decimal-constructor` (`FURB157`) ([#10533](https://github.com/astral-sh/ruff/pull/10533))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-comprehensions`\] Handled special case for `C401` which also matches `C416` ([#10596](https://github.com/astral-sh/ruff/pull/10596))
|
||||
- \[`flake8-pyi`\] Mark `unaliased-collections-abc-set-import` fix as "safe" for more cases in stub files (`PYI025`) ([#10547](https://github.com/astral-sh/ruff/pull/10547))
|
||||
- \[`numpy`\] Add `row_stack` to NumPy 2.0 migration rule ([#10646](https://github.com/astral-sh/ruff/pull/10646))
|
||||
- \[`pycodestyle`\] Allow cell magics before an import (`E402`) ([#10545](https://github.com/astral-sh/ruff/pull/10545))
|
||||
- \[`pycodestyle`\] Avoid blank line rules for the first logical line in cell ([#10291](https://github.com/astral-sh/ruff/pull/10291))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Respected nested namespace packages ([#10541](https://github.com/astral-sh/ruff/pull/10541))
|
||||
- \[`flake8-boolean-trap`\] Add setting for user defined allowed boolean trap ([#10531](https://github.com/astral-sh/ruff/pull/10531))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Correctly handle references in `__all__` definitions when renaming symbols in autofixes ([#10527](https://github.com/astral-sh/ruff/pull/10527))
|
||||
- Track ranges of names inside `__all__` definitions ([#10525](https://github.com/astral-sh/ruff/pull/10525))
|
||||
- \[`flake8-bugbear`\] Avoid false positive for usage after `continue` (`B031`) ([#10539](https://github.com/astral-sh/ruff/pull/10539))
|
||||
- \[`flake8-copyright`\] Accept commas in default copyright pattern ([#9498](https://github.com/astral-sh/ruff/pull/9498))
|
||||
- \[`flake8-datetimez`\] Allow f-strings with `%z` for `DTZ007` ([#10651](https://github.com/astral-sh/ruff/pull/10651))
|
||||
- \[`flake8-pytest-style`\] Fix `PT014` autofix for last item in list ([#10532](https://github.com/astral-sh/ruff/pull/10532))
|
||||
- \[`flake8-quotes`\] Ignore `Q000`, `Q001` when string is inside forward ref ([#10585](https://github.com/astral-sh/ruff/pull/10585))
|
||||
- \[`isort`\] Always place non-relative imports after relative imports ([#10669](https://github.com/astral-sh/ruff/pull/10669))
|
||||
- \[`isort`\] Respect Unicode characters in import sorting ([#10529](https://github.com/astral-sh/ruff/pull/10529))
|
||||
- \[`pyflakes`\] Fix F821 false negatives when `from __future__ import annotations` is active (attempt 2) ([#10524](https://github.com/astral-sh/ruff/pull/10524))
|
||||
- \[`pyflakes`\] Make `unnecessary-lambda` an always-unsafe fix ([#10668](https://github.com/astral-sh/ruff/pull/10668))
|
||||
- \[`pylint`\] Fixed false-positive on the rule `PLW1641` (`eq-without-hash`) ([#10566](https://github.com/astral-sh/ruff/pull/10566))
|
||||
- \[`ruff`\] Fix panic in unused `# noqa` removal with multi-byte space (`RUF100`) ([#10682](https://github.com/astral-sh/ruff/pull/10682))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add PR title format to `CONTRIBUTING.md` ([#10665](https://github.com/astral-sh/ruff/pull/10665))
|
||||
- Fix list markup to include blank lines required ([#10591](https://github.com/astral-sh/ruff/pull/10591))
|
||||
- Put `flake8-logging` next to the other flake8 plugins in registry ([#10587](https://github.com/astral-sh/ruff/pull/10587))
|
||||
- \[`flake8-bandit`\] Update warning message for rule `S305` to address insecure block cipher mode use ([#10602](https://github.com/astral-sh/ruff/pull/10602))
|
||||
- \[`flake8-bugbear`\] Document use of anonymous assignment in `useless-expression` ([#10551](https://github.com/astral-sh/ruff/pull/10551))
|
||||
- \[`flake8-datetimez`\] Clarify error messages and docs for `DTZ` rules ([#10621](https://github.com/astral-sh/ruff/pull/10621))
|
||||
- \[`pycodestyle`\] Use same before vs. after numbers for `space-around-operator` ([#10640](https://github.com/astral-sh/ruff/pull/10640))
|
||||
- \[`ruff`\] Change `quadratic-list-summation` docs to use `iadd` consistently ([#10666](https://github.com/astral-sh/ruff/pull/10666))
|
||||
|
||||
## 0.3.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Detect implicit `else` cases in `needless-bool` (`SIM103`) ([#10414](https://github.com/astral-sh/ruff/pull/10414))
|
||||
- \[`pylint`\] Implement `nan-comparison` (`PLW0117`) ([#10401](https://github.com/astral-sh/ruff/pull/10401))
|
||||
- \[`pylint`\] Implement `nonlocal-and-global` (`E115`) ([#10407](https://github.com/astral-sh/ruff/pull/10407))
|
||||
- \[`pylint`\] Implement `singledispatchmethod-function` (`PLE5120`) ([#10428](https://github.com/astral-sh/ruff/pull/10428))
|
||||
- \[`refurb`\] Implement `list-reverse-copy` (`FURB187`) ([#10212](https://github.com/astral-sh/ruff/pull/10212))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-pytest-style`\] Add automatic fix for `pytest-parametrize-values-wrong-type` (`PT007`) ([#10461](https://github.com/astral-sh/ruff/pull/10461))
|
||||
- \[`pycodestyle`\] Allow SPDX license headers to exceed the line length (`E501`) ([#10481](https://github.com/astral-sh/ruff/pull/10481))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix unstable formatting for trailing subscript end-of-line comment ([#10492](https://github.com/astral-sh/ruff/pull/10492))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid code comment detection in PEP 723 script tags ([#10464](https://github.com/astral-sh/ruff/pull/10464))
|
||||
- Avoid incorrect tuple transformation in single-element case (`C409`) ([#10491](https://github.com/astral-sh/ruff/pull/10491))
|
||||
- Bug fix: Prevent fully defined links [`name`](link) from being reformatted ([#10442](https://github.com/astral-sh/ruff/pull/10442))
|
||||
- Consider raw source code for `W605` ([#10480](https://github.com/astral-sh/ruff/pull/10480))
|
||||
- Docs: Link inline settings when not part of options section ([#10499](https://github.com/astral-sh/ruff/pull/10499))
|
||||
- Don't treat annotations as redefinitions in `.pyi` files ([#10512](https://github.com/astral-sh/ruff/pull/10512))
|
||||
- Fix `E231` bug: Inconsistent catch compared to pycodestyle, such as when dict nested in list ([#10469](https://github.com/astral-sh/ruff/pull/10469))
|
||||
- Fix pylint upstream categories not showing in docs ([#10441](https://github.com/astral-sh/ruff/pull/10441))
|
||||
- Add missing `Options` references to blank line docs ([#10498](https://github.com/astral-sh/ruff/pull/10498))
|
||||
- 'Revert "F821: Fix false negatives in .py files when `from __future__ import annotations` is active (#10362)"' ([#10513](https://github.com/astral-sh/ruff/pull/10513))
|
||||
- Apply NFKC normalization to unicode identifiers in the lexer ([#10412](https://github.com/astral-sh/ruff/pull/10412))
|
||||
- Avoid failures due to non-deterministic binding ordering ([#10478](https://github.com/astral-sh/ruff/pull/10478))
|
||||
- \[`flake8-bugbear`\] Allow tuples of exceptions (`B030`) ([#10437](https://github.com/astral-sh/ruff/pull/10437))
|
||||
- \[`flake8-quotes`\] Avoid syntax errors due to invalid quotes (`Q000, Q002`) ([#10199](https://github.com/astral-sh/ruff/pull/10199))
|
||||
|
||||
## 0.3.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bandit`\]: Implement `S610` rule ([#10316](https://github.com/astral-sh/ruff/pull/10316))
|
||||
- \[`pycodestyle`\] Implement `blank-line-at-end-of-file` (`W391`) ([#10243](https://github.com/astral-sh/ruff/pull/10243))
|
||||
- \[`pycodestyle`\] Implement `redundant-backslash` (`E502`) ([#10292](https://github.com/astral-sh/ruff/pull/10292))
|
||||
- \[`pylint`\] - implement `redeclared-assigned-name` (`W0128`) ([#9268](https://github.com/astral-sh/ruff/pull/9268))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8_comprehensions`\] Handled special case for `C400` which also matches `C416` ([#10419](https://github.com/astral-sh/ruff/pull/10419))
|
||||
- \[`flake8-bandit`\] Implement upstream updates for `S311`, `S324` and `S605` ([#10313](https://github.com/astral-sh/ruff/pull/10313))
|
||||
- \[`pyflakes`\] Remove `F401` fix for `__init__` imports by default and allow opt-in to unsafe fix ([#10365](https://github.com/astral-sh/ruff/pull/10365))
|
||||
- \[`pylint`\] Implement `invalid-bool-return-type` (`E304`) ([#10377](https://github.com/astral-sh/ruff/pull/10377))
|
||||
- \[`pylint`\] Include builtin warnings in useless-exception-statement (`PLW0133`) ([#10394](https://github.com/astral-sh/ruff/pull/10394))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add message on success to `ruff check` ([#8631](https://github.com/astral-sh/ruff/pull/8631))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`PIE970`\] Allow trailing ellipsis in `typing.TYPE_CHECKING` ([#10413](https://github.com/astral-sh/ruff/pull/10413))
|
||||
- Avoid `TRIO115` if the argument is a variable ([#10376](https://github.com/astral-sh/ruff/pull/10376))
|
||||
- \[`F811`\] Avoid removing shadowed imports that point to different symbols ([#10387](https://github.com/astral-sh/ruff/pull/10387))
|
||||
- Fix `F821` and `F822` false positives in `.pyi` files ([#10341](https://github.com/astral-sh/ruff/pull/10341))
|
||||
- Fix `F821` false negatives in `.py` files when `from __future__ import annotations` is active ([#10362](https://github.com/astral-sh/ruff/pull/10362))
|
||||
- Fix case where `Indexer` fails to identify continuation preceded by newline #10351 ([#10354](https://github.com/astral-sh/ruff/pull/10354))
|
||||
- Sort hash maps in `Settings` display ([#10370](https://github.com/astral-sh/ruff/pull/10370))
|
||||
- Track conditional deletions in the semantic model ([#10415](https://github.com/astral-sh/ruff/pull/10415))
|
||||
- \[`C413`\] Wrap expressions in parentheses when negating ([#10346](https://github.com/astral-sh/ruff/pull/10346))
|
||||
- \[`pycodestyle`\] Do not ignore lines before the first logical line in blank lines rules. ([#10382](https://github.com/astral-sh/ruff/pull/10382))
|
||||
- \[`pycodestyle`\] Do not trigger `E225` and `E275` when the next token is a ')' ([#10315](https://github.com/astral-sh/ruff/pull/10315))
|
||||
- \[`pylint`\] Avoid false-positive slot non-assignment for `__dict__` (`PLE0237`) ([#10348](https://github.com/astral-sh/ruff/pull/10348))
|
||||
- Gate f-string struct size test for Rustc \< 1.76 ([#10371](https://github.com/astral-sh/ruff/pull/10371))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Use `ruff.toml` format in README ([#10393](https://github.com/astral-sh/ruff/pull/10393))
|
||||
- \[`RUF008`\] Make it clearer that a mutable default in a dataclass is only valid if it is typed as a ClassVar ([#10395](https://github.com/astral-sh/ruff/pull/10395))
|
||||
- \[`pylint`\] Extend docs and test in `invalid-str-return-type` (`E307`) ([#10400](https://github.com/astral-sh/ruff/pull/10400))
|
||||
- Remove `.` from `check` and `format` commands ([#10217](https://github.com/astral-sh/ruff/pull/10217))
|
||||
|
||||
## 0.3.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- Improve single-`with` item formatting for Python 3.8 or older ([#10276](https://github.com/astral-sh/ruff/pull/10276))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pyupgrade`\] Allow fixes for f-string rule regardless of line length (`UP032`) ([#10263](https://github.com/astral-sh/ruff/pull/10263))
|
||||
- \[`pycodestyle`\] Include actual conditions in E712 diagnostics ([#10254](https://github.com/astral-sh/ruff/pull/10254))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix trailing kwargs end of line comment after slash ([#10297](https://github.com/astral-sh/ruff/pull/10297))
|
||||
- Fix unstable `with` items formatting ([#10274](https://github.com/astral-sh/ruff/pull/10274))
|
||||
- Avoid repeating function calls in f-string conversions ([#10265](https://github.com/astral-sh/ruff/pull/10265))
|
||||
- Fix E203 false positive for slices in format strings ([#10280](https://github.com/astral-sh/ruff/pull/10280))
|
||||
- Fix incorrect `Parameter` range for `*args` and `**kwargs` ([#10283](https://github.com/astral-sh/ruff/pull/10283))
|
||||
- Treat `typing.Annotated` subscripts as type definitions ([#10285](https://github.com/astral-sh/ruff/pull/10285))
|
||||
|
||||
## 0.3.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pycodestyle`\] Fix E301 not triggering on decorated methods. ([#10117](https://github.com/astral-sh/ruff/pull/10117))
|
||||
- \[`pycodestyle`\] Respect `isort` settings in blank line rules (`E3*`) ([#10096](https://github.com/astral-sh/ruff/pull/10096))
|
||||
- \[`pycodestyle`\] Make blank lines in typing stub files optional (`E3*`) ([#10098](https://github.com/astral-sh/ruff/pull/10098))
|
||||
- \[`pylint`\] Implement `singledispatch-method` (`E1519`) ([#10140](https://github.com/astral-sh/ruff/pull/10140))
|
||||
- \[`pylint`\] Implement `useless-exception-statement` (`W0133`) ([#10176](https://github.com/astral-sh/ruff/pull/10176))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-debugger`\] Check for use of `debugpy` and `ptvsd` debug modules (#10177) ([#10194](https://github.com/astral-sh/ruff/pull/10194))
|
||||
- \[`pyupgrade`\] Generate diagnostic for all valid f-string conversions regardless of line length (`UP032`) ([#10238](https://github.com/astral-sh/ruff/pull/10238))
|
||||
- \[`pep8_naming`\] Add fixes for `N804` and `N805` ([#10215](https://github.com/astral-sh/ruff/pull/10215))
|
||||
|
||||
### CLI
|
||||
|
||||
- Colorize the output of `ruff format --diff` ([#10110](https://github.com/astral-sh/ruff/pull/10110))
|
||||
- Make `--config` and `--isolated` global flags ([#10150](https://github.com/astral-sh/ruff/pull/10150))
|
||||
- Correctly expand tildes and environment variables in paths passed to `--config` ([#10219](https://github.com/astral-sh/ruff/pull/10219))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Accept a PEP 440 version specifier for `required-version` ([#10216](https://github.com/astral-sh/ruff/pull/10216))
|
||||
- Implement isort's `default-section` setting ([#10149](https://github.com/astral-sh/ruff/pull/10149))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Remove trailing space from `CapWords` message ([#10220](https://github.com/astral-sh/ruff/pull/10220))
|
||||
- Respect external codes in file-level exemptions ([#10203](https://github.com/astral-sh/ruff/pull/10203))
|
||||
- \[`flake8-raise`\] Avoid false-positives for parens-on-raise with `future.exception()` (`RSE102`) ([#10206](https://github.com/astral-sh/ruff/pull/10206))
|
||||
- \[`pylint`\] Add fix for unary expressions in `PLC2801` ([#9587](https://github.com/astral-sh/ruff/pull/9587))
|
||||
- \[`ruff`\] Fix RUF028 not allowing `# fmt: skip` on match cases ([#10178](https://github.com/astral-sh/ruff/pull/10178))
|
||||
|
||||
## 0.3.0
|
||||
|
||||
This release introduces the new Ruff formatter 2024.2 style and adds a new lint rule to
|
||||
detect invalid formatter suppression comments.
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bandit`\] Remove suspicious-lxml-import (`S410`) ([#10154](https://github.com/astral-sh/ruff/pull/10154))
|
||||
- \[`pycodestyle`\] Allow `os.environ` modifications between imports (`E402`) ([#10066](https://github.com/astral-sh/ruff/pull/10066))
|
||||
- \[`pycodestyle`\] Don't warn about a single whitespace character before a comma in a tuple (`E203`) ([#10094](https://github.com/astral-sh/ruff/pull/10094))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`eradicate`\] Detect commented out `case` statements (`ERA001`) ([#10055](https://github.com/astral-sh/ruff/pull/10055))
|
||||
- \[`eradicate`\] Detect single-line code for `try:`, `except:`, etc. (`ERA001`) ([#10057](https://github.com/astral-sh/ruff/pull/10057))
|
||||
- \[`flake8-boolean-trap`\] Allow boolean positionals in `__post_init__` ([#10027](https://github.com/astral-sh/ruff/pull/10027))
|
||||
- \[`flake8-copyright`\] Allow © in copyright notices ([#10065](https://github.com/astral-sh/ruff/pull/10065))
|
||||
- \[`isort`\]: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
|
||||
- \[`pylint`\] New Rule `dict-iter-missing-items` (`PLE1141`) ([#9845](https://github.com/astral-sh/ruff/pull/9845))
|
||||
- \[`pylint`\] Ignore `sys.version` and `sys.platform` (`PLR1714`) ([#10054](https://github.com/astral-sh/ruff/pull/10054))
|
||||
- \[`pyupgrade`\] Detect literals with unary operators (`UP018`) ([#10060](https://github.com/astral-sh/ruff/pull/10060))
|
||||
- \[`ruff`\] Expand rule for `list(iterable).pop(0)` idiom (`RUF015`) ([#10148](https://github.com/astral-sh/ruff/pull/10148))
|
||||
|
||||
### Formatter
|
||||
|
||||
This release introduces the Ruff 2024.2 style, stabilizing the following changes:
|
||||
|
||||
- Prefer splitting the assignment's value over the target or type annotation ([#8943](https://github.com/astral-sh/ruff/pull/8943))
|
||||
- Remove blank lines before class docstrings ([#9154](https://github.com/astral-sh/ruff/pull/9154))
|
||||
- Wrap multiple context managers in `with` parentheses when targeting Python 3.9 or newer ([#9222](https://github.com/astral-sh/ruff/pull/9222))
|
||||
- Add a blank line after nested classes with a dummy body (`...`) in typing stub files ([#9155](https://github.com/astral-sh/ruff/pull/9155))
|
||||
- Reduce vertical spacing for classes and functions with a dummy (`...`) body ([#7440](https://github.com/astral-sh/ruff/issues/7440), [#9240](https://github.com/astral-sh/ruff/pull/9240))
|
||||
- Add a blank line after the module docstring ([#8283](https://github.com/astral-sh/ruff/pull/8283))
|
||||
- Parenthesize long type hints in assignments ([#9210](https://github.com/astral-sh/ruff/pull/9210))
|
||||
- Preserve indent for single multiline-string call-expressions ([#9673](https://github.com/astral-sh/ruff/pull/9637))
|
||||
- Normalize hex escape and unicode escape sequences ([#9280](https://github.com/astral-sh/ruff/pull/9280))
|
||||
- Format module docstrings ([#9725](https://github.com/astral-sh/ruff/pull/9725))
|
||||
|
||||
### CLI
|
||||
|
||||
- Explicitly disallow `extend` as part of a `--config` flag ([#10135](https://github.com/astral-sh/ruff/pull/10135))
|
||||
- Remove `build` from the default exclusion list ([#10093](https://github.com/astral-sh/ruff/pull/10093))
|
||||
- Deprecate `ruff <path>`, `ruff --explain`, `ruff --clean`, and `ruff --generate-shell-completion` in favor of `ruff check <path>`, `ruff rule`, `ruff clean`, and `ruff generate-shell-completion` ([#10169](https://github.com/astral-sh/ruff/pull/10169))
|
||||
- Remove the deprecated CLI option `--format` from `ruff rule` and `ruff linter` ([#10170](https://github.com/astral-sh/ruff/pull/10170))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-bugbear`\] Avoid adding default initializers to stubs (`B006`) ([#10152](https://github.com/astral-sh/ruff/pull/10152))
|
||||
- \[`flake8-type-checking`\] Respect runtime-required decorators for function signatures ([#10091](https://github.com/astral-sh/ruff/pull/10091))
|
||||
- \[`pycodestyle`\] Mark fixes overlapping with a multiline string as unsafe (`W293`) ([#10049](https://github.com/astral-sh/ruff/pull/10049))
|
||||
- \[`pydocstyle`\] Trim whitespace when removing blank lines after section (`D413`) ([#10162](https://github.com/astral-sh/ruff/pull/10162))
|
||||
- \[`pylint`\] Delete entire statement, including semicolons (`PLR0203`) ([#10074](https://github.com/astral-sh/ruff/pull/10074))
|
||||
- \[`ruff`\] Avoid f-string false positives in `gettext` calls (`RUF027`) ([#10118](https://github.com/astral-sh/ruff/pull/10118))
|
||||
- Fix `ruff` crashing on PowerPC systems because of too small page size ([#10080](https://github.com/astral-sh/ruff/pull/10080))
|
||||
|
||||
### Performance
|
||||
|
||||
- Add cold attribute to less likely printer queue branches in the formatter ([#10121](https://github.com/astral-sh/ruff/pull/10121))
|
||||
- Skip unnecessary string normalization in the formatter ([#10116](https://github.com/astral-sh/ruff/pull/10116))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove "Beta" Label from formatter documentation ([#10144](https://github.com/astral-sh/ruff/pull/10144))
|
||||
- `line-length` option: fix link to `pycodestyle.max-line-length` ([#10136](https://github.com/astral-sh/ruff/pull/10136))
|
||||
|
||||
## 0.2.2
|
||||
|
||||
Highlights include:
|
||||
|
||||
- Initial support formatting f-strings (in `--preview`).
|
||||
- Support for overriding arbitrary configuration options via the CLI through an expanded `--config` argument (e.g., `--config "lint.isort.combine-as-imports=false"`).
|
||||
- Significant performance improvements in Ruff's lexer, parser, and lint rules.
|
||||
|
||||
### Preview features
|
||||
|
||||
- Implement minimal f-string formatting ([#9642](https://github.com/astral-sh/ruff/pull/9642))
|
||||
- \[`pycodestyle`\] Add blank line(s) rules (`E301`, `E302`, `E303`, `E304`, `E305`, `E306`) ([#9266](https://github.com/astral-sh/ruff/pull/9266))
|
||||
- \[`refurb`\] Implement `readlines_in_for` (`FURB129`) ([#9880](https://github.com/astral-sh/ruff/pull/9880))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Ensure closing parentheses for multiline sequences are always on their own line (`RUF022`, `RUF023`) ([#9793](https://github.com/astral-sh/ruff/pull/9793))
|
||||
- \[`numpy`\] Add missing deprecation violations (`NPY002`) ([#9862](https://github.com/astral-sh/ruff/pull/9862))
|
||||
- \[`flake8-bandit`\] Detect `mark_safe` usages in decorators ([#9887](https://github.com/astral-sh/ruff/pull/9887))
|
||||
- \[`ruff`\] Expand `asyncio-dangling-task` (`RUF006`) to include `new_event_loop` ([#9976](https://github.com/astral-sh/ruff/pull/9976))
|
||||
- \[`flake8-pyi`\] Ignore 'unused' private type dicts in class scopes ([#9952](https://github.com/astral-sh/ruff/pull/9952))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Docstring formatting: Preserve tab indentation when using `indent-style=tabs` ([#9915](https://github.com/astral-sh/ruff/pull/9915))
|
||||
- Disable top-level docstring formatting for notebooks ([#9957](https://github.com/astral-sh/ruff/pull/9957))
|
||||
- Stabilize quote-style's `preserve` mode ([#9922](https://github.com/astral-sh/ruff/pull/9922))
|
||||
|
||||
### CLI
|
||||
|
||||
- Allow arbitrary configuration options to be overridden via the CLI ([#9599](https://github.com/astral-sh/ruff/pull/9599))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make `show-settings` filters directory-agnostic ([#9866](https://github.com/astral-sh/ruff/pull/9866))
|
||||
- Respect duplicates when rewriting type aliases ([#9905](https://github.com/astral-sh/ruff/pull/9905))
|
||||
- Respect tuple assignments in typing analyzer ([#9969](https://github.com/astral-sh/ruff/pull/9969))
|
||||
- Use atomic write when persisting cache ([#9981](https://github.com/astral-sh/ruff/pull/9981))
|
||||
- Use non-parenthesized range for `DebugText` ([#9953](https://github.com/astral-sh/ruff/pull/9953))
|
||||
- \[`flake8-simplify`\] Avoid false positive with `async` for loops (`SIM113`) ([#9996](https://github.com/astral-sh/ruff/pull/9996))
|
||||
- \[`flake8-trio`\] Respect `async with` in `timeout-without-await` ([#9859](https://github.com/astral-sh/ruff/pull/9859))
|
||||
- \[`perflint`\] Catch a wider range of mutations in `PERF101` ([#9955](https://github.com/astral-sh/ruff/pull/9955))
|
||||
- \[`pycodestyle`\] Fix `E30X` panics on blank lines with trailing white spaces ([#9907](https://github.com/astral-sh/ruff/pull/9907))
|
||||
- \[`pydocstyle`\] Allow using `parameters` as a subsection header (`D405`) ([#9894](https://github.com/astral-sh/ruff/pull/9894))
|
||||
- \[`pydocstyle`\] Fix blank-line docstring rules for module-level docstrings ([#9878](https://github.com/astral-sh/ruff/pull/9878))
|
||||
- \[`pylint`\] Accept 0.0 and 1.0 as common magic values (`PLR2004`) ([#9964](https://github.com/astral-sh/ruff/pull/9964))
|
||||
- \[`pylint`\] Avoid suggesting set rewrites for non-hashable types ([#9956](https://github.com/astral-sh/ruff/pull/9956))
|
||||
- \[`ruff`\] Avoid false negatives with string literals inside of method calls (`RUF027`) ([#9865](https://github.com/astral-sh/ruff/pull/9865))
|
||||
- \[`ruff`\] Fix panic on with f-string detection (`RUF027`) ([#9990](https://github.com/astral-sh/ruff/pull/9990))
|
||||
- \[`ruff`\] Ignore builtins when detecting missing f-strings ([#9849](https://github.com/astral-sh/ruff/pull/9849))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use `memchr` for string lexing ([#9888](https://github.com/astral-sh/ruff/pull/9888))
|
||||
- Use `memchr` for tab-indentation detection ([#9853](https://github.com/astral-sh/ruff/pull/9853))
|
||||
- Reduce `Result<Tok, LexicalError>` size by using `Box<str>` instead of `String` ([#9885](https://github.com/astral-sh/ruff/pull/9885))
|
||||
- Reduce size of `Expr` from 80 to 64 bytes ([#9900](https://github.com/astral-sh/ruff/pull/9900))
|
||||
- Improve trailing comma rule performance ([#9867](https://github.com/astral-sh/ruff/pull/9867))
|
||||
- Remove unnecessary string cloning from the parser ([#9884](https://github.com/astral-sh/ruff/pull/9884))
|
||||
|
||||
## 0.2.1
|
||||
|
||||
This release includes support for range formatting (i.e., the ability to format specific lines
|
||||
@@ -1132,7 +593,7 @@ docstrings via the `docstring-code-format` setting.
|
||||
- \[`pylint`\] Default `max-positional-args` to `max-args` ([#8998](https://github.com/astral-sh/ruff/pull/8998))
|
||||
- \[`pylint`\] Add `allow-dunder-method-names` setting for `bad-dunder-method-name` (`PLW3201`) ([#8812](https://github.com/astral-sh/ruff/pull/8812))
|
||||
- \[`isort`\] Add support for `from-first` setting ([#8663](https://github.com/astral-sh/ruff/pull/8663))
|
||||
- \[`isort`\] Add support for `length-sort` settings ([#8841](https://github.com/astral-sh/ruff/pull/8841))
|
||||
- \[`isort`\] Add support for `length-sort` settings ([#8841](https://github.com/astral-sh/ruff/pull/8841))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
@@ -1261,7 +722,7 @@ docstrings via the `docstring-code-format` setting.
|
||||
- \[`flake8-trio`\] Implement `TRIO115` ([#8486](https://github.com/astral-sh/ruff/pull/8486))
|
||||
- \[`refurb`\] Implement `type-none-comparison` (`FURB169`) ([#8487](https://github.com/astral-sh/ruff/pull/8487))
|
||||
- Flag all comparisons against builtin types in `E721` ([#8491](https://github.com/astral-sh/ruff/pull/8491))
|
||||
- Make `SIM118` fix as safe when the expression is a known dictionary ([#8525](https://github.com/astral-sh/ruff/pull/8525))
|
||||
- Make `SIM118` fix as safe when the expression is a known dictionary ([#8525](https://github.com/astral-sh/ruff/pull/8525))
|
||||
|
||||
### Formatter
|
||||
|
||||
@@ -1429,7 +890,7 @@ Try it today with `ruff format`! [Check out the blog post](https://astral.sh/blo
|
||||
- Add `backports.strenum` to `deprecated-imports` ([#8113](https://github.com/astral-sh/ruff/pull/8113))
|
||||
- Update `SIM112` to ignore `https_proxy`, `http_proxy`, and `no_proxy` ([#8140](https://github.com/astral-sh/ruff/pull/8140))
|
||||
- Update fix for `literal-membership` (`PLR6201`) to be unsafe ([#8097](https://github.com/astral-sh/ruff/pull/8097))
|
||||
- Update fix for `mutable-argument-defaults` (`B006`) to be unsafe ([#8108](https://github.com/astral-sh/ruff/pull/8108))
|
||||
- Update fix for `mutable-argument-defaults` (`B006`) to be unsafe ([#8108](https://github.com/astral-sh/ruff/pull/8108))
|
||||
|
||||
### Formatter
|
||||
|
||||
@@ -1557,7 +1018,7 @@ Read Ruff's new [versioning policy](https://docs.astral.sh/ruff/versioning/).
|
||||
- \[`refurb`\] Add `single-item-membership-test` (`FURB171`) ([#7815](https://github.com/astral-sh/ruff/pull/7815))
|
||||
- \[`pylint`\] Add `and-or-ternary` (`R1706`) ([#7811](https://github.com/astral-sh/ruff/pull/7811))
|
||||
|
||||
*New rules are added in [preview](https://docs.astral.sh/ruff/preview/).*
|
||||
_New rules are added in [preview](https://docs.astral.sh/ruff/preview/)._
|
||||
|
||||
### Configuration
|
||||
|
||||
|
||||
@@ -33,18 +33,27 @@ Welcome! We're happy to have you here. Thank you in advance for your contributio
|
||||
|
||||
## The Basics
|
||||
|
||||
Ruff welcomes contributions in the form of pull requests.
|
||||
Ruff welcomes contributions in the form of Pull Requests.
|
||||
|
||||
For small changes (e.g., bug fixes), feel free to submit a PR.
|
||||
|
||||
For larger changes (e.g., new lint rules, new functionality, new configuration options), consider
|
||||
creating an [**issue**](https://github.com/astral-sh/ruff/issues) outlining your proposed change.
|
||||
You can also join us on [Discord](https://discord.com/invite/astral-sh) to discuss your idea with the
|
||||
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5) to discuss your idea with the
|
||||
community. We've labeled [beginner-friendly tasks](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
|
||||
in the issue tracker, along with [bugs](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
|
||||
and [improvements](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Aaccepted)
|
||||
that are ready for contributions.
|
||||
|
||||
If you're looking for a place to start, we recommend implementing a new lint rule (see:
|
||||
[_Adding a new lint rule_](#example-adding-a-new-lint-rule), which will allow you to learn from and
|
||||
pattern-match against the examples in the existing codebase. Many lint rules are inspired by
|
||||
existing Python plugins, which can be used as a reference implementation.
|
||||
|
||||
As a concrete example: consider taking on one of the rules from the [`flake8-pyi`](https://github.com/astral-sh/ruff/issues/848)
|
||||
plugin, and looking to the originating [Python source](https://github.com/PyCQA/flake8-pyi) for
|
||||
guidance.
|
||||
|
||||
If you have suggestions on how we might improve the contributing documentation, [let us know](https://github.com/astral-sh/ruff/discussions/5693)!
|
||||
|
||||
### Prerequisites
|
||||
@@ -98,7 +107,7 @@ RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
|
||||
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
These checks will run on GitHub Actions when you open your Pull Request, but running them locally
|
||||
will save you time and expedite the merge process.
|
||||
|
||||
Note that many code changes also require updating the snapshot tests, which is done interactively
|
||||
@@ -108,14 +117,7 @@ after running `cargo test` like so:
|
||||
cargo insta review
|
||||
```
|
||||
|
||||
If your pull request relates to a specific lint rule, include the category and rule code in the
|
||||
title, as in the following examples:
|
||||
|
||||
- \[`flake8-bugbear`\] Avoid false positive for usage after `continue` (`B031`)
|
||||
- \[`flake8-simplify`\] Detect implicit `else` cases in `needless-bool` (`SIM103`)
|
||||
- \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
|
||||
|
||||
Your pull request will be reviewed by a maintainer, which may involve a few rounds of iteration
|
||||
Your Pull Request will be reviewed by a maintainer, which may involve a few rounds of iteration
|
||||
prior to merging.
|
||||
|
||||
### Project Structure
|
||||
@@ -123,8 +125,8 @@ prior to merging.
|
||||
Ruff is structured as a monorepo with a [flat crate structure](https://matklad.github.io/2021/08/22/large-rust-workspaces.html),
|
||||
such that all crates are contained in a flat `crates` directory.
|
||||
|
||||
The vast majority of the code, including all lint rules, lives in the `ruff_linter` crate (located
|
||||
at `crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
|
||||
The vast majority of the code, including all lint rules, lives in the `ruff` crate (located at
|
||||
`crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
|
||||
|
||||
At the time of writing, the repository includes the following crates:
|
||||
|
||||
@@ -197,14 +199,11 @@ and calling out to lint rule analyzer functions as it goes.
|
||||
If you need to inspect the AST, you can run `cargo dev print-ast` with a Python file. Grep
|
||||
for the `Diagnostic::new` invocations to understand how other, similar rules are implemented.
|
||||
|
||||
Once you're satisfied with your code, add tests for your rule
|
||||
(see: [rule testing](#rule-testing-fixtures-and-snapshots)), and regenerate the documentation and
|
||||
associated assets (like our JSON Schema) with `cargo dev generate-all`.
|
||||
Once you're satisfied with your code, add tests for your rule. See [rule testing](#rule-testing-fixtures-and-snapshots)
|
||||
for more details.
|
||||
|
||||
Finally, submit a pull request, and include the category, rule name, and rule code in the title, as
|
||||
in:
|
||||
|
||||
> \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
|
||||
Finally, regenerate the documentation and other generated assets (like our JSON Schema) with:
|
||||
`cargo dev generate-all`.
|
||||
|
||||
#### Rule naming convention
|
||||
|
||||
@@ -317,7 +316,7 @@ To preview any changes to the documentation locally:
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
[http://127.0.0.1:8000/ruff/](http://127.0.0.1:8000/ruff/).
|
||||
[http://127.0.0.1:8000/docs/](http://127.0.0.1:8000/docs/).
|
||||
|
||||
## Release Process
|
||||
|
||||
@@ -330,13 +329,13 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
|
||||
### Creating a new release
|
||||
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
1. Run `./scripts/release/bump.sh`; this command will:
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
We use an experimental in-house tool for managing releases.
|
||||
|
||||
1. Install `rooster`: `pip install git+https://github.com/zanieb/rooster@main`
|
||||
1. Run `rooster release`; this command will:
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
- Display contributors for the release
|
||||
1. The changelog should then be editorialized for consistency
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
@@ -360,7 +359,7 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
1. Open the draft release in the GitHub release section
|
||||
1. Copy the changelog for the release into the GitHub release
|
||||
- See previous releases for formatting of section headers
|
||||
1. Append the contributors from the `bump.sh` script
|
||||
1. Generate the contributor list with `rooster contributors` and add to the release notes
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
@@ -814,8 +813,8 @@ To understand Ruff's import categorization system, we first need to define two c
|
||||
"project root".)
|
||||
- "Package root": The top-most directory defining the Python package that includes a given Python
|
||||
file. To find the package root for a given Python file, traverse up its parent directories until
|
||||
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't in a subtree
|
||||
marked as a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
|
||||
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't marked as
|
||||
a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
|
||||
just before that, i.e., the first directory in the package.
|
||||
|
||||
For example, given:
|
||||
|
||||
1236
Cargo.lock
generated
1236
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
98
Cargo.toml
98
Cargo.toml
@@ -12,111 +12,103 @@ authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
||||
license = "MIT"
|
||||
|
||||
[workspace.dependencies]
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
aho-corasick = { version = "1.1.2" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.80" }
|
||||
argfile = { version = "0.2.0" }
|
||||
anyhow = { version = "1.0.79" }
|
||||
argfile = { version = "0.1.6" }
|
||||
assert_cmd = { version = "2.0.13" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bstr = { version = "1.9.1" }
|
||||
bitflags = { version = "2.4.1" }
|
||||
bstr = { version = "1.9.0" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
chrono = { version = "0.4.34", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.4.18", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clearscreen = { version = "3.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
||||
clearscreen = { version = "2.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.3.3", default-features = false }
|
||||
colored = { version = "2.1.0" }
|
||||
configparser = { version = "3.0.3" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
countme = { version ="3.0.1"}
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "5.5.3" }
|
||||
dirs = { version = "5.0.0" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
env_logger = { version ="0.10.1"}
|
||||
fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.23" }
|
||||
fs-err = { version = "2.11.0" }
|
||||
fs-err = { version ="2.11.0"}
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
hashbrown = "0.14.3"
|
||||
hexf-parse = { version = "0.2.1" }
|
||||
hexf-parse = { version ="0.2.1"}
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imara-diff ={ version = "0.1.5"}
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.2.6" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1", feature = ["filters", "glob"] }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
indicatif ={ version = "0.17.8"}
|
||||
indoc ={ version = "2.0.4"}
|
||||
insta = { version = "1.34.0", feature = ["filters", "glob"] }
|
||||
insta-cmd = { version = "0.4.0" }
|
||||
is-macro = { version = "0.3.5" }
|
||||
is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.12.1" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "0.1.2" }
|
||||
js-sys = { version = "0.3.67" }
|
||||
lalrpop-util = { version = "0.20.0", default-features = false }
|
||||
lexical-parse-float = { version = "0.8.0", features = ["format"] }
|
||||
libc = { version = "0.2.153" }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { version = "0.95.0", features = ["proposed"] }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
mimalloc = { version ="0.1.39"}
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "6.1.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
parking_lot = "0.12.1"
|
||||
pep440_rs = { version = "0.6.0", features = ["serde"] }
|
||||
pep440_rs = { version = "0.4.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
proc-macro2 = { version = "1.0.78" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.4.0" }
|
||||
quick-junit = { version = "0.3.5" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
rayon = { version = "1.8.1" }
|
||||
regex = { version = "1.10.2" }
|
||||
result-like = { version = "0.5.0" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.4" }
|
||||
seahash = { version ="4.1.0"}
|
||||
semver = { version = "1.0.21" }
|
||||
serde = { version = "1.0.196", features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.3" }
|
||||
serde_json = { version = "1.0.113" }
|
||||
serde_test = { version = "1.0.152" }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = ["macros"] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
shlex = { version ="1.3.0"}
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
smallvec = { version = "1.13.1" }
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.26.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.26.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
strum = { version = "0.25.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.25.3" }
|
||||
syn = { version = "2.0.40" }
|
||||
tempfile = { version ="3.9.0"}
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.5.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
thiserror = { version = "1.0.57" }
|
||||
tikv-jemallocator = { version ="0.5.0"}
|
||||
toml = { version = "0.8.9" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.3.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unic-ucd-category = { version ="0.9"}
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
ureq = { version = "2.9.6" }
|
||||
unicode_names2 = { version = "1.2.1" }
|
||||
ureq = { version = "2.9.1" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wasm-bindgen = { version = "0.2.84" }
|
||||
wasm-bindgen-test = { version = "0.3.40" }
|
||||
wild = { version = "2" }
|
||||
|
||||
[workspace.lints.rust]
|
||||
|
||||
49
README.md
49
README.md
@@ -4,12 +4,11 @@
|
||||
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://github.com/astral-sh/ruff/actions)
|
||||
[](https://discord.com/invite/astral-sh)
|
||||
|
||||
[**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
||||
[**Discord**](https://discord.gg/c9MhzV8aU5) | [**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
||||
|
||||
An extremely fast Python linter and code formatter, written in Rust.
|
||||
|
||||
@@ -32,7 +31,7 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
- 📏 Over [700 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
@@ -50,7 +49,6 @@ times faster than any individual tool.
|
||||
Ruff is extremely actively developed and used in major open-source projects like:
|
||||
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
- [Apache Superset](https://github.com/apache/superset)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Hugging Face](https://github.com/huggingface/transformers)
|
||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||
@@ -130,7 +128,7 @@ and with [a variety of other package managers](https://docs.astral.sh/ruff/insta
|
||||
To run Ruff as a linter, try any of the following:
|
||||
|
||||
```shell
|
||||
ruff check # Lint all files in the current directory (and any subdirectories).
|
||||
ruff check . # Lint all files in the current directory (and any subdirectories).
|
||||
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories).
|
||||
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`.
|
||||
ruff check path/to/code/to/file.py # Lint `file.py`.
|
||||
@@ -140,7 +138,7 @@ ruff check @arguments.txt # Lint using an input file, treating its con
|
||||
Or, to run Ruff as a formatter:
|
||||
|
||||
```shell
|
||||
ruff format # Format all files in the current directory (and any subdirectories).
|
||||
ruff format . # Format all files in the current directory (and any subdirectories).
|
||||
ruff format path/to/code/ # Format all files in `/path/to/code` (and any subdirectories).
|
||||
ruff format path/to/code/*.py # Format all `.py` files in `/path/to/code`.
|
||||
ruff format path/to/code/to/file.py # Format `file.py`.
|
||||
@@ -152,7 +150,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.4.3
|
||||
rev: v0.2.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -174,7 +172,7 @@ jobs:
|
||||
ruff:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: chartboost/ruff-action@v1
|
||||
```
|
||||
|
||||
@@ -184,9 +182,10 @@ Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml`
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
for a complete list of all configuration options).
|
||||
|
||||
If left unspecified, Ruff's default configuration is equivalent to the following `ruff.toml` file:
|
||||
If left unspecified, Ruff's default configuration is equivalent to:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Exclude a variety of commonly ignored directories.
|
||||
exclude = [
|
||||
".bzr",
|
||||
@@ -224,7 +223,7 @@ indent-width = 4
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
|
||||
[lint]
|
||||
[tool.ruff.lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
select = ["E4", "E7", "E9", "F"]
|
||||
ignore = []
|
||||
@@ -236,7 +235,7 @@ unfixable = []
|
||||
# Allow unused variables when underscore-prefixed.
|
||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
[format]
|
||||
[tool.ruff.format]
|
||||
# Like Black, use double quotes for strings.
|
||||
quote-style = "double"
|
||||
|
||||
@@ -250,20 +249,11 @@ skip-magic-trailing-comma = false
|
||||
line-ending = "auto"
|
||||
```
|
||||
|
||||
Note that, in a `pyproject.toml`, each section header should be prefixed with `tool.ruff`. For
|
||||
example, `[lint]` should be replaced with `[tool.ruff.lint]`.
|
||||
|
||||
Some configuration options can be provided via dedicated command-line arguments, such as those
|
||||
related to rule enablement and disablement, file discovery, and logging level:
|
||||
Some configuration options can be provided via the command-line, such as those related to
|
||||
rule enablement and disablement, file discovery, and logging level:
|
||||
|
||||
```shell
|
||||
ruff check --select F401 --select F403 --quiet
|
||||
```
|
||||
|
||||
The remaining configuration options can be provided through a catch-all `--config` argument:
|
||||
|
||||
```shell
|
||||
ruff check --config "lint.per-file-ignores = {'some_file.py' = ['F841']}"
|
||||
ruff check path/to/code/ --select F401 --select F403 --quiet
|
||||
```
|
||||
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
@@ -273,7 +263,7 @@ for more on the linting and formatting commands, respectively.
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
**Ruff supports over 800 lint rules**, many of which are inspired by popular tools like Flake8,
|
||||
**Ruff supports over 700 lint rules**, many of which are inspired by popular tools like Flake8,
|
||||
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
||||
Rust as a first-party feature.
|
||||
|
||||
@@ -351,14 +341,14 @@ For a complete enumeration of the supported rules, see [_Rules_](https://docs.as
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
## Support
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
You can also ask for help on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
@@ -388,7 +378,6 @@ Ruff is released under the MIT license.
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
- [Albumentations](https://github.com/albumentations-team/albumentations)
|
||||
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
|
||||
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
@@ -415,7 +404,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||
- [Kraken Tech](https://kraken.tech/)
|
||||
- [LangChain](https://github.com/hwchase17/langchain)
|
||||
- [Litestar](https://litestar.dev/)
|
||||
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
|
||||
@@ -430,7 +418,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Mypy](https://github.com/python/mypy)
|
||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||
- [Neon](https://github.com/neondatabase/neon)
|
||||
- [Nokia](https://nokia.com/)
|
||||
- [NoneBot](https://github.com/nonebot/nonebot2)
|
||||
- [NumPyro](https://github.com/pyro-ppl/numpyro)
|
||||
- [ONNX](https://github.com/onnx/onnx)
|
||||
@@ -499,7 +486,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
||||
|
||||
## License
|
||||
|
||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
MIT
|
||||
|
||||
<div align="center">
|
||||
<a target="_blank" href="https://astral.sh" style="background:none">
|
||||
|
||||
@@ -3,17 +3,9 @@
|
||||
extend-exclude = ["**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
hel = "hel"
|
||||
whos = "whos"
|
||||
spawnve = "spawnve"
|
||||
ned = "ned"
|
||||
pn = "pn" # `import panel as pd` is a thing
|
||||
poit = "poit"
|
||||
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
||||
|
||||
[default]
|
||||
extend-ignore-re = [
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$"
|
||||
]
|
||||
|
||||
@@ -3,10 +3,5 @@ doc-valid-idents = [
|
||||
"CodeQL",
|
||||
"IPython",
|
||||
"NumPy",
|
||||
"LibCST",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"McCabe",
|
||||
"FastAPI",
|
||||
"..",
|
||||
]
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
[package]
|
||||
name = "red_knot"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
documentation.workspace = true
|
||||
repository.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
ruff_python_parser = { path = "../ruff_python_parser" }
|
||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
||||
ruff_python_trivia = { path = "../ruff_python_trivia" }
|
||||
ruff_text_size = { path = "../ruff_text_size" }
|
||||
ruff_index = { path = "../ruff_index" }
|
||||
ruff_notebook = { path = "../ruff_notebook" }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
ctrlc = "3.4.4"
|
||||
crossbeam = { workspace = true }
|
||||
dashmap = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
log = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
parking_lot = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
smol_str = "0.2.1"
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
textwrap = "0.16.1"
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,415 +0,0 @@
|
||||
use std::any::type_name;
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_index::{Idx, IndexVec};
|
||||
use ruff_python_ast::visitor::preorder;
|
||||
use ruff_python_ast::visitor::preorder::{PreorderVisitor, TraversalSignal};
|
||||
use ruff_python_ast::{
|
||||
AnyNodeRef, AstNode, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule,
|
||||
NodeKind, Parameter, Stmt, StmtAnnAssign, StmtAssign, StmtAugAssign, StmtClassDef,
|
||||
StmtFunctionDef, StmtGlobal, StmtImport, StmtImportFrom, StmtNonlocal, StmtTypeAlias,
|
||||
TypeParam, TypeParamParamSpec, TypeParamTypeVar, TypeParamTypeVarTuple, WithItem,
|
||||
};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// A type agnostic ID that uniquely identifies an AST node in a file.
|
||||
#[ruff_index::newtype_index]
|
||||
pub struct AstId;
|
||||
|
||||
/// A typed ID that uniquely identifies an AST node in a file.
|
||||
///
|
||||
/// This is different from [`AstId`] in that it is a combination of ID and the type of the node the ID identifies.
|
||||
/// Typing the ID prevents mixing IDs of different node types and allows to restrict the API to only accept
|
||||
/// nodes for which an ID has been created (not all AST nodes get an ID).
|
||||
pub struct TypedAstId<N: HasAstId> {
|
||||
erased: AstId,
|
||||
_marker: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: HasAstId> TypedAstId<N> {
|
||||
/// Upcasts this ID from a more specific node type to a more general node type.
|
||||
pub fn upcast<M: HasAstId>(self) -> TypedAstId<M>
|
||||
where
|
||||
N: Into<M>,
|
||||
{
|
||||
TypedAstId {
|
||||
erased: self.erased,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Copy for TypedAstId<N> {}
|
||||
impl<N: HasAstId> Clone for TypedAstId<N> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> PartialEq for TypedAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.erased == other.erased
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Eq for TypedAstId<N> {}
|
||||
impl<N: HasAstId> Hash for TypedAstId<N> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.erased.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Debug for TypedAstId<N> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("TypedAstId")
|
||||
.field(&self.erased)
|
||||
.field(&type_name::<N>())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AstIds {
|
||||
ids: IndexVec<AstId, NodeKey>,
|
||||
reverse: FxHashMap<NodeKey, AstId>,
|
||||
}
|
||||
|
||||
impl AstIds {
|
||||
// TODO rust analyzer doesn't allocate an ID for every node. It only allocates ids for
|
||||
// nodes with a corresponding HIR element, that is nodes that are definitions.
|
||||
pub fn from_module(module: &ModModule) -> Self {
|
||||
let mut visitor = AstIdsVisitor::default();
|
||||
|
||||
// TODO: visit_module?
|
||||
// Make sure we visit the root
|
||||
visitor.create_id(module);
|
||||
visitor.visit_body(&module.body);
|
||||
|
||||
while let Some(deferred) = visitor.deferred.pop() {
|
||||
match deferred {
|
||||
DeferredNode::FunctionDefinition(def) => {
|
||||
def.visit_preorder(&mut visitor);
|
||||
}
|
||||
DeferredNode::ClassDefinition(def) => def.visit_preorder(&mut visitor),
|
||||
}
|
||||
}
|
||||
|
||||
AstIds {
|
||||
ids: visitor.ids,
|
||||
reverse: visitor.reverse,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the ID to the root node.
|
||||
pub fn root(&self) -> NodeKey {
|
||||
self.ids[AstId::new(0)]
|
||||
}
|
||||
|
||||
/// Returns the [`TypedAstId`] for a node.
|
||||
pub fn ast_id<N: HasAstId>(&self, node: &N) -> TypedAstId<N> {
|
||||
let key = node.syntax_node_key();
|
||||
TypedAstId {
|
||||
erased: self.reverse.get(&key).copied().unwrap(),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the [`TypedAstId`] for the node identified with the given [`TypedNodeKey`].
|
||||
pub fn ast_id_for_key<N: HasAstId>(&self, node: &TypedNodeKey<N>) -> TypedAstId<N> {
|
||||
let ast_id = self.ast_id_for_node_key(node.inner);
|
||||
|
||||
TypedAstId {
|
||||
erased: ast_id,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the untyped [`AstId`] for the node identified by the given `node` key.
|
||||
pub fn ast_id_for_node_key(&self, node: NodeKey) -> AstId {
|
||||
self.reverse
|
||||
.get(&node)
|
||||
.copied()
|
||||
.expect("Can't find node in AstIds map.")
|
||||
}
|
||||
|
||||
/// Returns the [`TypedNodeKey`] for the node identified by the given [`TypedAstId`].
|
||||
pub fn key<N: HasAstId>(&self, id: TypedAstId<N>) -> TypedNodeKey<N> {
|
||||
let syntax_key = self.ids[id.erased];
|
||||
|
||||
TypedNodeKey::new(syntax_key).unwrap()
|
||||
}
|
||||
|
||||
pub fn node_key<H: HasAstId>(&self, id: TypedAstId<H>) -> NodeKey {
|
||||
self.ids[id.erased]
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for AstIds {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut map = f.debug_map();
|
||||
for (key, value) in self.ids.iter_enumerated() {
|
||||
map.entry(&key, &value);
|
||||
}
|
||||
|
||||
map.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for AstIds {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.ids == other.ids
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for AstIds {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct AstIdsVisitor<'a> {
|
||||
ids: IndexVec<AstId, NodeKey>,
|
||||
reverse: FxHashMap<NodeKey, AstId>,
|
||||
deferred: Vec<DeferredNode<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> AstIdsVisitor<'a> {
|
||||
fn create_id<A: HasAstId>(&mut self, node: &A) {
|
||||
let node_key = node.syntax_node_key();
|
||||
|
||||
let id = self.ids.push(node_key);
|
||||
self.reverse.insert(node_key, id);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PreorderVisitor<'a> for AstIdsVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
match stmt {
|
||||
Stmt::FunctionDef(def) => {
|
||||
self.create_id(def);
|
||||
self.deferred.push(DeferredNode::FunctionDefinition(def));
|
||||
return;
|
||||
}
|
||||
// TODO defer visiting the assignment body, type alias parameters etc?
|
||||
Stmt::ClassDef(def) => {
|
||||
self.create_id(def);
|
||||
self.deferred.push(DeferredNode::ClassDefinition(def));
|
||||
return;
|
||||
}
|
||||
Stmt::Expr(_) => {
|
||||
// Skip
|
||||
return;
|
||||
}
|
||||
Stmt::Return(_) => {}
|
||||
Stmt::Delete(_) => {}
|
||||
Stmt::Assign(assignment) => self.create_id(assignment),
|
||||
Stmt::AugAssign(assignment) => {
|
||||
self.create_id(assignment);
|
||||
}
|
||||
Stmt::AnnAssign(assignment) => self.create_id(assignment),
|
||||
Stmt::TypeAlias(assignment) => self.create_id(assignment),
|
||||
Stmt::For(_) => {}
|
||||
Stmt::While(_) => {}
|
||||
Stmt::If(_) => {}
|
||||
Stmt::With(_) => {}
|
||||
Stmt::Match(_) => {}
|
||||
Stmt::Raise(_) => {}
|
||||
Stmt::Try(_) => {}
|
||||
Stmt::Assert(_) => {}
|
||||
Stmt::Import(import) => self.create_id(import),
|
||||
Stmt::ImportFrom(import_from) => self.create_id(import_from),
|
||||
Stmt::Global(global) => self.create_id(global),
|
||||
Stmt::Nonlocal(non_local) => self.create_id(non_local),
|
||||
Stmt::Pass(_) => {}
|
||||
Stmt::Break(_) => {}
|
||||
Stmt::Continue(_) => {}
|
||||
Stmt::IpyEscapeCommand(_) => {}
|
||||
}
|
||||
|
||||
preorder::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, _expr: &'a Expr) {}
|
||||
|
||||
fn visit_parameter(&mut self, parameter: &'a Parameter) {
|
||||
self.create_id(parameter);
|
||||
preorder::walk_parameter(self, parameter);
|
||||
}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler) {
|
||||
match except_handler {
|
||||
ExceptHandler::ExceptHandler(except_handler) => {
|
||||
self.create_id(except_handler);
|
||||
}
|
||||
}
|
||||
|
||||
preorder::walk_except_handler(self, except_handler);
|
||||
}
|
||||
|
||||
fn visit_with_item(&mut self, with_item: &'a WithItem) {
|
||||
self.create_id(with_item);
|
||||
preorder::walk_with_item(self, with_item);
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'a MatchCase) {
|
||||
self.create_id(match_case);
|
||||
preorder::walk_match_case(self, match_case);
|
||||
}
|
||||
|
||||
fn visit_type_param(&mut self, type_param: &'a TypeParam) {
|
||||
self.create_id(type_param);
|
||||
}
|
||||
}
|
||||
|
||||
enum DeferredNode<'a> {
|
||||
FunctionDefinition(&'a StmtFunctionDef),
|
||||
ClassDefinition(&'a StmtClassDef),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct TypedNodeKey<N: AstNode> {
|
||||
/// The type erased node key.
|
||||
inner: NodeKey,
|
||||
_marker: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: AstNode> TypedNodeKey<N> {
|
||||
pub fn from_node(node: &N) -> Self {
|
||||
let inner = NodeKey {
|
||||
kind: node.as_any_node_ref().kind(),
|
||||
range: node.range(),
|
||||
};
|
||||
Self {
|
||||
inner,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(node_key: NodeKey) -> Option<Self> {
|
||||
N::can_cast(node_key.kind).then_some(TypedNodeKey {
|
||||
inner: node_key,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<N::Ref<'a>> {
|
||||
let node_ref = self.inner.resolve(root)?;
|
||||
|
||||
Some(N::cast_ref(node_ref).unwrap())
|
||||
}
|
||||
|
||||
pub fn resolve_unwrap<'a>(&self, root: AnyNodeRef<'a>) -> N::Ref<'a> {
|
||||
self.resolve(root).expect("node should resolve")
|
||||
}
|
||||
|
||||
pub fn erased(&self) -> &NodeKey {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
struct FindNodeKeyVisitor<'a> {
|
||||
key: NodeKey,
|
||||
result: Option<AnyNodeRef<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> PreorderVisitor<'a> for FindNodeKeyVisitor<'a> {
|
||||
fn enter_node(&mut self, node: AnyNodeRef<'a>) -> TraversalSignal {
|
||||
if self.result.is_some() {
|
||||
return TraversalSignal::Skip;
|
||||
}
|
||||
|
||||
if node.range() == self.key.range && node.kind() == self.key.kind {
|
||||
self.result = Some(node);
|
||||
TraversalSignal::Skip
|
||||
} else if node.range().contains_range(self.key.range) {
|
||||
TraversalSignal::Traverse
|
||||
} else {
|
||||
TraversalSignal::Skip
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_body(&mut self, body: &'a [Stmt]) {
|
||||
// TODO it would be more efficient to use binary search instead of linear
|
||||
for stmt in body {
|
||||
if stmt.range().start() > self.key.range.end() {
|
||||
break;
|
||||
}
|
||||
|
||||
self.visit_stmt(stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO an alternative to this is to have a `NodeId` on each node (in increasing order depending on the position).
|
||||
// This would allow to reduce the size of this to a u32.
|
||||
// What would be nice if we could use an `Arc::weak_ref` here but that only works if we use
|
||||
// `Arc` internally
|
||||
// TODO: Implement the logic to resolve a node, given a db (and the correct file).
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct NodeKey {
|
||||
kind: NodeKind,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<AnyNodeRef<'a>> {
|
||||
// We need to do a binary search here. Only traverse into a node if the range is withint the node
|
||||
let mut visitor = FindNodeKeyVisitor {
|
||||
key: *self,
|
||||
result: None,
|
||||
};
|
||||
|
||||
if visitor.enter_node(root) == TraversalSignal::Traverse {
|
||||
root.visit_preorder(&mut visitor);
|
||||
}
|
||||
|
||||
visitor.result
|
||||
}
|
||||
}
|
||||
|
||||
/// Marker trait implemented by AST nodes for which we extract the `AstId`.
|
||||
pub trait HasAstId: AstNode {
|
||||
fn node_key(&self) -> TypedNodeKey<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
TypedNodeKey {
|
||||
inner: self.syntax_node_key(),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
fn syntax_node_key(&self) -> NodeKey {
|
||||
NodeKey {
|
||||
kind: self.as_any_node_ref().kind(),
|
||||
range: self.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasAstId for StmtFunctionDef {}
|
||||
impl HasAstId for StmtClassDef {}
|
||||
impl HasAstId for StmtAnnAssign {}
|
||||
impl HasAstId for StmtAugAssign {}
|
||||
impl HasAstId for StmtAssign {}
|
||||
impl HasAstId for StmtTypeAlias {}
|
||||
|
||||
impl HasAstId for ModModule {}
|
||||
|
||||
impl HasAstId for StmtImport {}
|
||||
|
||||
impl HasAstId for StmtImportFrom {}
|
||||
|
||||
impl HasAstId for Parameter {}
|
||||
|
||||
impl HasAstId for TypeParam {}
|
||||
impl HasAstId for Stmt {}
|
||||
impl HasAstId for TypeParamTypeVar {}
|
||||
impl HasAstId for TypeParamTypeVarTuple {}
|
||||
impl HasAstId for TypeParamParamSpec {}
|
||||
impl HasAstId for StmtGlobal {}
|
||||
impl HasAstId for StmtNonlocal {}
|
||||
|
||||
impl HasAstId for ExceptHandlerExceptHandler {}
|
||||
impl HasAstId for WithItem {}
|
||||
impl HasAstId for MatchCase {}
|
||||
@@ -1,165 +0,0 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::Hash;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
use crate::db::QueryResult;
|
||||
use dashmap::mapref::entry::Entry;
|
||||
|
||||
use crate::FxDashMap;
|
||||
|
||||
/// Simple key value cache that locks on a per-key level.
|
||||
pub struct KeyValueCache<K, V> {
|
||||
map: FxDashMap<K, V>,
|
||||
statistics: CacheStatistics,
|
||||
}
|
||||
|
||||
impl<K, V> KeyValueCache<K, V>
|
||||
where
|
||||
K: Eq + Hash + Clone,
|
||||
V: Clone,
|
||||
{
|
||||
pub fn try_get(&self, key: &K) -> Option<V> {
|
||||
if let Some(existing) = self.map.get(key) {
|
||||
self.statistics.hit();
|
||||
Some(existing.clone())
|
||||
} else {
|
||||
self.statistics.miss();
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get<F>(&self, key: &K, compute: F) -> QueryResult<V>
|
||||
where
|
||||
F: FnOnce(&K) -> QueryResult<V>,
|
||||
{
|
||||
Ok(match self.map.entry(key.clone()) {
|
||||
Entry::Occupied(cached) => {
|
||||
self.statistics.hit();
|
||||
|
||||
cached.get().clone()
|
||||
}
|
||||
Entry::Vacant(vacant) => {
|
||||
self.statistics.miss();
|
||||
|
||||
let value = compute(key)?;
|
||||
vacant.insert(value.clone());
|
||||
value
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set(&mut self, key: K, value: V) {
|
||||
self.map.insert(key, value);
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, key: &K) -> Option<V> {
|
||||
self.map.remove(key).map(|(_, value)| value)
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.map.clear();
|
||||
self.map.shrink_to_fit();
|
||||
}
|
||||
|
||||
pub fn statistics(&self) -> Option<Statistics> {
|
||||
self.statistics.to_statistics()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Default for KeyValueCache<K, V>
|
||||
where
|
||||
K: Eq + Hash,
|
||||
V: Clone,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
map: FxDashMap::default(),
|
||||
statistics: CacheStatistics::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> std::fmt::Debug for KeyValueCache<K, V>
|
||||
where
|
||||
K: std::fmt::Debug + Eq + Hash,
|
||||
V: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut debug = f.debug_map();
|
||||
|
||||
for entry in &self.map {
|
||||
debug.entry(&entry.value(), &entry.key());
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Statistics {
|
||||
pub hits: usize,
|
||||
pub misses: usize,
|
||||
}
|
||||
|
||||
impl Statistics {
|
||||
#[allow(clippy::cast_precision_loss)]
|
||||
pub fn hit_rate(&self) -> Option<f64> {
|
||||
if self.hits + self.misses == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some((self.hits as f64) / (self.hits + self.misses) as f64)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
pub type CacheStatistics = DebugStatistics;
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
pub type CacheStatistics = ReleaseStatistics;
|
||||
|
||||
pub trait StatisticsRecorder {
|
||||
fn hit(&self);
|
||||
fn miss(&self);
|
||||
fn to_statistics(&self) -> Option<Statistics>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DebugStatistics {
|
||||
hits: AtomicUsize,
|
||||
misses: AtomicUsize,
|
||||
}
|
||||
|
||||
impl StatisticsRecorder for DebugStatistics {
|
||||
// TODO figure out appropriate Ordering
|
||||
fn hit(&self) {
|
||||
self.hits.fetch_add(1, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
fn miss(&self) {
|
||||
self.misses.fetch_add(1, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
fn to_statistics(&self) -> Option<Statistics> {
|
||||
let hits = self.hits.load(Ordering::SeqCst);
|
||||
let misses = self.misses.load(Ordering::SeqCst);
|
||||
|
||||
Some(Statistics { hits, misses })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ReleaseStatistics;
|
||||
|
||||
impl StatisticsRecorder for ReleaseStatistics {
|
||||
#[inline]
|
||||
fn hit(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn miss(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn to_statistics(&self) -> Option<Statistics> {
|
||||
None
|
||||
}
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct CancellationTokenSource {
|
||||
signal: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl CancellationTokenSource {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
signal: Arc::new(AtomicBool::new(false)),
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all)]
|
||||
pub fn cancel(&self) {
|
||||
self.signal.store(true, std::sync::atomic::Ordering::SeqCst);
|
||||
}
|
||||
|
||||
pub fn is_cancelled(&self) -> bool {
|
||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
|
||||
pub fn token(&self) -> CancellationToken {
|
||||
CancellationToken {
|
||||
signal: self.signal.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CancellationToken {
|
||||
signal: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl CancellationToken {
|
||||
/// Returns `true` if cancellation has been requested.
|
||||
pub fn is_cancelled(&self) -> bool {
|
||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
}
|
||||
@@ -1,297 +0,0 @@
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use jars::{HasJar, HasJars};
|
||||
pub use query::{QueryError, QueryResult};
|
||||
pub use runtime::DbRuntime;
|
||||
pub use storage::JarsStorage;
|
||||
|
||||
use crate::files::FileId;
|
||||
use crate::lint::{Diagnostics, LintSemanticStorage, LintSyntaxStorage};
|
||||
use crate::module::{Module, ModuleData, ModuleName, ModuleResolver, ModuleSearchPath};
|
||||
use crate::parse::{Parsed, ParsedStorage};
|
||||
use crate::source::{Source, SourceStorage};
|
||||
use crate::symbols::{SymbolId, SymbolTable, SymbolTablesStorage};
|
||||
use crate::types::{Type, TypeStore};
|
||||
|
||||
mod jars;
|
||||
mod query;
|
||||
mod runtime;
|
||||
mod storage;
|
||||
|
||||
pub trait Database {
|
||||
/// Returns a reference to the runtime of the current worker.
|
||||
fn runtime(&self) -> &DbRuntime;
|
||||
|
||||
/// Returns a mutable reference to the runtime. Only one worker can hold a mutable reference to the runtime.
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime;
|
||||
|
||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
||||
fn cancelled(&self) -> QueryResult<()> {
|
||||
self.runtime().cancelled()
|
||||
}
|
||||
|
||||
/// Returns `true` if the queries have been cancelled.
|
||||
fn is_cancelled(&self) -> bool {
|
||||
self.runtime().is_cancelled()
|
||||
}
|
||||
}
|
||||
|
||||
/// Database that supports running queries from multiple threads.
|
||||
pub trait ParallelDatabase: Database + Send {
|
||||
/// Creates a snapshot of the database state that can be used to query the database in another thread.
|
||||
///
|
||||
/// The snapshot is a read-only view of the database but query results are shared between threads.
|
||||
/// All queries will be automatically cancelled when applying any mutations (calling [`HasJars::jars_mut`])
|
||||
/// to the database (not the snapshot, because they're readonly).
|
||||
///
|
||||
/// ## Creating a snapshot
|
||||
///
|
||||
/// Creating a snapshot of the database's jars is cheap but creating a snapshot of
|
||||
/// other state stored on the database might require deep-cloning data. That's why you should
|
||||
/// avoid creating snapshots in a hot function (e.g. don't create a snapshot for each file, instead
|
||||
/// create a snapshot when scheduling the check of an entire program).
|
||||
///
|
||||
/// ## Salsa compatibility
|
||||
/// Salsa prohibits creating a snapshot while running a local query (it's fine if other workers run a query) [[source](https://github.com/salsa-rs/salsa/issues/80)].
|
||||
/// We should avoid creating snapshots while running a query because we might want to adopt Salsa in the future (if we can figure out persistent caching).
|
||||
/// Unfortunately, the infrastructure doesn't provide an automated way of knowing when a query is run, that's
|
||||
/// why we have to "enforce" this constraint manually.
|
||||
#[must_use]
|
||||
fn snapshot(&self) -> Snapshot<Self>;
|
||||
}
|
||||
|
||||
/// Readonly snapshot of a database.
|
||||
///
|
||||
/// ## Dead locks
|
||||
/// A snapshot should always be dropped as soon as it is no longer necessary to run queries.
|
||||
/// Storing the snapshot without running a query or periodically checking if cancellation was requested
|
||||
/// can lead to deadlocks because mutating the [`Database`] requires cancels all pending queries
|
||||
/// and waiting for all [`Snapshot`]s to be dropped.
|
||||
#[derive(Debug)]
|
||||
pub struct Snapshot<DB: ?Sized>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
db: DB,
|
||||
}
|
||||
|
||||
impl<DB> Snapshot<DB>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
pub fn new(db: DB) -> Self {
|
||||
Snapshot { db }
|
||||
}
|
||||
}
|
||||
|
||||
impl<DB> std::ops::Deref for Snapshot<DB>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
type Target = DB;
|
||||
|
||||
fn deref(&self) -> &DB {
|
||||
&self.db
|
||||
}
|
||||
}
|
||||
|
||||
// Red knot specific databases code.
|
||||
|
||||
pub trait SourceDb: Database {
|
||||
// queries
|
||||
fn file_id(&self, path: &std::path::Path) -> FileId;
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<std::path::Path>;
|
||||
|
||||
fn source(&self, file_id: FileId) -> QueryResult<Source>;
|
||||
|
||||
fn parse(&self, file_id: FileId) -> QueryResult<Parsed>;
|
||||
}
|
||||
|
||||
pub trait SemanticDb: SourceDb {
|
||||
// queries
|
||||
fn resolve_module(&self, name: ModuleName) -> QueryResult<Option<Module>>;
|
||||
|
||||
fn file_to_module(&self, file_id: FileId) -> QueryResult<Option<Module>>;
|
||||
|
||||
fn path_to_module(&self, path: &Path) -> QueryResult<Option<Module>>;
|
||||
|
||||
fn symbol_table(&self, file_id: FileId) -> QueryResult<Arc<SymbolTable>>;
|
||||
|
||||
fn infer_symbol_type(&self, file_id: FileId, symbol_id: SymbolId) -> QueryResult<Type>;
|
||||
|
||||
// mutations
|
||||
|
||||
fn add_module(&mut self, path: &Path) -> Option<(Module, Vec<Arc<ModuleData>>)>;
|
||||
|
||||
fn set_module_search_paths(&mut self, paths: Vec<ModuleSearchPath>);
|
||||
}
|
||||
|
||||
pub trait LintDb: SemanticDb {
|
||||
fn lint_syntax(&self, file_id: FileId) -> QueryResult<Diagnostics>;
|
||||
|
||||
fn lint_semantic(&self, file_id: FileId) -> QueryResult<Diagnostics>;
|
||||
}
|
||||
|
||||
pub trait Db: LintDb {}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SourceJar {
|
||||
pub sources: SourceStorage,
|
||||
pub parsed: ParsedStorage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SemanticJar {
|
||||
pub module_resolver: ModuleResolver,
|
||||
pub symbol_tables: SymbolTablesStorage,
|
||||
pub type_store: TypeStore,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LintJar {
|
||||
pub lint_syntax: LintSyntaxStorage,
|
||||
pub lint_semantic: LintSemanticStorage,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::db::{
|
||||
Database, DbRuntime, HasJar, HasJars, JarsStorage, LintDb, LintJar, QueryResult, SourceDb,
|
||||
SourceJar,
|
||||
};
|
||||
use crate::files::{FileId, Files};
|
||||
use crate::lint::{lint_semantic, lint_syntax, Diagnostics};
|
||||
use crate::module::{
|
||||
add_module, file_to_module, path_to_module, resolve_module, set_module_search_paths,
|
||||
Module, ModuleData, ModuleName, ModuleSearchPath,
|
||||
};
|
||||
use crate::parse::{parse, Parsed};
|
||||
use crate::source::{source_text, Source};
|
||||
use crate::symbols::{symbol_table, SymbolId, SymbolTable};
|
||||
use crate::types::{infer_symbol_type, Type};
|
||||
|
||||
use super::{SemanticDb, SemanticJar};
|
||||
|
||||
// This can be a partial database used in a single crate for testing.
|
||||
// It would hold fewer data than the full database.
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct TestDb {
|
||||
files: Files,
|
||||
jars: JarsStorage<Self>,
|
||||
}
|
||||
|
||||
impl HasJar<SourceJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
||||
Ok(&self.jars()?.0)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
||||
&mut self.jars_mut().0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SemanticJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
||||
Ok(&self.jars()?.1)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
||||
&mut self.jars_mut().1
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<LintJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&LintJar> {
|
||||
Ok(&self.jars()?.2)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut LintJar {
|
||||
&mut self.jars_mut().2
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for TestDb {
|
||||
fn file_id(&self, path: &Path) -> FileId {
|
||||
self.files.intern(path)
|
||||
}
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
||||
self.files.path(file_id)
|
||||
}
|
||||
|
||||
fn source(&self, file_id: FileId) -> QueryResult<Source> {
|
||||
source_text(self, file_id)
|
||||
}
|
||||
|
||||
fn parse(&self, file_id: FileId) -> QueryResult<Parsed> {
|
||||
parse(self, file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl SemanticDb for TestDb {
|
||||
fn resolve_module(&self, name: ModuleName) -> QueryResult<Option<Module>> {
|
||||
resolve_module(self, name)
|
||||
}
|
||||
|
||||
fn file_to_module(&self, file_id: FileId) -> QueryResult<Option<Module>> {
|
||||
file_to_module(self, file_id)
|
||||
}
|
||||
|
||||
fn path_to_module(&self, path: &Path) -> QueryResult<Option<Module>> {
|
||||
path_to_module(self, path)
|
||||
}
|
||||
|
||||
fn symbol_table(&self, file_id: FileId) -> QueryResult<Arc<SymbolTable>> {
|
||||
symbol_table(self, file_id)
|
||||
}
|
||||
|
||||
fn infer_symbol_type(&self, file_id: FileId, symbol_id: SymbolId) -> QueryResult<Type> {
|
||||
infer_symbol_type(self, file_id, symbol_id)
|
||||
}
|
||||
|
||||
fn add_module(&mut self, path: &Path) -> Option<(Module, Vec<Arc<ModuleData>>)> {
|
||||
add_module(self, path)
|
||||
}
|
||||
|
||||
fn set_module_search_paths(&mut self, paths: Vec<ModuleSearchPath>) {
|
||||
set_module_search_paths(self, paths);
|
||||
}
|
||||
}
|
||||
|
||||
impl LintDb for TestDb {
|
||||
fn lint_syntax(&self, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
lint_syntax(self, file_id)
|
||||
}
|
||||
|
||||
fn lint_semantic(&self, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
lint_semantic(self, file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJars for TestDb {
|
||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
||||
|
||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
||||
self.jars.jars()
|
||||
}
|
||||
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
||||
self.jars.jars_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for TestDb {
|
||||
fn runtime(&self) -> &DbRuntime {
|
||||
self.jars.runtime()
|
||||
}
|
||||
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
self.jars.runtime_mut()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
use crate::db::query::QueryResult;
|
||||
|
||||
/// Gives access to a specific jar in the database.
|
||||
///
|
||||
/// Nope, the terminology isn't borrowed from Java but from Salsa <https://salsa-rs.github.io/salsa/>,
|
||||
/// which is an analogy to storing the salsa in different jars.
|
||||
///
|
||||
/// The basic idea is that each crate can define its own jar and the jars can be combined to a single
|
||||
/// database in the top level crate. Each crate also defines its own `Database` trait. The combination of
|
||||
/// `Database` trait and the jar allows to write queries in isolation without having to know how they get composed at the upper levels.
|
||||
///
|
||||
/// Salsa further defines a `HasIngredient` trait which slices the jar to a specific storage (e.g. a specific cache).
|
||||
/// We don't need this just jet because we write our queries by hand. We may want a similar trait if we decide
|
||||
/// to use a macro to generate the queries.
|
||||
pub trait HasJar<T> {
|
||||
/// Gives a read-only reference to the jar.
|
||||
fn jar(&self) -> QueryResult<&T>;
|
||||
|
||||
/// Gives a mutable reference to the jar.
|
||||
fn jar_mut(&mut self) -> &mut T;
|
||||
}
|
||||
|
||||
/// Gives access to the jars in a database.
|
||||
pub trait HasJars {
|
||||
/// A type storing the jars.
|
||||
///
|
||||
/// Most commonly, this is a tuple where each jar is a tuple element.
|
||||
type Jars: Default;
|
||||
|
||||
/// Gives access to the underlying jars but tests if the queries have been cancelled.
|
||||
///
|
||||
/// Returns `Err(QueryError::Cancelled)` if the queries have been cancelled.
|
||||
fn jars(&self) -> QueryResult<&Self::Jars>;
|
||||
|
||||
/// Gives mutable access to the underlying jars.
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars;
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
/// Reason why a db query operation failed.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum QueryError {
|
||||
/// The query was cancelled because the DB was mutated or the query was cancelled by the host (e.g. on a file change or when pressing CTRL+C).
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
impl Display for QueryError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
QueryError::Cancelled => f.write_str("query was cancelled"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for QueryError {}
|
||||
|
||||
pub type QueryResult<T> = Result<T, QueryError>;
|
||||
@@ -1,41 +0,0 @@
|
||||
use crate::cancellation::CancellationTokenSource;
|
||||
use crate::db::{QueryError, QueryResult};
|
||||
|
||||
/// Holds the jar agnostic state of the database.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DbRuntime {
|
||||
/// The cancellation token source used to signal other works that the queries should be aborted and
|
||||
/// exit at the next possible point.
|
||||
cancellation_token: CancellationTokenSource,
|
||||
}
|
||||
|
||||
impl DbRuntime {
|
||||
pub(super) fn snapshot(&self) -> Self {
|
||||
Self {
|
||||
cancellation_token: self.cancellation_token.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Cancels the pending queries of other workers. The current worker cannot have any pending
|
||||
/// queries because we're holding a mutable reference to the runtime.
|
||||
pub(super) fn cancel_other_workers(&mut self) {
|
||||
self.cancellation_token.cancel();
|
||||
// Set a new cancellation token so that we're in a non-cancelled state again when running the next
|
||||
// query.
|
||||
self.cancellation_token = CancellationTokenSource::default();
|
||||
}
|
||||
|
||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
||||
pub(super) fn cancelled(&self) -> QueryResult<()> {
|
||||
if self.cancellation_token.is_cancelled() {
|
||||
Err(QueryError::Cancelled)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the queries have been cancelled.
|
||||
pub(super) fn is_cancelled(&self) -> bool {
|
||||
self.cancellation_token.is_cancelled()
|
||||
}
|
||||
}
|
||||
@@ -1,117 +0,0 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crossbeam::sync::WaitGroup;
|
||||
|
||||
use crate::db::query::QueryResult;
|
||||
use crate::db::runtime::DbRuntime;
|
||||
use crate::db::{HasJars, ParallelDatabase};
|
||||
|
||||
/// Stores the jars of a database and the state for each worker.
|
||||
///
|
||||
/// Today, all state is shared across all workers, but it may be desired to store data per worker in the future.
|
||||
pub struct JarsStorage<T>
|
||||
where
|
||||
T: HasJars + Sized,
|
||||
{
|
||||
// It's important that `jars_wait_group` is declared after `jars` to ensure that `jars` is dropped first.
|
||||
// See https://doc.rust-lang.org/reference/destructors.html
|
||||
/// Stores the jars of the database.
|
||||
jars: Arc<T::Jars>,
|
||||
|
||||
/// Used to count the references to `jars`. Allows implementing `jars_mut` without requiring to clone `jars`.
|
||||
jars_wait_group: WaitGroup,
|
||||
|
||||
/// The data agnostic state.
|
||||
runtime: DbRuntime,
|
||||
}
|
||||
|
||||
impl<Db> JarsStorage<Db>
|
||||
where
|
||||
Db: HasJars,
|
||||
{
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
jars: Arc::new(Db::Jars::default()),
|
||||
jars_wait_group: WaitGroup::default(),
|
||||
runtime: DbRuntime::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a snapshot of the jars.
|
||||
///
|
||||
/// Creating the snapshot is cheap because it doesn't clone the jars, it only increments a ref counter.
|
||||
#[must_use]
|
||||
pub fn snapshot(&self) -> JarsStorage<Db>
|
||||
where
|
||||
Db: ParallelDatabase,
|
||||
{
|
||||
Self {
|
||||
jars: self.jars.clone(),
|
||||
jars_wait_group: self.jars_wait_group.clone(),
|
||||
runtime: self.runtime.snapshot(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn jars(&self) -> QueryResult<&Db::Jars> {
|
||||
self.runtime.cancelled()?;
|
||||
Ok(&self.jars)
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the jars without cloning their content.
|
||||
///
|
||||
/// The method cancels any pending queries of other works and waits for them to complete so that
|
||||
/// this instance is the only instance holding a reference to the jars.
|
||||
pub(crate) fn jars_mut(&mut self) -> &mut Db::Jars {
|
||||
// We have a mutable ref here, so no more workers can be spawned between calling this function and taking the mut ref below.
|
||||
self.cancel_other_workers();
|
||||
|
||||
// Now all other references to `self.jars` should have been released. We can now safely return a mutable reference
|
||||
// to the Arc's content.
|
||||
let jars =
|
||||
Arc::get_mut(&mut self.jars).expect("All references to jars should have been released");
|
||||
|
||||
jars
|
||||
}
|
||||
|
||||
pub(crate) fn runtime(&self) -> &DbRuntime {
|
||||
&self.runtime
|
||||
}
|
||||
|
||||
pub(crate) fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
// Note: This method may need to use a similar trick to `jars_mut` if `DbRuntime` is ever to store data that is shared between workers.
|
||||
&mut self.runtime
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(self))]
|
||||
fn cancel_other_workers(&mut self) {
|
||||
self.runtime.cancel_other_workers();
|
||||
|
||||
// Wait for all other works to complete.
|
||||
let existing_wait = std::mem::take(&mut self.jars_wait_group);
|
||||
existing_wait.wait();
|
||||
}
|
||||
}
|
||||
|
||||
impl<Db> Default for JarsStorage<Db>
|
||||
where
|
||||
Db: HasJars,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Debug for JarsStorage<T>
|
||||
where
|
||||
T: HasJars,
|
||||
<T as HasJars>::Jars: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("SharedStorage")
|
||||
.field("jars", &self.jars)
|
||||
.field("jars_wait_group", &self.jars_wait_group)
|
||||
.field("runtime", &self.runtime)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
@@ -1,158 +0,0 @@
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hashbrown::hash_map::RawEntryMut;
|
||||
use parking_lot::RwLock;
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
|
||||
type Map<K, V> = hashbrown::HashMap<K, V, ()>;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FileId;
|
||||
|
||||
// TODO we'll need a higher level virtual file system abstraction that allows testing if a file exists
|
||||
// or retrieving its content (ideally lazily and in a way that the memory can be retained later)
|
||||
// I suspect that we'll end up with a FileSystem trait and our own Path abstraction.
|
||||
#[derive(Default)]
|
||||
pub struct Files {
|
||||
inner: Arc<RwLock<FilesInner>>,
|
||||
}
|
||||
|
||||
impl Files {
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn intern(&self, path: &Path) -> FileId {
|
||||
self.inner.write().intern(path)
|
||||
}
|
||||
|
||||
pub fn try_get(&self, path: &Path) -> Option<FileId> {
|
||||
self.inner.read().try_get(path)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn path(&self, id: FileId) -> Arc<Path> {
|
||||
self.inner.read().path(id)
|
||||
}
|
||||
|
||||
/// Snapshots files for a new database snapshot.
|
||||
///
|
||||
/// This method should not be used outside a database snapshot.
|
||||
#[must_use]
|
||||
pub fn snapshot(&self) -> Files {
|
||||
Files {
|
||||
inner: self.inner.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Files {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let files = self.inner.read();
|
||||
let mut debug = f.debug_map();
|
||||
for item in files.iter() {
|
||||
debug.entry(&item.0, &item.1);
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Files {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.inner.read().eq(&other.inner.read())
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Files {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FilesInner {
|
||||
by_path: Map<FileId, ()>,
|
||||
// TODO should we use a map here to reclaim the space for removed files?
|
||||
// TODO I think we should use our own path abstraction here to avoid having to normalize paths
|
||||
// and dealing with non-utf paths everywhere.
|
||||
by_id: IndexVec<FileId, Arc<Path>>,
|
||||
}
|
||||
|
||||
impl FilesInner {
|
||||
/// Inserts the path and returns a new id for it or returns the id if it is an existing path.
|
||||
// TODO should this accept Path or PathBuf?
|
||||
pub(crate) fn intern(&mut self, path: &Path) -> FileId {
|
||||
let mut hasher = FxHasher::default();
|
||||
path.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
let entry = self
|
||||
.by_path
|
||||
.raw_entry_mut()
|
||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(entry) => *entry.key(),
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let id = self.by_id.push(Arc::from(path));
|
||||
entry.insert_with_hasher(hash, id, (), |_| hash);
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn try_get(&self, path: &Path) -> Option<FileId> {
|
||||
let mut hasher = FxHasher::default();
|
||||
path.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
Some(
|
||||
*self
|
||||
.by_path
|
||||
.raw_entry()
|
||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path)?
|
||||
.0,
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the path for the file with the given id.
|
||||
pub(crate) fn path(&self, id: FileId) -> Arc<Path> {
|
||||
self.by_id[id].clone()
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&self) -> impl Iterator<Item = (FileId, Arc<Path>)> + '_ {
|
||||
self.by_path.keys().map(|id| (*id, self.by_id[*id].clone()))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for FilesInner {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.by_id == other.by_id
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for FilesInner {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn insert_path_twice_same_id() {
|
||||
let files = Files::default();
|
||||
let path = PathBuf::from("foo/bar");
|
||||
let id1 = files.intern(&path);
|
||||
let id2 = files.intern(&path);
|
||||
assert_eq!(id1, id2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_different_paths_different_ids() {
|
||||
let files = Files::default();
|
||||
let path1 = PathBuf::from("foo/bar");
|
||||
let path2 = PathBuf::from("foo/bar/baz");
|
||||
let id1 = files.intern(&path1);
|
||||
let id2 = files.intern(&path2);
|
||||
assert_ne!(id1, id2);
|
||||
}
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
//! Key observations
|
||||
//!
|
||||
//! The HIR avoids allocations to large extends by:
|
||||
//! * Using an arena per node type
|
||||
//! * using ids and id ranges to reference items.
|
||||
//!
|
||||
//! Using separate arena per node type has the advantage that the IDs are relatively stable, because
|
||||
//! they only change when a node of the same kind has been added or removed. (What's unclear is if that matters or if
|
||||
//! it still triggers a re-compute because the AST-id in the node has changed).
|
||||
//!
|
||||
//! The HIR does not store all details. It mainly stores the *public* interface. There's a reference
|
||||
//! back to the AST node to get more details.
|
||||
//!
|
||||
//!
|
||||
|
||||
use crate::ast_ids::{HasAstId, TypedAstId};
|
||||
use crate::files::FileId;
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
pub struct HirAstId<N: HasAstId> {
|
||||
file_id: FileId,
|
||||
node_id: TypedAstId<N>,
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Copy for HirAstId<N> {}
|
||||
impl<N: HasAstId> Clone for HirAstId<N> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> PartialEq for HirAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.file_id == other.file_id && self.node_id == other.node_id
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Eq for HirAstId<N> {}
|
||||
|
||||
impl<N: HasAstId> std::fmt::Debug for HirAstId<N> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("HirAstId")
|
||||
.field("file_id", &self.file_id)
|
||||
.field("node_id", &self.node_id)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Hash for HirAstId<N> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.file_id.hash(state);
|
||||
self.node_id.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> HirAstId<N> {
|
||||
pub fn upcast<M: HasAstId>(self) -> HirAstId<M>
|
||||
where
|
||||
N: Into<M>,
|
||||
{
|
||||
HirAstId {
|
||||
file_id: self.file_id,
|
||||
node_id: self.node_id.upcast(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,556 +0,0 @@
|
||||
use std::ops::{Index, Range};
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use ruff_python_ast::visitor::preorder;
|
||||
use ruff_python_ast::visitor::preorder::PreorderVisitor;
|
||||
use ruff_python_ast::{
|
||||
Decorator, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule, Stmt,
|
||||
StmtAnnAssign, StmtAssign, StmtClassDef, StmtFunctionDef, StmtGlobal, StmtImport,
|
||||
StmtImportFrom, StmtNonlocal, StmtTypeAlias, TypeParam, TypeParamParamSpec, TypeParamTypeVar,
|
||||
TypeParamTypeVarTuple, WithItem,
|
||||
};
|
||||
|
||||
use crate::ast_ids::{AstIds, HasAstId};
|
||||
use crate::files::FileId;
|
||||
use crate::hir::HirAstId;
|
||||
use crate::Name;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FunctionId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Function {
|
||||
ast_id: HirAstId<StmtFunctionDef>,
|
||||
name: Name,
|
||||
parameters: Range<ParameterId>,
|
||||
type_parameters: Range<TypeParameterId>, // TODO: type_parameters, return expression, decorators
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ParameterId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Parameter {
|
||||
kind: ParameterKind,
|
||||
name: Name,
|
||||
default: Option<()>, // TODO use expression HIR
|
||||
ast_id: HirAstId<ruff_python_ast::Parameter>,
|
||||
}
|
||||
|
||||
// TODO or should `Parameter` be an enum?
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ParameterKind {
|
||||
PositionalOnly,
|
||||
Arguments,
|
||||
Vararg,
|
||||
KeywordOnly,
|
||||
Kwarg,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ClassId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Class {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtClassDef>,
|
||||
// TODO type parameters, inheritance, decorators, members
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct AssignmentId;
|
||||
|
||||
// This can have more than one name...
|
||||
// but that means we can't implement `name()` on `ModuleItem`.
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Assignment {
|
||||
// TODO: Handle multiple names / targets
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtAssign>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct AnnotatedAssignment {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtAnnAssign>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct AnnotatedAssignmentId;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct TypeAliasId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeAlias {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtTypeAlias>,
|
||||
parameters: Range<TypeParameterId>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct TypeParameterId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum TypeParameter {
|
||||
TypeVar(TypeParameterTypeVar),
|
||||
ParamSpec(TypeParameterParamSpec),
|
||||
TypeVarTuple(TypeParameterTypeVarTuple),
|
||||
}
|
||||
|
||||
impl TypeParameter {
|
||||
pub fn ast_id(&self) -> HirAstId<TypeParam> {
|
||||
match self {
|
||||
TypeParameter::TypeVar(type_var) => type_var.ast_id.upcast(),
|
||||
TypeParameter::ParamSpec(param_spec) => param_spec.ast_id.upcast(),
|
||||
TypeParameter::TypeVarTuple(type_var_tuple) => type_var_tuple.ast_id.upcast(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterTypeVar {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamTypeVar>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterParamSpec {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamParamSpec>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterTypeVarTuple {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamTypeVarTuple>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct GlobalId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Global {
|
||||
// TODO track names
|
||||
ast_id: HirAstId<StmtGlobal>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct NonLocalId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct NonLocal {
|
||||
// TODO track names
|
||||
ast_id: HirAstId<StmtNonlocal>,
|
||||
}
|
||||
|
||||
pub enum DefinitionId {
|
||||
Function(FunctionId),
|
||||
Parameter(ParameterId),
|
||||
Class(ClassId),
|
||||
Assignment(AssignmentId),
|
||||
AnnotatedAssignment(AnnotatedAssignmentId),
|
||||
Global(GlobalId),
|
||||
NonLocal(NonLocalId),
|
||||
TypeParameter(TypeParameterId),
|
||||
TypeAlias(TypeAlias),
|
||||
}
|
||||
|
||||
pub enum DefinitionItem {
|
||||
Function(Function),
|
||||
Parameter(Parameter),
|
||||
Class(Class),
|
||||
Assignment(Assignment),
|
||||
AnnotatedAssignment(AnnotatedAssignment),
|
||||
Global(Global),
|
||||
NonLocal(NonLocal),
|
||||
TypeParameter(TypeParameter),
|
||||
TypeAlias(TypeAlias),
|
||||
}
|
||||
|
||||
// The closest is rust-analyzers item-tree. It only represents "Items" which make the public interface of a module
|
||||
// (it excludes any other statement or expressions). rust-analyzer uses it as the main input to the name resolution
|
||||
// algorithm
|
||||
// > It is the input to the name resolution algorithm, as well as to the queries defined in `adt.rs`,
|
||||
// > `data.rs`, and most things in `attr.rs`.
|
||||
//
|
||||
// > One important purpose of this layer is to provide an "invalidation barrier" for incremental
|
||||
// > computations: when typing inside an item body, the `ItemTree` of the modified file is typically
|
||||
// > unaffected, so we don't have to recompute name resolution results or item data (see `data.rs`).
|
||||
//
|
||||
// I haven't fully figured this out but I think that this composes the "public" interface of a module?
|
||||
// But maybe that's too optimistic.
|
||||
//
|
||||
//
|
||||
#[derive(Debug, Clone, Default, Eq, PartialEq)]
|
||||
pub struct Definitions {
|
||||
functions: IndexVec<FunctionId, Function>,
|
||||
parameters: IndexVec<ParameterId, Parameter>,
|
||||
classes: IndexVec<ClassId, Class>,
|
||||
assignments: IndexVec<AssignmentId, Assignment>,
|
||||
annotated_assignments: IndexVec<AnnotatedAssignmentId, AnnotatedAssignment>,
|
||||
type_aliases: IndexVec<TypeAliasId, TypeAlias>,
|
||||
type_parameters: IndexVec<TypeParameterId, TypeParameter>,
|
||||
globals: IndexVec<GlobalId, Global>,
|
||||
non_locals: IndexVec<NonLocalId, NonLocal>,
|
||||
}
|
||||
|
||||
impl Definitions {
|
||||
pub fn from_module(module: &ModModule, ast_ids: &AstIds, file_id: FileId) -> Self {
|
||||
let mut visitor = DefinitionsVisitor {
|
||||
definitions: Definitions::default(),
|
||||
ast_ids,
|
||||
file_id,
|
||||
};
|
||||
|
||||
visitor.visit_body(&module.body);
|
||||
|
||||
visitor.definitions
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<FunctionId> for Definitions {
|
||||
type Output = Function;
|
||||
|
||||
fn index(&self, index: FunctionId) -> &Self::Output {
|
||||
&self.functions[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ParameterId> for Definitions {
|
||||
type Output = Parameter;
|
||||
|
||||
fn index(&self, index: ParameterId) -> &Self::Output {
|
||||
&self.parameters[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ClassId> for Definitions {
|
||||
type Output = Class;
|
||||
|
||||
fn index(&self, index: ClassId) -> &Self::Output {
|
||||
&self.classes[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<AssignmentId> for Definitions {
|
||||
type Output = Assignment;
|
||||
|
||||
fn index(&self, index: AssignmentId) -> &Self::Output {
|
||||
&self.assignments[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<AnnotatedAssignmentId> for Definitions {
|
||||
type Output = AnnotatedAssignment;
|
||||
|
||||
fn index(&self, index: AnnotatedAssignmentId) -> &Self::Output {
|
||||
&self.annotated_assignments[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<TypeAliasId> for Definitions {
|
||||
type Output = TypeAlias;
|
||||
|
||||
fn index(&self, index: TypeAliasId) -> &Self::Output {
|
||||
&self.type_aliases[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<GlobalId> for Definitions {
|
||||
type Output = Global;
|
||||
|
||||
fn index(&self, index: GlobalId) -> &Self::Output {
|
||||
&self.globals[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<NonLocalId> for Definitions {
|
||||
type Output = NonLocal;
|
||||
|
||||
fn index(&self, index: NonLocalId) -> &Self::Output {
|
||||
&self.non_locals[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<TypeParameterId> for Definitions {
|
||||
type Output = TypeParameter;
|
||||
|
||||
fn index(&self, index: TypeParameterId) -> &Self::Output {
|
||||
&self.type_parameters[index]
|
||||
}
|
||||
}
|
||||
|
||||
struct DefinitionsVisitor<'a> {
|
||||
definitions: Definitions,
|
||||
ast_ids: &'a AstIds,
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl DefinitionsVisitor<'_> {
|
||||
fn ast_id<N: HasAstId>(&self, node: &N) -> HirAstId<N> {
|
||||
HirAstId {
|
||||
file_id: self.file_id,
|
||||
node_id: self.ast_ids.ast_id(node),
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_function_def(&mut self, function: &StmtFunctionDef) -> FunctionId {
|
||||
let name = Name::new(&function.name);
|
||||
|
||||
let first_type_parameter_id = self.definitions.type_parameters.next_index();
|
||||
let mut last_type_parameter_id = first_type_parameter_id;
|
||||
|
||||
if let Some(type_params) = &function.type_params {
|
||||
for parameter in &type_params.type_params {
|
||||
let id = self.lower_type_parameter(parameter);
|
||||
last_type_parameter_id = id;
|
||||
}
|
||||
}
|
||||
|
||||
let parameters = self.lower_parameters(&function.parameters);
|
||||
|
||||
self.definitions.functions.push(Function {
|
||||
name,
|
||||
ast_id: self.ast_id(function),
|
||||
parameters,
|
||||
type_parameters: first_type_parameter_id..last_type_parameter_id,
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_parameters(&mut self, parameters: &ruff_python_ast::Parameters) -> Range<ParameterId> {
|
||||
let first_parameter_id = self.definitions.parameters.next_index();
|
||||
let mut last_parameter_id = first_parameter_id;
|
||||
|
||||
for parameter in ¶meters.posonlyargs {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::PositionalOnly,
|
||||
name: Name::new(¶meter.parameter.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(¶meter.parameter),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(vararg) = ¶meters.vararg {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::Vararg,
|
||||
name: Name::new(&vararg.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(vararg),
|
||||
});
|
||||
}
|
||||
|
||||
for parameter in ¶meters.kwonlyargs {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::KeywordOnly,
|
||||
name: Name::new(¶meter.parameter.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(¶meter.parameter),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(kwarg) = ¶meters.kwarg {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::KeywordOnly,
|
||||
name: Name::new(&kwarg.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(kwarg),
|
||||
});
|
||||
}
|
||||
|
||||
first_parameter_id..last_parameter_id
|
||||
}
|
||||
|
||||
fn lower_class_def(&mut self, class: &StmtClassDef) -> ClassId {
|
||||
let name = Name::new(&class.name);
|
||||
|
||||
self.definitions.classes.push(Class {
|
||||
name,
|
||||
ast_id: self.ast_id(class),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_assignment(&mut self, assignment: &StmtAssign) {
|
||||
// FIXME handle multiple names
|
||||
if let Some(Expr::Name(name)) = assignment.targets.first() {
|
||||
self.definitions.assignments.push(Assignment {
|
||||
name: Name::new(&name.id),
|
||||
ast_id: self.ast_id(assignment),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_annotated_assignment(&mut self, annotated_assignment: &StmtAnnAssign) {
|
||||
if let Expr::Name(name) = &*annotated_assignment.target {
|
||||
self.definitions
|
||||
.annotated_assignments
|
||||
.push(AnnotatedAssignment {
|
||||
name: Name::new(&name.id),
|
||||
ast_id: self.ast_id(annotated_assignment),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_type_alias(&mut self, type_alias: &StmtTypeAlias) {
|
||||
if let Expr::Name(name) = &*type_alias.name {
|
||||
let name = Name::new(&name.id);
|
||||
|
||||
let lower_parameters_id = self.definitions.type_parameters.next_index();
|
||||
let mut last_parameter_id = lower_parameters_id;
|
||||
|
||||
if let Some(type_params) = &type_alias.type_params {
|
||||
for type_parameter in &type_params.type_params {
|
||||
let id = self.lower_type_parameter(type_parameter);
|
||||
last_parameter_id = id;
|
||||
}
|
||||
}
|
||||
|
||||
self.definitions.type_aliases.push(TypeAlias {
|
||||
name,
|
||||
ast_id: self.ast_id(type_alias),
|
||||
parameters: lower_parameters_id..last_parameter_id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_type_parameter(&mut self, type_parameter: &TypeParam) -> TypeParameterId {
|
||||
match type_parameter {
|
||||
TypeParam::TypeVar(type_var) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::TypeVar(TypeParameterTypeVar {
|
||||
name: Name::new(&type_var.name),
|
||||
ast_id: self.ast_id(type_var),
|
||||
}))
|
||||
}
|
||||
TypeParam::ParamSpec(param_spec) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::ParamSpec(TypeParameterParamSpec {
|
||||
name: Name::new(¶m_spec.name),
|
||||
ast_id: self.ast_id(param_spec),
|
||||
}))
|
||||
}
|
||||
TypeParam::TypeVarTuple(type_var_tuple) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::TypeVarTuple(TypeParameterTypeVarTuple {
|
||||
name: Name::new(&type_var_tuple.name),
|
||||
ast_id: self.ast_id(type_var_tuple),
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_import(&mut self, _import: &StmtImport) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_import_from(&mut self, _import_from: &StmtImportFrom) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_global(&mut self, global: &StmtGlobal) -> GlobalId {
|
||||
self.definitions.globals.push(Global {
|
||||
ast_id: self.ast_id(global),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_non_local(&mut self, non_local: &StmtNonlocal) -> NonLocalId {
|
||||
self.definitions.non_locals.push(NonLocal {
|
||||
ast_id: self.ast_id(non_local),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_except_handler(&mut self, _except_handler: &ExceptHandlerExceptHandler) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_with_item(&mut self, _with_item: &WithItem) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_match_case(&mut self, _match_case: &MatchCase) {
|
||||
// TODO
|
||||
}
|
||||
}
|
||||
|
||||
impl PreorderVisitor<'_> for DefinitionsVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
match stmt {
|
||||
// Definition statements
|
||||
Stmt::FunctionDef(definition) => {
|
||||
self.lower_function_def(definition);
|
||||
self.visit_body(&definition.body);
|
||||
}
|
||||
Stmt::ClassDef(definition) => {
|
||||
self.lower_class_def(definition);
|
||||
self.visit_body(&definition.body);
|
||||
}
|
||||
Stmt::Assign(assignment) => {
|
||||
self.lower_assignment(assignment);
|
||||
}
|
||||
Stmt::AnnAssign(annotated_assignment) => {
|
||||
self.lower_annotated_assignment(annotated_assignment);
|
||||
}
|
||||
Stmt::TypeAlias(type_alias) => {
|
||||
self.lower_type_alias(type_alias);
|
||||
}
|
||||
|
||||
Stmt::Import(import) => self.lower_import(import),
|
||||
Stmt::ImportFrom(import_from) => self.lower_import_from(import_from),
|
||||
Stmt::Global(global) => {
|
||||
self.lower_global(global);
|
||||
}
|
||||
Stmt::Nonlocal(non_local) => {
|
||||
self.lower_non_local(non_local);
|
||||
}
|
||||
|
||||
// Visit the compound statement bodies because they can contain other definitions.
|
||||
Stmt::For(_)
|
||||
| Stmt::While(_)
|
||||
| Stmt::If(_)
|
||||
| Stmt::With(_)
|
||||
| Stmt::Match(_)
|
||||
| Stmt::Try(_) => {
|
||||
preorder::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
// Skip over simple statements because they can't contain any other definitions.
|
||||
Stmt::Return(_)
|
||||
| Stmt::Delete(_)
|
||||
| Stmt::AugAssign(_)
|
||||
| Stmt::Raise(_)
|
||||
| Stmt::Assert(_)
|
||||
| Stmt::Expr(_)
|
||||
| Stmt::Pass(_)
|
||||
| Stmt::Break(_)
|
||||
| Stmt::Continue(_)
|
||||
| Stmt::IpyEscapeCommand(_) => {
|
||||
// No op
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, _: &'_ Expr) {}
|
||||
|
||||
fn visit_decorator(&mut self, _decorator: &'_ Decorator) {}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'_ ExceptHandler) {
|
||||
match except_handler {
|
||||
ExceptHandler::ExceptHandler(except_handler) => {
|
||||
self.lower_except_handler(except_handler);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_with_item(&mut self, with_item: &'_ WithItem) {
|
||||
self.lower_with_item(with_item);
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'_ MatchCase) {
|
||||
self.lower_match_case(match_case);
|
||||
self.visit_body(&match_case.body);
|
||||
}
|
||||
}
|
||||
@@ -1,109 +0,0 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::BuildHasherDefault;
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use rustc_hash::{FxHashSet, FxHasher};
|
||||
|
||||
use crate::files::FileId;
|
||||
|
||||
pub mod ast_ids;
|
||||
pub mod cache;
|
||||
pub mod cancellation;
|
||||
pub mod db;
|
||||
pub mod files;
|
||||
pub mod hir;
|
||||
pub mod lint;
|
||||
pub mod module;
|
||||
mod parse;
|
||||
pub mod program;
|
||||
pub mod source;
|
||||
mod symbols;
|
||||
mod types;
|
||||
pub mod watch;
|
||||
|
||||
pub(crate) type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
#[allow(unused)]
|
||||
pub(crate) type FxDashSet<V> = dashmap::DashSet<V, BuildHasherDefault<FxHasher>>;
|
||||
pub(crate) type FxIndexSet<V> = indexmap::set::IndexSet<V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Workspace {
|
||||
/// TODO this should be a resolved path. We should probably use a newtype wrapper that guarantees that
|
||||
/// PATH is a UTF-8 path and is normalized.
|
||||
root: PathBuf,
|
||||
/// The files that are open in the workspace.
|
||||
///
|
||||
/// * Editor: The files that are actively being edited in the editor (the user has a tab open with the file).
|
||||
/// * CLI: The resolved files passed as arguments to the CLI.
|
||||
open_files: FxHashSet<FileId>,
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
pub fn new(root: PathBuf) -> Self {
|
||||
Self {
|
||||
root,
|
||||
open_files: FxHashSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &Path {
|
||||
self.root.as_path()
|
||||
}
|
||||
|
||||
// TODO having the content in workspace feels wrong.
|
||||
pub fn open_file(&mut self, file_id: FileId) {
|
||||
self.open_files.insert(file_id);
|
||||
}
|
||||
|
||||
pub fn close_file(&mut self, file_id: FileId) {
|
||||
self.open_files.remove(&file_id);
|
||||
}
|
||||
|
||||
// TODO introduce an `OpenFile` type instead of using an anonymous tuple.
|
||||
pub fn open_files(&self) -> impl Iterator<Item = FileId> + '_ {
|
||||
self.open_files.iter().copied()
|
||||
}
|
||||
|
||||
pub fn is_file_open(&self, file_id: FileId) -> bool {
|
||||
self.open_files.contains(&file_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Name(smol_str::SmolStr);
|
||||
|
||||
impl Name {
|
||||
#[inline]
|
||||
pub fn new(name: &str) -> Self {
|
||||
Self(smol_str::SmolStr::new(name))
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.0.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Name {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for Name
|
||||
where
|
||||
T: Into<smol_str::SmolStr>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Self(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Name {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
@@ -1,264 +0,0 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use ruff_python_ast::visitor::Visitor;
|
||||
use ruff_python_ast::{ModModule, StringLiteral};
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{HasJar, LintDb, LintJar, QueryResult, SemanticDb};
|
||||
use crate::files::FileId;
|
||||
use crate::parse::Parsed;
|
||||
use crate::source::Source;
|
||||
use crate::symbols::{Definition, SymbolId, SymbolTable};
|
||||
use crate::types::Type;
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn lint_syntax<Db>(db: &Db, file_id: FileId) -> QueryResult<Diagnostics>
|
||||
where
|
||||
Db: LintDb + HasJar<LintJar>,
|
||||
{
|
||||
let storage = &db.jar()?.lint_syntax;
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
|
||||
for i in 0..10 {
|
||||
db.cancelled()?;
|
||||
println!("RED_KNOT_SLOW_LINT is set, sleeping for {i}/10 seconds");
|
||||
std::thread::sleep(Duration::from_secs(1));
|
||||
}
|
||||
}
|
||||
|
||||
storage.get(&file_id, |file_id| {
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
let source = db.source(*file_id)?;
|
||||
lint_lines(source.text(), &mut diagnostics);
|
||||
|
||||
let parsed = db.parse(*file_id)?;
|
||||
|
||||
if parsed.errors().is_empty() {
|
||||
let ast = parsed.ast();
|
||||
|
||||
let mut visitor = SyntaxLintVisitor {
|
||||
diagnostics,
|
||||
source: source.text(),
|
||||
};
|
||||
visitor.visit_body(&ast.body);
|
||||
diagnostics = visitor.diagnostics;
|
||||
} else {
|
||||
diagnostics.extend(parsed.errors().iter().map(std::string::ToString::to_string));
|
||||
}
|
||||
|
||||
Ok(Diagnostics::from(diagnostics))
|
||||
})
|
||||
}
|
||||
|
||||
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
||||
for (line_number, line) in source.lines().enumerate() {
|
||||
if line.len() < 88 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let char_count = line.chars().count();
|
||||
if char_count > 88 {
|
||||
diagnostics.push(format!(
|
||||
"Line {} is too long ({} characters)",
|
||||
line_number + 1,
|
||||
char_count
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn lint_semantic<Db>(db: &Db, file_id: FileId) -> QueryResult<Diagnostics>
|
||||
where
|
||||
Db: LintDb + HasJar<LintJar>,
|
||||
{
|
||||
let storage = &db.jar()?.lint_semantic;
|
||||
|
||||
storage.get(&file_id, |file_id| {
|
||||
let source = db.source(*file_id)?;
|
||||
let parsed = db.parse(*file_id)?;
|
||||
let symbols = db.symbol_table(*file_id)?;
|
||||
|
||||
let context = SemanticLintContext {
|
||||
file_id: *file_id,
|
||||
source,
|
||||
parsed,
|
||||
symbols,
|
||||
db,
|
||||
diagnostics: RefCell::new(Vec::new()),
|
||||
};
|
||||
|
||||
lint_unresolved_imports(&context)?;
|
||||
|
||||
Ok(Diagnostics::from(context.diagnostics.take()))
|
||||
})
|
||||
}
|
||||
|
||||
fn lint_unresolved_imports(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
// TODO: Consider iterating over the dependencies (imports) only instead of all definitions.
|
||||
for (symbol, definition) in context.symbols().all_definitions() {
|
||||
match definition {
|
||||
Definition::Import(import) => {
|
||||
let ty = context.infer_symbol_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
context.push_diagnostic(format!("Unresolved module {}", import.module));
|
||||
}
|
||||
}
|
||||
Definition::ImportFrom(import) => {
|
||||
let ty = context.infer_symbol_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
let module_name = import.module().map(Deref::deref).unwrap_or_default();
|
||||
let message = if import.level() > 0 {
|
||||
format!(
|
||||
"Unresolved relative import '{}' from {}{}",
|
||||
import.name(),
|
||||
".".repeat(import.level() as usize),
|
||||
module_name
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"Unresolved import '{}' from '{}'",
|
||||
import.name(),
|
||||
module_name
|
||||
)
|
||||
};
|
||||
|
||||
context.push_diagnostic(message);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub struct SemanticLintContext<'a> {
|
||||
file_id: FileId,
|
||||
source: Source,
|
||||
parsed: Parsed,
|
||||
symbols: Arc<SymbolTable>,
|
||||
db: &'a dyn SemanticDb,
|
||||
diagnostics: RefCell<Vec<String>>,
|
||||
}
|
||||
|
||||
impl<'a> SemanticLintContext<'a> {
|
||||
pub fn source_text(&self) -> &str {
|
||||
self.source.text()
|
||||
}
|
||||
|
||||
pub fn file_id(&self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> &ModModule {
|
||||
self.parsed.ast()
|
||||
}
|
||||
|
||||
pub fn symbols(&self) -> &SymbolTable {
|
||||
&self.symbols
|
||||
}
|
||||
|
||||
pub fn infer_symbol_type(&self, symbol_id: SymbolId) -> QueryResult<Type> {
|
||||
self.db.infer_symbol_type(self.file_id, symbol_id)
|
||||
}
|
||||
|
||||
pub fn push_diagnostic(&self, diagnostic: String) {
|
||||
self.diagnostics.borrow_mut().push(diagnostic);
|
||||
}
|
||||
|
||||
pub fn extend_diagnostics(&mut self, diagnostics: impl IntoIterator<Item = String>) {
|
||||
self.diagnostics.get_mut().extend(diagnostics);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SyntaxLintVisitor<'a> {
|
||||
diagnostics: Vec<String>,
|
||||
source: &'a str,
|
||||
}
|
||||
|
||||
impl Visitor<'_> for SyntaxLintVisitor<'_> {
|
||||
fn visit_string_literal(&mut self, string_literal: &'_ StringLiteral) {
|
||||
// A very naive implementation of use double quotes
|
||||
let text = &self.source[string_literal.range];
|
||||
|
||||
if text.starts_with('\'') {
|
||||
self.diagnostics
|
||||
.push("Use double quotes for strings".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Diagnostics {
|
||||
Empty,
|
||||
List(Arc<Vec<String>>),
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub fn as_slice(&self) -> &[String] {
|
||||
match self {
|
||||
Diagnostics::Empty => &[],
|
||||
Diagnostics::List(list) => list.as_slice(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Diagnostics {
|
||||
type Target = [String];
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<String>> for Diagnostics {
|
||||
fn from(value: Vec<String>) -> Self {
|
||||
if value.is_empty() {
|
||||
Diagnostics::Empty
|
||||
} else {
|
||||
Diagnostics::List(Arc::new(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintSyntaxStorage(KeyValueCache<FileId, Diagnostics>);
|
||||
|
||||
impl Deref for LintSyntaxStorage {
|
||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for LintSyntaxStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintSemanticStorage(KeyValueCache<FileId, Diagnostics>);
|
||||
|
||||
impl Deref for LintSemanticStorage {
|
||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for LintSemanticStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
@@ -1,359 +0,0 @@
|
||||
#![allow(clippy::dbg_macro)]
|
||||
|
||||
use std::path::Path;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use tracing::subscriber::Interest;
|
||||
use tracing::{Level, Metadata};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
||||
use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
use red_knot::db::{HasJar, ParallelDatabase, QueryError, SemanticDb, SourceDb, SourceJar};
|
||||
use red_knot::module::{ModuleSearchPath, ModuleSearchPathKind};
|
||||
use red_knot::program::check::ExecutionMode;
|
||||
use red_knot::program::{FileWatcherChange, Program};
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::Workspace;
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
fn main() -> anyhow::Result<()> {
|
||||
setup_tracing();
|
||||
|
||||
let arguments: Vec<_> = std::env::args().collect();
|
||||
|
||||
if arguments.len() < 2 {
|
||||
eprintln!("Usage: red_knot <path>");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
let entry_point = Path::new(&arguments[1]);
|
||||
|
||||
if !entry_point.exists() {
|
||||
eprintln!("The entry point does not exist.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
if !entry_point.is_file() {
|
||||
eprintln!("The entry point is not a file.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
let workspace_folder = entry_point.parent().unwrap();
|
||||
let workspace = Workspace::new(workspace_folder.to_path_buf());
|
||||
|
||||
let workspace_search_path = ModuleSearchPath::new(
|
||||
workspace.root().to_path_buf(),
|
||||
ModuleSearchPathKind::FirstParty,
|
||||
);
|
||||
let mut program = Program::new(workspace);
|
||||
program.set_module_search_paths(vec![workspace_search_path]);
|
||||
|
||||
let entry_id = program.file_id(entry_point);
|
||||
program.workspace_mut().open_file(entry_id);
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
ctrlc::set_handler(move || {
|
||||
let mut lock = main_loop_cancellation_token.lock().unwrap();
|
||||
|
||||
if let Some(token) = lock.take() {
|
||||
token.stop();
|
||||
}
|
||||
})?;
|
||||
|
||||
let file_changes_notifier = main_loop.file_changes_notifier();
|
||||
|
||||
// Watch for file changes and re-trigger the analysis.
|
||||
let mut file_watcher = FileWatcher::new(move |changes| {
|
||||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
|
||||
file_watcher.watch_folder(workspace_folder)?;
|
||||
|
||||
main_loop.run(&mut program);
|
||||
|
||||
let source_jar: &SourceJar = program.jar().unwrap();
|
||||
|
||||
dbg!(source_jar.parsed.statistics());
|
||||
dbg!(source_jar.sources.statistics());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
orchestrator_sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
main_loop_receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
sender: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
std::thread::spawn(move || {
|
||||
orchestrator.run();
|
||||
});
|
||||
|
||||
(
|
||||
Self {
|
||||
orchestrator_sender,
|
||||
main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator_sender.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(self, program: &mut Program) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Run)
|
||||
.unwrap();
|
||||
|
||||
for message in &self.main_loop_receiver {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
|
||||
match message {
|
||||
MainLoopMessage::CheckProgram { revision } => {
|
||||
let program = program.snapshot();
|
||||
let sender = self.orchestrator_sender.clone();
|
||||
|
||||
// Spawn a new task that checks the program. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || match program.check(ExecutionMode::ThreadPool) {
|
||||
Ok(result) => {
|
||||
sender
|
||||
.send(OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
Err(QueryError::Cancelled) => {}
|
||||
});
|
||||
}
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
program.apply_changes(changes);
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
dbg!(diagnostics);
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct FileChangesNotifier {
|
||||
sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
}
|
||||
|
||||
impl FileChangesNotifier {
|
||||
fn notify(&self, changes: Vec<FileWatcherChange>) {
|
||||
self.sender
|
||||
.send(OrchestratorMessage::FileChanges(changes))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MainLoopCancellationToken {
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
}
|
||||
|
||||
impl MainLoopCancellationToken {
|
||||
fn stop(self) {
|
||||
self.sender.send(MainLoopMessage::Exit).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
}
|
||||
|
||||
impl Orchestrator {
|
||||
fn run(&mut self) {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckProgram {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
|
||||
}
|
||||
}
|
||||
|
||||
OrchestratorMessage::FileChanges(changes) => {
|
||||
// Request cancellation, but wait until all analysis tasks have completed to
|
||||
// avoid stale messages in the next main loop.
|
||||
|
||||
self.revision += 1;
|
||||
self.debounce_changes(changes);
|
||||
}
|
||||
OrchestratorMessage::Shutdown => {
|
||||
return self.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
|
||||
loop {
|
||||
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
|
||||
crossbeam_channel::select! {
|
||||
recv(self.receiver) -> message => {
|
||||
match message {
|
||||
Ok(OrchestratorMessage::Shutdown) => {
|
||||
return self.shutdown();
|
||||
}
|
||||
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
|
||||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckProgramCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
|
||||
Err(_) => {
|
||||
// There are no more senders, no point in waiting for more messages
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.sender.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.sender.send(MainLoopMessage::CheckProgram { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn shutdown(&self) {
|
||||
tracing::trace!("Shutting down orchestrator.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckProgram { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum OrchestratorMessage {
|
||||
Run,
|
||||
Shutdown,
|
||||
|
||||
CheckProgramCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
}
|
||||
|
||||
fn setup_tracing() {
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter {
|
||||
trace_level: Level::TRACE,
|
||||
}),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
||||
}
|
||||
|
||||
struct LoggingFilter {
|
||||
trace_level: Level,
|
||||
}
|
||||
|
||||
impl LoggingFilter {
|
||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
||||
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
|
||||
self.trace_level
|
||||
} else {
|
||||
Level::INFO
|
||||
};
|
||||
|
||||
meta.level() <= &filter
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Filter<S> for LoggingFilter {
|
||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
||||
self.is_enabled(meta)
|
||||
}
|
||||
|
||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
||||
if self.is_enabled(meta) {
|
||||
Interest::always()
|
||||
} else {
|
||||
Interest::never()
|
||||
}
|
||||
}
|
||||
|
||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
||||
Some(LevelFilter::from_level(self.trace_level))
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,95 +0,0 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_parser::{Mode, ParseError};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{HasJar, QueryResult, SourceDb, SourceJar};
|
||||
use crate::files::FileId;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Parsed {
|
||||
inner: Arc<ParsedInner>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
struct ParsedInner {
|
||||
ast: ast::ModModule,
|
||||
errors: Vec<ParseError>,
|
||||
}
|
||||
|
||||
impl Parsed {
|
||||
fn new(ast: ast::ModModule, errors: Vec<ParseError>) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(ParsedInner { ast, errors }),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_text(text: &str) -> Self {
|
||||
let result = ruff_python_parser::parse(text, Mode::Module);
|
||||
|
||||
let (module, errors) = match result {
|
||||
Ok(ast::Mod::Module(module)) => (module, vec![]),
|
||||
Ok(ast::Mod::Expression(expression)) => (
|
||||
ast::ModModule {
|
||||
range: expression.range(),
|
||||
body: vec![ast::Stmt::Expr(ast::StmtExpr {
|
||||
range: expression.range(),
|
||||
value: expression.body,
|
||||
})],
|
||||
},
|
||||
vec![],
|
||||
),
|
||||
Err(errors) => (
|
||||
ast::ModModule {
|
||||
range: TextRange::default(),
|
||||
body: Vec::new(),
|
||||
},
|
||||
vec![errors],
|
||||
),
|
||||
};
|
||||
|
||||
Parsed::new(module, errors)
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> &ast::ModModule {
|
||||
&self.inner.ast
|
||||
}
|
||||
|
||||
pub fn errors(&self) -> &[ParseError] {
|
||||
&self.inner.errors
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn parse<Db>(db: &Db, file_id: FileId) -> QueryResult<Parsed>
|
||||
where
|
||||
Db: SourceDb + HasJar<SourceJar>,
|
||||
{
|
||||
let parsed = db.jar()?;
|
||||
|
||||
parsed.parsed.get(&file_id, |file_id| {
|
||||
let source = db.source(*file_id)?;
|
||||
|
||||
Ok(Parsed::from_text(source.text()))
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ParsedStorage(KeyValueCache<FileId, Parsed>);
|
||||
|
||||
impl Deref for ParsedStorage {
|
||||
type Target = KeyValueCache<FileId, Parsed>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for ParsedStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
@@ -1,412 +0,0 @@
|
||||
use rayon::{current_num_threads, yield_local};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::db::{Database, LintDb, QueryError, QueryResult, SemanticDb};
|
||||
use crate::files::FileId;
|
||||
use crate::lint::Diagnostics;
|
||||
use crate::program::Program;
|
||||
use crate::symbols::Dependency;
|
||||
|
||||
impl Program {
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(&self, mode: ExecutionMode) -> QueryResult<Vec<String>> {
|
||||
self.cancelled()?;
|
||||
|
||||
let mut context = CheckContext::new(self);
|
||||
|
||||
match mode {
|
||||
ExecutionMode::SingleThreaded => SingleThreadedExecutor.run(&mut context)?,
|
||||
ExecutionMode::ThreadPool => ThreadPoolExecutor.run(&mut context)?,
|
||||
};
|
||||
|
||||
Ok(context.finish())
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, context))]
|
||||
fn check_file(&self, file: FileId, context: &CheckFileContext) -> QueryResult<Diagnostics> {
|
||||
self.cancelled()?;
|
||||
|
||||
let symbol_table = self.symbol_table(file)?;
|
||||
let dependencies = symbol_table.dependencies();
|
||||
|
||||
if !dependencies.is_empty() {
|
||||
let module = self.file_to_module(file)?;
|
||||
|
||||
// TODO scheduling all dependencies here is wasteful if we don't infer any types on them
|
||||
// but I think that's unlikely, so it is okay?
|
||||
// Anyway, we need to figure out a way to retrieve the dependencies of a module
|
||||
// from the persistent cache. So maybe it should be a separate query after all.
|
||||
for dependency in dependencies {
|
||||
let dependency_name = match dependency {
|
||||
Dependency::Module(name) => Some(name.clone()),
|
||||
Dependency::Relative { .. } => match &module {
|
||||
Some(module) => module.resolve_dependency(self, dependency)?,
|
||||
None => None,
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(dependency_name) = dependency_name {
|
||||
// TODO We may want to have a different check functions for non-first-party
|
||||
// files because we only need to index them and not check them.
|
||||
// Supporting non-first-party code also requires supporting typing stubs.
|
||||
if let Some(dependency) = self.resolve_module(dependency_name)? {
|
||||
if dependency.path(self)?.root().kind().is_first_party() {
|
||||
context.schedule_dependency(dependency.path(self)?.file());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
if self.workspace().is_file_open(file) {
|
||||
diagnostics.extend_from_slice(&self.lint_syntax(file)?);
|
||||
diagnostics.extend_from_slice(&self.lint_semantic(file)?);
|
||||
}
|
||||
|
||||
Ok(Diagnostics::from(diagnostics))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ExecutionMode {
|
||||
SingleThreaded,
|
||||
ThreadPool,
|
||||
}
|
||||
|
||||
/// Context that stores state information about the entire check operation.
|
||||
struct CheckContext<'a> {
|
||||
/// IDs of the files that have been queued for checking.
|
||||
///
|
||||
/// Used to avoid queuing the same file twice.
|
||||
scheduled_files: FxHashSet<FileId>,
|
||||
|
||||
/// Reference to the program that is checked.
|
||||
program: &'a Program,
|
||||
|
||||
/// The aggregated diagnostics
|
||||
diagnostics: Vec<String>,
|
||||
}
|
||||
|
||||
impl<'a> CheckContext<'a> {
|
||||
fn new(program: &'a Program) -> Self {
|
||||
Self {
|
||||
scheduled_files: FxHashSet::default(),
|
||||
program,
|
||||
diagnostics: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the tasks to check all open files in the workspace.
|
||||
fn check_open_files(&mut self) -> Vec<CheckOpenFileTask> {
|
||||
self.scheduled_files
|
||||
.extend(self.program.workspace().open_files());
|
||||
|
||||
self.program
|
||||
.workspace()
|
||||
.open_files()
|
||||
.map(|file_id| CheckOpenFileTask { file_id })
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns the task to check a dependency.
|
||||
fn check_dependency(&mut self, file_id: FileId) -> Option<CheckDependencyTask> {
|
||||
if self.scheduled_files.insert(file_id) {
|
||||
Some(CheckDependencyTask { file_id })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Pushes the result for a single file check operation
|
||||
fn push_diagnostics(&mut self, diagnostics: &Diagnostics) {
|
||||
self.diagnostics.extend_from_slice(diagnostics);
|
||||
}
|
||||
|
||||
/// Returns a reference to the program that is being checked.
|
||||
fn program(&self) -> &'a Program {
|
||||
self.program
|
||||
}
|
||||
|
||||
/// Creates a task context that is used to check a single file.
|
||||
fn task_context<'b, S>(&self, dependency_scheduler: &'b S) -> CheckTaskContext<'a, 'b, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
CheckTaskContext {
|
||||
program: self.program,
|
||||
dependency_scheduler,
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(self) -> Vec<String> {
|
||||
self.diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait that abstracts away how a dependency of a file gets scheduled for checking.
|
||||
trait ScheduleDependency {
|
||||
/// Schedules the file with the given ID for checking.
|
||||
fn schedule(&self, file_id: FileId);
|
||||
}
|
||||
|
||||
impl<T> ScheduleDependency for T
|
||||
where
|
||||
T: Fn(FileId),
|
||||
{
|
||||
fn schedule(&self, file_id: FileId) {
|
||||
let f = self;
|
||||
f(file_id);
|
||||
}
|
||||
}
|
||||
|
||||
/// Context that is used to run a single file check task.
|
||||
///
|
||||
/// The task is generic over `S` because it is passed across thread boundaries and
|
||||
/// we don't want to add the requirement that [`ScheduleDependency`] must be [`Send`].
|
||||
struct CheckTaskContext<'a, 'scheduler, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
dependency_scheduler: &'scheduler S,
|
||||
program: &'a Program,
|
||||
}
|
||||
|
||||
impl<'a, 'scheduler, S> CheckTaskContext<'a, 'scheduler, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
fn as_file_context(&self) -> CheckFileContext<'scheduler> {
|
||||
CheckFileContext {
|
||||
dependency_scheduler: self.dependency_scheduler,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Context passed when checking a single file.
|
||||
///
|
||||
/// This is a trimmed down version of [`CheckTaskContext`] with the type parameter `S` erased
|
||||
/// to avoid monomorphization of [`Program:check_file`].
|
||||
struct CheckFileContext<'a> {
|
||||
dependency_scheduler: &'a dyn ScheduleDependency,
|
||||
}
|
||||
|
||||
impl<'a> CheckFileContext<'a> {
|
||||
fn schedule_dependency(&self, file_id: FileId) {
|
||||
self.dependency_scheduler.schedule(file_id);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum CheckFileTask {
|
||||
OpenFile(CheckOpenFileTask),
|
||||
Dependency(CheckDependencyTask),
|
||||
}
|
||||
|
||||
impl CheckFileTask {
|
||||
/// Runs the task and returns the results for checking this file.
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
match self {
|
||||
Self::OpenFile(task) => task.run(context),
|
||||
Self::Dependency(task) => task.run(context),
|
||||
}
|
||||
}
|
||||
|
||||
fn file_id(&self) -> FileId {
|
||||
match self {
|
||||
CheckFileTask::OpenFile(task) => task.file_id,
|
||||
CheckFileTask::Dependency(task) => task.file_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Task to check an open file.
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CheckOpenFileTask {
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl CheckOpenFileTask {
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
context
|
||||
.program
|
||||
.check_file(self.file_id, &context.as_file_context())
|
||||
}
|
||||
}
|
||||
|
||||
/// Task to check a dependency file.
|
||||
#[derive(Debug)]
|
||||
struct CheckDependencyTask {
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl CheckDependencyTask {
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
context
|
||||
.program
|
||||
.check_file(self.file_id, &context.as_file_context())
|
||||
}
|
||||
}
|
||||
|
||||
/// Executor that schedules the checking of individual program files.
|
||||
trait CheckExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()>;
|
||||
}
|
||||
|
||||
/// Executor that runs all check operations on the current thread.
|
||||
///
|
||||
/// The executor does not schedule dependencies for checking.
|
||||
/// The main motivation for scheduling dependencies
|
||||
/// in a multithreaded environment is to parse and index the dependencies concurrently.
|
||||
/// However, that doesn't make sense in a single threaded environment, because the dependencies then compute
|
||||
/// with checking the open files. Checking dependencies in a single threaded environment is more likely
|
||||
/// to hurt performance because we end up analyzing files in their entirety, even if we only need to type check parts of them.
|
||||
#[derive(Debug, Default)]
|
||||
struct SingleThreadedExecutor;
|
||||
|
||||
impl CheckExecutor for SingleThreadedExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
||||
let mut queue = context.check_open_files();
|
||||
|
||||
let noop_schedule_dependency = |_| {};
|
||||
|
||||
while let Some(file) = queue.pop() {
|
||||
context.program().cancelled()?;
|
||||
|
||||
let task_context = context.task_context(&noop_schedule_dependency);
|
||||
context.push_diagnostics(&file.run(&task_context)?);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Executor that runs the check operations on a thread pool.
|
||||
///
|
||||
/// The executor runs each check operation as its own task using a thread pool.
|
||||
///
|
||||
/// Other than [`SingleThreadedExecutor`], this executor schedules dependencies for checking. It
|
||||
/// even schedules dependencies for checking when the thread pool size is 1 for a better debugging experience.
|
||||
#[derive(Debug, Default)]
|
||||
struct ThreadPoolExecutor;
|
||||
|
||||
impl CheckExecutor for ThreadPoolExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
||||
let num_threads = current_num_threads();
|
||||
let single_threaded = num_threads == 1;
|
||||
let span = tracing::trace_span!("ThreadPoolExecutor::run", num_threads);
|
||||
let _ = span.enter();
|
||||
|
||||
let mut queue: Vec<_> = context
|
||||
.check_open_files()
|
||||
.into_iter()
|
||||
.map(CheckFileTask::OpenFile)
|
||||
.collect();
|
||||
|
||||
let (sender, receiver) = if single_threaded {
|
||||
// Use an unbounded queue for single threaded execution to prevent deadlocks
|
||||
// when a single file schedules multiple dependencies.
|
||||
crossbeam::channel::unbounded()
|
||||
} else {
|
||||
// Use a bounded queue to apply backpressure when the orchestration thread isn't able to keep
|
||||
// up processing messages from the worker threads.
|
||||
crossbeam::channel::bounded(num_threads)
|
||||
};
|
||||
|
||||
let schedule_sender = sender.clone();
|
||||
let schedule_dependency = move |file_id| {
|
||||
schedule_sender
|
||||
.send(ThreadPoolMessage::ScheduleDependency(file_id))
|
||||
.unwrap();
|
||||
};
|
||||
|
||||
let result = rayon::in_place_scope(|scope| {
|
||||
let mut pending = 0usize;
|
||||
|
||||
loop {
|
||||
context.program().cancelled()?;
|
||||
|
||||
// 1. Try to get a queued message to ensure that we have always remaining space in the channel to prevent blocking the worker threads.
|
||||
// 2. Try to process a queued file
|
||||
// 3. If there's no queued file wait for the next incoming message.
|
||||
// 4. Exit if there are no more messages and no senders.
|
||||
let message = if let Ok(message) = receiver.try_recv() {
|
||||
message
|
||||
} else if let Some(task) = queue.pop() {
|
||||
pending += 1;
|
||||
|
||||
let task_context = context.task_context(&schedule_dependency);
|
||||
let sender = sender.clone();
|
||||
let task_span = tracing::trace_span!(
|
||||
parent: &span,
|
||||
"CheckFileTask::run",
|
||||
file_id = task.file_id().as_u32(),
|
||||
);
|
||||
|
||||
scope.spawn(move |_| {
|
||||
task_span.in_scope(|| match task.run(&task_context) {
|
||||
Ok(result) => {
|
||||
sender.send(ThreadPoolMessage::Completed(result)).unwrap();
|
||||
}
|
||||
Err(err) => sender.send(ThreadPoolMessage::Errored(err)).unwrap(),
|
||||
});
|
||||
});
|
||||
|
||||
// If this is a single threaded rayon thread pool, yield the current thread
|
||||
// or we never start processing the work items.
|
||||
if single_threaded {
|
||||
yield_local();
|
||||
}
|
||||
|
||||
continue;
|
||||
} else if let Ok(message) = receiver.recv() {
|
||||
message
|
||||
} else {
|
||||
break;
|
||||
};
|
||||
|
||||
match message {
|
||||
ThreadPoolMessage::ScheduleDependency(dependency) => {
|
||||
if let Some(task) = context.check_dependency(dependency) {
|
||||
queue.push(CheckFileTask::Dependency(task));
|
||||
}
|
||||
}
|
||||
ThreadPoolMessage::Completed(diagnostics) => {
|
||||
context.push_diagnostics(&diagnostics);
|
||||
pending -= 1;
|
||||
|
||||
if pending == 0 && queue.is_empty() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
ThreadPoolMessage::Errored(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ThreadPoolMessage {
|
||||
ScheduleDependency(FileId),
|
||||
Completed(Diagnostics),
|
||||
Errored(QueryError),
|
||||
}
|
||||
@@ -1,305 +0,0 @@
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::db::{
|
||||
Database, Db, DbRuntime, HasJar, HasJars, JarsStorage, LintDb, LintJar, ParallelDatabase,
|
||||
QueryResult, SemanticDb, SemanticJar, Snapshot, SourceDb, SourceJar,
|
||||
};
|
||||
use crate::files::{FileId, Files};
|
||||
use crate::lint::{lint_semantic, lint_syntax, Diagnostics};
|
||||
use crate::module::{
|
||||
add_module, file_to_module, path_to_module, resolve_module, set_module_search_paths, Module,
|
||||
ModuleData, ModuleName, ModuleSearchPath,
|
||||
};
|
||||
use crate::parse::{parse, Parsed};
|
||||
use crate::source::{source_text, Source};
|
||||
use crate::symbols::{symbol_table, SymbolId, SymbolTable};
|
||||
use crate::types::{infer_symbol_type, Type};
|
||||
use crate::Workspace;
|
||||
|
||||
pub mod check;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Program {
|
||||
jars: JarsStorage<Program>,
|
||||
files: Files,
|
||||
workspace: Workspace,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn new(workspace: Workspace) -> Self {
|
||||
Self {
|
||||
jars: JarsStorage::default(),
|
||||
files: Files::default(),
|
||||
workspace,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_changes<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileWatcherChange>,
|
||||
{
|
||||
let mut aggregated_changes = AggregatedChanges::default();
|
||||
|
||||
aggregated_changes.extend(changes.into_iter().map(|change| FileChange {
|
||||
id: self.files.intern(&change.path),
|
||||
kind: change.kind,
|
||||
}));
|
||||
|
||||
let (source, semantic, lint) = self.jars_mut();
|
||||
for change in aggregated_changes.iter() {
|
||||
semantic.module_resolver.remove_module(change.id);
|
||||
semantic.symbol_tables.remove(&change.id);
|
||||
source.sources.remove(&change.id);
|
||||
source.parsed.remove(&change.id);
|
||||
// TODO: remove all dependent modules as well
|
||||
semantic.type_store.remove_module(change.id);
|
||||
lint.lint_syntax.remove(&change.id);
|
||||
lint.lint_semantic.remove(&change.id);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> &Workspace {
|
||||
&self.workspace
|
||||
}
|
||||
|
||||
pub fn workspace_mut(&mut self) -> &mut Workspace {
|
||||
&mut self.workspace
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for Program {
|
||||
fn file_id(&self, path: &Path) -> FileId {
|
||||
self.files.intern(path)
|
||||
}
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
||||
self.files.path(file_id)
|
||||
}
|
||||
|
||||
fn source(&self, file_id: FileId) -> QueryResult<Source> {
|
||||
source_text(self, file_id)
|
||||
}
|
||||
|
||||
fn parse(&self, file_id: FileId) -> QueryResult<Parsed> {
|
||||
parse(self, file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl SemanticDb for Program {
|
||||
fn resolve_module(&self, name: ModuleName) -> QueryResult<Option<Module>> {
|
||||
resolve_module(self, name)
|
||||
}
|
||||
|
||||
fn file_to_module(&self, file_id: FileId) -> QueryResult<Option<Module>> {
|
||||
file_to_module(self, file_id)
|
||||
}
|
||||
|
||||
fn path_to_module(&self, path: &Path) -> QueryResult<Option<Module>> {
|
||||
path_to_module(self, path)
|
||||
}
|
||||
|
||||
fn symbol_table(&self, file_id: FileId) -> QueryResult<Arc<SymbolTable>> {
|
||||
symbol_table(self, file_id)
|
||||
}
|
||||
|
||||
fn infer_symbol_type(&self, file_id: FileId, symbol_id: SymbolId) -> QueryResult<Type> {
|
||||
infer_symbol_type(self, file_id, symbol_id)
|
||||
}
|
||||
|
||||
// Mutations
|
||||
fn add_module(&mut self, path: &Path) -> Option<(Module, Vec<Arc<ModuleData>>)> {
|
||||
add_module(self, path)
|
||||
}
|
||||
|
||||
fn set_module_search_paths(&mut self, paths: Vec<ModuleSearchPath>) {
|
||||
set_module_search_paths(self, paths);
|
||||
}
|
||||
}
|
||||
|
||||
impl LintDb for Program {
|
||||
fn lint_syntax(&self, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
lint_syntax(self, file_id)
|
||||
}
|
||||
|
||||
fn lint_semantic(&self, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
lint_semantic(self, file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for Program {}
|
||||
|
||||
impl Database for Program {
|
||||
fn runtime(&self) -> &DbRuntime {
|
||||
self.jars.runtime()
|
||||
}
|
||||
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
self.jars.runtime_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl ParallelDatabase for Program {
|
||||
fn snapshot(&self) -> Snapshot<Self> {
|
||||
Snapshot::new(Self {
|
||||
jars: self.jars.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
workspace: self.workspace.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJars for Program {
|
||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
||||
|
||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
||||
self.jars.jars()
|
||||
}
|
||||
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
||||
self.jars.jars_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SourceJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
||||
Ok(&self.jars()?.0)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
||||
&mut self.jars_mut().0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SemanticJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
||||
Ok(&self.jars()?.1)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
||||
&mut self.jars_mut().1
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<LintJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&LintJar> {
|
||||
Ok(&self.jars()?.2)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut LintJar {
|
||||
&mut self.jars_mut().2
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
path: PathBuf,
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: PathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct FileChange {
|
||||
id: FileId,
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileChange {
|
||||
fn file_id(self) -> FileId {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn kind(self) -> FileChangeKind {
|
||||
self.kind
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
struct AggregatedChanges {
|
||||
changes: FxHashMap<FileId, FileChangeKind>,
|
||||
}
|
||||
|
||||
impl AggregatedChanges {
|
||||
fn add(&mut self, change: FileChange) {
|
||||
match self.changes.entry(change.file_id()) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
let merged = entry.get_mut();
|
||||
|
||||
match (merged, change.kind()) {
|
||||
(FileChangeKind::Created, FileChangeKind::Deleted) => {
|
||||
// Deletion after creations means that ruff never saw the file.
|
||||
entry.remove();
|
||||
}
|
||||
(FileChangeKind::Created, FileChangeKind::Modified) => {
|
||||
// No-op, for ruff, modifying a file that it doesn't yet know that it exists is still considered a creation.
|
||||
}
|
||||
|
||||
(FileChangeKind::Modified, FileChangeKind::Created) => {
|
||||
// Uhh, that should probably not happen. Continue considering it a modification.
|
||||
}
|
||||
|
||||
(FileChangeKind::Modified, FileChangeKind::Deleted) => {
|
||||
*entry.get_mut() = FileChangeKind::Deleted;
|
||||
}
|
||||
|
||||
(FileChangeKind::Deleted, FileChangeKind::Created) => {
|
||||
*entry.get_mut() = FileChangeKind::Modified;
|
||||
}
|
||||
|
||||
(FileChangeKind::Deleted, FileChangeKind::Modified) => {
|
||||
// That's weird, but let's consider it a modification.
|
||||
*entry.get_mut() = FileChangeKind::Modified;
|
||||
}
|
||||
|
||||
(FileChangeKind::Created, FileChangeKind::Created)
|
||||
| (FileChangeKind::Modified, FileChangeKind::Modified)
|
||||
| (FileChangeKind::Deleted, FileChangeKind::Deleted) => {
|
||||
// No-op transitions. Some of them should be impossible but we handle them anyway.
|
||||
}
|
||||
}
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(change.kind());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extend<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileChange>,
|
||||
{
|
||||
let iter = changes.into_iter();
|
||||
let (lower, _) = iter.size_hint();
|
||||
self.changes.reserve(lower);
|
||||
|
||||
for change in iter {
|
||||
self.add(change);
|
||||
}
|
||||
}
|
||||
|
||||
fn iter(&self) -> impl Iterator<Item = FileChange> + '_ {
|
||||
self.changes.iter().map(|(id, kind)| FileChange {
|
||||
id: *id,
|
||||
kind: *kind,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{HasJar, QueryResult, SourceDb, SourceJar};
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::files::FileId;
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn source_text<Db>(db: &Db, file_id: FileId) -> QueryResult<Source>
|
||||
where
|
||||
Db: SourceDb + HasJar<SourceJar>,
|
||||
{
|
||||
let sources = &db.jar()?.sources;
|
||||
|
||||
sources.get(&file_id, |file_id| {
|
||||
let path = db.file_path(*file_id);
|
||||
|
||||
let source_text = std::fs::read_to_string(&path).unwrap_or_else(|err| {
|
||||
tracing::error!("Failed to read file '{path:?}: {err}'. Falling back to empty text");
|
||||
String::new()
|
||||
});
|
||||
|
||||
let python_ty = PySourceType::from(&path);
|
||||
|
||||
let kind = match python_ty {
|
||||
PySourceType::Python => {
|
||||
SourceKind::Python(Arc::from(source_text))
|
||||
}
|
||||
PySourceType::Stub => SourceKind::Stub(Arc::from(source_text)),
|
||||
PySourceType::Ipynb => {
|
||||
let notebook = Notebook::from_source_code(&source_text).unwrap_or_else(|err| {
|
||||
// TODO should this be changed to never fail?
|
||||
// or should we instead add a diagnostic somewhere? But what would we return in this case?
|
||||
tracing::error!(
|
||||
"Failed to parse notebook '{path:?}: {err}'. Falling back to an empty notebook"
|
||||
);
|
||||
Notebook::from_source_code("").unwrap()
|
||||
});
|
||||
|
||||
SourceKind::IpyNotebook(Arc::new(notebook))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Source { kind })
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum SourceKind {
|
||||
Python(Arc<str>),
|
||||
Stub(Arc<str>),
|
||||
IpyNotebook(Arc<Notebook>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Source {
|
||||
kind: SourceKind,
|
||||
}
|
||||
|
||||
impl Source {
|
||||
pub fn python<T: Into<Arc<str>>>(source: T) -> Self {
|
||||
Self {
|
||||
kind: SourceKind::Python(source.into()),
|
||||
}
|
||||
}
|
||||
pub fn kind(&self) -> &SourceKind {
|
||||
&self.kind
|
||||
}
|
||||
|
||||
pub fn text(&self) -> &str {
|
||||
match &self.kind {
|
||||
SourceKind::Python(text) => text,
|
||||
SourceKind::Stub(text) => text,
|
||||
SourceKind::IpyNotebook(notebook) => notebook.source_code(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SourceStorage(pub(crate) KeyValueCache<FileId, Source>);
|
||||
|
||||
impl Deref for SourceStorage {
|
||||
type Target = KeyValueCache<FileId, Source>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for SourceStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
@@ -1,956 +0,0 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::iter::{Copied, DoubleEndedIterator, FusedIterator};
|
||||
use std::num::NonZeroU32;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use bitflags::bitflags;
|
||||
use hashbrown::hash_map::{Keys, RawEntryMut};
|
||||
use rustc_hash::{FxHashMap, FxHasher};
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::visitor::preorder::PreorderVisitor;
|
||||
|
||||
use crate::ast_ids::TypedNodeKey;
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{HasJar, QueryResult, SemanticDb, SemanticJar};
|
||||
use crate::files::FileId;
|
||||
use crate::module::ModuleName;
|
||||
use crate::Name;
|
||||
|
||||
#[allow(unreachable_pub)]
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn symbol_table<Db>(db: &Db, file_id: FileId) -> QueryResult<Arc<SymbolTable>>
|
||||
where
|
||||
Db: SemanticDb + HasJar<SemanticJar>,
|
||||
{
|
||||
let jar = db.jar()?;
|
||||
|
||||
jar.symbol_tables.get(&file_id, |_| {
|
||||
let parsed = db.parse(file_id)?;
|
||||
Ok(Arc::from(SymbolTable::from_ast(parsed.ast())))
|
||||
})
|
||||
}
|
||||
|
||||
type Map<K, V> = hashbrown::HashMap<K, V, ()>;
|
||||
|
||||
#[newtype_index]
|
||||
pub(crate) struct ScopeId;
|
||||
|
||||
impl ScopeId {
|
||||
pub(crate) fn scope(self, table: &SymbolTable) -> &Scope {
|
||||
&table.scopes_by_id[self]
|
||||
}
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct SymbolId;
|
||||
|
||||
impl SymbolId {
|
||||
pub(crate) fn symbol(self, table: &SymbolTable) -> &Symbol {
|
||||
&table.symbols_by_id[self]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
pub(crate) enum ScopeKind {
|
||||
Module,
|
||||
Annotation,
|
||||
Class,
|
||||
Function,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Scope {
|
||||
name: Name,
|
||||
kind: ScopeKind,
|
||||
child_scopes: Vec<ScopeId>,
|
||||
/// symbol IDs, hashed by symbol name
|
||||
symbols_by_name: Map<SymbolId, ()>,
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
pub(crate) fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
pub(crate) fn kind(&self) -> ScopeKind {
|
||||
self.kind
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum Kind {
|
||||
FreeVar,
|
||||
CellVar,
|
||||
CellVarAssigned,
|
||||
ExplicitGlobal,
|
||||
ImplicitGlobal,
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
#[derive(Copy,Clone,Debug)]
|
||||
pub(crate) struct SymbolFlags: u8 {
|
||||
const IS_USED = 1 << 0;
|
||||
const IS_DEFINED = 1 << 1;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_GLOBAL = 1 << 2;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_NONLOCAL = 1 << 3;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Symbol {
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
scope_id: ScopeId,
|
||||
// kind: Kind,
|
||||
}
|
||||
|
||||
impl Symbol {
|
||||
pub(crate) fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
/// Is the symbol used in its containing scope?
|
||||
pub(crate) fn is_used(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_USED)
|
||||
}
|
||||
|
||||
/// Is the symbol defined in its containing scope?
|
||||
pub(crate) fn is_defined(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_DEFINED)
|
||||
}
|
||||
|
||||
// TODO: implement Symbol.kind 2-pass analysis to categorize as: free-var, cell-var,
|
||||
// explicit-global, implicit-global and implement Symbol.kind by modifying the preorder
|
||||
// traversal code
|
||||
}
|
||||
|
||||
// TODO storing TypedNodeKey for definitions means we have to search to find them again in the AST;
|
||||
// this is at best O(log n). If looking up definitions is a bottleneck we should look for
|
||||
// alternatives here.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum Definition {
|
||||
// For the import cases, we don't need reference to any arbitrary AST subtrees (annotations,
|
||||
// RHS), and referencing just the import statement node is imprecise (a single import statement
|
||||
// can assign many symbols, we'd have to re-search for the one we care about), so we just copy
|
||||
// the small amount of information we need from the AST.
|
||||
Import(ImportDefinition),
|
||||
ImportFrom(ImportFromDefinition),
|
||||
ClassDef(ClassDefinition),
|
||||
FunctionDef(TypedNodeKey<ast::StmtFunctionDef>),
|
||||
Assignment(TypedNodeKey<ast::StmtAssign>),
|
||||
AnnotatedAssignment(TypedNodeKey<ast::StmtAnnAssign>),
|
||||
// TODO with statements, except handlers, function args...
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct ImportDefinition {
|
||||
pub(crate) module: ModuleName,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinition {
|
||||
pub(crate) module: Option<ModuleName>,
|
||||
pub(crate) name: Name,
|
||||
pub(crate) level: u32,
|
||||
}
|
||||
|
||||
impl ImportFromDefinition {
|
||||
pub(crate) fn module(&self) -> Option<&ModuleName> {
|
||||
self.module.as_ref()
|
||||
}
|
||||
|
||||
pub(crate) fn name(&self) -> &Name {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub(crate) fn level(&self) -> u32 {
|
||||
self.level
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct ClassDefinition {
|
||||
pub(crate) node_key: TypedNodeKey<ast::StmtClassDef>,
|
||||
pub(crate) scope_id: ScopeId,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Dependency {
|
||||
Module(ModuleName),
|
||||
Relative {
|
||||
level: NonZeroU32,
|
||||
module: Option<ModuleName>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Table of all symbols in all scopes for a module.
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolTable {
|
||||
scopes_by_id: IndexVec<ScopeId, Scope>,
|
||||
symbols_by_id: IndexVec<SymbolId, Symbol>,
|
||||
defs: FxHashMap<SymbolId, Vec<Definition>>,
|
||||
dependencies: Vec<Dependency>,
|
||||
}
|
||||
|
||||
impl SymbolTable {
|
||||
pub(crate) fn from_ast(module: &ast::ModModule) -> Self {
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let mut builder = SymbolTableBuilder {
|
||||
table: SymbolTable::new(),
|
||||
scopes: vec![root_scope_id],
|
||||
current_definition: None,
|
||||
};
|
||||
builder.visit_body(&module.body);
|
||||
builder.table
|
||||
}
|
||||
|
||||
pub(crate) fn new() -> Self {
|
||||
let mut table = SymbolTable {
|
||||
scopes_by_id: IndexVec::new(),
|
||||
symbols_by_id: IndexVec::new(),
|
||||
defs: FxHashMap::default(),
|
||||
dependencies: Vec::new(),
|
||||
};
|
||||
table.scopes_by_id.push(Scope {
|
||||
name: Name::new("<module>"),
|
||||
kind: ScopeKind::Module,
|
||||
child_scopes: Vec::new(),
|
||||
symbols_by_name: Map::default(),
|
||||
});
|
||||
table
|
||||
}
|
||||
|
||||
pub(crate) fn dependencies(&self) -> &[Dependency] {
|
||||
&self.dependencies
|
||||
}
|
||||
|
||||
pub(crate) const fn root_scope_id() -> ScopeId {
|
||||
ScopeId::from_usize(0)
|
||||
}
|
||||
|
||||
pub(crate) fn root_scope(&self) -> &Scope {
|
||||
&self.scopes_by_id[SymbolTable::root_scope_id()]
|
||||
}
|
||||
|
||||
pub(crate) fn symbol_ids_for_scope(&self, scope_id: ScopeId) -> Copied<Keys<SymbolId, ()>> {
|
||||
self.scopes_by_id[scope_id].symbols_by_name.keys().copied()
|
||||
}
|
||||
|
||||
pub(crate) fn symbols_for_scope(
|
||||
&self,
|
||||
scope_id: ScopeId,
|
||||
) -> SymbolIterator<Copied<Keys<SymbolId, ()>>> {
|
||||
SymbolIterator {
|
||||
table: self,
|
||||
ids: self.symbol_ids_for_scope(scope_id),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn root_symbol_ids(&self) -> Copied<Keys<SymbolId, ()>> {
|
||||
self.symbol_ids_for_scope(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub(crate) fn root_symbols(&self) -> SymbolIterator<Copied<Keys<SymbolId, ()>>> {
|
||||
self.symbols_for_scope(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub(crate) fn child_scope_ids_of(&self, scope_id: ScopeId) -> &[ScopeId] {
|
||||
&self.scopes_by_id[scope_id].child_scopes
|
||||
}
|
||||
|
||||
pub(crate) fn child_scopes_of(&self, scope_id: ScopeId) -> ScopeIterator<&[ScopeId]> {
|
||||
ScopeIterator {
|
||||
table: self,
|
||||
ids: self.child_scope_ids_of(scope_id),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn root_child_scope_ids(&self) -> &[ScopeId] {
|
||||
self.child_scope_ids_of(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub(crate) fn root_child_scopes(&self) -> ScopeIterator<&[ScopeId]> {
|
||||
self.child_scopes_of(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub(crate) fn symbol_id_by_name(&self, scope_id: ScopeId, name: &str) -> Option<SymbolId> {
|
||||
let scope = &self.scopes_by_id[scope_id];
|
||||
let hash = SymbolTable::hash_name(name);
|
||||
let name = Name::new(name);
|
||||
scope
|
||||
.symbols_by_name
|
||||
.raw_entry()
|
||||
.from_hash(hash, |symid| self.symbols_by_id[*symid].name == name)
|
||||
.map(|(symbol_id, ())| *symbol_id)
|
||||
}
|
||||
|
||||
pub(crate) fn symbol_by_name(&self, scope_id: ScopeId, name: &str) -> Option<&Symbol> {
|
||||
Some(&self.symbols_by_id[self.symbol_id_by_name(scope_id, name)?])
|
||||
}
|
||||
|
||||
pub(crate) fn root_symbol_id_by_name(&self, name: &str) -> Option<SymbolId> {
|
||||
self.symbol_id_by_name(SymbolTable::root_scope_id(), name)
|
||||
}
|
||||
|
||||
pub(crate) fn root_symbol_by_name(&self, name: &str) -> Option<&Symbol> {
|
||||
self.symbol_by_name(SymbolTable::root_scope_id(), name)
|
||||
}
|
||||
|
||||
pub(crate) fn definitions(&self, symbol_id: SymbolId) -> &[Definition] {
|
||||
self.defs
|
||||
.get(&symbol_id)
|
||||
.map(std::vec::Vec::as_slice)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub(crate) fn all_definitions(&self) -> impl Iterator<Item = (SymbolId, &Definition)> + '_ {
|
||||
self.defs
|
||||
.iter()
|
||||
.flat_map(|(sym_id, defs)| defs.iter().map(move |def| (*sym_id, def)))
|
||||
}
|
||||
|
||||
fn add_or_update_symbol(
|
||||
&mut self,
|
||||
scope_id: ScopeId,
|
||||
name: &str,
|
||||
flags: SymbolFlags,
|
||||
) -> SymbolId {
|
||||
let hash = SymbolTable::hash_name(name);
|
||||
let scope = &mut self.scopes_by_id[scope_id];
|
||||
let name = Name::new(name);
|
||||
|
||||
let entry = scope
|
||||
.symbols_by_name
|
||||
.raw_entry_mut()
|
||||
.from_hash(hash, |existing| self.symbols_by_id[*existing].name == name);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(entry) => {
|
||||
if let Some(symbol) = self.symbols_by_id.get_mut(*entry.key()) {
|
||||
symbol.flags.insert(flags);
|
||||
};
|
||||
*entry.key()
|
||||
}
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let id = self.symbols_by_id.push(Symbol {
|
||||
name,
|
||||
flags,
|
||||
scope_id,
|
||||
});
|
||||
entry.insert_with_hasher(hash, id, (), |_| hash);
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn add_child_scope(
|
||||
&mut self,
|
||||
parent_scope_id: ScopeId,
|
||||
name: &str,
|
||||
kind: ScopeKind,
|
||||
) -> ScopeId {
|
||||
let new_scope_id = self.scopes_by_id.push(Scope {
|
||||
name: Name::new(name),
|
||||
kind,
|
||||
child_scopes: Vec::new(),
|
||||
symbols_by_name: Map::default(),
|
||||
});
|
||||
let parent_scope = &mut self.scopes_by_id[parent_scope_id];
|
||||
parent_scope.child_scopes.push(new_scope_id);
|
||||
new_scope_id
|
||||
}
|
||||
|
||||
fn hash_name(name: &str) -> u64 {
|
||||
let mut hasher = FxHasher::default();
|
||||
name.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct SymbolIterator<'a, I> {
|
||||
table: &'a SymbolTable,
|
||||
ids: I,
|
||||
}
|
||||
|
||||
impl<'a, I> Iterator for SymbolIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = SymbolId>,
|
||||
{
|
||||
type Item = &'a Symbol;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next()?;
|
||||
Some(&self.table.symbols_by_id[id])
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.ids.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I> FusedIterator for SymbolIterator<'a, I> where
|
||||
I: Iterator<Item = SymbolId> + FusedIterator
|
||||
{
|
||||
}
|
||||
|
||||
impl<'a, I> DoubleEndedIterator for SymbolIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = SymbolId> + DoubleEndedIterator,
|
||||
{
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next_back()?;
|
||||
Some(&self.table.symbols_by_id[id])
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ScopeIterator<'a, I> {
|
||||
table: &'a SymbolTable,
|
||||
ids: I,
|
||||
}
|
||||
|
||||
impl<'a, I> Iterator for ScopeIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = ScopeId>,
|
||||
{
|
||||
type Item = &'a Scope;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next()?;
|
||||
Some(&self.table.scopes_by_id[id])
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.ids.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I> FusedIterator for ScopeIterator<'a, I> where I: Iterator<Item = ScopeId> + FusedIterator {}
|
||||
|
||||
impl<'a, I> DoubleEndedIterator for ScopeIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = ScopeId> + DoubleEndedIterator,
|
||||
{
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next_back()?;
|
||||
Some(&self.table.scopes_by_id[id])
|
||||
}
|
||||
}
|
||||
|
||||
struct SymbolTableBuilder {
|
||||
table: SymbolTable,
|
||||
scopes: Vec<ScopeId>,
|
||||
/// the definition whose target(s) we are currently walking
|
||||
current_definition: Option<Definition>,
|
||||
}
|
||||
|
||||
impl SymbolTableBuilder {
|
||||
fn add_or_update_symbol(&mut self, identifier: &str, flags: SymbolFlags) -> SymbolId {
|
||||
self.table
|
||||
.add_or_update_symbol(self.cur_scope(), identifier, flags)
|
||||
}
|
||||
|
||||
fn add_or_update_symbol_with_def(
|
||||
&mut self,
|
||||
identifier: &str,
|
||||
definition: Definition,
|
||||
) -> SymbolId {
|
||||
let symbol_id = self.add_or_update_symbol(identifier, SymbolFlags::IS_DEFINED);
|
||||
self.table
|
||||
.defs
|
||||
.entry(symbol_id)
|
||||
.or_default()
|
||||
.push(definition);
|
||||
symbol_id
|
||||
}
|
||||
|
||||
fn push_scope(&mut self, name: &str, kind: ScopeKind) -> ScopeId {
|
||||
let scope_id = self.table.add_child_scope(self.cur_scope(), name, kind);
|
||||
self.scopes.push(scope_id);
|
||||
scope_id
|
||||
}
|
||||
|
||||
fn pop_scope(&mut self) -> ScopeId {
|
||||
self.scopes
|
||||
.pop()
|
||||
.expect("Scope stack should never be empty")
|
||||
}
|
||||
|
||||
fn cur_scope(&self) -> ScopeId {
|
||||
*self
|
||||
.scopes
|
||||
.last()
|
||||
.expect("Scope stack should never be empty")
|
||||
}
|
||||
|
||||
fn with_type_params(
|
||||
&mut self,
|
||||
name: &str,
|
||||
params: &Option<Box<ast::TypeParams>>,
|
||||
nested: impl FnOnce(&mut Self) -> ScopeId,
|
||||
) -> ScopeId {
|
||||
if let Some(type_params) = params {
|
||||
self.push_scope(name, ScopeKind::Annotation);
|
||||
for type_param in &type_params.type_params {
|
||||
let name = match type_param {
|
||||
ast::TypeParam::TypeVar(ast::TypeParamTypeVar { name, .. }) => name,
|
||||
ast::TypeParam::ParamSpec(ast::TypeParamParamSpec { name, .. }) => name,
|
||||
ast::TypeParam::TypeVarTuple(ast::TypeParamTypeVarTuple { name, .. }) => name,
|
||||
};
|
||||
self.add_or_update_symbol(name, SymbolFlags::IS_DEFINED);
|
||||
}
|
||||
}
|
||||
let scope_id = nested(self);
|
||||
if params.is_some() {
|
||||
self.pop_scope();
|
||||
}
|
||||
scope_id
|
||||
}
|
||||
}
|
||||
|
||||
impl PreorderVisitor<'_> for SymbolTableBuilder {
|
||||
fn visit_expr(&mut self, expr: &ast::Expr) {
|
||||
if let ast::Expr::Name(ast::ExprName { id, ctx, .. }) = expr {
|
||||
let flags = match ctx {
|
||||
ast::ExprContext::Load => SymbolFlags::IS_USED,
|
||||
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Del => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Invalid => SymbolFlags::empty(),
|
||||
};
|
||||
self.add_or_update_symbol(id, flags);
|
||||
if flags.contains(SymbolFlags::IS_DEFINED) {
|
||||
if let Some(curdef) = self.current_definition.clone() {
|
||||
self.add_or_update_symbol_with_def(id, curdef);
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::visitor::preorder::walk_expr(self, expr);
|
||||
}
|
||||
|
||||
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
|
||||
// TODO need to capture more definition statements here
|
||||
match stmt {
|
||||
ast::Stmt::ClassDef(node) => {
|
||||
let scope_id = self.with_type_params(&node.name, &node.type_params, |builder| {
|
||||
let scope_id = builder.push_scope(&node.name, ScopeKind::Class);
|
||||
ast::visitor::preorder::walk_stmt(builder, stmt);
|
||||
builder.pop_scope();
|
||||
scope_id
|
||||
});
|
||||
let def = Definition::ClassDef(ClassDefinition {
|
||||
node_key: TypedNodeKey::from_node(node),
|
||||
scope_id,
|
||||
});
|
||||
self.add_or_update_symbol_with_def(&node.name, def);
|
||||
}
|
||||
ast::Stmt::FunctionDef(node) => {
|
||||
let def = Definition::FunctionDef(TypedNodeKey::from_node(node));
|
||||
self.add_or_update_symbol_with_def(&node.name, def);
|
||||
self.with_type_params(&node.name, &node.type_params, |builder| {
|
||||
let scope_id = builder.push_scope(&node.name, ScopeKind::Function);
|
||||
ast::visitor::preorder::walk_stmt(builder, stmt);
|
||||
builder.pop_scope();
|
||||
scope_id
|
||||
});
|
||||
}
|
||||
ast::Stmt::Import(ast::StmtImport { names, .. }) => {
|
||||
for alias in names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
asname.id.as_str()
|
||||
} else {
|
||||
alias.name.id.split('.').next().unwrap()
|
||||
};
|
||||
|
||||
let module = ModuleName::new(&alias.name.id);
|
||||
|
||||
let def = Definition::Import(ImportDefinition {
|
||||
module: module.clone(),
|
||||
});
|
||||
self.add_or_update_symbol_with_def(symbol_name, def);
|
||||
self.table.dependencies.push(Dependency::Module(module));
|
||||
}
|
||||
}
|
||||
ast::Stmt::ImportFrom(ast::StmtImportFrom {
|
||||
module,
|
||||
names,
|
||||
level,
|
||||
..
|
||||
}) => {
|
||||
let module = module.as_ref().map(|m| ModuleName::new(&m.id));
|
||||
|
||||
for alias in names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
asname.id.as_str()
|
||||
} else {
|
||||
alias.name.id.as_str()
|
||||
};
|
||||
let def = Definition::ImportFrom(ImportFromDefinition {
|
||||
module: module.clone(),
|
||||
name: Name::new(&alias.name.id),
|
||||
level: *level,
|
||||
});
|
||||
self.add_or_update_symbol_with_def(symbol_name, def);
|
||||
}
|
||||
|
||||
let dependency = if let Some(module) = module {
|
||||
match NonZeroU32::new(*level) {
|
||||
Some(level) => Dependency::Relative {
|
||||
level,
|
||||
module: Some(module),
|
||||
},
|
||||
None => Dependency::Module(module),
|
||||
}
|
||||
} else {
|
||||
Dependency::Relative {
|
||||
level: NonZeroU32::new(*level)
|
||||
.expect("Import without a module to have a level > 0"),
|
||||
module,
|
||||
}
|
||||
};
|
||||
|
||||
self.table.dependencies.push(dependency);
|
||||
}
|
||||
ast::Stmt::Assign(node) => {
|
||||
debug_assert!(self.current_definition.is_none());
|
||||
self.current_definition =
|
||||
Some(Definition::Assignment(TypedNodeKey::from_node(node)));
|
||||
ast::visitor::preorder::walk_stmt(self, stmt);
|
||||
self.current_definition = None;
|
||||
}
|
||||
_ => {
|
||||
ast::visitor::preorder::walk_stmt(self, stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SymbolTablesStorage(KeyValueCache<FileId, Arc<SymbolTable>>);
|
||||
|
||||
impl Deref for SymbolTablesStorage {
|
||||
type Target = KeyValueCache<FileId, Arc<SymbolTable>>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for SymbolTablesStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use textwrap::dedent;
|
||||
|
||||
use crate::parse::Parsed;
|
||||
use crate::symbols::ScopeKind;
|
||||
|
||||
use super::{SymbolFlags, SymbolId, SymbolIterator, SymbolTable};
|
||||
|
||||
mod from_ast {
|
||||
use super::*;
|
||||
|
||||
fn parse(code: &str) -> Parsed {
|
||||
Parsed::from_text(&dedent(code))
|
||||
}
|
||||
|
||||
fn names<I>(it: SymbolIterator<I>) -> Vec<&str>
|
||||
where
|
||||
I: Iterator<Item = SymbolId>,
|
||||
{
|
||||
let mut symbols: Vec<_> = it.map(|sym| sym.name.as_str()).collect();
|
||||
symbols.sort_unstable();
|
||||
symbols
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
let parsed = parse("");
|
||||
let table = SymbolTable::from_ast(parsed.ast());
|
||||
assert_eq!(names(table.root_symbols()).len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple() {
|
||||
let parsed = parse("x");
|
||||
let table = SymbolTable::from_ast(parsed.ast());
|
||||
assert_eq!(names(table.root_symbols()), vec!["x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("x").unwrap())
|
||||
.len(),
|
||||
0
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn annotation_only() {
|
||||
let parsed = parse("x: int");
|
||||
let table = SymbolTable::from_ast(parsed.ast());
|
||||
assert_eq!(names(table.root_symbols()), vec!["int", "x"]);
|
||||
// TODO record definition
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import() {
|
||||
let parsed = parse("import foo");
|
||||
let table = SymbolTable::from_ast(parsed.ast());
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("foo").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_sub() {
|
||||
let parsed = parse("import foo.bar");
|
||||
let table = SymbolTable::from_ast(parsed.ast());
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_as() {
|
||||
let parsed = parse("import foo.bar as baz");
|
||||
let table = SymbolTable::from_ast(parsed.ast());
|
||||
assert_eq!(names(table.root_symbols()), vec!["baz"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_from() {
|
||||
let parsed = parse("from bar import foo");
|
||||
let table = SymbolTable::from_ast(parsed.ast());
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("foo").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
assert!(
|
||||
table.root_symbol_id_by_name("foo").is_some_and(|sid| {
|
||||
let s = sid.symbol(&table);
|
||||
s.is_defined() || !s.is_used()
|
||||
}),
|
||||
"symbols that are defined get the defined flag"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assign() {
|
||||
let parsed = parse("x = foo");
|
||||
let table = SymbolTable::from_ast(parsed.ast());
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo", "x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("x").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
assert!(
|
||||
table.root_symbol_id_by_name("foo").is_some_and(|sid| {
|
||||
let s = sid.symbol(&table);
|
||||
!s.is_defined() && s.is_used()
|
||||
}),
|
||||
"a symbol used but not defined in a scope should have only the used flag"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn class_scope() {
|
||||
let parsed = parse(
|
||||
"
|
||||
class C:
|
||||
x = 1
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let table = SymbolTable::from_ast(parsed.ast());
|
||||
assert_eq!(names(table.root_symbols()), vec!["C", "y"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let c_scope = scopes[0].scope(&table);
|
||||
assert_eq!(c_scope.kind(), ScopeKind::Class);
|
||||
assert_eq!(c_scope.name(), "C");
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[0])), vec!["x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("C").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn func_scope() {
|
||||
let parsed = parse(
|
||||
"
|
||||
def func():
|
||||
x = 1
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let table = SymbolTable::from_ast(parsed.ast());
|
||||
assert_eq!(names(table.root_symbols()), vec!["func", "y"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let func_scope = scopes[0].scope(&table);
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[0])), vec!["x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("func").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dupes() {
|
||||
let parsed = parse(
|
||||
"
|
||||
def func():
|
||||
x = 1
|
||||
def func():
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let table = SymbolTable::from_ast(parsed.ast());
|
||||
assert_eq!(names(table.root_symbols()), vec!["func"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 2);
|
||||
let func_scope_1 = scopes[0].scope(&table);
|
||||
let func_scope_2 = scopes[1].scope(&table);
|
||||
assert_eq!(func_scope_1.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope_1.name(), "func");
|
||||
assert_eq!(func_scope_2.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope_2.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[0])), vec!["x"]);
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[1])), vec!["y"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("func").unwrap())
|
||||
.len(),
|
||||
2
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generic_func() {
|
||||
let parsed = parse(
|
||||
"
|
||||
def func[T]():
|
||||
x = 1
|
||||
",
|
||||
);
|
||||
let table = SymbolTable::from_ast(parsed.ast());
|
||||
assert_eq!(names(table.root_symbols()), vec!["func"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let ann_scope_id = scopes[0];
|
||||
let ann_scope = ann_scope_id.scope(&table);
|
||||
assert_eq!(ann_scope.kind(), ScopeKind::Annotation);
|
||||
assert_eq!(ann_scope.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(ann_scope_id)), vec!["T"]);
|
||||
let scopes = table.child_scope_ids_of(ann_scope_id);
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let func_scope_id = scopes[0];
|
||||
let func_scope = func_scope_id.scope(&table);
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(func_scope_id)), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generic_class() {
|
||||
let parsed = parse(
|
||||
"
|
||||
class C[T]:
|
||||
x = 1
|
||||
",
|
||||
);
|
||||
let table = SymbolTable::from_ast(parsed.ast());
|
||||
assert_eq!(names(table.root_symbols()), vec!["C"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let ann_scope_id = scopes[0];
|
||||
let ann_scope = ann_scope_id.scope(&table);
|
||||
assert_eq!(ann_scope.kind(), ScopeKind::Annotation);
|
||||
assert_eq!(ann_scope.name(), "C");
|
||||
assert_eq!(names(table.symbols_for_scope(ann_scope_id)), vec!["T"]);
|
||||
assert!(
|
||||
table
|
||||
.symbol_by_name(ann_scope_id, "T")
|
||||
.is_some_and(|s| s.is_defined() && !s.is_used()),
|
||||
"type parameters are defined by the scope that introduces them"
|
||||
);
|
||||
let scopes = table.child_scope_ids_of(ann_scope_id);
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let func_scope_id = scopes[0];
|
||||
let func_scope = func_scope_id.scope(&table);
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Class);
|
||||
assert_eq!(func_scope.name(), "C");
|
||||
assert_eq!(names(table.symbols_for_scope(func_scope_id)), vec!["x"]);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_same_name_symbol_twice() {
|
||||
let mut table = SymbolTable::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let symbol_id_1 = table.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::IS_DEFINED);
|
||||
let symbol_id_2 = table.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::IS_USED);
|
||||
assert_eq!(symbol_id_1, symbol_id_2);
|
||||
assert!(symbol_id_1.symbol(&table).is_used(), "flags must merge");
|
||||
assert!(symbol_id_1.symbol(&table).is_defined(), "flags must merge");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_different_named_symbols() {
|
||||
let mut table = SymbolTable::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let symbol_id_1 = table.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
let symbol_id_2 = table.add_or_update_symbol(root_scope_id, "bar", SymbolFlags::empty());
|
||||
assert_ne!(symbol_id_1, symbol_id_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_child_scope_with_symbol() {
|
||||
let mut table = SymbolTable::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let foo_symbol_top = table.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
let c_scope = table.add_child_scope(root_scope_id, "C", ScopeKind::Class);
|
||||
let foo_symbol_inner = table.add_or_update_symbol(c_scope, "foo", SymbolFlags::empty());
|
||||
assert_ne!(foo_symbol_top, foo_symbol_inner);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scope_from_id() {
|
||||
let table = SymbolTable::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let scope = root_scope_id.scope(&table);
|
||||
assert_eq!(scope.name.as_str(), "<module>");
|
||||
assert_eq!(scope.kind, ScopeKind::Module);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn symbol_from_id() {
|
||||
let mut table = SymbolTable::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let foo_symbol_id = table.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
let symbol = foo_symbol_id.symbol(&table);
|
||||
assert_eq!(symbol.name.as_str(), "foo");
|
||||
}
|
||||
}
|
||||
@@ -1,600 +0,0 @@
|
||||
#![allow(dead_code)]
|
||||
use crate::ast_ids::NodeKey;
|
||||
use crate::db::{HasJar, QueryResult, SemanticDb, SemanticJar};
|
||||
use crate::files::FileId;
|
||||
use crate::symbols::{ScopeId, SymbolId};
|
||||
use crate::{FxDashMap, FxIndexSet, Name};
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
pub(crate) mod infer;
|
||||
|
||||
pub(crate) use infer::infer_symbol_type;
|
||||
|
||||
/// unique ID for a type
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum Type {
|
||||
/// the dynamic or gradual type: a statically-unknown set of values
|
||||
Any,
|
||||
/// the empty set of values
|
||||
Never,
|
||||
/// unknown type (no annotation)
|
||||
/// equivalent to Any, or to object in strict mode
|
||||
Unknown,
|
||||
/// name is not bound to any value
|
||||
Unbound,
|
||||
/// a specific function object
|
||||
Function(FunctionTypeId),
|
||||
/// a specific class object
|
||||
Class(ClassTypeId),
|
||||
/// the set of Python objects with the given class in their __class__'s method resolution order
|
||||
Instance(ClassTypeId),
|
||||
Union(UnionTypeId),
|
||||
Intersection(IntersectionTypeId),
|
||||
// TODO protocols, callable types, overloads, generics, type vars
|
||||
}
|
||||
|
||||
impl Type {
|
||||
fn display<'a>(&'a self, store: &'a TypeStore) -> DisplayType<'a> {
|
||||
DisplayType { ty: self, store }
|
||||
}
|
||||
|
||||
pub const fn is_unbound(&self) -> bool {
|
||||
matches!(self, Type::Unbound)
|
||||
}
|
||||
|
||||
pub const fn is_unknown(&self) -> bool {
|
||||
matches!(self, Type::Unknown)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FunctionTypeId> for Type {
|
||||
fn from(id: FunctionTypeId) -> Self {
|
||||
Type::Function(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<UnionTypeId> for Type {
|
||||
fn from(id: UnionTypeId) -> Self {
|
||||
Type::Union(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IntersectionTypeId> for Type {
|
||||
fn from(id: IntersectionTypeId) -> Self {
|
||||
Type::Intersection(id)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: currently calling `get_function` et al and holding on to the `FunctionTypeRef` will lock a
|
||||
// shard of this dashmap, for as long as you hold the reference. This may be a problem. We could
|
||||
// switch to having all the arenas hold Arc, or we could see if we can split up ModuleTypeStore,
|
||||
// and/or give it inner mutability and finer-grained internal locking.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TypeStore {
|
||||
modules: FxDashMap<FileId, ModuleTypeStore>,
|
||||
}
|
||||
|
||||
impl TypeStore {
|
||||
pub fn remove_module(&mut self, file_id: FileId) {
|
||||
self.modules.remove(&file_id);
|
||||
}
|
||||
|
||||
pub fn cache_symbol_type(&self, file_id: FileId, symbol_id: SymbolId, ty: Type) {
|
||||
self.add_or_get_module(file_id)
|
||||
.symbol_types
|
||||
.insert(symbol_id, ty);
|
||||
}
|
||||
|
||||
pub fn cache_node_type(&self, file_id: FileId, node_key: NodeKey, ty: Type) {
|
||||
self.add_or_get_module(file_id)
|
||||
.node_types
|
||||
.insert(node_key, ty);
|
||||
}
|
||||
|
||||
pub fn get_cached_symbol_type(&self, file_id: FileId, symbol_id: SymbolId) -> Option<Type> {
|
||||
self.try_get_module(file_id)?
|
||||
.symbol_types
|
||||
.get(&symbol_id)
|
||||
.copied()
|
||||
}
|
||||
|
||||
pub fn get_cached_node_type(&self, file_id: FileId, node_key: &NodeKey) -> Option<Type> {
|
||||
self.try_get_module(file_id)?
|
||||
.node_types
|
||||
.get(node_key)
|
||||
.copied()
|
||||
}
|
||||
|
||||
fn add_or_get_module(&self, file_id: FileId) -> ModuleStoreRefMut {
|
||||
self.modules
|
||||
.entry(file_id)
|
||||
.or_insert_with(|| ModuleTypeStore::new(file_id))
|
||||
}
|
||||
|
||||
fn get_module(&self, file_id: FileId) -> ModuleStoreRef {
|
||||
self.try_get_module(file_id).expect("module should exist")
|
||||
}
|
||||
|
||||
fn try_get_module(&self, file_id: FileId) -> Option<ModuleStoreRef> {
|
||||
self.modules.get(&file_id)
|
||||
}
|
||||
|
||||
fn add_function(&self, file_id: FileId, name: &str, decorators: Vec<Type>) -> FunctionTypeId {
|
||||
self.add_or_get_module(file_id)
|
||||
.add_function(name, decorators)
|
||||
}
|
||||
|
||||
fn add_class(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
name: &str,
|
||||
scope_id: ScopeId,
|
||||
bases: Vec<Type>,
|
||||
) -> ClassTypeId {
|
||||
self.add_or_get_module(file_id)
|
||||
.add_class(name, scope_id, bases)
|
||||
}
|
||||
|
||||
fn add_union(&mut self, file_id: FileId, elems: &[Type]) -> UnionTypeId {
|
||||
self.add_or_get_module(file_id).add_union(elems)
|
||||
}
|
||||
|
||||
fn add_intersection(
|
||||
&mut self,
|
||||
file_id: FileId,
|
||||
positive: &[Type],
|
||||
negative: &[Type],
|
||||
) -> IntersectionTypeId {
|
||||
self.add_or_get_module(file_id)
|
||||
.add_intersection(positive, negative)
|
||||
}
|
||||
|
||||
fn get_function(&self, id: FunctionTypeId) -> FunctionTypeRef {
|
||||
FunctionTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
function_id: id.func_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_class(&self, id: ClassTypeId) -> ClassTypeRef {
|
||||
ClassTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
class_id: id.class_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_union(&self, id: UnionTypeId) -> UnionTypeRef {
|
||||
UnionTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
union_id: id.union_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_intersection(&self, id: IntersectionTypeId) -> IntersectionTypeRef {
|
||||
IntersectionTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
intersection_id: id.intersection_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type ModuleStoreRef<'a> = dashmap::mapref::one::Ref<
|
||||
'a,
|
||||
FileId,
|
||||
ModuleTypeStore,
|
||||
std::hash::BuildHasherDefault<rustc_hash::FxHasher>,
|
||||
>;
|
||||
|
||||
type ModuleStoreRefMut<'a> = dashmap::mapref::one::RefMut<
|
||||
'a,
|
||||
FileId,
|
||||
ModuleTypeStore,
|
||||
std::hash::BuildHasherDefault<rustc_hash::FxHasher>,
|
||||
>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FunctionTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
function_id: ModuleFunctionTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for FunctionTypeRef<'a> {
|
||||
type Target = FunctionType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_function(self.function_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ClassTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
class_id: ModuleClassTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for ClassTypeRef<'a> {
|
||||
type Target = ClassType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_class(self.class_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct UnionTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
union_id: ModuleUnionTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for UnionTypeRef<'a> {
|
||||
type Target = UnionType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_union(self.union_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct IntersectionTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
intersection_id: ModuleIntersectionTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for IntersectionTypeRef<'a> {
|
||||
type Target = IntersectionType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_intersection(self.intersection_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct FunctionTypeId {
|
||||
file_id: FileId,
|
||||
func_id: ModuleFunctionTypeId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct ClassTypeId {
|
||||
file_id: FileId,
|
||||
class_id: ModuleClassTypeId,
|
||||
}
|
||||
|
||||
impl ClassTypeId {
|
||||
fn get_own_class_member<Db>(self, db: &Db, name: &Name) -> QueryResult<Option<Type>>
|
||||
where
|
||||
Db: SemanticDb + HasJar<SemanticJar>,
|
||||
{
|
||||
// TODO: this should distinguish instance-only members (e.g. `x: int`) and not return them
|
||||
let ClassType { scope_id, .. } = *db.jar()?.type_store.get_class(self);
|
||||
let table = db.symbol_table(self.file_id)?;
|
||||
if let Some(symbol_id) = table.symbol_id_by_name(scope_id, name) {
|
||||
Ok(Some(db.infer_symbol_type(self.file_id, symbol_id)?))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: get_own_instance_member, get_class_member, get_instance_member
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct UnionTypeId {
|
||||
file_id: FileId,
|
||||
union_id: ModuleUnionTypeId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct IntersectionTypeId {
|
||||
file_id: FileId,
|
||||
intersection_id: ModuleIntersectionTypeId,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleFunctionTypeId;
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleClassTypeId;
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleUnionTypeId;
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleIntersectionTypeId;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ModuleTypeStore {
|
||||
file_id: FileId,
|
||||
/// arena of all function types defined in this module
|
||||
functions: IndexVec<ModuleFunctionTypeId, FunctionType>,
|
||||
/// arena of all class types defined in this module
|
||||
classes: IndexVec<ModuleClassTypeId, ClassType>,
|
||||
/// arenda of all union types created in this module
|
||||
unions: IndexVec<ModuleUnionTypeId, UnionType>,
|
||||
/// arena of all intersection types created in this module
|
||||
intersections: IndexVec<ModuleIntersectionTypeId, IntersectionType>,
|
||||
/// cached types of symbols in this module
|
||||
symbol_types: FxHashMap<SymbolId, Type>,
|
||||
/// cached types of AST nodes in this module
|
||||
node_types: FxHashMap<NodeKey, Type>,
|
||||
}
|
||||
|
||||
impl ModuleTypeStore {
|
||||
fn new(file_id: FileId) -> Self {
|
||||
Self {
|
||||
file_id,
|
||||
functions: IndexVec::default(),
|
||||
classes: IndexVec::default(),
|
||||
unions: IndexVec::default(),
|
||||
intersections: IndexVec::default(),
|
||||
symbol_types: FxHashMap::default(),
|
||||
node_types: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_function(&mut self, name: &str, decorators: Vec<Type>) -> FunctionTypeId {
|
||||
let func_id = self.functions.push(FunctionType {
|
||||
name: Name::new(name),
|
||||
decorators,
|
||||
});
|
||||
FunctionTypeId {
|
||||
file_id: self.file_id,
|
||||
func_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_class(&mut self, name: &str, scope_id: ScopeId, bases: Vec<Type>) -> ClassTypeId {
|
||||
let class_id = self.classes.push(ClassType {
|
||||
name: Name::new(name),
|
||||
scope_id,
|
||||
// TODO: if no bases are given, that should imply [object]
|
||||
bases,
|
||||
});
|
||||
ClassTypeId {
|
||||
file_id: self.file_id,
|
||||
class_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_union(&mut self, elems: &[Type]) -> UnionTypeId {
|
||||
let union_id = self.unions.push(UnionType {
|
||||
elements: elems.iter().copied().collect(),
|
||||
});
|
||||
UnionTypeId {
|
||||
file_id: self.file_id,
|
||||
union_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_intersection(&mut self, positive: &[Type], negative: &[Type]) -> IntersectionTypeId {
|
||||
let intersection_id = self.intersections.push(IntersectionType {
|
||||
positive: positive.iter().copied().collect(),
|
||||
negative: negative.iter().copied().collect(),
|
||||
});
|
||||
IntersectionTypeId {
|
||||
file_id: self.file_id,
|
||||
intersection_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_function(&self, func_id: ModuleFunctionTypeId) -> &FunctionType {
|
||||
&self.functions[func_id]
|
||||
}
|
||||
|
||||
fn get_class(&self, class_id: ModuleClassTypeId) -> &ClassType {
|
||||
&self.classes[class_id]
|
||||
}
|
||||
|
||||
fn get_union(&self, union_id: ModuleUnionTypeId) -> &UnionType {
|
||||
&self.unions[union_id]
|
||||
}
|
||||
|
||||
fn get_intersection(&self, intersection_id: ModuleIntersectionTypeId) -> &IntersectionType {
|
||||
&self.intersections[intersection_id]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct DisplayType<'a> {
|
||||
ty: &'a Type,
|
||||
store: &'a TypeStore,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DisplayType<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self.ty {
|
||||
Type::Any => f.write_str("Any"),
|
||||
Type::Never => f.write_str("Never"),
|
||||
Type::Unknown => f.write_str("Unknown"),
|
||||
Type::Unbound => f.write_str("Unbound"),
|
||||
// TODO functions and classes should display using a fully qualified name
|
||||
Type::Class(class_id) => {
|
||||
f.write_str("Literal[")?;
|
||||
f.write_str(self.store.get_class(*class_id).name())?;
|
||||
f.write_str("]")
|
||||
}
|
||||
Type::Instance(class_id) => f.write_str(self.store.get_class(*class_id).name()),
|
||||
Type::Function(func_id) => f.write_str(self.store.get_function(*func_id).name()),
|
||||
Type::Union(union_id) => self
|
||||
.store
|
||||
.get_module(union_id.file_id)
|
||||
.get_union(union_id.union_id)
|
||||
.display(f, self.store),
|
||||
Type::Intersection(int_id) => self
|
||||
.store
|
||||
.get_module(int_id.file_id)
|
||||
.get_intersection(int_id.intersection_id)
|
||||
.display(f, self.store),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ClassType {
|
||||
/// Name of the class at definition
|
||||
name: Name,
|
||||
/// `ScopeId` of the class body
|
||||
pub(crate) scope_id: ScopeId,
|
||||
/// Types of all class bases
|
||||
bases: Vec<Type>,
|
||||
}
|
||||
|
||||
impl ClassType {
|
||||
fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
fn bases(&self) -> &[Type] {
|
||||
self.bases.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FunctionType {
|
||||
name: Name,
|
||||
decorators: Vec<Type>,
|
||||
}
|
||||
|
||||
impl FunctionType {
|
||||
fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
fn decorators(&self) -> &[Type] {
|
||||
self.decorators.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct UnionType {
|
||||
// the union type includes values in any of these types
|
||||
elements: FxIndexSet<Type>,
|
||||
}
|
||||
|
||||
impl UnionType {
|
||||
fn display(&self, f: &mut std::fmt::Formatter<'_>, store: &TypeStore) -> std::fmt::Result {
|
||||
f.write_str("(")?;
|
||||
let mut first = true;
|
||||
for ty in &self.elements {
|
||||
if !first {
|
||||
f.write_str(" | ")?;
|
||||
};
|
||||
first = false;
|
||||
write!(f, "{}", ty.display(store))?;
|
||||
}
|
||||
f.write_str(")")
|
||||
}
|
||||
}
|
||||
|
||||
// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
// directly in intersections rather than as a separate type. This sacrifices some efficiency in the
|
||||
// case where a Not appears outside an intersection (unclear when that could even happen, but we'd
|
||||
// have to represent it as a single-element intersection if it did) in exchange for better
|
||||
// efficiency in the not-within-intersection case.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct IntersectionType {
|
||||
// the intersection type includes only values in all of these types
|
||||
positive: FxIndexSet<Type>,
|
||||
// negated elements of the intersection, e.g.
|
||||
negative: FxIndexSet<Type>,
|
||||
}
|
||||
|
||||
impl IntersectionType {
|
||||
fn display(&self, f: &mut std::fmt::Formatter<'_>, store: &TypeStore) -> std::fmt::Result {
|
||||
f.write_str("(")?;
|
||||
let mut first = true;
|
||||
for (neg, ty) in self
|
||||
.positive
|
||||
.iter()
|
||||
.map(|ty| (false, ty))
|
||||
.chain(self.negative.iter().map(|ty| (true, ty)))
|
||||
{
|
||||
if !first {
|
||||
f.write_str(" & ")?;
|
||||
};
|
||||
first = false;
|
||||
if neg {
|
||||
f.write_str("~")?;
|
||||
};
|
||||
write!(f, "{}", ty.display(store))?;
|
||||
}
|
||||
f.write_str(")")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::files::Files;
|
||||
use crate::symbols::SymbolTable;
|
||||
use crate::types::{Type, TypeStore};
|
||||
use crate::FxIndexSet;
|
||||
use std::path::Path;
|
||||
|
||||
#[test]
|
||||
fn add_class() {
|
||||
let store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let id = store.add_class(file_id, "C", SymbolTable::root_scope_id(), Vec::new());
|
||||
assert_eq!(store.get_class(id).name(), "C");
|
||||
let inst = Type::Instance(id);
|
||||
assert_eq!(format!("{}", inst.display(&store)), "C");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_function() {
|
||||
let store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let id = store.add_function(file_id, "func", vec![Type::Unknown]);
|
||||
assert_eq!(store.get_function(id).name(), "func");
|
||||
assert_eq!(store.get_function(id).decorators(), vec![Type::Unknown]);
|
||||
let func = Type::Function(id);
|
||||
assert_eq!(format!("{}", func.display(&store)), "func");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_union() {
|
||||
let mut store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
|
||||
let elems = vec![Type::Instance(c1), Type::Instance(c2)];
|
||||
let id = store.add_union(file_id, &elems);
|
||||
assert_eq!(
|
||||
store.get_union(id).elements,
|
||||
elems.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
let union = Type::Union(id);
|
||||
assert_eq!(format!("{}", union.display(&store)), "(C1 | C2)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_intersection() {
|
||||
let mut store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c3 = store.add_class(file_id, "C3", SymbolTable::root_scope_id(), Vec::new());
|
||||
let pos = vec![Type::Instance(c1), Type::Instance(c2)];
|
||||
let neg = vec![Type::Instance(c3)];
|
||||
let id = store.add_intersection(file_id, &pos, &neg);
|
||||
assert_eq!(
|
||||
store.get_intersection(id).positive,
|
||||
pos.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
assert_eq!(
|
||||
store.get_intersection(id).negative,
|
||||
neg.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
let intersection = Type::Intersection(id);
|
||||
assert_eq!(
|
||||
format!("{}", intersection.display(&store)),
|
||||
"(C1 & C2 & ~C3)"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,265 +0,0 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use ruff_python_ast::AstNode;
|
||||
|
||||
use crate::db::{HasJar, QueryResult, SemanticDb, SemanticJar};
|
||||
use crate::module::ModuleName;
|
||||
use crate::symbols::{ClassDefinition, Definition, ImportFromDefinition, SymbolId};
|
||||
use crate::types::Type;
|
||||
use crate::FileId;
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
// FIXME: Figure out proper dead-lock free synchronisation now that this takes `&db` instead of `&mut db`.
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_symbol_type<Db>(db: &Db, file_id: FileId, symbol_id: SymbolId) -> QueryResult<Type>
|
||||
where
|
||||
Db: SemanticDb + HasJar<SemanticJar>,
|
||||
{
|
||||
let symbols = db.symbol_table(file_id)?;
|
||||
let defs = symbols.definitions(symbol_id);
|
||||
|
||||
if let Some(ty) = db
|
||||
.jar()?
|
||||
.type_store
|
||||
.get_cached_symbol_type(file_id, symbol_id)
|
||||
{
|
||||
return Ok(ty);
|
||||
}
|
||||
|
||||
// TODO handle multiple defs, conditional defs...
|
||||
assert_eq!(defs.len(), 1);
|
||||
let type_store = &db.jar()?.type_store;
|
||||
|
||||
let ty = match &defs[0] {
|
||||
Definition::ImportFrom(ImportFromDefinition {
|
||||
module,
|
||||
name,
|
||||
level,
|
||||
}) => {
|
||||
// TODO relative imports
|
||||
assert!(matches!(level, 0));
|
||||
let module_name = ModuleName::new(module.as_ref().expect("TODO relative imports"));
|
||||
if let Some(module) = db.resolve_module(module_name)? {
|
||||
let remote_file_id = module.path(db)?.file();
|
||||
let remote_symbols = db.symbol_table(remote_file_id)?;
|
||||
if let Some(remote_symbol_id) = remote_symbols.root_symbol_id_by_name(name) {
|
||||
db.infer_symbol_type(remote_file_id, remote_symbol_id)?
|
||||
} else {
|
||||
Type::Unknown
|
||||
}
|
||||
} else {
|
||||
Type::Unknown
|
||||
}
|
||||
}
|
||||
Definition::ClassDef(ClassDefinition { node_key, scope_id }) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
ty
|
||||
} else {
|
||||
let parsed = db.parse(file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
|
||||
let mut bases = Vec::with_capacity(node.bases().len());
|
||||
|
||||
for base in node.bases() {
|
||||
bases.push(infer_expr_type(db, file_id, base)?);
|
||||
}
|
||||
|
||||
let ty =
|
||||
Type::Class(type_store.add_class(file_id, &node.name.id, *scope_id, bases));
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
ty
|
||||
}
|
||||
}
|
||||
Definition::FunctionDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
ty
|
||||
} else {
|
||||
let parsed = db.parse(file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let node = node_key
|
||||
.resolve(ast.as_any_node_ref())
|
||||
.expect("node key should resolve");
|
||||
|
||||
let decorator_tys = node
|
||||
.decorator_list
|
||||
.iter()
|
||||
.map(|decorator| infer_expr_type(db, file_id, &decorator.expression))
|
||||
.collect::<QueryResult<_>>()?;
|
||||
|
||||
let ty = type_store
|
||||
.add_function(file_id, &node.name.id, decorator_tys)
|
||||
.into();
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
ty
|
||||
}
|
||||
}
|
||||
Definition::Assignment(node_key) => {
|
||||
let parsed = db.parse(file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
// TODO handle unpacking assignment correctly
|
||||
infer_expr_type(db, file_id, &node.value)?
|
||||
}
|
||||
_ => todo!("other kinds of definitions"),
|
||||
};
|
||||
|
||||
type_store.cache_symbol_type(file_id, symbol_id, ty);
|
||||
|
||||
// TODO record dependencies
|
||||
Ok(ty)
|
||||
}
|
||||
|
||||
fn infer_expr_type<Db>(db: &Db, file_id: FileId, expr: &ast::Expr) -> QueryResult<Type>
|
||||
where
|
||||
Db: SemanticDb + HasJar<SemanticJar>,
|
||||
{
|
||||
// TODO cache the resolution of the type on the node
|
||||
let symbols = db.symbol_table(file_id)?;
|
||||
match expr {
|
||||
ast::Expr::Name(name) => {
|
||||
if let Some(symbol_id) = symbols.root_symbol_id_by_name(&name.id) {
|
||||
db.infer_symbol_type(file_id, symbol_id)
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
_ => todo!("full expression type resolution"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::db::{HasJar, SemanticDb, SemanticJar};
|
||||
use crate::module::{ModuleName, ModuleSearchPath, ModuleSearchPathKind};
|
||||
use crate::types::Type;
|
||||
use crate::Name;
|
||||
|
||||
// TODO with virtual filesystem we shouldn't have to write files to disk for these
|
||||
// tests
|
||||
|
||||
struct TestCase {
|
||||
temp_dir: tempfile::TempDir,
|
||||
db: TestDb,
|
||||
|
||||
src: ModuleSearchPath,
|
||||
}
|
||||
|
||||
fn create_test() -> std::io::Result<TestCase> {
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
|
||||
let src = temp_dir.path().join("src");
|
||||
std::fs::create_dir(&src)?;
|
||||
let src = ModuleSearchPath::new(src.canonicalize()?, ModuleSearchPathKind::FirstParty);
|
||||
|
||||
let roots = vec![src.clone()];
|
||||
|
||||
let mut db = TestDb::default();
|
||||
db.set_module_search_paths(roots);
|
||||
|
||||
Ok(TestCase { temp_dir, db, src })
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_import_to_class() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let a_path = case.src.path().join("a.py");
|
||||
let b_path = case.src.path().join("b.py");
|
||||
std::fs::write(a_path, "from b import C as D; E = D")?;
|
||||
std::fs::write(b_path, "class C: pass")?;
|
||||
let a_file = db
|
||||
.resolve_module(ModuleName::new("a"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let a_syms = db.symbol_table(a_file)?;
|
||||
let e_sym = a_syms
|
||||
.root_symbol_id_by_name("E")
|
||||
.expect("E symbol should be found");
|
||||
|
||||
let ty = db.infer_symbol_type(a_file, e_sym)?;
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
assert!(matches!(ty, Type::Class(_)));
|
||||
assert_eq!(format!("{}", ty.display(&jar.type_store)), "Literal[C]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_base_class_by_name() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let path = case.src.path().join("mod.py");
|
||||
std::fs::write(path, "class Base: pass\nclass Sub(Base): pass")?;
|
||||
let file = db
|
||||
.resolve_module(ModuleName::new("mod"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let syms = db.symbol_table(file)?;
|
||||
let sym = syms
|
||||
.root_symbol_id_by_name("Sub")
|
||||
.expect("Sub symbol should be found");
|
||||
|
||||
let ty = db.infer_symbol_type(file, sym)?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("Sub is not a Class")
|
||||
};
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
let base_names: Vec<_> = jar
|
||||
.type_store
|
||||
.get_class(class_id)
|
||||
.bases()
|
||||
.iter()
|
||||
.map(|base_ty| format!("{}", base_ty.display(&jar.type_store)))
|
||||
.collect();
|
||||
|
||||
assert_eq!(base_names, vec!["Literal[Base]"]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_method() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let path = case.src.path().join("mod.py");
|
||||
std::fs::write(path, "class C:\n def f(self): pass")?;
|
||||
let file = db
|
||||
.resolve_module(ModuleName::new("mod"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let syms = db.symbol_table(file)?;
|
||||
let sym = syms
|
||||
.root_symbol_id_by_name("C")
|
||||
.expect("C symbol should be found");
|
||||
|
||||
let ty = db.infer_symbol_type(file, sym)?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("C is not a Class");
|
||||
};
|
||||
|
||||
let member_ty = class_id
|
||||
.get_own_class_member(db, &Name::new("f"))
|
||||
.expect("C.f to resolve");
|
||||
|
||||
let Some(Type::Function(func_id)) = member_ty else {
|
||||
panic!("C.f is not a Function");
|
||||
};
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
let function = jar.type_store.get_function(func_id);
|
||||
assert_eq!(function.name(), "f");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
|
||||
use crate::program::{FileChangeKind, FileWatcherChange};
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
}
|
||||
|
||||
pub trait EventHandler: Send + 'static {
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>);
|
||||
}
|
||||
|
||||
impl<F> EventHandler for F
|
||||
where
|
||||
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
|
||||
{
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>) {
|
||||
let f = self;
|
||||
f(changes);
|
||||
}
|
||||
}
|
||||
|
||||
impl FileWatcher {
|
||||
pub fn new<E>(handler: E) -> anyhow::Result<Self>
|
||||
where
|
||||
E: EventHandler,
|
||||
{
|
||||
Self::from_handler(Box::new(handler))
|
||||
}
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |changes: notify::Result<Event>| {
|
||||
match changes {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if path.is_file() {
|
||||
changes.push(FileWatcherChange::new(path, change_kind));
|
||||
}
|
||||
}
|
||||
|
||||
if !changes.is_empty() {
|
||||
handler.handle(changes);
|
||||
}
|
||||
}
|
||||
// TODO proper error handling
|
||||
Err(err) => {
|
||||
panic!("Error: {err}");
|
||||
}
|
||||
}
|
||||
})
|
||||
.context("Failed to create file watcher.")?;
|
||||
|
||||
Ok(Self { watcher })
|
||||
}
|
||||
|
||||
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
|
||||
self.watcher.watch(path, RecursiveMode::Recursive)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.4.3"
|
||||
version = "0.2.1"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -20,7 +20,6 @@ ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_notebook = { path = "../ruff_notebook" }
|
||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
||||
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
||||
ruff_server = { path = "../ruff_server" }
|
||||
ruff_source_file = { path = "../ruff_source_file" }
|
||||
ruff_text_size = { path = "../ruff_text_size" }
|
||||
ruff_workspace = { path = "../ruff_workspace" }
|
||||
@@ -53,16 +52,15 @@ tempfile = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["registry"] }
|
||||
tracing-tree = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
wild = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
# Enable test rules during development
|
||||
ruff_linter = { path = "../ruff_linter", features = ["clap", "test-rules"] }
|
||||
assert_cmd = { workspace = true }
|
||||
# Avoid writing colored snapshots when running tests from the terminal
|
||||
colored = { workspace = true, features = ["no-color"] }
|
||||
colored = { workspace = true, features = ["no-color"]}
|
||||
insta = { workspace = true, features = ["filters", "json"] }
|
||||
insta-cmd = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -28,52 +28,6 @@ use ruff_workspace::configuration::{Configuration, RuleSelection};
|
||||
use ruff_workspace::options::{Options, PycodestyleOptions};
|
||||
use ruff_workspace::resolver::ConfigurationTransformer;
|
||||
|
||||
/// All configuration options that can be passed "globally",
|
||||
/// i.e., can be passed to all subcommands
|
||||
#[derive(Debug, Default, Clone, clap::Args)]
|
||||
pub struct GlobalConfigArgs {
|
||||
#[clap(flatten)]
|
||||
log_level_args: LogLevelArgs,
|
||||
/// Either a path to a TOML configuration file (`pyproject.toml` or `ruff.toml`),
|
||||
/// or a TOML `<KEY> = <VALUE>` pair
|
||||
/// (such as you might find in a `ruff.toml` configuration file)
|
||||
/// overriding a specific configuration option.
|
||||
/// Overrides of individual settings using this option always take precedence
|
||||
/// over all configuration files, including configuration files that were also
|
||||
/// specified using `--config`.
|
||||
#[arg(
|
||||
long,
|
||||
action = clap::ArgAction::Append,
|
||||
value_name = "CONFIG_OPTION",
|
||||
value_parser = ConfigArgumentParser,
|
||||
global = true,
|
||||
help_heading = "Global options",
|
||||
)]
|
||||
pub config: Vec<SingleConfigArgument>,
|
||||
/// Ignore all configuration files.
|
||||
//
|
||||
// Note: We can't mark this as conflicting with `--config` here
|
||||
// as `--config` can be used for specifying configuration overrides
|
||||
// as well as configuration files.
|
||||
// Specifying a configuration file conflicts with `--isolated`;
|
||||
// specifying a configuration override does not.
|
||||
// If a user specifies `ruff check --isolated --config=ruff.toml`,
|
||||
// we emit an error later on, after the initial parsing by clap.
|
||||
#[arg(long, help_heading = "Global options", global = true)]
|
||||
pub isolated: bool,
|
||||
}
|
||||
|
||||
impl GlobalConfigArgs {
|
||||
pub fn log_level(&self) -> LogLevel {
|
||||
LogLevel::from(&self.log_level_args)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn partition(self) -> (LogLevel, Vec<SingleConfigArgument>, bool) {
|
||||
(self.log_level(), self.config, self.isolated)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
@@ -84,9 +38,9 @@ impl GlobalConfigArgs {
|
||||
#[command(version)]
|
||||
pub struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Command,
|
||||
pub command: Command,
|
||||
#[clap(flatten)]
|
||||
pub(crate) global_options: GlobalConfigArgs,
|
||||
pub log_level_args: LogLevelArgs,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
@@ -109,6 +63,10 @@ pub enum Command {
|
||||
/// Output format
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
output_format: HelpFormat,
|
||||
|
||||
/// Output format (Deprecated: Use `--output-format` instead).
|
||||
#[arg(long, value_enum, conflicts_with = "output_format", hide = true)]
|
||||
format: Option<HelpFormat>,
|
||||
},
|
||||
/// List or describe the available configuration options.
|
||||
Config { option: Option<String> },
|
||||
@@ -117,6 +75,10 @@ pub enum Command {
|
||||
/// Output format
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
output_format: HelpFormat,
|
||||
|
||||
/// Output format (Deprecated: Use `--output-format` instead).
|
||||
#[arg(long, value_enum, conflicts_with = "output_format", hide = true)]
|
||||
format: Option<HelpFormat>,
|
||||
},
|
||||
/// Clear any caches in the current directory and any subdirectories.
|
||||
#[clap(alias = "--clean")]
|
||||
@@ -126,8 +88,6 @@ pub enum Command {
|
||||
GenerateShellCompletion { shell: clap_complete_command::Shell },
|
||||
/// Run the Ruff formatter on the given files or directories.
|
||||
Format(FormatCommand),
|
||||
/// Run the language server.
|
||||
Server(ServerCommand),
|
||||
/// Display Ruff's version
|
||||
Version {
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
@@ -190,7 +150,7 @@ pub struct CheckCommand {
|
||||
pub output_format: Option<SerializationFormat>,
|
||||
|
||||
/// Specify file to write the linter output to (default: stdout).
|
||||
#[arg(short, long, env = "RUFF_OUTPUT_FILE")]
|
||||
#[arg(short, long)]
|
||||
pub output_file: Option<PathBuf>,
|
||||
/// The minimum Python version that should be supported.
|
||||
#[arg(long, value_enum)]
|
||||
@@ -201,6 +161,20 @@ pub struct CheckCommand {
|
||||
preview: bool,
|
||||
#[clap(long, overrides_with("preview"), hide = true)]
|
||||
no_preview: bool,
|
||||
/// Either a path to a TOML configuration file (`pyproject.toml` or `ruff.toml`),
|
||||
/// or a TOML `<KEY> = <VALUE>` pair
|
||||
/// (such as you might find in a `ruff.toml` configuration file)
|
||||
/// overriding a specific configuration option.
|
||||
/// Overrides of individual settings using this option always take precedence
|
||||
/// over all configuration files, including configuration files that were also
|
||||
/// specified using `--config`.
|
||||
#[arg(
|
||||
long,
|
||||
action = clap::ArgAction::Append,
|
||||
value_name = "CONFIG_OPTION",
|
||||
value_parser = ConfigArgumentParser,
|
||||
)]
|
||||
pub config: Vec<SingleConfigArgument>,
|
||||
/// Comma-separated list of rule codes to enable (or ALL, to enable all rules).
|
||||
#[arg(
|
||||
long,
|
||||
@@ -332,13 +306,24 @@ pub struct CheckCommand {
|
||||
/// Disable cache reads.
|
||||
#[arg(short, long, env = "RUFF_NO_CACHE", help_heading = "Miscellaneous")]
|
||||
pub no_cache: bool,
|
||||
/// Ignore all configuration files.
|
||||
//
|
||||
// Note: We can't mark this as conflicting with `--config` here
|
||||
// as `--config` can be used for specifying configuration overrides
|
||||
// as well as configuration files.
|
||||
// Specifying a configuration file conflicts with `--isolated`;
|
||||
// specifying a configuration override does not.
|
||||
// If a user specifies `ruff check --isolated --config=ruff.toml`,
|
||||
// we emit an error later on, after the initial parsing by clap.
|
||||
#[arg(long, help_heading = "Miscellaneous")]
|
||||
pub isolated: bool,
|
||||
/// Path to the cache directory.
|
||||
#[arg(long, env = "RUFF_CACHE_DIR", help_heading = "Miscellaneous")]
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
/// The name of the file when passing it through stdin.
|
||||
#[arg(long, help_heading = "Miscellaneous")]
|
||||
pub stdin_filename: Option<PathBuf>,
|
||||
/// List of mappings from file extension to language (one of `python`, `ipynb`, `pyi`). For
|
||||
/// List of mappings from file extension to language (one of ["python", "ipynb", "pyi"]). For
|
||||
/// example, to treat `.ipy` files as IPython notebooks, use `--extension ipy:ipynb`.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
pub extension: Option<Vec<ExtensionPair>>,
|
||||
@@ -423,6 +408,20 @@ pub struct FormatCommand {
|
||||
/// difference between the current file and how the formatted file would look like.
|
||||
#[arg(long)]
|
||||
pub diff: bool,
|
||||
/// Either a path to a TOML configuration file (`pyproject.toml` or `ruff.toml`),
|
||||
/// or a TOML `<KEY> = <VALUE>` pair
|
||||
/// (such as you might find in a `ruff.toml` configuration file)
|
||||
/// overriding a specific configuration option.
|
||||
/// Overrides of individual settings using this option always take precedence
|
||||
/// over all configuration files, including configuration files that were also
|
||||
/// specified using `--config`.
|
||||
#[arg(
|
||||
long,
|
||||
action = clap::ArgAction::Append,
|
||||
value_name = "CONFIG_OPTION",
|
||||
value_parser = ConfigArgumentParser,
|
||||
)]
|
||||
pub config: Vec<SingleConfigArgument>,
|
||||
|
||||
/// Disable cache reads.
|
||||
#[arg(short, long, env = "RUFF_NO_CACHE", help_heading = "Miscellaneous")]
|
||||
@@ -463,10 +462,21 @@ pub struct FormatCommand {
|
||||
/// Set the line-length.
|
||||
#[arg(long, help_heading = "Format configuration")]
|
||||
pub line_length: Option<LineLength>,
|
||||
/// Ignore all configuration files.
|
||||
//
|
||||
// Note: We can't mark this as conflicting with `--config` here
|
||||
// as `--config` can be used for specifying configuration overrides
|
||||
// as well as configuration files.
|
||||
// Specifying a configuration file conflicts with `--isolated`;
|
||||
// specifying a configuration override does not.
|
||||
// If a user specifies `ruff check --isolated --config=ruff.toml`,
|
||||
// we emit an error later on, after the initial parsing by clap.
|
||||
#[arg(long, help_heading = "Miscellaneous")]
|
||||
pub isolated: bool,
|
||||
/// The name of the file when passing it through stdin.
|
||||
#[arg(long, help_heading = "Miscellaneous")]
|
||||
pub stdin_filename: Option<PathBuf>,
|
||||
/// List of mappings from file extension to language (one of `python`, `ipynb`, `pyi`). For
|
||||
/// List of mappings from file extension to language (one of ["python", "ipynb", "pyi"]). For
|
||||
/// example, to treat `.ipy` files as IPython notebooks, use `--extension ipy:ipynb`.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
pub extension: Option<Vec<ExtensionPair>>,
|
||||
@@ -496,13 +506,6 @@ pub struct FormatCommand {
|
||||
pub range: Option<FormatRange>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, clap::Parser)]
|
||||
pub struct ServerCommand {
|
||||
/// Enable preview mode; required for regular operation
|
||||
#[arg(long)]
|
||||
pub(crate) preview: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, clap::ValueEnum)]
|
||||
pub enum HelpFormat {
|
||||
Text,
|
||||
@@ -510,7 +513,7 @@ pub enum HelpFormat {
|
||||
}
|
||||
|
||||
#[allow(clippy::module_name_repetitions)]
|
||||
#[derive(Debug, Default, Clone, clap::Args)]
|
||||
#[derive(Debug, clap::Args)]
|
||||
pub struct LogLevelArgs {
|
||||
/// Enable verbose logging.
|
||||
#[arg(
|
||||
@@ -558,10 +561,6 @@ impl From<&LogLevelArgs> for LogLevel {
|
||||
/// Configuration-related arguments passed via the CLI.
|
||||
#[derive(Default)]
|
||||
pub struct ConfigArguments {
|
||||
/// Whether the user specified --isolated on the command line
|
||||
pub(crate) isolated: bool,
|
||||
/// The logging level to be used, derived from command-line arguments passed
|
||||
pub(crate) log_level: LogLevel,
|
||||
/// Path to a pyproject.toml or ruff.toml configuration file (etc.).
|
||||
/// Either 0 or 1 configuration file paths may be provided on the command line.
|
||||
config_file: Option<PathBuf>,
|
||||
@@ -582,19 +581,21 @@ impl ConfigArguments {
|
||||
}
|
||||
|
||||
fn from_cli_arguments(
|
||||
global_options: GlobalConfigArgs,
|
||||
config_options: Vec<SingleConfigArgument>,
|
||||
per_flag_overrides: ExplicitConfigOverrides,
|
||||
isolated: bool,
|
||||
) -> anyhow::Result<Self> {
|
||||
let (log_level, config_options, isolated) = global_options.partition();
|
||||
let mut config_file: Option<PathBuf> = None;
|
||||
let mut overrides = Configuration::default();
|
||||
let mut new = Self {
|
||||
per_flag_overrides,
|
||||
..Self::default()
|
||||
};
|
||||
|
||||
for option in config_options {
|
||||
match option {
|
||||
SingleConfigArgument::SettingsOverride(overridden_option) => {
|
||||
let overridden_option = Arc::try_unwrap(overridden_option)
|
||||
.unwrap_or_else(|option| option.deref().clone());
|
||||
overrides = overrides.combine(Configuration::from_options(
|
||||
new.overrides = new.overrides.combine(Configuration::from_options(
|
||||
overridden_option,
|
||||
None,
|
||||
&path_dedot::CWD,
|
||||
@@ -613,7 +614,7 @@ The argument `--config={}` cannot be used with `--isolated`
|
||||
path.display()
|
||||
);
|
||||
}
|
||||
if let Some(ref config_file) = config_file {
|
||||
if let Some(ref config_file) = new.config_file {
|
||||
let (first, second) = (config_file.display(), path.display());
|
||||
bail!(
|
||||
"\
|
||||
@@ -624,17 +625,11 @@ You cannot specify more than one configuration file on the command line.
|
||||
"
|
||||
);
|
||||
}
|
||||
config_file = Some(path);
|
||||
new.config_file = Some(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
isolated,
|
||||
log_level,
|
||||
config_file,
|
||||
overrides,
|
||||
per_flag_overrides,
|
||||
})
|
||||
Ok(new)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -648,10 +643,7 @@ impl ConfigurationTransformer for ConfigArguments {
|
||||
impl CheckCommand {
|
||||
/// Partition the CLI into command-line arguments and configuration
|
||||
/// overrides.
|
||||
pub fn partition(
|
||||
self,
|
||||
global_options: GlobalConfigArgs,
|
||||
) -> anyhow::Result<(CheckArguments, ConfigArguments)> {
|
||||
pub fn partition(self) -> anyhow::Result<(CheckArguments, ConfigArguments)> {
|
||||
let check_arguments = CheckArguments {
|
||||
add_noqa: self.add_noqa,
|
||||
diff: self.diff,
|
||||
@@ -660,6 +652,7 @@ impl CheckCommand {
|
||||
exit_zero: self.exit_zero,
|
||||
files: self.files,
|
||||
ignore_noqa: self.ignore_noqa,
|
||||
isolated: self.isolated,
|
||||
no_cache: self.no_cache,
|
||||
output_file: self.output_file,
|
||||
show_files: self.show_files,
|
||||
@@ -703,7 +696,8 @@ impl CheckCommand {
|
||||
extension: self.extension,
|
||||
};
|
||||
|
||||
let config_args = ConfigArguments::from_cli_arguments(global_options, cli_overrides)?;
|
||||
let config_args =
|
||||
ConfigArguments::from_cli_arguments(self.config, cli_overrides, self.isolated)?;
|
||||
Ok((check_arguments, config_args))
|
||||
}
|
||||
}
|
||||
@@ -711,14 +705,12 @@ impl CheckCommand {
|
||||
impl FormatCommand {
|
||||
/// Partition the CLI into command-line arguments and configuration
|
||||
/// overrides.
|
||||
pub fn partition(
|
||||
self,
|
||||
global_options: GlobalConfigArgs,
|
||||
) -> anyhow::Result<(FormatArguments, ConfigArguments)> {
|
||||
pub fn partition(self) -> anyhow::Result<(FormatArguments, ConfigArguments)> {
|
||||
let format_arguments = FormatArguments {
|
||||
check: self.check,
|
||||
diff: self.diff,
|
||||
files: self.files,
|
||||
isolated: self.isolated,
|
||||
no_cache: self.no_cache,
|
||||
stdin_filename: self.stdin_filename,
|
||||
range: self.range,
|
||||
@@ -738,7 +730,8 @@ impl FormatCommand {
|
||||
..ExplicitConfigOverrides::default()
|
||||
};
|
||||
|
||||
let config_args = ConfigArguments::from_cli_arguments(global_options, cli_overrides)?;
|
||||
let config_args =
|
||||
ConfigArguments::from_cli_arguments(self.config, cli_overrides, self.isolated)?;
|
||||
Ok((format_arguments, config_args))
|
||||
}
|
||||
}
|
||||
@@ -752,34 +745,38 @@ fn resolve_bool_arg(yes: bool, no: bool) -> Option<bool> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Enumeration of various ways in which a --config CLI flag
|
||||
/// could be invalid
|
||||
#[derive(Debug)]
|
||||
enum InvalidConfigFlagReason {
|
||||
InvalidToml(toml::de::Error),
|
||||
/// It was valid TOML, but not a valid ruff config file.
|
||||
/// E.g. the user tried to select a rule that doesn't exist,
|
||||
/// or tried to enable a setting that doesn't exist
|
||||
ValidTomlButInvalidRuffSchema(toml::de::Error),
|
||||
/// It was a valid ruff config file, but the user tried to pass a
|
||||
/// value for `extend` as part of the config override.
|
||||
// `extend` is special, because it affects which config files we look at
|
||||
/// in the first place. We currently only parse --config overrides *after*
|
||||
/// we've combined them with all the arguments from the various config files
|
||||
/// that we found, so trying to override `extend` as part of a --config
|
||||
/// override is forbidden.
|
||||
ExtendPassedViaConfigFlag,
|
||||
enum TomlParseFailureKind {
|
||||
SyntaxError,
|
||||
UnknownOption,
|
||||
}
|
||||
|
||||
impl InvalidConfigFlagReason {
|
||||
const fn description(&self) -> &'static str {
|
||||
match self {
|
||||
Self::InvalidToml(_) => "The supplied argument is not valid TOML",
|
||||
Self::ValidTomlButInvalidRuffSchema(_) => {
|
||||
impl std::fmt::Display for TomlParseFailureKind {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let display = match self {
|
||||
Self::SyntaxError => "The supplied argument is not valid TOML",
|
||||
Self::UnknownOption => {
|
||||
"Could not parse the supplied argument as a `ruff.toml` configuration option"
|
||||
}
|
||||
Self::ExtendPassedViaConfigFlag => "Cannot include `extend` in a --config flag value",
|
||||
}
|
||||
};
|
||||
write!(f, "{display}")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct TomlParseFailure {
|
||||
kind: TomlParseFailureKind,
|
||||
underlying_error: toml::de::Error,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TomlParseFailure {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let TomlParseFailure {
|
||||
kind,
|
||||
underlying_error,
|
||||
} = self;
|
||||
let display = format!("{kind}:\n\n{underlying_error}");
|
||||
write!(f, "{}", display.trim_end())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -821,38 +818,27 @@ impl TypedValueParser for ConfigArgumentParser {
|
||||
arg: Option<&clap::Arg>,
|
||||
value: &std::ffi::OsStr,
|
||||
) -> Result<Self::Value, clap::Error> {
|
||||
// Convert to UTF-8.
|
||||
let Some(value) = value.to_str() else {
|
||||
// But respect non-UTF-8 paths.
|
||||
let path_to_config_file = PathBuf::from(value);
|
||||
if path_to_config_file.is_file() {
|
||||
return Ok(SingleConfigArgument::FilePath(path_to_config_file));
|
||||
}
|
||||
return Err(clap::Error::new(clap::error::ErrorKind::InvalidUtf8));
|
||||
};
|
||||
|
||||
// Expand environment variables and tildes.
|
||||
if let Ok(path_to_config_file) =
|
||||
shellexpand::full(value).map(|config| PathBuf::from(&*config))
|
||||
{
|
||||
if path_to_config_file.is_file() {
|
||||
return Ok(SingleConfigArgument::FilePath(path_to_config_file));
|
||||
}
|
||||
let path_to_config_file = PathBuf::from(value);
|
||||
if path_to_config_file.exists() {
|
||||
return Ok(SingleConfigArgument::FilePath(path_to_config_file));
|
||||
}
|
||||
|
||||
let config_parse_error = match toml::Table::from_str(value) {
|
||||
Ok(table) => match table.try_into::<Options>() {
|
||||
Ok(option) => {
|
||||
if option.extend.is_none() {
|
||||
return Ok(SingleConfigArgument::SettingsOverride(Arc::new(option)));
|
||||
}
|
||||
InvalidConfigFlagReason::ExtendPassedViaConfigFlag
|
||||
}
|
||||
Err(underlying_error) => {
|
||||
InvalidConfigFlagReason::ValidTomlButInvalidRuffSchema(underlying_error)
|
||||
}
|
||||
let value = value
|
||||
.to_str()
|
||||
.ok_or_else(|| clap::Error::new(clap::error::ErrorKind::InvalidUtf8))?;
|
||||
|
||||
let toml_parse_error = match toml::Table::from_str(value) {
|
||||
Ok(table) => match table.try_into() {
|
||||
Ok(option) => return Ok(SingleConfigArgument::SettingsOverride(Arc::new(option))),
|
||||
Err(underlying_error) => TomlParseFailure {
|
||||
kind: TomlParseFailureKind::UnknownOption,
|
||||
underlying_error,
|
||||
},
|
||||
},
|
||||
Err(underlying_error) => TomlParseFailure {
|
||||
kind: TomlParseFailureKind::SyntaxError,
|
||||
underlying_error,
|
||||
},
|
||||
Err(underlying_error) => InvalidConfigFlagReason::InvalidToml(underlying_error),
|
||||
};
|
||||
|
||||
let mut new_error = clap::Error::new(clap::error::ErrorKind::ValueValidation).with_cmd(cmd);
|
||||
@@ -867,21 +853,6 @@ impl TypedValueParser for ConfigArgumentParser {
|
||||
clap::error::ContextValue::String(value.to_string()),
|
||||
);
|
||||
|
||||
let underlying_error = match &config_parse_error {
|
||||
InvalidConfigFlagReason::ExtendPassedViaConfigFlag => {
|
||||
let tip = config_parse_error.description().into();
|
||||
new_error.insert(
|
||||
clap::error::ContextKind::Suggested,
|
||||
clap::error::ContextValue::StyledStrs(vec![tip]),
|
||||
);
|
||||
return Err(new_error);
|
||||
}
|
||||
InvalidConfigFlagReason::InvalidToml(underlying_error)
|
||||
| InvalidConfigFlagReason::ValidTomlButInvalidRuffSchema(underlying_error) => {
|
||||
underlying_error
|
||||
}
|
||||
};
|
||||
|
||||
// small hack so that multiline tips
|
||||
// have the same indent on the left-hand side:
|
||||
let tip_indent = " ".repeat(" tip: ".len());
|
||||
@@ -906,20 +877,16 @@ A `--config` flag must either be a path to a `.toml` configuration file
|
||||
"
|
||||
|
||||
It looks like you were trying to pass a path to a configuration file.
|
||||
The path `{value}` does not point to a configuration file"
|
||||
The path `{value}` does not exist"
|
||||
));
|
||||
}
|
||||
} else if value.contains('=') {
|
||||
tip.push_str(&format!(
|
||||
"\n\n{}:\n\n{underlying_error}",
|
||||
config_parse_error.description()
|
||||
));
|
||||
tip.push_str(&format!("\n\n{toml_parse_error}"));
|
||||
}
|
||||
let tip = tip.trim_end().to_owned().into();
|
||||
|
||||
new_error.insert(
|
||||
clap::error::ContextKind::Suggested,
|
||||
clap::error::ContextValue::StyledStrs(vec![tip]),
|
||||
clap::error::ContextValue::StyledStrs(vec![tip.into()]),
|
||||
);
|
||||
|
||||
Err(new_error)
|
||||
@@ -974,6 +941,7 @@ pub struct CheckArguments {
|
||||
pub exit_zero: bool,
|
||||
pub files: Vec<PathBuf>,
|
||||
pub ignore_noqa: bool,
|
||||
pub isolated: bool,
|
||||
pub no_cache: bool,
|
||||
pub output_file: Option<PathBuf>,
|
||||
pub show_files: bool,
|
||||
@@ -991,6 +959,7 @@ pub struct FormatArguments {
|
||||
pub no_cache: bool,
|
||||
pub diff: bool,
|
||||
pub files: Vec<PathBuf>,
|
||||
pub isolated: bool,
|
||||
pub stdin_filename: Option<PathBuf>,
|
||||
pub range: Option<FormatRange>,
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ use ruff_linter::message::Message;
|
||||
use ruff_linter::{warn_user, VERSION};
|
||||
use ruff_macros::CacheKey;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_workspace::resolver::Resolver;
|
||||
@@ -347,7 +348,7 @@ impl FileCache {
|
||||
} else {
|
||||
FxHashMap::default()
|
||||
};
|
||||
Diagnostics::new(messages, notebook_indexes)
|
||||
Diagnostics::new(messages, lint.imports.clone(), notebook_indexes)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -374,17 +375,15 @@ pub(crate) fn init(path: &Path) -> Result<()> {
|
||||
fs::create_dir_all(path.join(VERSION))?;
|
||||
|
||||
// Add the CACHEDIR.TAG.
|
||||
cachedir::ensure_tag(path)?;
|
||||
if !cachedir::is_tagged(path)? {
|
||||
cachedir::add_tag(path)?;
|
||||
}
|
||||
|
||||
// Add the .gitignore.
|
||||
match fs::OpenOptions::new()
|
||||
.write(true)
|
||||
.create_new(true)
|
||||
.open(path.join(".gitignore"))
|
||||
{
|
||||
Ok(mut file) => file.write_all(b"# Automatically created by ruff.\n*\n")?,
|
||||
Err(err) if err.kind() == io::ErrorKind::AlreadyExists => (),
|
||||
Err(err) => return Err(err.into()),
|
||||
let gitignore_path = path.join(".gitignore");
|
||||
if !gitignore_path.exists() {
|
||||
let mut file = fs::File::create(gitignore_path)?;
|
||||
file.write_all(b"*")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -393,7 +392,7 @@ pub(crate) fn init(path: &Path) -> Result<()> {
|
||||
#[derive(Deserialize, Debug, Serialize, PartialEq)]
|
||||
pub(crate) struct LintCacheData {
|
||||
/// Imports made.
|
||||
// pub(super) imports: ImportMap,
|
||||
pub(super) imports: ImportMap,
|
||||
/// Diagnostic messages.
|
||||
pub(super) messages: Vec<CacheMessage>,
|
||||
/// Source code of the file.
|
||||
@@ -409,6 +408,7 @@ pub(crate) struct LintCacheData {
|
||||
impl LintCacheData {
|
||||
pub(crate) fn from_messages(
|
||||
messages: &[Message],
|
||||
imports: ImportMap,
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
) -> Self {
|
||||
let source = if let Some(msg) = messages.first() {
|
||||
@@ -436,6 +436,7 @@ impl LintCacheData {
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
imports,
|
||||
messages,
|
||||
source,
|
||||
notebook_index,
|
||||
|
||||
@@ -17,6 +17,7 @@ use ruff_linter::registry::Rule;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{flags, LinterSettings};
|
||||
use ruff_linter::{fs, warn_user_once, IOError};
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_workspace::resolver::{
|
||||
@@ -133,6 +134,7 @@ pub(crate) fn check(
|
||||
dummy,
|
||||
TextSize::default(),
|
||||
)],
|
||||
ImportMap::default(),
|
||||
FxHashMap::default(),
|
||||
)
|
||||
} else {
|
||||
@@ -250,7 +252,6 @@ mod test {
|
||||
for file in [&pyproject_toml, &python_file, ¬ebook] {
|
||||
fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(true)
|
||||
.write(true)
|
||||
.mode(0o000)
|
||||
.open(file)?;
|
||||
|
||||
@@ -61,8 +61,13 @@ impl FormatMode {
|
||||
pub(crate) fn format(
|
||||
cli: FormatArguments,
|
||||
config_arguments: &ConfigArguments,
|
||||
log_level: LogLevel,
|
||||
) -> Result<ExitStatus> {
|
||||
let pyproject_config = resolve(config_arguments, cli.stdin_filename.as_deref())?;
|
||||
let pyproject_config = resolve(
|
||||
cli.isolated,
|
||||
config_arguments,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
let mode = FormatMode::from_cli(&cli);
|
||||
let files = resolve_default_files(cli.files, false);
|
||||
let (paths, resolver) = python_files_in_path(&files, &pyproject_config, config_arguments)?;
|
||||
@@ -197,7 +202,7 @@ pub(crate) fn format(
|
||||
}
|
||||
|
||||
// Report on the formatting changes.
|
||||
if config_arguments.log_level >= LogLevel::Default {
|
||||
if log_level >= LogLevel::Default {
|
||||
if mode.is_diff() {
|
||||
// Allow piping the diff to e.g. a file by writing the summary to stderr
|
||||
results.write_summary(&mut stderr().lock())?;
|
||||
@@ -527,7 +532,7 @@ impl<'a> FormatResults<'a> {
|
||||
})
|
||||
.sorted_unstable_by_key(|(path, _, _)| *path)
|
||||
{
|
||||
write!(f, "{}", unformatted.diff(formatted, Some(path)).unwrap())?;
|
||||
unformatted.diff(formatted, Some(path), f)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -23,7 +23,11 @@ pub(crate) fn format_stdin(
|
||||
cli: &FormatArguments,
|
||||
config_arguments: &ConfigArguments,
|
||||
) -> Result<ExitStatus> {
|
||||
let pyproject_config = resolve(config_arguments, cli.stdin_filename.as_deref())?;
|
||||
let pyproject_config = resolve(
|
||||
cli.isolated,
|
||||
config_arguments,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
|
||||
let mut resolver = Resolver::new(&pyproject_config);
|
||||
warn_incompatible_formatter_settings(&resolver);
|
||||
@@ -118,13 +122,9 @@ fn format_source_code(
|
||||
}
|
||||
FormatMode::Check => {}
|
||||
FormatMode::Diff => {
|
||||
use std::io::Write;
|
||||
write!(
|
||||
&mut stdout().lock(),
|
||||
"{}",
|
||||
source_kind.diff(formatted, path).unwrap()
|
||||
)
|
||||
.map_err(|err| FormatCommandError::Diff(path.map(Path::to_path_buf), err))?;
|
||||
source_kind
|
||||
.diff(formatted, path, &mut stdout().lock())
|
||||
.map_err(|err| FormatCommandError::Diff(path.map(Path::to_path_buf), err))?;
|
||||
}
|
||||
},
|
||||
FormattedSource::Unchanged => {
|
||||
|
||||
@@ -7,7 +7,6 @@ pub(crate) mod format;
|
||||
pub(crate) mod format_stdin;
|
||||
pub(crate) mod linter;
|
||||
pub(crate) mod rule;
|
||||
pub(crate) mod server;
|
||||
pub(crate) mod show_files;
|
||||
pub(crate) mod show_settings;
|
||||
pub(crate) mod version;
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use crate::ExitStatus;
|
||||
use anyhow::Result;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_server::Server;
|
||||
use tracing::{level_filters::LevelFilter, metadata::Level, subscriber::Interest, Metadata};
|
||||
use tracing_subscriber::{
|
||||
layer::{Context, Filter, SubscriberExt},
|
||||
Layer, Registry,
|
||||
};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
pub(crate) fn run_server(
|
||||
preview: bool,
|
||||
worker_threads: NonZeroUsize,
|
||||
log_level: LogLevel,
|
||||
) -> Result<ExitStatus> {
|
||||
if !preview {
|
||||
tracing::error!("--preview needs to be provided as a command line argument while the server is still unstable.\nFor example: `ruff server --preview`");
|
||||
return Ok(ExitStatus::Error);
|
||||
}
|
||||
let trace_level = if log_level == LogLevel::Verbose {
|
||||
Level::TRACE
|
||||
} else {
|
||||
Level::DEBUG
|
||||
};
|
||||
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter { trace_level }),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber)?;
|
||||
|
||||
let server = Server::new(worker_threads)?;
|
||||
|
||||
server.run().map(|()| ExitStatus::Success)
|
||||
}
|
||||
|
||||
struct LoggingFilter {
|
||||
trace_level: Level,
|
||||
}
|
||||
|
||||
impl LoggingFilter {
|
||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
||||
let filter = if meta.target().starts_with("ruff") {
|
||||
self.trace_level
|
||||
} else {
|
||||
Level::INFO
|
||||
};
|
||||
|
||||
meta.level() <= &filter
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Filter<S> for LoggingFilter {
|
||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
||||
self.is_enabled(meta)
|
||||
}
|
||||
|
||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
||||
if self.is_enabled(meta) {
|
||||
Interest::always()
|
||||
} else {
|
||||
Interest::never()
|
||||
}
|
||||
}
|
||||
|
||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
||||
Some(LevelFilter::from_level(self.trace_level))
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@
|
||||
use std::borrow::Cow;
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::io::Write;
|
||||
use std::ops::{Add, AddAssign};
|
||||
use std::path::Path;
|
||||
|
||||
@@ -23,6 +22,7 @@ use ruff_linter::settings::{flags, LinterSettings};
|
||||
use ruff_linter::source_kind::{SourceError, SourceKind};
|
||||
use ruff_linter::{fs, IOError, SyntaxError};
|
||||
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
@@ -34,17 +34,20 @@ use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||
pub(crate) struct Diagnostics {
|
||||
pub(crate) messages: Vec<Message>,
|
||||
pub(crate) fixed: FixMap,
|
||||
pub(crate) imports: ImportMap,
|
||||
pub(crate) notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub(crate) fn new(
|
||||
messages: Vec<Message>,
|
||||
imports: ImportMap,
|
||||
notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
) -> Self {
|
||||
Self {
|
||||
messages,
|
||||
fixed: FixMap::default(),
|
||||
imports,
|
||||
notebook_indexes,
|
||||
}
|
||||
}
|
||||
@@ -88,6 +91,7 @@ impl Diagnostics {
|
||||
dummy,
|
||||
TextSize::default(),
|
||||
)],
|
||||
ImportMap::default(),
|
||||
FxHashMap::default(),
|
||||
)
|
||||
} else {
|
||||
@@ -122,6 +126,7 @@ impl Add for Diagnostics {
|
||||
impl AddAssign for Diagnostics {
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
self.messages.extend(other.messages);
|
||||
self.imports.extend(other.imports);
|
||||
self.fixed += other.fixed;
|
||||
self.notebook_indexes.extend(other.notebook_indexes);
|
||||
}
|
||||
@@ -261,7 +266,7 @@ pub(crate) fn lint_path(
|
||||
// Lint the file.
|
||||
let (
|
||||
LinterResult {
|
||||
data: messages,
|
||||
data: (messages, imports),
|
||||
error: parse_error,
|
||||
},
|
||||
transformed,
|
||||
@@ -284,10 +289,10 @@ pub(crate) fn lint_path(
|
||||
match fix_mode {
|
||||
flags::FixMode::Apply => transformed.write(&mut File::create(path)?)?,
|
||||
flags::FixMode::Diff => {
|
||||
write!(
|
||||
source_kind.diff(
|
||||
transformed.as_ref(),
|
||||
Some(path),
|
||||
&mut io::stdout().lock(),
|
||||
"{}",
|
||||
source_kind.diff(&transformed, Some(path)).unwrap()
|
||||
)?;
|
||||
}
|
||||
flags::FixMode::Generate => {}
|
||||
@@ -329,6 +334,8 @@ pub(crate) fn lint_path(
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
|
||||
let imports = imports.unwrap_or_default();
|
||||
|
||||
if let Some((cache, relative_path, key)) = caching {
|
||||
// We don't cache parsing errors.
|
||||
if parse_error.is_none() {
|
||||
@@ -346,6 +353,7 @@ pub(crate) fn lint_path(
|
||||
&key,
|
||||
LintCacheData::from_messages(
|
||||
&messages,
|
||||
imports.clone(),
|
||||
transformed.as_ipy_notebook().map(Notebook::index).cloned(),
|
||||
),
|
||||
);
|
||||
@@ -369,6 +377,7 @@ pub(crate) fn lint_path(
|
||||
Ok(Diagnostics {
|
||||
messages,
|
||||
fixed: FixMap::from_iter([(fs::relativize_path(path), fixed)]),
|
||||
imports,
|
||||
notebook_indexes,
|
||||
})
|
||||
}
|
||||
@@ -406,7 +415,7 @@ pub(crate) fn lint_stdin(
|
||||
// Lint the inputs.
|
||||
let (
|
||||
LinterResult {
|
||||
data: messages,
|
||||
data: (messages, imports),
|
||||
error: parse_error,
|
||||
},
|
||||
transformed,
|
||||
@@ -433,11 +442,7 @@ pub(crate) fn lint_stdin(
|
||||
flags::FixMode::Diff => {
|
||||
// But only write a diff if it's non-empty.
|
||||
if !fixed.is_empty() {
|
||||
write!(
|
||||
&mut io::stdout().lock(),
|
||||
"{}",
|
||||
source_kind.diff(&transformed, path).unwrap()
|
||||
)?;
|
||||
source_kind.diff(transformed.as_ref(), path, &mut io::stdout().lock())?;
|
||||
}
|
||||
}
|
||||
flags::FixMode::Generate => {}
|
||||
@@ -484,6 +489,8 @@ pub(crate) fn lint_stdin(
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
|
||||
let imports = imports.unwrap_or_default();
|
||||
|
||||
if let Some(error) = parse_error {
|
||||
error!(
|
||||
"{}",
|
||||
@@ -506,6 +513,7 @@ pub(crate) fn lint_stdin(
|
||||
fs::relativize_path(path.unwrap_or_else(|| Path::new("-"))),
|
||||
fixed,
|
||||
)]),
|
||||
imports,
|
||||
notebook_indexes,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -2,13 +2,11 @@
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::{self, stdout, BufWriter, Write};
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::ExitCode;
|
||||
use std::sync::mpsc::channel;
|
||||
|
||||
use anyhow::Result;
|
||||
use args::{GlobalConfigArgs, ServerCommand};
|
||||
use clap::CommandFactory;
|
||||
use colored::Colorize;
|
||||
use log::warn;
|
||||
@@ -20,7 +18,7 @@ use ruff_linter::settings::types::SerializationFormat;
|
||||
use ruff_linter::{fs, warn_user, warn_user_once};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::args::{Args, CheckCommand, Command, FormatCommand};
|
||||
use crate::args::{Args, CheckCommand, Command, FormatCommand, HelpFormat};
|
||||
use crate::printer::{Flags as PrinterFlags, Printer};
|
||||
|
||||
pub mod args;
|
||||
@@ -116,12 +114,20 @@ fn resolve_default_files(files: Vec<PathBuf>, is_stdin: bool) -> Vec<PathBuf> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the actual value of the `format` desired from either `output_format`
|
||||
/// or `format`, and warn the user if they're using the deprecated form.
|
||||
fn resolve_help_output_format(output_format: HelpFormat, format: Option<HelpFormat>) -> HelpFormat {
|
||||
if format.is_some() {
|
||||
warn_user!("The `--format` argument is deprecated. Use `--output-format` instead.");
|
||||
}
|
||||
format.unwrap_or(output_format)
|
||||
}
|
||||
|
||||
pub fn run(
|
||||
Args {
|
||||
command,
|
||||
global_options,
|
||||
log_level_args,
|
||||
}: Args,
|
||||
deprecated_alias_warning: Option<&'static str>,
|
||||
) -> Result<ExitStatus> {
|
||||
{
|
||||
let default_panic_hook = std::panic::take_hook();
|
||||
@@ -149,18 +155,8 @@ pub fn run(
|
||||
#[cfg(windows)]
|
||||
assert!(colored::control::set_virtual_terminal(true).is_ok());
|
||||
|
||||
// support FORCE_COLOR env var
|
||||
if let Some(force_color) = std::env::var_os("FORCE_COLOR") {
|
||||
if force_color.len() > 0 {
|
||||
colored::control::set_override(true);
|
||||
}
|
||||
}
|
||||
|
||||
set_up_logging(global_options.log_level())?;
|
||||
|
||||
if let Some(deprecated_alias_warning) = deprecated_alias_warning {
|
||||
warn_user!("{}", deprecated_alias_warning);
|
||||
}
|
||||
let log_level = LogLevel::from(&log_level_args);
|
||||
set_up_logging(&log_level)?;
|
||||
|
||||
match command {
|
||||
Command::Version { output_format } => {
|
||||
@@ -170,8 +166,10 @@ pub fn run(
|
||||
Command::Rule {
|
||||
rule,
|
||||
all,
|
||||
output_format,
|
||||
format,
|
||||
mut output_format,
|
||||
} => {
|
||||
output_format = resolve_help_output_format(output_format, format);
|
||||
if all {
|
||||
commands::rule::rules(output_format)?;
|
||||
}
|
||||
@@ -184,52 +182,47 @@ pub fn run(
|
||||
commands::config::config(option.as_deref())?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Command::Linter { output_format } => {
|
||||
Command::Linter {
|
||||
format,
|
||||
mut output_format,
|
||||
} => {
|
||||
output_format = resolve_help_output_format(output_format, format);
|
||||
commands::linter::linter(output_format)?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Command::Clean => {
|
||||
commands::clean::clean(global_options.log_level())?;
|
||||
commands::clean::clean(log_level)?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Command::GenerateShellCompletion { shell } => {
|
||||
shell.generate(&mut Args::command(), &mut stdout());
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Command::Check(args) => check(args, global_options),
|
||||
Command::Format(args) => format(args, global_options),
|
||||
Command::Server(args) => server(args, global_options.log_level()),
|
||||
Command::Check(args) => check(args, log_level),
|
||||
Command::Format(args) => format(args, log_level),
|
||||
}
|
||||
}
|
||||
|
||||
fn format(args: FormatCommand, global_options: GlobalConfigArgs) -> Result<ExitStatus> {
|
||||
let (cli, config_arguments) = args.partition(global_options)?;
|
||||
fn format(args: FormatCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
let (cli, config_arguments) = args.partition()?;
|
||||
|
||||
if is_stdin(&cli.files, cli.stdin_filename.as_deref()) {
|
||||
commands::format_stdin::format_stdin(&cli, &config_arguments)
|
||||
} else {
|
||||
commands::format::format(cli, &config_arguments)
|
||||
commands::format::format(cli, &config_arguments, log_level)
|
||||
}
|
||||
}
|
||||
|
||||
fn server(args: ServerCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
let ServerCommand { preview } = args;
|
||||
|
||||
let four = NonZeroUsize::new(4).unwrap();
|
||||
|
||||
// by default, we set the number of worker threads to `num_cpus`, with a maximum of 4.
|
||||
let worker_threads = std::thread::available_parallelism()
|
||||
.unwrap_or(four)
|
||||
.max(four);
|
||||
commands::server::run_server(preview, worker_threads, log_level)
|
||||
}
|
||||
|
||||
pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<ExitStatus> {
|
||||
let (cli, config_arguments) = args.partition(global_options)?;
|
||||
pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
let (cli, config_arguments) = args.partition()?;
|
||||
|
||||
// Construct the "default" settings. These are used when no `pyproject.toml`
|
||||
// files are present, or files are injected from outside of the hierarchy.
|
||||
let pyproject_config = resolve::resolve(&config_arguments, cli.stdin_filename.as_deref())?;
|
||||
let pyproject_config = resolve::resolve(
|
||||
cli.isolated,
|
||||
&config_arguments,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
|
||||
let mut writer: Box<dyn Write> = match cli.output_file {
|
||||
Some(path) if !cli.watch => {
|
||||
@@ -320,7 +313,7 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
}
|
||||
let modifications =
|
||||
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments)?;
|
||||
if modifications > 0 && config_arguments.log_level >= LogLevel::Default {
|
||||
if modifications > 0 && log_level >= LogLevel::Default {
|
||||
let s = if modifications == 1 { "" } else { "s" };
|
||||
#[allow(clippy::print_stderr)]
|
||||
{
|
||||
@@ -332,7 +325,7 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
|
||||
let printer = Printer::new(
|
||||
output_format,
|
||||
config_arguments.log_level,
|
||||
log_level,
|
||||
fix_mode,
|
||||
unsafe_fixes,
|
||||
printer_flags,
|
||||
@@ -389,8 +382,11 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
};
|
||||
|
||||
if matches!(change_kind, ChangeKind::Configuration) {
|
||||
pyproject_config =
|
||||
resolve::resolve(&config_arguments, cli.stdin_filename.as_deref())?;
|
||||
pyproject_config = resolve::resolve(
|
||||
cli.isolated,
|
||||
&config_arguments,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
}
|
||||
Printer::clear_screen()?;
|
||||
printer.write_to_user("File change detected...\n");
|
||||
|
||||
@@ -27,42 +27,26 @@ pub fn main() -> ExitCode {
|
||||
let mut args =
|
||||
argfile::expand_args_from(args, argfile::parse_fromfile, argfile::PREFIX).unwrap();
|
||||
|
||||
// We can't use `warn_user` here because logging isn't set up at this point
|
||||
// and we also don't know if the user runs ruff with quiet.
|
||||
// Keep the message and pass it to `run` that is responsible for emitting the warning.
|
||||
let deprecated_alias_warning = match args.get(1).and_then(|arg| arg.to_str()) {
|
||||
// Deprecated aliases that are handled by clap
|
||||
Some("--explain") => {
|
||||
Some("`ruff --explain <RULE>` is deprecated. Use `ruff rule <RULE>` instead.")
|
||||
}
|
||||
Some("--clean") => {
|
||||
Some("`ruff --clean` is deprecated. Use `ruff clean` instead.")
|
||||
}
|
||||
Some("--generate-shell-completion") => {
|
||||
Some("`ruff --generate-shell-completion <SHELL>` is deprecated. Use `ruff generate-shell-completion <SHELL>` instead.")
|
||||
}
|
||||
// Deprecated `ruff` alias to `ruff check`
|
||||
// Clap doesn't support default subcommands but we want to run `check` by
|
||||
// default for convenience and backwards-compatibility, so we just
|
||||
// preprocess the arguments accordingly before passing them to Clap.
|
||||
Some(arg) if !Command::has_subcommand(arg)
|
||||
// Clap doesn't support default subcommands but we want to run `check` by
|
||||
// default for convenience and backwards-compatibility, so we just
|
||||
// preprocess the arguments accordingly before passing them to Clap.
|
||||
if let Some(arg) = args.get(1) {
|
||||
if arg
|
||||
.to_str()
|
||||
.is_some_and(|arg| !Command::has_subcommand(rewrite_legacy_subcommand(arg)))
|
||||
&& arg != "-h"
|
||||
&& arg != "--help"
|
||||
&& arg != "-V"
|
||||
&& arg != "--version"
|
||||
&& arg != "help" => {
|
||||
|
||||
{
|
||||
args.insert(1, "check".into());
|
||||
Some("`ruff <path>` is deprecated. Use `ruff check <path>` instead.")
|
||||
}
|
||||
},
|
||||
_ => None
|
||||
};
|
||||
&& arg != "help"
|
||||
{
|
||||
args.insert(1, "check".into());
|
||||
}
|
||||
}
|
||||
|
||||
let args = Args::parse_from(args);
|
||||
|
||||
match run(args, deprecated_alias_warning) {
|
||||
match run(args) {
|
||||
Ok(code) => code.into(),
|
||||
Err(err) => {
|
||||
#[allow(clippy::print_stderr)]
|
||||
@@ -81,3 +65,12 @@ pub fn main() -> ExitCode {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn rewrite_legacy_subcommand(cmd: &str) -> &str {
|
||||
match cmd {
|
||||
"--explain" => "rule",
|
||||
"--clean" => "clean",
|
||||
"--generate-shell-completion" => "generate-shell-completion",
|
||||
cmd => cmd,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ impl std::fmt::Display for PanicError {
|
||||
}
|
||||
|
||||
thread_local! {
|
||||
static LAST_PANIC: std::cell::Cell<Option<PanicError>> = const { std::cell::Cell::new(None) };
|
||||
static LAST_PANIC: std::cell::Cell<Option<PanicError>> = std::cell::Cell::new(None);
|
||||
}
|
||||
|
||||
/// [`catch_unwind`](std::panic::catch_unwind) wrapper that sets a custom [`set_hook`](std::panic::set_hook)
|
||||
|
||||
@@ -118,8 +118,6 @@ impl Printer {
|
||||
} else if remaining > 0 {
|
||||
let s = if remaining == 1 { "" } else { "s" };
|
||||
writeln!(writer, "Found {remaining} error{s}.")?;
|
||||
} else if remaining == 0 {
|
||||
writeln!(writer, "All checks passed!")?;
|
||||
}
|
||||
|
||||
if let Some(fixables) = fixables {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::path::Path;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::Result;
|
||||
use log::debug;
|
||||
@@ -16,11 +16,12 @@ use crate::args::ConfigArguments;
|
||||
/// Resolve the relevant settings strategy and defaults for the current
|
||||
/// invocation.
|
||||
pub fn resolve(
|
||||
isolated: bool,
|
||||
config_arguments: &ConfigArguments,
|
||||
stdin_filename: Option<&Path>,
|
||||
) -> Result<PyprojectConfig> {
|
||||
// First priority: if we're running in isolated mode, use the default settings.
|
||||
if config_arguments.isolated {
|
||||
if isolated {
|
||||
let config = config_arguments.transform(Configuration::default());
|
||||
let settings = config.into_settings(&path_dedot::CWD)?;
|
||||
debug!("Isolated mode, not reading any pyproject.toml");
|
||||
@@ -34,8 +35,13 @@ pub fn resolve(
|
||||
// Second priority: the user specified a `pyproject.toml` file. Use that
|
||||
// `pyproject.toml` for _all_ configuration, and resolve paths relative to the
|
||||
// current working directory. (This matches ESLint's behavior.)
|
||||
if let Some(pyproject) = config_arguments.config_file() {
|
||||
let settings = resolve_root_settings(pyproject, Relativity::Cwd, config_arguments)?;
|
||||
if let Some(pyproject) = config_arguments
|
||||
.config_file()
|
||||
.map(|config| config.display().to_string())
|
||||
.map(|config| shellexpand::full(&config).map(|config| PathBuf::from(config.as_ref())))
|
||||
.transpose()?
|
||||
{
|
||||
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, config_arguments)?;
|
||||
debug!(
|
||||
"Using user-specified configuration file at: {}",
|
||||
pyproject.display()
|
||||
@@ -43,7 +49,7 @@ pub fn resolve(
|
||||
return Ok(PyprojectConfig::new(
|
||||
PyprojectDiscoveryStrategy::Fixed,
|
||||
settings,
|
||||
Some(pyproject.to_path_buf()),
|
||||
Some(pyproject),
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@ pub(crate) fn version() -> VersionInfo {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::{assert_json_snapshot, assert_snapshot};
|
||||
use insta::{assert_display_snapshot, assert_json_snapshot};
|
||||
|
||||
use super::{CommitInfo, VersionInfo};
|
||||
|
||||
@@ -80,7 +80,7 @@ mod tests {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: None,
|
||||
};
|
||||
assert_snapshot!(version);
|
||||
assert_display_snapshot!(version);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -95,7 +95,7 @@ mod tests {
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version);
|
||||
assert_display_snapshot!(version);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -110,7 +110,7 @@ mod tests {
|
||||
commits_since_last_tag: 24,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version);
|
||||
assert_display_snapshot!(version);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -12,10 +12,9 @@ const STDIN: &str = "l = 1";
|
||||
fn ruff_check(show_source: Option<bool>, output_format: Option<String>) -> Command {
|
||||
let mut cmd = Command::new(get_cargo_bin(BIN_NAME));
|
||||
let output_format = output_format.unwrap_or(format!("{}", SerializationFormat::default(false)));
|
||||
cmd.arg("check")
|
||||
.arg("--output-format")
|
||||
.arg(output_format)
|
||||
.arg("--no-cache");
|
||||
cmd.arg("--output-format");
|
||||
cmd.arg(output_format);
|
||||
cmd.arg("--no-cache");
|
||||
match show_source {
|
||||
Some(true) => {
|
||||
cmd.arg("--show-source");
|
||||
|
||||
@@ -7,15 +7,10 @@ use std::str;
|
||||
|
||||
use anyhow::Result;
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
use regex::escape;
|
||||
use tempfile::TempDir;
|
||||
|
||||
const BIN_NAME: &str = "ruff";
|
||||
|
||||
fn tempdir_filter(tempdir: &TempDir) -> String {
|
||||
format!(r"{}\\?/?", escape(tempdir.path().to_str().unwrap()))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_options() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
@@ -23,7 +18,7 @@ fn default_options() {
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
def foo(arg1, arg2,):
|
||||
print('Shouldn\'t change quotes')
|
||||
print('Should\'t change quotes')
|
||||
|
||||
|
||||
if condition:
|
||||
@@ -38,7 +33,7 @@ if condition:
|
||||
arg1,
|
||||
arg2,
|
||||
):
|
||||
print("Shouldn't change quotes")
|
||||
print("Should't change quotes")
|
||||
|
||||
|
||||
if condition:
|
||||
@@ -111,7 +106,7 @@ fn nonexistent_config_file() {
|
||||
option
|
||||
|
||||
It looks like you were trying to pass a path to a configuration file.
|
||||
The path `foo.toml` does not point to a configuration file
|
||||
The path `foo.toml` does not exist
|
||||
|
||||
For more information, try '--help'.
|
||||
"###);
|
||||
@@ -152,29 +147,28 @@ fn too_many_config_files() -> Result<()> {
|
||||
let ruff2_dot_toml = tempdir.path().join("ruff2.toml");
|
||||
fs::File::create(&ruff_dot_toml)?;
|
||||
fs::File::create(&ruff2_dot_toml)?;
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
let expected_stderr = format!(
|
||||
"\
|
||||
ruff failed
|
||||
Cause: You cannot specify more than one configuration file on the command line.
|
||||
|
||||
tip: remove either `--config={}` or `--config={}`.
|
||||
For more information, try `--help`.
|
||||
|
||||
",
|
||||
ruff_dot_toml.display(),
|
||||
ruff2_dot_toml.display(),
|
||||
);
|
||||
let cmd = Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("format")
|
||||
.arg("--config")
|
||||
.arg(&ruff_dot_toml)
|
||||
.arg("--config")
|
||||
.arg(&ruff2_dot_toml)
|
||||
.arg("."), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: You cannot specify more than one configuration file on the command line.
|
||||
|
||||
tip: remove either `--config=[TMP]/ruff.toml` or `--config=[TMP]/ruff2.toml`.
|
||||
For more information, try `--help`.
|
||||
|
||||
"###);
|
||||
});
|
||||
.arg(".")
|
||||
.output()?;
|
||||
let stderr = std::str::from_utf8(&cmd.stderr)?;
|
||||
assert_eq!(stderr, expected_stderr);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -183,29 +177,27 @@ fn config_file_and_isolated() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_dot_toml = tempdir.path().join("ruff.toml");
|
||||
fs::File::create(&ruff_dot_toml)?;
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
let expected_stderr = format!(
|
||||
"\
|
||||
ruff failed
|
||||
Cause: The argument `--config={}` cannot be used with `--isolated`
|
||||
|
||||
tip: You cannot specify a configuration file and also specify `--isolated`,
|
||||
as `--isolated` causes ruff to ignore all configuration files.
|
||||
For more information, try `--help`.
|
||||
|
||||
",
|
||||
ruff_dot_toml.display(),
|
||||
);
|
||||
let cmd = Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("format")
|
||||
.arg("--config")
|
||||
.arg(&ruff_dot_toml)
|
||||
.arg("--isolated")
|
||||
.arg("."), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: The argument `--config=[TMP]/ruff.toml` cannot be used with `--isolated`
|
||||
|
||||
tip: You cannot specify a configuration file and also specify `--isolated`,
|
||||
as `--isolated` causes ruff to ignore all configuration files.
|
||||
For more information, try `--help`.
|
||||
|
||||
"###);
|
||||
});
|
||||
.arg(".")
|
||||
.output()?;
|
||||
let stderr = std::str::from_utf8(&cmd.stderr)?;
|
||||
assert_eq!(stderr, expected_stderr);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -358,52 +350,58 @@ def f(x):
|
||||
'''
|
||||
pass
|
||||
"#), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
def f(x):
|
||||
"""
|
||||
Something about `f`. And an example:
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
def f(x):
|
||||
"""
|
||||
Something about `f`. And an example:
|
||||
|
||||
.. code-block:: python
|
||||
.. code-block:: python
|
||||
|
||||
foo, bar, quux = (
|
||||
this_is_a_long_line(
|
||||
lion,
|
||||
hippo,
|
||||
lemur,
|
||||
bear,
|
||||
)
|
||||
)
|
||||
|
||||
Another example:
|
||||
|
||||
```py
|
||||
foo, bar, quux = (
|
||||
this_is_a_long_line(
|
||||
lion,
|
||||
hippo,
|
||||
lemur,
|
||||
bear,
|
||||
)
|
||||
(
|
||||
foo,
|
||||
bar,
|
||||
quux,
|
||||
) = this_is_a_long_line(
|
||||
lion,
|
||||
hippo,
|
||||
lemur,
|
||||
bear,
|
||||
)
|
||||
```
|
||||
|
||||
And another:
|
||||
Another example:
|
||||
|
||||
>>> foo, bar, quux = (
|
||||
... this_is_a_long_line(
|
||||
... lion,
|
||||
... hippo,
|
||||
... lemur,
|
||||
... bear,
|
||||
... )
|
||||
... )
|
||||
"""
|
||||
pass
|
||||
```py
|
||||
(
|
||||
foo,
|
||||
bar,
|
||||
quux,
|
||||
) = this_is_a_long_line(
|
||||
lion,
|
||||
hippo,
|
||||
lemur,
|
||||
bear,
|
||||
)
|
||||
```
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
And another:
|
||||
|
||||
>>> (
|
||||
... foo,
|
||||
... bar,
|
||||
... quux,
|
||||
... ) = this_is_a_long_line(
|
||||
... lion,
|
||||
... hippo,
|
||||
... lemur,
|
||||
... bear,
|
||||
... )
|
||||
"""
|
||||
pass
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -523,7 +521,7 @@ from module import =
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Failed to parse main.py:2:20: Expected an import name
|
||||
error: Failed to parse main.py:2:20: Unexpected token '='
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -71,7 +71,6 @@ impl<'a> RuffCheck<'a> {
|
||||
/// Generate a [`Command`] for the `ruff check` command.
|
||||
fn build(self) -> Command {
|
||||
let mut cmd = ruff_cmd();
|
||||
cmd.arg("check");
|
||||
if let Some(output_format) = self.output_format {
|
||||
cmd.args(["--output-format", output_format]);
|
||||
}
|
||||
@@ -101,7 +100,6 @@ fn stdin_success() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -223,7 +221,6 @@ fn stdin_source_type_pyi() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -592,7 +589,6 @@ fn stdin_fix_when_no_issues_should_still_print_contents() {
|
||||
print(sys.version)
|
||||
|
||||
----- stderr -----
|
||||
All checks passed!
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -731,11 +727,11 @@ fn stdin_parse_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:17: E999 SyntaxError: Expected an import name
|
||||
-:1:17: E999 SyntaxError: Unexpected token '='
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
error: Failed to parse at 1:17: Expected an import name
|
||||
error: Failed to parse at 1:17: Unexpected token '='
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -809,13 +805,13 @@ fn full_output_format() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rule_f401() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "F401"]));
|
||||
fn explain_status_codes_f401() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["--explain", "F401"]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rule_invalid_rule_name() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404"]), @r###"
|
||||
fn explain_status_codes_ruf404() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["--explain", "RUF404"]), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -1026,7 +1022,6 @@ fn preview_disabled_direct() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: Selection `RUF911` has no effect because preview is not enabled.
|
||||
@@ -1043,7 +1038,6 @@ fn preview_disabled_prefix_empty() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: Selection `RUF91` has no effect because preview is not enabled.
|
||||
@@ -1060,7 +1054,6 @@ fn preview_disabled_does_not_warn_for_empty_ignore_selections() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1076,7 +1069,6 @@ fn preview_disabled_does_not_warn_for_empty_fixable_selections() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1182,7 +1174,6 @@ fn removed_indirect() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1213,7 +1204,6 @@ fn redirect_indirect() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1316,7 +1306,6 @@ fn deprecated_indirect_preview_enabled() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1353,14 +1342,13 @@ fn unreadable_pyproject_toml() -> Result<()> {
|
||||
// Create an empty file with 000 permissions
|
||||
fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(true)
|
||||
.write(true)
|
||||
.mode(0o000)
|
||||
.open(pyproject_toml)?;
|
||||
|
||||
// Don't `--isolated` since the configuration discovery is where the error happens
|
||||
let args = Args::parse_from(["", "check", "--no-cache", tempdir.path().to_str().unwrap()]);
|
||||
let err = run(args, None).err().context("Unexpected success")?;
|
||||
let err = run(args).err().context("Unexpected success")?;
|
||||
assert_eq!(
|
||||
err.chain()
|
||||
.map(std::string::ToString::to_string)
|
||||
@@ -1394,7 +1382,6 @@ fn unreadable_dir() -> Result<()> {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: Encountered error: Permission denied (os error 13)
|
||||
@@ -1909,7 +1896,6 @@ def log(x, base) -> float:
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
|
||||
@@ -12,7 +12,7 @@ use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
use tempfile::TempDir;
|
||||
|
||||
const BIN_NAME: &str = "ruff";
|
||||
const STDIN_BASE_OPTIONS: &[&str] = &["check", "--no-cache", "--output-format", "concise"];
|
||||
const STDIN_BASE_OPTIONS: &[&str] = &["--no-cache", "--output-format", "concise"];
|
||||
|
||||
fn tempdir_filter(tempdir: &TempDir) -> String {
|
||||
format!(r"{}\\?/?", escape(tempdir.path().to_str().unwrap()))
|
||||
@@ -246,6 +246,7 @@ OTHER = "OTHER"
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.arg("check")
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
// Explicitly pass test.py, should be linted regardless of it being excluded by lint.exclude
|
||||
@@ -292,6 +293,7 @@ inline-quotes = "single"
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.arg("check")
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
.args(["--stdin-filename", "generated.py"])
|
||||
@@ -384,6 +386,7 @@ inline-quotes = "single"
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.arg("check")
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
.args(["--stdin-filename", "generated.py"])
|
||||
@@ -432,6 +435,7 @@ inline-quotes = "single"
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.arg("check")
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
.args(["--stdin-filename", "generated.py"])
|
||||
@@ -491,12 +495,12 @@ ignore = ["D203", "D212"]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(sub_dir)
|
||||
.arg("check")
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
, @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: No Python files found under the given path(s)
|
||||
@@ -523,7 +527,7 @@ fn nonexistent_config_file() {
|
||||
option
|
||||
|
||||
It looks like you were trying to pass a path to a configuration file.
|
||||
The path `foo.toml` does not point to a configuration file
|
||||
The path `foo.toml` does not exist
|
||||
|
||||
For more information, try '--help'.
|
||||
"###);
|
||||
@@ -591,24 +595,6 @@ fn too_many_config_files() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extend_passed_via_config_argument() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "extend = 'foo.toml'", "."]), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'extend = 'foo.toml'' for '--config <CONFIG_OPTION>'
|
||||
|
||||
tip: Cannot include `extend` in a --config flag value
|
||||
|
||||
For more information, try '--help'.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn config_file_and_isolated() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
@@ -834,7 +820,6 @@ fn complex_config_setting_overridden_via_cli() -> Result<()> {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -851,7 +836,7 @@ fn deprecated_config_option_overridden_via_cli() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:7: N801 Class name `lowercase` should use CapWords convention
|
||||
-:1:7: N801 Class name `lowercase` should use CapWords convention
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -918,6 +903,7 @@ include = ["*.ipy"]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.arg("check")
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
.args(["--extension", "ipy:ipynb"])
|
||||
@@ -935,398 +921,3 @@ include = ["*.ipy"]
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn file_noqa_external() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[lint]
|
||||
external = ["AAA"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
# flake8: noqa: AAA101, BBB102
|
||||
import os
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:3:8: F401 [*] `os` imported but unused
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
warning: Invalid rule code provided to `# ruff: noqa` at -:2: BBB102
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn required_version_exact_mismatch() -> Result<()> {
|
||||
let version = env!("CARGO_PKG_VERSION");
|
||||
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
required-version = "0.1.0"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/"), (version, "[VERSION]")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
import os
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Required version `==0.1.0` does not match the running version `[VERSION]`
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn required_version_exact_match() -> Result<()> {
|
||||
let version = env!("CARGO_PKG_VERSION");
|
||||
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
format!(
|
||||
r#"
|
||||
required-version = "{version}"
|
||||
"#
|
||||
),
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/"), (version, "[VERSION]")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
import os
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:2:8: F401 [*] `os` imported but unused
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn required_version_bound_mismatch() -> Result<()> {
|
||||
let version = env!("CARGO_PKG_VERSION");
|
||||
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
format!(
|
||||
r#"
|
||||
required-version = ">{version}"
|
||||
"#
|
||||
),
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/"), (version, "[VERSION]")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
import os
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Required version `>[VERSION]` does not match the running version `[VERSION]`
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn required_version_bound_match() -> Result<()> {
|
||||
let version = env!("CARGO_PKG_VERSION");
|
||||
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
required-version = ">=0.1.0"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/"), (version, "[VERSION]")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
import os
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:2:8: F401 [*] `os` imported but unused
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Expand environment variables in `--config` paths provided via the CLI.
|
||||
#[test]
|
||||
fn config_expand() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
ruff_toml,
|
||||
r#"
|
||||
[lint]
|
||||
select = ["F"]
|
||||
ignore = ["F841"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg("${NAME}.toml")
|
||||
.env("NAME", "ruff")
|
||||
.arg("-")
|
||||
.current_dir(tempdir.path())
|
||||
.pass_stdin(r#"
|
||||
import os
|
||||
|
||||
def func():
|
||||
x = 1
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:2:8: F401 [*] `os` imported but unused
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Per-file selects via ! negation in per-file-ignores
|
||||
#[test]
|
||||
fn negated_per_file_ignores() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[lint.per-file-ignores]
|
||||
"!selected.py" = ["RUF"]
|
||||
"#,
|
||||
)?;
|
||||
let selected = tempdir.path().join("selected.py");
|
||||
fs::write(selected, "")?;
|
||||
let ignored = tempdir.path().join("ignored.py");
|
||||
fs::write(ignored, "")?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.arg("--select")
|
||||
.arg("RUF901")
|
||||
.current_dir(&tempdir)
|
||||
, @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
selected.py:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn negated_per_file_ignores_absolute() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[lint.per-file-ignores]
|
||||
"!src/**.py" = ["RUF"]
|
||||
"#,
|
||||
)?;
|
||||
let src_dir = tempdir.path().join("src");
|
||||
fs::create_dir(&src_dir)?;
|
||||
let selected = src_dir.join("selected.py");
|
||||
fs::write(selected, "")?;
|
||||
let ignored = tempdir.path().join("ignored.py");
|
||||
fs::write(ignored, "")?;
|
||||
|
||||
insta::with_settings!({filters => vec![
|
||||
// Replace windows paths
|
||||
(r"\\", "/"),
|
||||
]}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.arg("--select")
|
||||
.arg("RUF901")
|
||||
.current_dir(&tempdir)
|
||||
, @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
src/selected.py:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// patterns are additive, can't use negative patterns to "un-ignore"
|
||||
#[test]
|
||||
fn negated_per_file_ignores_overlap() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[lint.per-file-ignores]
|
||||
"*.py" = ["RUF"]
|
||||
"!foo.py" = ["RUF"]
|
||||
"#,
|
||||
)?;
|
||||
let foo_file = tempdir.path().join("foo.py");
|
||||
fs::write(foo_file, "")?;
|
||||
let bar_file = tempdir.path().join("bar.py");
|
||||
fs::write(bar_file, "")?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.arg("--select")
|
||||
.arg("RUF901")
|
||||
.current_dir(&tempdir)
|
||||
, @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unused_interaction() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[lint]
|
||||
select = ["F"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.args(["--stdin-filename", "test.py"])
|
||||
.arg("--fix")
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
import os # F401
|
||||
|
||||
def function():
|
||||
import os # F811
|
||||
print(os.name)
|
||||
"#), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
import os # F401
|
||||
|
||||
def function():
|
||||
print(os.name)
|
||||
|
||||
----- stderr -----
|
||||
Found 1 error (1 fixed, 0 remaining).
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ source: crates/ruff/tests/integration_test.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- rule
|
||||
- "--explain"
|
||||
- F401
|
||||
---
|
||||
success: true
|
||||
@@ -34,11 +34,6 @@ marking it as unused, as in:
|
||||
from module import member as member
|
||||
```
|
||||
|
||||
## Fix safety
|
||||
|
||||
When `ignore_init_module_imports` is disabled, fixes can remove for unused imports in `__init__` files.
|
||||
These fixes are considered unsafe because they can change the public interface.
|
||||
|
||||
## Example
|
||||
```python
|
||||
import numpy as np # unused import
|
||||
@@ -73,3 +68,4 @@ else:
|
||||
- [Typing documentation: interface conventions](https://typing.readthedocs.io/en/latest/source/libraries.html#library-interface-public-and-private-symbols)
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -44,16 +44,15 @@ file_resolver.exclude = [
|
||||
"__pypackages__",
|
||||
"_build",
|
||||
"buck-out",
|
||||
"build",
|
||||
"dist",
|
||||
"node_modules",
|
||||
"site-packages",
|
||||
"venv",
|
||||
]
|
||||
file_resolver.extend_exclude = [
|
||||
"crates/ruff/resources/",
|
||||
"crates/ruff_linter/resources/",
|
||||
"crates/ruff_python_formatter/resources/",
|
||||
"crates/ruff_python_parser/resources/",
|
||||
]
|
||||
file_resolver.force_exclude = false
|
||||
file_resolver.include = [
|
||||
@@ -69,128 +68,128 @@ file_resolver.project_root = "[BASEPATH]"
|
||||
linter.exclude = []
|
||||
linter.project_root = "[BASEPATH]"
|
||||
linter.rules.enabled = [
|
||||
multiple-imports-on-one-line (E401),
|
||||
module-import-not-at-top-of-file (E402),
|
||||
multiple-statements-on-one-line-colon (E701),
|
||||
multiple-statements-on-one-line-semicolon (E702),
|
||||
useless-semicolon (E703),
|
||||
none-comparison (E711),
|
||||
true-false-comparison (E712),
|
||||
not-in-test (E713),
|
||||
not-is-test (E714),
|
||||
type-comparison (E721),
|
||||
bare-except (E722),
|
||||
lambda-assignment (E731),
|
||||
ambiguous-variable-name (E741),
|
||||
ambiguous-class-name (E742),
|
||||
ambiguous-function-name (E743),
|
||||
io-error (E902),
|
||||
syntax-error (E999),
|
||||
unused-import (F401),
|
||||
import-shadowed-by-loop-var (F402),
|
||||
undefined-local-with-import-star (F403),
|
||||
late-future-import (F404),
|
||||
undefined-local-with-import-star-usage (F405),
|
||||
undefined-local-with-nested-import-star-usage (F406),
|
||||
future-feature-not-defined (F407),
|
||||
percent-format-invalid-format (F501),
|
||||
percent-format-expected-mapping (F502),
|
||||
percent-format-expected-sequence (F503),
|
||||
percent-format-extra-named-arguments (F504),
|
||||
percent-format-missing-argument (F505),
|
||||
percent-format-mixed-positional-and-named (F506),
|
||||
percent-format-positional-count-mismatch (F507),
|
||||
percent-format-star-requires-sequence (F508),
|
||||
percent-format-unsupported-format-character (F509),
|
||||
string-dot-format-invalid-format (F521),
|
||||
string-dot-format-extra-named-arguments (F522),
|
||||
string-dot-format-extra-positional-arguments (F523),
|
||||
string-dot-format-missing-arguments (F524),
|
||||
string-dot-format-mixing-automatic (F525),
|
||||
f-string-missing-placeholders (F541),
|
||||
multi-value-repeated-key-literal (F601),
|
||||
multi-value-repeated-key-variable (F602),
|
||||
expressions-in-star-assignment (F621),
|
||||
multiple-starred-expressions (F622),
|
||||
assert-tuple (F631),
|
||||
is-literal (F632),
|
||||
invalid-print-syntax (F633),
|
||||
if-tuple (F634),
|
||||
break-outside-loop (F701),
|
||||
continue-outside-loop (F702),
|
||||
yield-outside-function (F704),
|
||||
return-outside-function (F706),
|
||||
default-except-not-last (F707),
|
||||
forward-annotation-syntax-error (F722),
|
||||
redefined-while-unused (F811),
|
||||
undefined-name (F821),
|
||||
undefined-export (F822),
|
||||
undefined-local (F823),
|
||||
unused-variable (F841),
|
||||
unused-annotation (F842),
|
||||
raise-not-implemented (F901),
|
||||
MultipleImportsOnOneLine,
|
||||
ModuleImportNotAtTopOfFile,
|
||||
MultipleStatementsOnOneLineColon,
|
||||
MultipleStatementsOnOneLineSemicolon,
|
||||
UselessSemicolon,
|
||||
NoneComparison,
|
||||
TrueFalseComparison,
|
||||
NotInTest,
|
||||
NotIsTest,
|
||||
TypeComparison,
|
||||
BareExcept,
|
||||
LambdaAssignment,
|
||||
AmbiguousVariableName,
|
||||
AmbiguousClassName,
|
||||
AmbiguousFunctionName,
|
||||
IOError,
|
||||
SyntaxError,
|
||||
UnusedImport,
|
||||
ImportShadowedByLoopVar,
|
||||
UndefinedLocalWithImportStar,
|
||||
LateFutureImport,
|
||||
UndefinedLocalWithImportStarUsage,
|
||||
UndefinedLocalWithNestedImportStarUsage,
|
||||
FutureFeatureNotDefined,
|
||||
PercentFormatInvalidFormat,
|
||||
PercentFormatExpectedMapping,
|
||||
PercentFormatExpectedSequence,
|
||||
PercentFormatExtraNamedArguments,
|
||||
PercentFormatMissingArgument,
|
||||
PercentFormatMixedPositionalAndNamed,
|
||||
PercentFormatPositionalCountMismatch,
|
||||
PercentFormatStarRequiresSequence,
|
||||
PercentFormatUnsupportedFormatCharacter,
|
||||
StringDotFormatInvalidFormat,
|
||||
StringDotFormatExtraNamedArguments,
|
||||
StringDotFormatExtraPositionalArguments,
|
||||
StringDotFormatMissingArguments,
|
||||
StringDotFormatMixingAutomatic,
|
||||
FStringMissingPlaceholders,
|
||||
MultiValueRepeatedKeyLiteral,
|
||||
MultiValueRepeatedKeyVariable,
|
||||
ExpressionsInStarAssignment,
|
||||
MultipleStarredExpressions,
|
||||
AssertTuple,
|
||||
IsLiteral,
|
||||
InvalidPrintSyntax,
|
||||
IfTuple,
|
||||
BreakOutsideLoop,
|
||||
ContinueOutsideLoop,
|
||||
YieldOutsideFunction,
|
||||
ReturnOutsideFunction,
|
||||
DefaultExceptNotLast,
|
||||
ForwardAnnotationSyntaxError,
|
||||
RedefinedWhileUnused,
|
||||
UndefinedName,
|
||||
UndefinedExport,
|
||||
UndefinedLocal,
|
||||
UnusedVariable,
|
||||
UnusedAnnotation,
|
||||
RaiseNotImplemented,
|
||||
]
|
||||
linter.rules.should_fix = [
|
||||
multiple-imports-on-one-line (E401),
|
||||
module-import-not-at-top-of-file (E402),
|
||||
multiple-statements-on-one-line-colon (E701),
|
||||
multiple-statements-on-one-line-semicolon (E702),
|
||||
useless-semicolon (E703),
|
||||
none-comparison (E711),
|
||||
true-false-comparison (E712),
|
||||
not-in-test (E713),
|
||||
not-is-test (E714),
|
||||
type-comparison (E721),
|
||||
bare-except (E722),
|
||||
lambda-assignment (E731),
|
||||
ambiguous-variable-name (E741),
|
||||
ambiguous-class-name (E742),
|
||||
ambiguous-function-name (E743),
|
||||
io-error (E902),
|
||||
syntax-error (E999),
|
||||
unused-import (F401),
|
||||
import-shadowed-by-loop-var (F402),
|
||||
undefined-local-with-import-star (F403),
|
||||
late-future-import (F404),
|
||||
undefined-local-with-import-star-usage (F405),
|
||||
undefined-local-with-nested-import-star-usage (F406),
|
||||
future-feature-not-defined (F407),
|
||||
percent-format-invalid-format (F501),
|
||||
percent-format-expected-mapping (F502),
|
||||
percent-format-expected-sequence (F503),
|
||||
percent-format-extra-named-arguments (F504),
|
||||
percent-format-missing-argument (F505),
|
||||
percent-format-mixed-positional-and-named (F506),
|
||||
percent-format-positional-count-mismatch (F507),
|
||||
percent-format-star-requires-sequence (F508),
|
||||
percent-format-unsupported-format-character (F509),
|
||||
string-dot-format-invalid-format (F521),
|
||||
string-dot-format-extra-named-arguments (F522),
|
||||
string-dot-format-extra-positional-arguments (F523),
|
||||
string-dot-format-missing-arguments (F524),
|
||||
string-dot-format-mixing-automatic (F525),
|
||||
f-string-missing-placeholders (F541),
|
||||
multi-value-repeated-key-literal (F601),
|
||||
multi-value-repeated-key-variable (F602),
|
||||
expressions-in-star-assignment (F621),
|
||||
multiple-starred-expressions (F622),
|
||||
assert-tuple (F631),
|
||||
is-literal (F632),
|
||||
invalid-print-syntax (F633),
|
||||
if-tuple (F634),
|
||||
break-outside-loop (F701),
|
||||
continue-outside-loop (F702),
|
||||
yield-outside-function (F704),
|
||||
return-outside-function (F706),
|
||||
default-except-not-last (F707),
|
||||
forward-annotation-syntax-error (F722),
|
||||
redefined-while-unused (F811),
|
||||
undefined-name (F821),
|
||||
undefined-export (F822),
|
||||
undefined-local (F823),
|
||||
unused-variable (F841),
|
||||
unused-annotation (F842),
|
||||
raise-not-implemented (F901),
|
||||
MultipleImportsOnOneLine,
|
||||
ModuleImportNotAtTopOfFile,
|
||||
MultipleStatementsOnOneLineColon,
|
||||
MultipleStatementsOnOneLineSemicolon,
|
||||
UselessSemicolon,
|
||||
NoneComparison,
|
||||
TrueFalseComparison,
|
||||
NotInTest,
|
||||
NotIsTest,
|
||||
TypeComparison,
|
||||
BareExcept,
|
||||
LambdaAssignment,
|
||||
AmbiguousVariableName,
|
||||
AmbiguousClassName,
|
||||
AmbiguousFunctionName,
|
||||
IOError,
|
||||
SyntaxError,
|
||||
UnusedImport,
|
||||
ImportShadowedByLoopVar,
|
||||
UndefinedLocalWithImportStar,
|
||||
LateFutureImport,
|
||||
UndefinedLocalWithImportStarUsage,
|
||||
UndefinedLocalWithNestedImportStarUsage,
|
||||
FutureFeatureNotDefined,
|
||||
PercentFormatInvalidFormat,
|
||||
PercentFormatExpectedMapping,
|
||||
PercentFormatExpectedSequence,
|
||||
PercentFormatExtraNamedArguments,
|
||||
PercentFormatMissingArgument,
|
||||
PercentFormatMixedPositionalAndNamed,
|
||||
PercentFormatPositionalCountMismatch,
|
||||
PercentFormatStarRequiresSequence,
|
||||
PercentFormatUnsupportedFormatCharacter,
|
||||
StringDotFormatInvalidFormat,
|
||||
StringDotFormatExtraNamedArguments,
|
||||
StringDotFormatExtraPositionalArguments,
|
||||
StringDotFormatMissingArguments,
|
||||
StringDotFormatMixingAutomatic,
|
||||
FStringMissingPlaceholders,
|
||||
MultiValueRepeatedKeyLiteral,
|
||||
MultiValueRepeatedKeyVariable,
|
||||
ExpressionsInStarAssignment,
|
||||
MultipleStarredExpressions,
|
||||
AssertTuple,
|
||||
IsLiteral,
|
||||
InvalidPrintSyntax,
|
||||
IfTuple,
|
||||
BreakOutsideLoop,
|
||||
ContinueOutsideLoop,
|
||||
YieldOutsideFunction,
|
||||
ReturnOutsideFunction,
|
||||
DefaultExceptNotLast,
|
||||
ForwardAnnotationSyntaxError,
|
||||
RedefinedWhileUnused,
|
||||
UndefinedName,
|
||||
UndefinedExport,
|
||||
UndefinedLocal,
|
||||
UnusedVariable,
|
||||
UnusedAnnotation,
|
||||
RaiseNotImplemented,
|
||||
]
|
||||
linter.per_file_ignores = {}
|
||||
linter.safety_table.forced_safe = []
|
||||
@@ -203,7 +202,7 @@ linter.allowed_confusables = []
|
||||
linter.builtins = []
|
||||
linter.dummy_variable_rgx = ^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$
|
||||
linter.external = []
|
||||
linter.ignore_init_module_imports = true
|
||||
linter.ignore_init_module_imports = false
|
||||
linter.logger_objects = []
|
||||
linter.namespace_packages = []
|
||||
linter.src = [
|
||||
@@ -233,7 +232,7 @@ linter.flake8_bandit.check_typed_exception = false
|
||||
linter.flake8_bugbear.extend_immutable_calls = []
|
||||
linter.flake8_builtins.builtins_ignorelist = []
|
||||
linter.flake8_comprehensions.allow_dict_calls_with_keyword_arguments = false
|
||||
linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|,\s)\d{4})*
|
||||
linter.flake8_copyright.notice_rgx = (?i)Copyright\s+(\(C\)\s+)?\d{4}(-\d{4})*
|
||||
linter.flake8_copyright.author = none
|
||||
linter.flake8_copyright.min_file_size = 0
|
||||
linter.flake8_errmsg.max_string_length = 0
|
||||
@@ -243,22 +242,7 @@ linter.flake8_gettext.functions_names = [
|
||||
ngettext,
|
||||
]
|
||||
linter.flake8_implicit_str_concat.allow_multiline = true
|
||||
linter.flake8_import_conventions.aliases = {
|
||||
altair = alt,
|
||||
holoviews = hv,
|
||||
matplotlib = mpl,
|
||||
matplotlib.pyplot = plt,
|
||||
networkx = nx,
|
||||
numpy = np,
|
||||
pandas = pd,
|
||||
panel = pn,
|
||||
plotly.express = px,
|
||||
polars = pl,
|
||||
pyarrow = pa,
|
||||
seaborn = sns,
|
||||
tensorflow = tf,
|
||||
tkinter = tk,
|
||||
}
|
||||
linter.flake8_import_conventions.aliases = {"matplotlib": "mpl", "matplotlib.pyplot": "plt", "pandas": "pd", "seaborn": "sns", "tensorflow": "tf", "networkx": "nx", "plotly.express": "px", "polars": "pl", "numpy": "np", "panel": "pn", "pyarrow": "pa", "altair": "alt", "tkinter": "tk", "holoviews": "hv"}
|
||||
linter.flake8_import_conventions.banned_aliases = {}
|
||||
linter.flake8_import_conventions.banned_from = []
|
||||
linter.flake8_pytest_style.fixture_parentheses = true
|
||||
@@ -328,7 +312,6 @@ linter.isort.section_order = [
|
||||
known { type = first_party },
|
||||
known { type = local_folder },
|
||||
]
|
||||
linter.isort.default_section = known { type = third_party }
|
||||
linter.isort.no_sections = false
|
||||
linter.isort.from_first = false
|
||||
linter.isort.length_sort = false
|
||||
@@ -383,3 +366,4 @@ formatter.docstring_code_format = disabled
|
||||
formatter.docstring_code_line_width = dynamic
|
||||
|
||||
----- stderr -----
|
||||
|
||||
|
||||
@@ -1,105 +0,0 @@
|
||||
//! Tests for the --version command
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
|
||||
use anyhow::Result;
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
use tempfile::TempDir;
|
||||
|
||||
const BIN_NAME: &str = "ruff";
|
||||
const VERSION_FILTER: [(&str, &str); 1] = [(
|
||||
r"\d+\.\d+\.\d+(\+\d+)?( \(\w{9} \d\d\d\d-\d\d-\d\d\))?",
|
||||
"[VERSION]",
|
||||
)];
|
||||
|
||||
#[test]
|
||||
fn version_basics() {
|
||||
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME)).arg("version"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
ruff [VERSION]
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/// `--config` is a global option,
|
||||
/// so it's allowed to pass --config to subcommands such as `version`
|
||||
/// -- the flag is simply ignored
|
||||
#[test]
|
||||
fn config_option_allowed_but_ignored() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_dot_toml = tempdir.path().join("ruff.toml");
|
||||
fs::File::create(&ruff_dot_toml)?;
|
||||
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("version")
|
||||
.arg("--config")
|
||||
.arg(&ruff_dot_toml)
|
||||
.args(["--config", "lint.isort.extra-standard-library = ['foo', 'bar']"]), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
ruff [VERSION]
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
#[test]
|
||||
fn config_option_ignored_but_validated() {
|
||||
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("version")
|
||||
.args(["--config", "foo = bar"]), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'foo = bar' for '--config <CONFIG_OPTION>'
|
||||
|
||||
tip: A `--config` flag must either be a path to a `.toml` configuration file
|
||||
or a TOML `<KEY> = <VALUE>` pair overriding a specific configuration
|
||||
option
|
||||
|
||||
The supplied argument is not valid TOML:
|
||||
|
||||
TOML parse error at line 1, column 7
|
||||
|
|
||||
1 | foo = bar
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
|
||||
For more information, try '--help'.
|
||||
"###
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/// `--isolated` is also a global option,
|
||||
#[test]
|
||||
fn isolated_option_allowed() {
|
||||
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME)).arg("version").arg("--isolated"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
ruff [VERSION]
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
);
|
||||
});
|
||||
}
|
||||
@@ -11,9 +11,9 @@ repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
itertools = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
globset = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
filetime = { workspace = true }
|
||||
seahash = { workspace = true }
|
||||
|
||||
@@ -65,7 +65,7 @@ use seahash::SeaHasher;
|
||||
/// The main reason is that hashes and cache keys have different constraints:
|
||||
///
|
||||
/// * Cache keys are less performance sensitive: Hashes must be super fast to compute for performant hashed-collections. That's
|
||||
/// why some standard types don't implement [`Hash`] where it would be safe to implement [`CacheKey`], e.g. `HashSet`
|
||||
/// why some standard types don't implement [`Hash`] where it would be safe to to implement [`CacheKey`], e.g. `HashSet`
|
||||
/// * Cache keys must be deterministic where hash keys do not have this constraint. That's why pointers don't implement [`CacheKey`] but they implement [`Hash`].
|
||||
/// * Ideally, cache keys are portable
|
||||
///
|
||||
|
||||
@@ -22,7 +22,7 @@ ruff_python_formatter = { path = "../ruff_python_formatter" }
|
||||
ruff_python_parser = { path = "../ruff_python_parser" }
|
||||
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
|
||||
ruff_python_trivia = { path = "../ruff_python_trivia" }
|
||||
ruff_workspace = { path = "../ruff_workspace", features = ["schemars"] }
|
||||
ruff_workspace = { path = "../ruff_workspace", features = ["schemars"]}
|
||||
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["wrap_help"] }
|
||||
@@ -31,6 +31,7 @@ imara-diff = { workspace = true }
|
||||
indicatif = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
libcst = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
|
||||
@@ -27,7 +27,7 @@ use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
use ruff::args::{ConfigArguments, FormatArguments, FormatCommand, GlobalConfigArgs, LogLevelArgs};
|
||||
use ruff::args::{ConfigArguments, FormatArguments, FormatCommand, LogLevelArgs};
|
||||
use ruff::resolve::resolve;
|
||||
use ruff_formatter::{FormatError, LineWidth, PrintError};
|
||||
use ruff_linter::logging::LogLevel;
|
||||
@@ -43,7 +43,7 @@ fn parse_cli(dirs: &[PathBuf]) -> anyhow::Result<(FormatArguments, ConfigArgumen
|
||||
.no_binary_name(true)
|
||||
.get_matches_from(dirs);
|
||||
let arguments: FormatCommand = FormatCommand::from_arg_matches(&args_matches)?;
|
||||
let (cli, config_arguments) = arguments.partition(GlobalConfigArgs::default())?;
|
||||
let (cli, config_arguments) = arguments.partition()?;
|
||||
Ok((cli, config_arguments))
|
||||
}
|
||||
|
||||
@@ -52,7 +52,11 @@ fn find_pyproject_config(
|
||||
cli: &FormatArguments,
|
||||
config_arguments: &ConfigArguments,
|
||||
) -> anyhow::Result<PyprojectConfig> {
|
||||
let mut pyproject_config = resolve(config_arguments, cli.stdin_filename.as_deref())?;
|
||||
let mut pyproject_config = resolve(
|
||||
cli.isolated,
|
||||
config_arguments,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
// We don't want to format pyproject.toml
|
||||
pyproject_config.settings.file_resolver.include = FilePatternSet::try_from_iter([
|
||||
FilePattern::Builtin("*.py"),
|
||||
@@ -134,7 +138,7 @@ impl Statistics {
|
||||
}
|
||||
}
|
||||
|
||||
/// We currently prefer the similarity index, but i'd like to keep this around
|
||||
/// We currently prefer the the similarity index, but i'd like to keep this around
|
||||
#[allow(clippy::cast_precision_loss, unused)]
|
||||
pub(crate) fn jaccard_index(&self) -> f32 {
|
||||
self.intersection as f32 / (self.black_input + self.ruff_output + self.intersection) as f32
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
//! Generate Markdown documentation for applicable rules.
|
||||
#![allow(clippy::print_stdout, clippy::print_stderr)]
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
@@ -98,13 +97,12 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
fn process_documentation(documentation: &str, out: &mut String, rule_name: &str) {
|
||||
let mut in_options = false;
|
||||
let mut after = String::new();
|
||||
let mut referenced_options = HashSet::new();
|
||||
|
||||
// HACK: This is an ugly regex hack that's necessary because mkdocs uses
|
||||
// a non-CommonMark-compliant Markdown parser, which doesn't support code
|
||||
// tags in link definitions
|
||||
// (see https://github.com/Python-Markdown/markdown/issues/280).
|
||||
let documentation = Regex::new(r"\[`([^`]*?)`]($|[^\[(])").unwrap().replace_all(
|
||||
let documentation = Regex::new(r"\[`([^`]*?)`]($|[^\[])").unwrap().replace_all(
|
||||
documentation,
|
||||
|caps: &Captures| {
|
||||
format!(
|
||||
@@ -137,7 +135,6 @@ fn process_documentation(documentation: &str, out: &mut String, rule_name: &str)
|
||||
let anchor = option.replace('.', "_");
|
||||
out.push_str(&format!("- [`{option}`][{option}]\n"));
|
||||
after.push_str(&format!("[{option}]: ../settings.md#{anchor}\n"));
|
||||
referenced_options.insert(option);
|
||||
|
||||
continue;
|
||||
}
|
||||
@@ -145,20 +142,6 @@ fn process_documentation(documentation: &str, out: &mut String, rule_name: &str)
|
||||
|
||||
out.push_str(line);
|
||||
}
|
||||
|
||||
let re = Regex::new(r"\[`([^`]*?)`]\[(.*?)]").unwrap();
|
||||
for (_, [option, _]) in re.captures_iter(&documentation).map(|c| c.extract()) {
|
||||
if let Some(OptionEntry::Field(field)) = Options::metadata().find(option) {
|
||||
if referenced_options.insert(option) {
|
||||
let anchor = option.replace('.', "_");
|
||||
after.push_str(&format!("[{option}]: ../settings.md#{anchor}\n"));
|
||||
}
|
||||
if field.deprecated.is_some() {
|
||||
eprintln!("Rule {rule_name} references deprecated option {option}.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !after.is_empty() {
|
||||
out.push('\n');
|
||||
out.push('\n');
|
||||
@@ -176,7 +159,7 @@ mod tests {
|
||||
process_documentation(
|
||||
"
|
||||
See also [`lint.mccabe.max-complexity`] and [`lint.task-tags`].
|
||||
Something [`else`][other]. Some [link](https://example.com).
|
||||
Something [`else`][other].
|
||||
|
||||
## Options
|
||||
|
||||
@@ -191,7 +174,7 @@ Something [`else`][other]. Some [link](https://example.com).
|
||||
output,
|
||||
"
|
||||
See also [`lint.mccabe.max-complexity`][lint.mccabe.max-complexity] and [`lint.task-tags`][lint.task-tags].
|
||||
Something [`else`][other]. Some [link](https://example.com).
|
||||
Something [`else`][other].
|
||||
|
||||
## Options
|
||||
|
||||
|
||||
@@ -180,22 +180,8 @@ pub(crate) fn generate() -> String {
|
||||
.map(|rule| (rule.upstream_category(&linter), rule))
|
||||
.into_group_map();
|
||||
|
||||
let mut rules_by_upstream_category: Vec<_> = rules_by_upstream_category.iter().collect();
|
||||
|
||||
// Sort the upstream categories alphabetically by prefix.
|
||||
rules_by_upstream_category.sort_by(|(a, _), (b, _)| {
|
||||
a.as_ref()
|
||||
.map(|category| category.prefix)
|
||||
.unwrap_or_default()
|
||||
.cmp(
|
||||
b.as_ref()
|
||||
.map(|category| category.prefix)
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
});
|
||||
|
||||
if rules_by_upstream_category.len() > 1 {
|
||||
for (opt, rules) in rules_by_upstream_category {
|
||||
for (opt, rules) in &rules_by_upstream_category {
|
||||
if opt.is_some() {
|
||||
let UpstreamCategoryAndPrefix { category, prefix } = opt.unwrap();
|
||||
table_out.push_str(&format!("#### {category} ({prefix})"));
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::{Parser, Subcommand};
|
||||
use ruff::{args::GlobalConfigArgs, check};
|
||||
use ruff_linter::logging::set_up_logging;
|
||||
use ruff::check;
|
||||
use ruff_linter::logging::{set_up_logging, LogLevel};
|
||||
use std::process::ExitCode;
|
||||
|
||||
mod format_dev;
|
||||
@@ -28,8 +28,6 @@ const ROOT_DIR: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../../");
|
||||
struct Args {
|
||||
#[command(subcommand)]
|
||||
command: Command,
|
||||
#[clap(flatten)]
|
||||
global_options: GlobalConfigArgs,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
@@ -59,6 +57,8 @@ enum Command {
|
||||
Repeat {
|
||||
#[clap(flatten)]
|
||||
args: ruff::args::CheckCommand,
|
||||
#[clap(flatten)]
|
||||
log_level_args: ruff::args::LogLevelArgs,
|
||||
/// Run this many times
|
||||
#[clap(long)]
|
||||
repeat: usize,
|
||||
@@ -75,12 +75,9 @@ enum Command {
|
||||
}
|
||||
|
||||
fn main() -> Result<ExitCode> {
|
||||
let Args {
|
||||
command,
|
||||
global_options,
|
||||
} = Args::parse();
|
||||
let args = Args::parse();
|
||||
#[allow(clippy::print_stdout)]
|
||||
match command {
|
||||
match args.command {
|
||||
Command::GenerateAll(args) => generate_all::main(&args)?,
|
||||
Command::GenerateJSONSchema(args) => generate_json_schema::main(&args)?,
|
||||
Command::GenerateRulesTable => println!("{}", generate_rules_table::generate()),
|
||||
@@ -92,12 +89,14 @@ fn main() -> Result<ExitCode> {
|
||||
Command::PrintTokens(args) => print_tokens::main(&args)?,
|
||||
Command::RoundTrip(args) => round_trip::main(&args)?,
|
||||
Command::Repeat {
|
||||
args: subcommand_args,
|
||||
args,
|
||||
repeat,
|
||||
log_level_args,
|
||||
} => {
|
||||
set_up_logging(global_options.log_level())?;
|
||||
let log_level = LogLevel::from(&log_level_args);
|
||||
set_up_logging(&log_level)?;
|
||||
for _ in 0..repeat {
|
||||
check(subcommand_args.clone(), global_options.clone())?;
|
||||
check(args.clone(), log_level)?;
|
||||
}
|
||||
}
|
||||
Command::FormatDev(args) => {
|
||||
|
||||
@@ -71,14 +71,6 @@ impl Diagnostic {
|
||||
}
|
||||
}
|
||||
|
||||
/// Consumes `self` and returns a new `Diagnostic` with the given parent node.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn with_parent(mut self, parent: TextSize) -> Self {
|
||||
self.set_parent(parent);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the location of the diagnostic's parent node.
|
||||
#[inline]
|
||||
pub fn set_parent(&mut self, parent: TextSize) {
|
||||
|
||||
@@ -25,6 +25,11 @@ pub trait Violation: Debug + PartialEq + Eq {
|
||||
/// The message used to describe the violation.
|
||||
fn message(&self) -> String;
|
||||
|
||||
/// The explanation used in documentation and elsewhere.
|
||||
fn explanation() -> Option<&'static str> {
|
||||
None
|
||||
}
|
||||
|
||||
// TODO(micha): Move `fix_title` to `Fix`, add new `advice` method that is shown as an advice.
|
||||
// Change the `Diagnostic` renderer to show the advice, and render the fix message after the `Suggested fix: <here>`
|
||||
|
||||
@@ -45,6 +50,11 @@ pub trait AlwaysFixableViolation: Debug + PartialEq + Eq {
|
||||
/// The message used to describe the violation.
|
||||
fn message(&self) -> String;
|
||||
|
||||
/// The explanation used in documentation and elsewhere.
|
||||
fn explanation() -> Option<&'static str> {
|
||||
None
|
||||
}
|
||||
|
||||
/// The title displayed for the available fix.
|
||||
fn fix_title(&self) -> String;
|
||||
|
||||
@@ -61,6 +71,10 @@ impl<V: AlwaysFixableViolation> Violation for V {
|
||||
<Self as AlwaysFixableViolation>::message(self)
|
||||
}
|
||||
|
||||
fn explanation() -> Option<&'static str> {
|
||||
<Self as AlwaysFixableViolation>::explanation()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some(<Self as AlwaysFixableViolation>::fix_title(self))
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ tracing = { workspace = true }
|
||||
unicode-width = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { workspace = true }
|
||||
|
||||
[features]
|
||||
serde = ["dep:serde", "ruff_text_size/serde"]
|
||||
|
||||
@@ -37,7 +37,7 @@ pub trait Buffer {
|
||||
#[doc(hidden)]
|
||||
fn elements(&self) -> &[FormatElement];
|
||||
|
||||
/// Glue for usage of the [`write!`] macro with implementers of this trait.
|
||||
/// Glue for usage of the [`write!`] macro with implementors of this trait.
|
||||
///
|
||||
/// This method should generally not be invoked manually, but rather through the [`write!`] macro itself.
|
||||
///
|
||||
@@ -344,7 +344,7 @@ pub struct RemoveSoftLinesBuffer<'a, Context> {
|
||||
|
||||
/// Caches the interned elements after the soft line breaks have been removed.
|
||||
///
|
||||
/// The `key` is the [Interned] element as it has been passed to [`Self::write_element`] or the child of another
|
||||
/// The `key` is the [Interned] element as it has been passed to [Self::write_element] or the child of another
|
||||
/// [Interned] element. The `value` is the matching document of the key where all soft line breaks have been removed.
|
||||
///
|
||||
/// It's fine to not snapshot the cache. The worst that can happen is that it holds on interned elements
|
||||
|
||||
@@ -138,7 +138,7 @@ pub const fn empty_line() -> Line {
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// The line breaks are emitted as spaces if the enclosing `Group` fits on a single line:
|
||||
/// The line breaks are emitted as spaces if the enclosing `Group` fits on a a single line:
|
||||
/// ```
|
||||
/// use ruff_formatter::{format, format_args};
|
||||
/// use ruff_formatter::prelude::*;
|
||||
|
||||
@@ -18,10 +18,10 @@ pub enum FormatError {
|
||||
InvalidDocument(InvalidDocumentError),
|
||||
|
||||
/// Formatting failed because some content encountered a situation where a layout
|
||||
/// choice by an enclosing [`crate::Format`] resulted in a poor layout for a child [`crate::Format`].
|
||||
/// choice by an enclosing [crate::Format] resulted in a poor layout for a child [crate::Format].
|
||||
///
|
||||
/// It's up to an enclosing [`crate::Format`] to handle the error and pick another layout.
|
||||
/// This error should not be raised if there's no outer [`crate::Format`] handling the poor layout error,
|
||||
/// It's up to an enclosing [crate::Format] to handle the error and pick another layout.
|
||||
/// This error should not be raised if there's no outer [crate::Format] handling the poor layout error,
|
||||
/// avoiding that formatting of the whole document fails.
|
||||
PoorLayout,
|
||||
}
|
||||
|
||||
@@ -19,10 +19,10 @@ use ruff_text_size::TextSize;
|
||||
/// Use the helper functions like [`crate::builders::space`], [`crate::builders::soft_line_break`] etc. defined in this file to create elements.
|
||||
#[derive(Clone, Eq, PartialEq)]
|
||||
pub enum FormatElement {
|
||||
/// A space token, see [`crate::builders::space`] for documentation.
|
||||
/// A space token, see [crate::builders::space] for documentation.
|
||||
Space,
|
||||
|
||||
/// A new line, see [`crate::builders::soft_line_break`], [`crate::builders::hard_line_break`], and [`crate::builders::soft_line_break_or_space`] for documentation.
|
||||
/// A new line, see [crate::builders::soft_line_break], [crate::builders::hard_line_break], and [crate::builders::soft_line_break_or_space] for documentation.
|
||||
Line(LineMode),
|
||||
|
||||
/// Forces the parent group to print in expanded mode.
|
||||
@@ -108,13 +108,13 @@ impl std::fmt::Debug for FormatElement {
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
|
||||
pub enum LineMode {
|
||||
/// See [`crate::builders::soft_line_break_or_space`] for documentation.
|
||||
/// See [crate::builders::soft_line_break_or_space] for documentation.
|
||||
SoftOrSpace,
|
||||
/// See [`crate::builders::soft_line_break`] for documentation.
|
||||
/// See [crate::builders::soft_line_break] for documentation.
|
||||
Soft,
|
||||
/// See [`crate::builders::hard_line_break`] for documentation.
|
||||
/// See [crate::builders::hard_line_break] for documentation.
|
||||
Hard,
|
||||
/// See [`crate::builders::empty_line`] for documentation.
|
||||
/// See [crate::builders::empty_line] for documentation.
|
||||
Empty,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,22 +1,23 @@
|
||||
use crate::format_element::PrintMode;
|
||||
use crate::{GroupId, TextSize};
|
||||
use std::cell::Cell;
|
||||
use std::num::NonZeroU8;
|
||||
|
||||
use crate::format_element::PrintMode;
|
||||
use crate::{GroupId, TextSize};
|
||||
|
||||
/// A Tag marking the start and end of some content to which some special formatting should be applied.
|
||||
///
|
||||
/// Tags always come in pairs of a start and an end tag and the styling defined by this tag
|
||||
/// will be applied to all elements in between the start/end tags.
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum Tag {
|
||||
/// Indents the content one level deeper, see [`crate::builders::indent`] for documentation and examples.
|
||||
/// Indents the content one level deeper, see [crate::builders::indent] for documentation and examples.
|
||||
StartIndent,
|
||||
EndIndent,
|
||||
|
||||
/// Variant of [`TagKind::Indent`] that indents content by a number of spaces. For example, `Align(2)`
|
||||
/// Variant of [TagKind::Indent] that indents content by a number of spaces. For example, `Align(2)`
|
||||
/// indents any content following a line break by an additional two spaces.
|
||||
///
|
||||
/// Nesting (Aligns)[`TagKind::Align`] has the effect that all except the most inner align are handled as (Indent)[`TagKind::Indent`].
|
||||
/// Nesting (Aligns)[TagKind::Align] has the effect that all except the most inner align are handled as (Indent)[TagKind::Indent].
|
||||
StartAlign(Align),
|
||||
EndAlign,
|
||||
|
||||
@@ -29,7 +30,7 @@ pub enum Tag {
|
||||
/// - on a single line: Omitting `LineMode::Soft` line breaks and printing spaces for `LineMode::SoftOrSpace`
|
||||
/// - on multiple lines: Printing all line breaks
|
||||
///
|
||||
/// See [`crate::builders::group`] for documentation and examples.
|
||||
/// See [crate::builders::group] for documentation and examples.
|
||||
StartGroup(Group),
|
||||
EndGroup,
|
||||
|
||||
@@ -44,22 +45,22 @@ pub enum Tag {
|
||||
EndConditionalGroup,
|
||||
|
||||
/// Allows to specify content that gets printed depending on whatever the enclosing group
|
||||
/// is printed on a single line or multiple lines. See [`crate::builders::if_group_breaks`] for examples.
|
||||
/// is printed on a single line or multiple lines. See [crate::builders::if_group_breaks] for examples.
|
||||
StartConditionalContent(Condition),
|
||||
EndConditionalContent,
|
||||
|
||||
/// Optimized version of [`Tag::StartConditionalContent`] for the case where some content
|
||||
/// Optimized version of [Tag::StartConditionalContent] for the case where some content
|
||||
/// should be indented if the specified group breaks.
|
||||
StartIndentIfGroupBreaks(GroupId),
|
||||
EndIndentIfGroupBreaks,
|
||||
|
||||
/// Concatenates multiple elements together with a given separator printed in either
|
||||
/// flat or expanded mode to fill the print width. Expect that the content is a list of alternating
|
||||
/// [element, separator] See [`crate::Formatter::fill`].
|
||||
/// [element, separator] See [crate::Formatter::fill].
|
||||
StartFill,
|
||||
EndFill,
|
||||
|
||||
/// Entry inside of a [`Tag::StartFill`]
|
||||
/// Entry inside of a [Tag::StartFill]
|
||||
StartEntry,
|
||||
EndEntry,
|
||||
|
||||
@@ -77,7 +78,7 @@ pub enum Tag {
|
||||
/// Special semantic element marking the content with a label.
|
||||
/// This does not directly influence how the content will be printed.
|
||||
///
|
||||
/// See [`crate::builders::labelled`] for documentation.
|
||||
/// See [crate::builders::labelled] for documentation.
|
||||
StartLabelled(LabelId),
|
||||
EndLabelled,
|
||||
|
||||
@@ -99,6 +100,10 @@ pub enum Tag {
|
||||
}
|
||||
|
||||
impl Tag {
|
||||
pub const fn align(count: NonZeroU8) -> Tag {
|
||||
Tag::StartAlign(Align(count))
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is any start tag.
|
||||
pub const fn is_start(&self) -> bool {
|
||||
matches!(
|
||||
|
||||
@@ -545,10 +545,6 @@ impl PrintedRange {
|
||||
&self.code
|
||||
}
|
||||
|
||||
pub fn into_code(self) -> String {
|
||||
self.code
|
||||
}
|
||||
|
||||
/// The range the formatted code corresponds to in the source document.
|
||||
pub fn source_range(&self) -> TextRange {
|
||||
self.source_range
|
||||
|
||||
@@ -78,28 +78,27 @@ impl<'a> PrintQueue<'a> {
|
||||
impl<'a> Queue<'a> for PrintQueue<'a> {
|
||||
fn pop(&mut self) -> Option<&'a FormatElement> {
|
||||
let elements = self.element_slices.last_mut()?;
|
||||
elements.next().or_else(
|
||||
#[cold]
|
||||
|| {
|
||||
self.element_slices.pop();
|
||||
let elements = self.element_slices.last_mut()?;
|
||||
elements.next()
|
||||
},
|
||||
)
|
||||
elements.next().or_else(|| {
|
||||
self.element_slices.pop();
|
||||
let elements = self.element_slices.last_mut()?;
|
||||
elements.next()
|
||||
})
|
||||
}
|
||||
|
||||
fn top_with_interned(&self) -> Option<&'a FormatElement> {
|
||||
let mut slices = self.element_slices.iter().rev();
|
||||
let slice = slices.next()?;
|
||||
|
||||
slice.as_slice().first().or_else(
|
||||
#[cold]
|
||||
|| {
|
||||
slices
|
||||
.next()
|
||||
.and_then(|next_elements| next_elements.as_slice().first())
|
||||
},
|
||||
)
|
||||
match slice.as_slice().first() {
|
||||
Some(element) => Some(element),
|
||||
None => {
|
||||
if let Some(next_elements) = slices.next() {
|
||||
next_elements.as_slice().first()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extend_back(&mut self, elements: &'a [FormatElement]) {
|
||||
@@ -147,30 +146,24 @@ impl<'a, 'print> FitsQueue<'a, 'print> {
|
||||
|
||||
impl<'a, 'print> Queue<'a> for FitsQueue<'a, 'print> {
|
||||
fn pop(&mut self) -> Option<&'a FormatElement> {
|
||||
self.queue.pop().or_else(
|
||||
#[cold]
|
||||
|| {
|
||||
if let Some(next_slice) = self.rest_elements.next_back() {
|
||||
self.queue.extend_back(next_slice.as_slice());
|
||||
self.queue.pop()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
)
|
||||
self.queue.pop().or_else(|| {
|
||||
if let Some(next_slice) = self.rest_elements.next_back() {
|
||||
self.queue.extend_back(next_slice.as_slice());
|
||||
self.queue.pop()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn top_with_interned(&self) -> Option<&'a FormatElement> {
|
||||
self.queue.top_with_interned().or_else(
|
||||
#[cold]
|
||||
|| {
|
||||
if let Some(next_elements) = self.rest_elements.as_slice().last() {
|
||||
next_elements.as_slice().first()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
)
|
||||
self.queue.top_with_interned().or_else(|| {
|
||||
if let Some(next_elements) = self.rest_elements.as_slice().last() {
|
||||
next_elements.as_slice().first()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn extend_back(&mut self, elements: &'a [FormatElement]) {
|
||||
@@ -189,6 +182,27 @@ impl<'a, 'print> Queue<'a> for FitsQueue<'a, 'print> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator that calls [`Queue::pop`] until it reaches the end of the document.
|
||||
///
|
||||
/// The iterator traverses into the content of any [`FormatElement::Interned`].
|
||||
pub(super) struct QueueIterator<'a, 'q, Q: Queue<'a>> {
|
||||
queue: &'q mut Q,
|
||||
lifetime: PhantomData<&'a ()>,
|
||||
}
|
||||
|
||||
impl<'a, Q> Iterator for QueueIterator<'a, '_, Q>
|
||||
where
|
||||
Q: Queue<'a>,
|
||||
{
|
||||
type Item = &'a FormatElement;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.queue.pop()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, Q> FusedIterator for QueueIterator<'a, '_, Q> where Q: Queue<'a> {}
|
||||
|
||||
pub(super) struct QueueContentIterator<'a, 'q, Q: Queue<'a>> {
|
||||
queue: &'q mut Q,
|
||||
kind: TagKind,
|
||||
|
||||
@@ -8,6 +8,9 @@ pub(super) trait Stack<T> {
|
||||
|
||||
/// Returns the last element if any
|
||||
fn top(&self) -> Option<&T>;
|
||||
|
||||
/// Returns `true` if the stack is empty
|
||||
fn is_empty(&self) -> bool;
|
||||
}
|
||||
|
||||
impl<T> Stack<T> for Vec<T> {
|
||||
@@ -22,6 +25,10 @@ impl<T> Stack<T> for Vec<T> {
|
||||
fn top(&self) -> Option<&T> {
|
||||
self.last()
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
/// A Stack that is stacked on top of another stack. Guarantees that the underlying stack remains unchanged.
|
||||
@@ -73,6 +80,10 @@ where
|
||||
.last()
|
||||
.or_else(|| self.original.as_slice().last())
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.stack.is_empty() && self.original.len() == 0
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.4.3"
|
||||
version = "0.2.1"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -15,6 +15,7 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ruff_cache = { path = "../ruff_cache" }
|
||||
ruff_diagnostics = { path = "../ruff_diagnostics", features = ["serde"] }
|
||||
ruff_index = { path = "../ruff_index" }
|
||||
ruff_notebook = { path = "../ruff_notebook" }
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_python_ast = { path = "../ruff_python_ast", features = ["serde"] }
|
||||
@@ -59,6 +60,7 @@ regex = { workspace = true }
|
||||
result-like = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
semver = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
similar = { workspace = true }
|
||||
@@ -74,9 +76,11 @@ url = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
test-case = { workspace = true }
|
||||
# Disable colored output in tests
|
||||
colored = { workspace = true, features = ["no-color"] }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
@@ -28,40 +28,3 @@ dictionary = {
|
||||
}
|
||||
|
||||
#import os # noqa
|
||||
|
||||
# case 1:
|
||||
# try:
|
||||
# try: # with comment
|
||||
# try: print()
|
||||
# except:
|
||||
# except Foo:
|
||||
# except Exception as e: print(e)
|
||||
|
||||
|
||||
# Script tag without an opening tag (Error)
|
||||
|
||||
# requires-python = ">=3.11"
|
||||
# dependencies = [
|
||||
# "requests<3",
|
||||
# "rich",
|
||||
# ]
|
||||
# ///
|
||||
|
||||
# Script tag (OK)
|
||||
|
||||
# /// script
|
||||
# requires-python = ">=3.11"
|
||||
# dependencies = [
|
||||
# "requests<3",
|
||||
# "rich",
|
||||
# ]
|
||||
# ///
|
||||
|
||||
# Script tag without a closing tag (OK)
|
||||
|
||||
# /// script
|
||||
# requires-python = ">=3.11"
|
||||
# dependencies = [
|
||||
# "requests<3",
|
||||
# "rich",
|
||||
# ]
|
||||
|
||||
@@ -18,7 +18,3 @@ func("0.0.0.0")
|
||||
def my_func():
|
||||
x = "0.0.0.0"
|
||||
print(x)
|
||||
|
||||
|
||||
# Implicit string concatenation
|
||||
"0.0.0.0" f"0.0.0.0{expr}0.0.0.0"
|
||||
|
||||
@@ -18,13 +18,6 @@ with open("/dev/shm/unit/test", "w") as f:
|
||||
with open("/foo/bar", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
# Implicit string concatenation
|
||||
with open("/tmp/" "abc", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
with open("/tmp/abc" f"/tmp/abc", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
# Using `tempfile` module should be ok
|
||||
import tempfile
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
@@ -17,9 +17,3 @@ urllib.request.URLopener().open(fullurl='http://www.google.com')
|
||||
urllib.request.URLopener().open('http://www.google.com')
|
||||
urllib.request.URLopener().open('file:///foo/bar/baz')
|
||||
urllib.request.URLopener().open(url)
|
||||
|
||||
urllib.request.urlopen(url=urllib.request.Request('http://www.google.com'))
|
||||
urllib.request.urlopen(url=urllib.request.Request('http://www.google.com'), **kwargs)
|
||||
urllib.request.urlopen(urllib.request.Request('http://www.google.com'))
|
||||
urllib.request.urlopen(urllib.request.Request('file:///foo/bar/baz'))
|
||||
urllib.request.urlopen(urllib.request.Request(url))
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
import os
|
||||
import random
|
||||
|
||||
import a_lib
|
||||
|
||||
# OK
|
||||
random.SystemRandom()
|
||||
|
||||
# Errors
|
||||
random.Random()
|
||||
random.random()
|
||||
random.randrange()
|
||||
random.randint()
|
||||
random.choice()
|
||||
random.choices()
|
||||
random.uniform()
|
||||
random.triangular()
|
||||
random.randbytes()
|
||||
|
||||
# Unrelated
|
||||
os.urandom()
|
||||
a_lib.random()
|
||||
@@ -1,47 +1,52 @@
|
||||
import crypt
|
||||
import hashlib
|
||||
from hashlib import new as hashlib_new
|
||||
from hashlib import sha1 as hashlib_sha1
|
||||
|
||||
# Errors
|
||||
# Invalid
|
||||
|
||||
hashlib.new('md5')
|
||||
|
||||
hashlib.new('md4', b'test')
|
||||
|
||||
hashlib.new(name='md5', data=b'test')
|
||||
|
||||
hashlib.new('MD4', data=b'test')
|
||||
|
||||
hashlib.new('sha1')
|
||||
|
||||
hashlib.new('sha1', data=b'test')
|
||||
|
||||
hashlib.new('sha', data=b'test')
|
||||
|
||||
hashlib.new(name='SHA', data=b'test')
|
||||
|
||||
hashlib.sha(data=b'test')
|
||||
|
||||
hashlib.md5()
|
||||
|
||||
hashlib_new('sha1')
|
||||
|
||||
hashlib_sha1('sha1')
|
||||
|
||||
# usedforsecurity arg only available in Python 3.9+
|
||||
hashlib.new('sha1', usedforsecurity=True)
|
||||
|
||||
crypt.crypt("test", salt=crypt.METHOD_CRYPT)
|
||||
crypt.crypt("test", salt=crypt.METHOD_MD5)
|
||||
crypt.crypt("test", salt=crypt.METHOD_BLOWFISH)
|
||||
crypt.crypt("test", crypt.METHOD_BLOWFISH)
|
||||
# Valid
|
||||
|
||||
crypt.mksalt(crypt.METHOD_CRYPT)
|
||||
crypt.mksalt(crypt.METHOD_MD5)
|
||||
crypt.mksalt(crypt.METHOD_BLOWFISH)
|
||||
|
||||
# OK
|
||||
hashlib.new('sha256')
|
||||
|
||||
hashlib.new('SHA512')
|
||||
|
||||
hashlib.sha256(data=b'test')
|
||||
|
||||
# usedforsecurity arg only available in Python 3.9+
|
||||
hashlib_new(name='sha1', usedforsecurity=False)
|
||||
|
||||
# usedforsecurity arg only available in Python 3.9+
|
||||
hashlib_sha1(name='sha1', usedforsecurity=False)
|
||||
|
||||
# usedforsecurity arg only available in Python 3.9+
|
||||
hashlib.md4(usedforsecurity=False)
|
||||
|
||||
# usedforsecurity arg only available in Python 3.9+
|
||||
hashlib.new(name='sha256', usedforsecurity=False)
|
||||
|
||||
crypt.crypt("test")
|
||||
crypt.crypt("test", salt=crypt.METHOD_SHA256)
|
||||
crypt.crypt("test", salt=crypt.METHOD_SHA512)
|
||||
|
||||
crypt.mksalt()
|
||||
crypt.mksalt(crypt.METHOD_SHA256)
|
||||
crypt.mksalt(crypt.METHOD_SHA512)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import commands
|
||||
import popen2
|
||||
@@ -17,8 +16,6 @@ popen2.Popen3("true")
|
||||
popen2.Popen4("true")
|
||||
commands.getoutput("true")
|
||||
commands.getstatusoutput("true")
|
||||
subprocess.getoutput("true")
|
||||
subprocess.getstatusoutput("true")
|
||||
|
||||
|
||||
# Check command argument looks unsafe.
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
# Errors
|
||||
User.objects.filter(username='admin').extra(dict(could_be='insecure'))
|
||||
User.objects.filter(username='admin').extra(select=dict(could_be='insecure'))
|
||||
User.objects.filter(username='admin').extra(select={'test': '%secure' % 'nos'})
|
||||
User.objects.filter(username='admin').extra(select={'test': '{}secure'.format('nos')})
|
||||
User.objects.filter(username='admin').extra(where=['%secure' % 'nos'])
|
||||
User.objects.filter(username='admin').extra(where=['{}secure'.format('no')])
|
||||
|
||||
query = '"username") AS "username", * FROM "auth_user" WHERE 1=1 OR "username"=? --'
|
||||
User.objects.filter(username='admin').extra(select={'test': query})
|
||||
|
||||
where_var = ['1=1) OR 1=1 AND (1=1']
|
||||
User.objects.filter(username='admin').extra(where=where_var)
|
||||
|
||||
where_str = '1=1) OR 1=1 AND (1=1'
|
||||
User.objects.filter(username='admin').extra(where=[where_str])
|
||||
|
||||
tables_var = ['django_content_type" WHERE "auth_user"."username"="admin']
|
||||
User.objects.all().extra(tables=tables_var).distinct()
|
||||
|
||||
tables_str = 'django_content_type" WHERE "auth_user"."username"="admin'
|
||||
User.objects.all().extra(tables=[tables_str]).distinct()
|
||||
|
||||
# OK
|
||||
User.objects.filter(username='admin').extra(
|
||||
select={'test': 'secure'},
|
||||
where=['secure'],
|
||||
tables=['secure']
|
||||
)
|
||||
User.objects.filter(username='admin').extra({'test': 'secure'})
|
||||
User.objects.filter(username='admin').extra(select={'test': 'secure'})
|
||||
User.objects.filter(username='admin').extra(where=['secure'])
|
||||
@@ -124,8 +124,3 @@ try:
|
||||
pass
|
||||
except Exception:
|
||||
error("...", exc_info=True)
|
||||
|
||||
try:
|
||||
...
|
||||
except Exception as e:
|
||||
raise ValueError from e
|
||||
|
||||
@@ -31,7 +31,8 @@ def function(
|
||||
kwonly_nonboolvalued_boolhint: bool = 1,
|
||||
kwonly_nonboolvalued_boolstrhint: "bool" = 1,
|
||||
**kw,
|
||||
): ...
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
def used(do):
|
||||
@@ -118,39 +119,3 @@ def func(x: bool):
|
||||
|
||||
|
||||
settings(True)
|
||||
|
||||
|
||||
from dataclasses import dataclass, InitVar
|
||||
|
||||
|
||||
@dataclass
|
||||
class Fit:
|
||||
force: InitVar[bool] = False
|
||||
|
||||
def __post_init__(self, force: bool) -> None:
|
||||
print(force)
|
||||
|
||||
|
||||
Fit(force=True)
|
||||
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/10356
|
||||
from django.db.models import Case, Q, Value, When
|
||||
|
||||
|
||||
qs.annotate(
|
||||
is_foo_or_bar=Case(
|
||||
When(Q(is_foo=True) | Q(is_bar=True)),
|
||||
then=Value(True),
|
||||
),
|
||||
default=Value(False),
|
||||
)
|
||||
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/10485
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
foo: bool = Field(True, exclude=True)
|
||||
|
||||
@@ -6,25 +6,6 @@ def this_is_a_bug():
|
||||
print("Ooh, callable! Or is it?")
|
||||
|
||||
|
||||
def still_a_bug():
|
||||
import builtins
|
||||
o = object()
|
||||
if builtins.hasattr(o, "__call__"):
|
||||
print("B U G")
|
||||
if builtins.getattr(o, "__call__", False):
|
||||
print("B U G")
|
||||
|
||||
|
||||
def trickier_fix_for_this_one():
|
||||
o = object()
|
||||
|
||||
def callable(x):
|
||||
return True
|
||||
|
||||
if hasattr(o, "__call__"):
|
||||
print("STILL a bug!")
|
||||
|
||||
|
||||
def this_is_fine():
|
||||
o = object()
|
||||
if callable(o):
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
def foo(a: list = []):
|
||||
raise NotImplementedError("")
|
||||
|
||||
|
||||
def bar(a: dict = {}):
|
||||
""" This one also has a docstring"""
|
||||
raise NotImplementedError("and has some text in here")
|
||||
|
||||
|
||||
def baz(a: list = []):
|
||||
"""This one raises a different exception"""
|
||||
raise IndexError()
|
||||
|
||||
|
||||
def qux(a: list = []):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def quux(a: list = []):
|
||||
raise NotImplemented
|
||||
@@ -23,15 +23,3 @@ def okay(data: custom.ImmutableTypeA = foo()):
|
||||
|
||||
def error_due_to_missing_import(data: List[str] = Depends(None)):
|
||||
...
|
||||
|
||||
|
||||
class Class:
|
||||
pass
|
||||
|
||||
|
||||
def okay(obj=Class()):
|
||||
...
|
||||
|
||||
|
||||
def error(obj=OtherClass()):
|
||||
...
|
||||
|
||||
@@ -64,6 +64,3 @@ setattr(*foo, "bar", None)
|
||||
# Regression test for: https://github.com/astral-sh/ruff/issues/7455#issuecomment-1739800901
|
||||
getattr(self.
|
||||
registration.registry, '__name__')
|
||||
|
||||
import builtins
|
||||
builtins.getattr(foo, "bar")
|
||||
|
||||
@@ -115,25 +115,25 @@ class non_keyword_abcmeta_2(abc.ABCMeta): # safe
|
||||
|
||||
|
||||
# very invalid code, but that's up to mypy et al to check
|
||||
class keyword_abc_1(metaclass=ABC): # incorrect but outside scope of this check
|
||||
class keyword_abc_1(metaclass=ABC): # safe
|
||||
def method(self):
|
||||
foo()
|
||||
|
||||
|
||||
class keyword_abc_2(metaclass=abc.ABC): # incorrect but outside scope of this check
|
||||
class keyword_abc_2(metaclass=abc.ABC): # safe
|
||||
def method(self):
|
||||
foo()
|
||||
|
||||
|
||||
class abc_set_class_variable_1(ABC): # safe (abstract attribute)
|
||||
class abc_set_class_variable_1(ABC): # safe
|
||||
foo: int
|
||||
|
||||
|
||||
class abc_set_class_variable_2(ABC): # error (not an abstract attribute)
|
||||
class abc_set_class_variable_2(ABC): # safe
|
||||
foo = 2
|
||||
|
||||
|
||||
class abc_set_class_variable_3(ABC): # error (not an abstract attribute)
|
||||
class abc_set_class_variable_3(ABC): # safe
|
||||
foo: int = 2
|
||||
|
||||
|
||||
|
||||
@@ -9,69 +9,62 @@ B030:
|
||||
|
||||
try:
|
||||
pass
|
||||
except 1: # Error
|
||||
except 1: # error
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (1, ValueError): # Error
|
||||
except (1, ValueError): # error
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError, (RuntimeError, (KeyError, TypeError))): # Error
|
||||
except (ValueError, (RuntimeError, (KeyError, TypeError))): # error
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError, *(RuntimeError, (KeyError, TypeError))): # Error
|
||||
except (ValueError, *(RuntimeError, (KeyError, TypeError))): # error
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (*a, *(RuntimeError, (KeyError, TypeError))): # Error
|
||||
except (*a, *(RuntimeError, (KeyError, TypeError))): # error
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError, *(RuntimeError, TypeError)): # ok
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError, *[RuntimeError, *(TypeError,)]): # ok
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except* a + (RuntimeError, (KeyError, TypeError)): # Error
|
||||
except (*a, *b): # ok
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError, *(RuntimeError, TypeError)): # OK
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError, *[RuntimeError, *(TypeError,)]): # OK
|
||||
except (*a, *(RuntimeError, TypeError)): # ok
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (*a, *b): # OK
|
||||
except (*a, *(b, c)): # ok
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (*a, *(RuntimeError, TypeError)): # OK
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (*a, *(b, c)): # OK
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (*a, *(*b, *c)): # OK
|
||||
except (*a, *(*b, *c)): # ok
|
||||
pass
|
||||
|
||||
|
||||
@@ -81,52 +74,5 @@ def what_to_catch():
|
||||
|
||||
try:
|
||||
pass
|
||||
except what_to_catch(): # OK
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (a, b) + (c, d): # OK
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except* (a, b) + (c, d): # OK
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except* (a, (b) + (c)): # OK
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (a, b) + (c, d) + (e, f): # OK
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except a + (b, c): # OK
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError, *(RuntimeError, TypeError), *((ArithmeticError,) + (EOFError,))):
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except ((a, b) + (c, d)) + ((e, f) + (g)): # OK
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (a, b) * (c, d): # B030
|
||||
except what_to_catch(): # ok
|
||||
pass
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user