Compare commits

..

13 Commits

Author SHA1 Message Date
Charlie Marsh
4a7167520a Match pylint logic 2023-03-17 17:31:34 -04:00
Charlie Marsh
41ab37e792 Rebase 2023-03-17 17:07:51 -04:00
Charlie Marsh
0b1806fdfa Merge branch 'main' into useless_return 2023-03-17 17:04:37 -04:00
tomecki
d20474da87 fix for autofix 2023-02-25 13:12:27 +01:00
tomecki
b7df7ee370 Merge branch 'useless_return' of github.com:tomecki/ruff into useless_return 2023-02-25 13:08:38 +01:00
tomecki
d6f0d1fab2 Merge branch 'charliermarsh:main' into useless_return 2023-02-25 13:08:09 +01:00
tomecki
c9bf979e61 Merge branch 'main' into useless_return 2023-02-25 13:06:51 +01:00
tomecki
e556fb4c77 Update crates/ruff/src/rules/pylint/rules/useless_return.rs
Co-authored-by: Jeong YunWon <69878+youknowone@users.noreply.github.com>
2023-02-25 12:59:13 +01:00
tomecki
02ae14037b Merge branch 'main' into useless_return 2023-02-22 09:38:18 +01:00
tomecki
385ccb2a50 rename for clarity 2023-02-21 20:54:22 +01:00
tomecki
ecacccbd5e cleanup 2023-02-21 20:39:33 +01:00
tomecki
bd7b472902 Merge branch 'main' into useless_return 2023-02-21 20:28:48 +01:00
tomecki
89b1b06dc7 wip 2023-02-21 20:28:33 +01:00
9247 changed files with 191635 additions and 829779 deletions

View File

@@ -1,10 +1,29 @@
[alias]
dev = "run --package ruff_dev --bin ruff_dev"
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
benchmark = "bench -p ruff_benchmark --"
# statically link the C runtime so the executable does not depend on
# that shared/dynamic library.
#
# See: https://github.com/astral-sh/ruff/issues/11503
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
rustflags = ["-C", "target-feature=+crt-static"]
[target.'cfg(all())']
rustflags = [
# CLIPPY LINT SETTINGS
# This is a workaround to configure lints for the entire workspace, pending the ability to configure this via TOML.
# See: `https://github.com/rust-lang/cargo/issues/5034`
# `https://github.com/EmbarkStudios/rust-ecosystem/issues/22#issuecomment-947011395`
"-Dunsafe_code",
"-Wclippy::pedantic",
# Allowed pedantic lints
"-Wclippy::char_lit_as_u8",
"-Aclippy::collapsible_else_if",
"-Aclippy::collapsible_if",
"-Aclippy::implicit_hasher",
"-Aclippy::match_same_arms",
"-Aclippy::missing_errors_doc",
"-Aclippy::missing_panics_doc",
"-Aclippy::module_name_repetitions",
"-Aclippy::must_use_candidate",
"-Aclippy::similar_names",
"-Aclippy::too_many_lines",
# Disallowed restriction lints
"-Wclippy::print_stdout",
"-Wclippy::print_stderr",
"-Wclippy::dbg_macro",
]

View File

@@ -1,8 +0,0 @@
[profile.ci]
# Print out output for failing tests as soon as they fail, and also at the end
# of the run (for easy scrollability).
failure-output = "immediate-final"
# Do not cancel the test run on the first failure.
fail-fast = false
status-level = "skip"

View File

@@ -1,46 +0,0 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/rust
{
"name": "Ruff",
"image": "mcr.microsoft.com/devcontainers/rust:0-1-bullseye",
"mounts": [
{
"source": "devcontainer-cargo-cache-${devcontainerId}",
"target": "/usr/local/cargo",
"type": "volume"
}
],
"customizations": {
"codespaces": {
"openFiles": [
"CONTRIBUTING.md"
]
},
"vscode": {
"extensions": [
"ms-python.python",
"rust-lang.rust-analyzer",
"fill-labs.dependi",
"tamasfe.even-better-toml",
"Swellaby.vscode-rust-test-adapter",
"charliermarsh.ruff"
],
"settings": {
"rust-analyzer.updates.askBeforeDownload": false
}
}
},
// Features to add to the dev container. More info: https://containers.dev/features.
"features": {
"ghcr.io/devcontainers/features/python": {
"installTools": false
}
},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
"postCreateCommand": ".devcontainer/post-create.sh"
// Configure tool-specific properties.
// "customizations": {},
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"
}

View File

@@ -1,8 +0,0 @@
#!/usr/bin/env bash
rustup default < rust-toolchain
rustup component add clippy rustfmt
cargo install cargo-insta
cargo fetch
pip install maturin pre-commit

View File

@@ -10,11 +10,5 @@ indent_style = space
insert_final_newline = true
indent_size = 2
[*.{rs,py,pyi}]
[*.{rs,py}]
indent_size = 4
[*.snap]
trim_trailing_whitespace = false
[*.md]
max_line_length = 100

16
.gitattributes vendored
View File

@@ -1,18 +1,6 @@
* text=auto eol=lf
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py text eol=crlf
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
crates/ruff_python_parser/resources/invalid/re_lexing/line_continuation_windows_eol.py text eol=crlf
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_windows_eol.py text eol=crlf
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_mac_eol.py text eol=cr
crates/ruff_python_parser/resources/inline linguist-generated=true
crates/ruff/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
crates/ruff/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
ruff.schema.json linguist-generated=true text=auto eol=lf
*.md.snap linguist-language=Markdown

21
.github/CODEOWNERS vendored
View File

@@ -1,21 +0,0 @@
# GitHub code owners file. For more info: https://help.github.com/articles/about-codeowners/
#
# - Comment lines begin with `#` character.
# - Each line is a file pattern followed by one or more owners.
# - The '*' pattern is global owners.
# - Order is important. The last matching pattern has the most precedence.
/crates/ruff_notebook/ @dhruvmanila
/crates/ruff_formatter/ @MichaReiser
/crates/ruff_python_formatter/ @MichaReiser
/crates/ruff_python_parser/ @MichaReiser @dhruvmanila
# flake8-pyi
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
# Script for fuzzing the parser
/scripts/fuzz-parser/ @AlexWaygood
# red-knot
/crates/red_knot* @carljm @MichaReiser @AlexWaygood
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood

View File

@@ -3,8 +3,6 @@ Thank you for taking the time to report an issue! We're glad to have you involve
If you're filing a bug report, please consider including the following information:
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
e.g. "RUF001", "unused variable", "Jupyter notebook"
* A minimal code snippet that reproduces the bug.
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
* The current Ruff settings (any relevant sections from your `pyproject.toml`).

View File

@@ -1,15 +0,0 @@
<!--
Thank you for contributing to Ruff! To help us out with reviewing, please consider the following:
- Does this pull request include a summary of the change? (See below.)
- Does this pull request include a descriptive title?
- Does this pull request include references to any relevant issues?
-->
## Summary
<!-- What's the purpose of the change? What does it do, and why? -->
## Test Plan
<!-- How was it tested? -->

11
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,11 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
time: "12:00"
timezone: "America/New_York"
commit-message:
prefix: "ci(deps)"

19
.github/release.yml vendored Normal file
View File

@@ -0,0 +1,19 @@
# https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes#configuring-automatically-generated-release-notes
changelog:
categories:
- title: Breaking Changes
labels:
- breaking
- title: Rules
labels:
- rule
- autofix
- title: Settings
labels:
- configuration
- title: Bug Fixes
labels:
- bug
- title: Other Changes
labels:
- "*"

111
.github/renovate.json5 vendored
View File

@@ -1,111 +0,0 @@
{
$schema: "https://docs.renovatebot.com/renovate-schema.json",
dependencyDashboard: true,
suppressNotifications: ["prEditedNotification"],
extends: ["config:recommended"],
labels: ["internal"],
schedule: ["before 4am on Monday"],
semanticCommits: "disabled",
separateMajorMinor: false,
prHourlyLimit: 10,
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
cargo: {
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
rangeStrategy: "update-lockfile",
},
pep621: {
// The default for this package manager is to only search for `pyproject.toml` files
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
},
pip_requirements: {
// The default for this package manager is to run on all requirements.txt files:
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
// a "negative glob pattern".
// See:
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
ignorePaths: ["!docs/requirements*.txt"]
},
npm: {
// The default for this package manager is to only search for `package.json` files
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
fileMatch: ["^playground/.*package\\.json$"],
},
"pre-commit": {
enabled: true,
},
packageRules: [
{
// Group upload/download artifact updates, the versions are dependent
groupName: "Artifact GitHub Actions dependencies",
matchManagers: ["github-actions"],
matchDatasources: ["gitea-tags", "github-tags"],
matchPackagePatterns: ["actions/.*-artifact"],
description: "Weekly update of artifact-related GitHub Actions dependencies",
},
{
// This package rule disables updates for GitHub runners:
// we'd only pin them to a specific version
// if there was a deliberate reason to do so
groupName: "GitHub runners",
matchManagers: ["github-actions"],
matchDatasources: ["github-runners"],
description: "Disable PRs updating GitHub runners (e.g. 'runs-on: macos-14')",
enabled: false,
},
{
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
// See: https://github.com/astral-sh/uv/issues/3642
matchPackagePatterns: ["zip"],
matchManagers: ["cargo"],
enabled: false,
},
{
// `mkdocs-material` requires a manual update to keep the version in sync
// with `mkdocs-material-insider`.
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
matchManagers: ["pip_requirements"],
matchPackagePatterns: ["mkdocs-material"],
enabled: false,
},
{
groupName: "pre-commit dependencies",
matchManagers: ["pre-commit"],
description: "Weekly update of pre-commit dependencies",
},
{
groupName: "NPM Development dependencies",
matchManagers: ["npm"],
matchDepTypes: ["devDependencies"],
description: "Weekly update of NPM development dependencies",
},
{
groupName: "Monaco",
matchManagers: ["npm"],
matchPackagePatterns: ["monaco"],
description: "Weekly update of the Monaco editor",
},
{
groupName: "strum",
matchManagers: ["cargo"],
matchPackagePatterns: ["strum"],
description: "Weekly update of strum dependencies",
},
{
groupName: "ESLint",
matchManagers: ["npm"],
matchPackageNames: ["eslint"],
allowedVersions: "<9",
description: "Constraint ESLint to version 8 until TypeScript-eslint supports ESLint 9", // https://github.com/typescript-eslint/typescript-eslint/issues/8211
},
],
vulnerabilityAlerts: {
commitMessageSuffix: "",
labels: ["internal", "security"],
},
}

133
.github/workflows/benchmark.yaml vendored Normal file
View File

@@ -0,0 +1,133 @@
name: Benchmark
on:
pull_request:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
jobs:
run-benchmark:
if: github.event_name == 'pull_request'
name: "Run | ${{ matrix.os }}"
strategy:
matrix:
os: [ubuntu-latest, windows-latest]
runs-on: ${{ matrix.os }}
steps:
- name: "PR - Checkout Branch"
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: "PR - Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v1
- name: "PR - Build benchmarks"
uses: actions-rs/cargo@v1
with:
command: bench
args: -p ruff_benchmark --no-run
- name: "PR - Run benchmarks"
run: cargo benchmark --save-baseline=pr
- name: "Main - Checkout Branch"
uses: actions/checkout@v3
with:
clean: false
ref: main
- name: "Main - Install Rust toolchain"
run: rustup show
- name: "Main - Build benchmarks"
uses: actions-rs/cargo@v1
with:
command: bench
args: -p ruff_benchmark --no-run
- name: "Main - Run benchmarks"
run: cargo benchmark --save-baseline=main
- name: "Upload benchmark results"
uses: actions/upload-artifact@v3
with:
name: benchmark-results-${{ matrix.os }}
path: ./target/criterion
# Cleanup
- name: Remove Criterion Artifact
uses: JesseTG/rm@v1.0.3
with:
path: ./target/criterion
benchmark-compare:
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
name: Compare
needs:
- run-benchmark
steps:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install critcmp"
# Use debug build: Building takes much longer than the "slowness" of using the debug build.
run: cargo install --debug critcmp
- name: "Linux | Download PR benchmark results"
uses: actions/download-artifact@v3
with:
name: benchmark-results-ubuntu-latest
path: ./target/criterion
- name: "Linux | Compare benchmark results"
shell: bash
run: |
echo "### Benchmark" >> summary.md
echo "#### Linux" >> summary.md
echo "\`\`\`" >> summary.md
critcmp main pr >> summary.md
echo "\`\`\`" >> summary.md
echo "" >> summary.md
- name: "Linux | Cleanup benchmark results"
run: rm -rf ./target/criterion
- name: "Windows | Download PR benchmark results"
uses: actions/download-artifact@v3
with:
name: benchmark-results-windows-latest
path: ./target/criterion
- name: "Windows | Compare benchmark results"
shell: bash
run: |
echo "#### Windows" >> summary.md
echo "\`\`\`" >> summary.md
critcmp main pr >> summary.md
echo "\`\`\`" >> summary.md
echo "" >> summary.md
echo ${{ github.event.pull_request.number }} > pr-number
cat summary.md > $GITHUB_STEP_SUMMARY
- uses: actions/upload-artifact@v3
name: Upload PR Number
with:
name: pr-number
path: pr-number
- uses: actions/upload-artifact@v3
name: Upload Summary
with:
name: summary
path: summary.md

View File

@@ -1,470 +0,0 @@
# Build ruff on all platforms.
#
# Generates both wheels (for PyPI) and archived binaries (for GitHub releases).
#
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
# artifacts job within `cargo-dist`.
name: "Build binaries"
on:
workflow_call:
inputs:
plan:
required: true
type: string
pull_request:
paths:
# When we change pyproject.toml, we want to ensure that the maturin builds still work.
- pyproject.toml
# And when we change this workflow itself...
- .github/workflows/build-binaries.yml
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
PACKAGE_NAME: ruff
MODULE_NAME: ruff
PYTHON_VERSION: "3.11"
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
sdist:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build sdist"
uses: PyO3/maturin-action@v1
with:
command: sdist
args: --out dist
- name: "Test sdist"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
${{ env.MODULE_NAME }} --help
python -m ${{ env.MODULE_NAME }} --help
- name: "Upload sdist"
uses: actions/upload-artifact@v4
with:
name: wheels-sdist
path: dist
macos-x86_64:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: macos-12
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels - x86_64"
uses: PyO3/maturin-action@v1
with:
target: x86_64
args: --release --locked --out dist
- name: "Upload wheels"
uses: actions/upload-artifact@v4
with:
name: wheels-macos-x86_64
path: dist
- name: "Archive binary"
run: |
TARGET=x86_64-apple-darwin
ARCHIVE_NAME=ruff-$TARGET
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
mkdir -p $ARCHIVE_NAME
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
with:
name: artifacts-macos-x86_64
path: |
*.tar.gz
*.sha256
macos-aarch64:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: macos-14
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: arm64
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels - aarch64"
uses: PyO3/maturin-action@v1
with:
target: aarch64
args: --release --locked --out dist
- name: "Test wheel - aarch64"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
with:
name: wheels-aarch64-apple-darwin
path: dist
- name: "Archive binary"
run: |
TARGET=aarch64-apple-darwin
ARCHIVE_NAME=ruff-$TARGET
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
mkdir -p $ARCHIVE_NAME
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
with:
name: artifacts-aarch64-apple-darwin
path: |
*.tar.gz
*.sha256
windows:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: windows-latest
strategy:
matrix:
platform:
- target: x86_64-pc-windows-msvc
arch: x64
- target: i686-pc-windows-msvc
arch: x86
- target: aarch64-pc-windows-msvc
arch: x64
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: ${{ matrix.platform.arch }}
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
args: --release --locked --out dist
env:
# aarch64 build fails, see https://github.com/PyO3/maturin/issues/2110
XWIN_VERSION: 16
- name: "Test wheel"
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
shell: bash
run: |
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
${{ env.MODULE_NAME }} --help
python -m ${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.platform.target }}
path: dist
- name: "Archive binary"
shell: bash
run: |
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
with:
name: artifacts-${{ matrix.platform.target }}
path: |
*.zip
*.sha256
linux:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: ubuntu-latest
strategy:
matrix:
target:
- x86_64-unknown-linux-gnu
- i686-unknown-linux-gnu
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
args: --release --locked --out dist
- name: "Test wheel"
if: ${{ startsWith(matrix.target, 'x86_64') }}
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
${{ env.MODULE_NAME }} --help
python -m ${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.target }}
path: dist
- name: "Archive binary"
shell: bash
run: |
set -euo pipefail
TARGET=${{ matrix.target }}
ARCHIVE_NAME=ruff-$TARGET
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
mkdir -p $ARCHIVE_NAME
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
with:
name: artifacts-${{ matrix.target }}
path: |
*.tar.gz
*.sha256
linux-cross:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: ubuntu-latest
strategy:
matrix:
platform:
- target: aarch64-unknown-linux-gnu
arch: aarch64
# see https://github.com/astral-sh/ruff/issues/3791
# and https://github.com/gnzlbg/jemallocator/issues/170#issuecomment-1503228963
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
- target: armv7-unknown-linux-gnueabihf
arch: armv7
- target: s390x-unknown-linux-gnu
arch: s390x
- target: powerpc64le-unknown-linux-gnu
arch: ppc64le
# see https://github.com/astral-sh/ruff/issues/10073
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
- target: powerpc64-unknown-linux-gnu
arch: ppc64
# see https://github.com/astral-sh/ruff/issues/10073
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
- target: arm-unknown-linux-musleabihf
arch: arm
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
manylinux: auto
docker-options: ${{ matrix.platform.maturin_docker_options }}
args: --release --locked --out dist
- uses: uraimo/run-on-arch-action@v2
if: matrix.platform.arch != 'ppc64'
name: Test wheel
with:
arch: ${{ matrix.platform.arch == 'arm' && 'armv6' || matrix.platform.arch }}
distro: ${{ matrix.platform.arch == 'arm' && 'bullseye' || 'ubuntu20.04' }}
githubToken: ${{ github.token }}
install: |
apt-get update
apt-get install -y --no-install-recommends python3 python3-pip
pip3 install -U pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.platform.target }}
path: dist
- name: "Archive binary"
shell: bash
run: |
set -euo pipefail
TARGET=${{ matrix.platform.target }}
ARCHIVE_NAME=ruff-$TARGET
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
mkdir -p $ARCHIVE_NAME
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
with:
name: artifacts-${{ matrix.platform.target }}
path: |
*.tar.gz
*.sha256
musllinux:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: ubuntu-latest
strategy:
matrix:
target:
- x86_64-unknown-linux-musl
- i686-unknown-linux-musl
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist
- name: "Test wheel"
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@v3
with:
image: alpine:latest
options: -v ${{ github.workspace }}:/io -w /io
run: |
apk add python3
python -m venv .venv
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.target }}
path: dist
- name: "Archive binary"
shell: bash
run: |
set -euo pipefail
TARGET=${{ matrix.target }}
ARCHIVE_NAME=ruff-$TARGET
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
mkdir -p $ARCHIVE_NAME
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
with:
name: artifacts-${{ matrix.target }}
path: |
*.tar.gz
*.sha256
musllinux-cross:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: ubuntu-latest
strategy:
matrix:
platform:
- target: aarch64-unknown-linux-musl
arch: aarch64
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
- target: armv7-unknown-linux-musleabihf
arch: armv7
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist
docker-options: ${{ matrix.platform.maturin_docker_options }}
- uses: uraimo/run-on-arch-action@v2
name: Test wheel
with:
arch: ${{ matrix.platform.arch }}
distro: alpine_latest
githubToken: ${{ github.token }}
install: |
apk add python3
run: |
python -m venv .venv
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.platform.target }}
path: dist
- name: "Archive binary"
shell: bash
run: |
set -euo pipefail
TARGET=${{ matrix.platform.target }}
ARCHIVE_NAME=ruff-$TARGET
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
mkdir -p $ARCHIVE_NAME
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
with:
name: artifacts-${{ matrix.platform.target }}
path: |
*.tar.gz
*.sha256

View File

@@ -1,68 +0,0 @@
# Build and publish a Docker image.
#
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
# artifacts job within `cargo-dist`.
#
# TODO(charlie): Ideally, the publish step would happen as a publish job within `cargo-dist`, but
# sharing the built image as an artifact between jobs is challenging.
name: "[ruff] Build Docker image"
on:
workflow_call:
inputs:
plan:
required: true
type: string
pull_request:
paths:
- .github/workflows/build-docker.yml
jobs:
docker-publish:
name: Build Docker image (ghcr.io/astral-sh/ruff)
runs-on: ubuntu-latest
environment:
name: release
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: docker/setup-buildx-action@v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ghcr.io/astral-sh/ruff
- name: Check tag consistency
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
run: |
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
if [ "${{ fromJson(inputs.plan).announcement_tag }}" != "${version}" ]; then
echo "The input tag does not match the version from pyproject.toml:" >&2
echo "${{ fromJson(inputs.plan).announcement_tag }}" >&2
echo "${version}" >&2
exit 1
else
echo "Releasing ${version}"
fi
- name: "Build and push Docker image"
uses: docker/build-push-action@v6
with:
context: .
platforms: linux/amd64,linux/arm64
# Reuse the builder
cache-from: type=gha
cache-to: type=gha,mode=max
push: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || 'dry-run' }}
labels: ${{ steps.meta.outputs.labels }}

View File

@@ -15,82 +15,13 @@ env:
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
PACKAGE_NAME: ruff
PYTHON_VERSION: "3.11"
jobs:
determine_changes:
name: "Determine changes"
runs-on: ubuntu-latest
outputs:
# Flag that is raised when any code that affects parser is changed
parser: ${{ steps.changed.outputs.parser_any_changed }}
# Flag that is raised when any code that affects linter is changed
linter: ${{ steps.changed.outputs.linter_any_changed }}
# Flag that is raised when any code that affects formatter is changed
formatter: ${{ steps.changed.outputs.formatter_any_changed }}
# Flag that is raised when any code is changed
# This is superset of the linter and formatter
code: ${{ steps.changed.outputs.code_any_changed }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: tj-actions/changed-files@v45
id: changed
with:
files_yaml: |
parser:
- Cargo.toml
- Cargo.lock
- crates/ruff_python_trivia/**
- crates/ruff_source_file/**
- crates/ruff_text_size/**
- crates/ruff_python_ast/**
- crates/ruff_python_parser/**
- scripts/fuzz-parser/**
- .github/workflows/ci.yaml
linter:
- Cargo.toml
- Cargo.lock
- crates/**
- "!crates/ruff_python_formatter/**"
- "!crates/ruff_formatter/**"
- "!crates/ruff_dev/**"
- scripts/*
- python/**
- .github/workflows/ci.yaml
formatter:
- Cargo.toml
- Cargo.lock
- crates/ruff_python_formatter/**
- crates/ruff_formatter/**
- crates/ruff_python_trivia/**
- crates/ruff_python_ast/**
- crates/ruff_source_file/**
- crates/ruff_python_index/**
- crates/ruff_text_size/**
- crates/ruff_python_parser/**
- crates/ruff_dev/**
- scripts/*
- python/**
- .github/workflows/ci.yaml
code:
- "**/*"
- "!**/*.md"
- "!docs/**"
- "!assets/**"
cargo-fmt:
name: "cargo fmt"
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: "Install Rust toolchain"
run: rustup component add rustfmt
- run: cargo fmt --all --check
@@ -98,539 +29,150 @@ jobs:
cargo-clippy:
name: "cargo clippy"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: "Install Rust toolchain"
run: |
rustup component add clippy
- uses: Swatinem/rust-cache@v2
- run: cargo clippy --workspace --all-targets --all-features -- -D warnings
cargo-clippy-wasm:
name: "cargo clippy (wasm)"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: "Install Rust toolchain"
run: |
rustup component add clippy
rustup target add wasm32-unknown-unknown
- uses: Swatinem/rust-cache@v2
- name: "Clippy"
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
- name: "Clippy (wasm)"
run: cargo clippy -p ruff_wasm -p red_knot_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
- run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features -- -D warnings
cargo-test-linux:
name: "cargo test (linux)"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
cargo-test:
strategy:
matrix:
os: [ubuntu-latest, windows-latest]
runs-on: ${{ matrix.os }}
name: "cargo test | ${{ matrix.os }}"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
- run: cargo install cargo-insta
- run: pip install black[d]==23.1.0
- name: "Run tests (Ubuntu)"
if: ${{ matrix.os == 'ubuntu-latest' }}
run: |
cargo insta test --all --all-features --delete-unreferenced-snapshots
git diff --exit-code
- name: "Run tests (Windows)"
if: ${{ matrix.os == 'windows-latest' }}
shell: bash
env:
NEXTEST_PROFILE: "ci"
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
run: |
cargo insta test --all --all-features
git diff --exit-code
- run: cargo test --package ruff_cli --test black_compatibility_test -- --ignored
# Check for broken links in the documentation.
- run: cargo doc --all --no-deps
env:
RUSTDOCFLAGS: "-D warnings"
# Use --document-private-items so that all our doc comments are kept in
# sync, not just public items. Eventually we should do this for all
# crates; for now add crates here as they are warning-clean to prevent
# regression.
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p ruff_db --document-private-items
env:
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
RUSTDOCFLAGS: "-D warnings"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
if: ${{ matrix.os == 'ubuntu-latest' }}
with:
name: ruff
path: target/debug/ruff
cargo-test-windows:
name: "cargo test (windows)"
runs-on: windows-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo nextest"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
run: |
cargo nextest run --all-features --profile ci
cargo test --all-features --doc
cargo-test-wasm:
name: "cargo test (wasm)"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 10
name: "cargo test (wasm)"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version: 18
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@v0.4.0
- uses: Swatinem/rust-cache@v2
- name: "Test ruff_wasm"
- name: "Run wasm-pack"
run: |
cd crates/ruff_wasm
wasm-pack test --node
- name: "Test red_knot_wasm"
run: |
cd crates/red_knot_wasm
wasm-pack test --node
cargo-build-release:
name: "cargo build (release)"
runs-on: macos-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@v1
- uses: Swatinem/rust-cache@v2
- name: "Build"
run: cargo build --release --locked
cargo-build-msrv:
name: "cargo build (msrv)"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- uses: SebRollen/toml-action@v1.2.0
id: msrv
with:
file: "Cargo.toml"
field: "workspace.package.rust-version"
- name: "Install Rust toolchain"
run: rustup default ${{ steps.msrv.outputs.value }}
- name: "Install mold"
uses: rui314/setup-mold@v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
NEXTEST_PROFILE: "ci"
run: cargo +${{ steps.msrv.outputs.value }} insta test --all-features --unreferenced reject --test-runner nextest
cargo-fuzz:
name: "cargo fuzz"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
with:
workspaces: "fuzz -> target"
- name: "Install cargo-binstall"
uses: cargo-bins/cargo-binstall@main
with:
tool: cargo-fuzz@0.11.2
- name: "Install cargo-fuzz"
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
- run: cargo fuzz build -s none
fuzz-parser:
name: "Fuzz the parser"
runs-on: ubuntu-latest
needs:
- cargo-test-linux
- determine_changes
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
timeout-minutes: 20
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install uv
run: curl -LsSf https://astral.sh/uv/install.sh | sh
- name: Install Python requirements
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
- uses: actions/download-artifact@v4
name: Download Ruff binary to test
id: download-cached-binary
with:
name: ruff
path: ruff-to-test
- name: Fuzz
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ${{ steps.download-cached-binary.outputs.download-path }}/ruff
python scripts/fuzz-parser/fuzz.py 0-500 --test-executable ${{ steps.download-cached-binary.outputs.download-path }}/ruff
scripts:
name: "test scripts"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 5
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: "Install Rust toolchain"
run: rustup component add rustfmt
run: rustup show
- uses: Swatinem/rust-cache@v2
- run: ./scripts/add_rule.py --name DoTheThing --prefix PL --code C0999 --linter pylint
- run: ./scripts/add_rule.py --name DoTheThing --code PLC999 --linter pylint
- run: cargo check
- run: cargo fmt --all --check
- run: |
./scripts/add_plugin.py test --url https://pypi.org/project/-test/0.1.0/ --prefix TST
./scripts/add_rule.py --name FirstRule --prefix TST --code 001 --linter test
./scripts/add_rule.py --name FirstRule --code TST001 --linter test
- run: cargo check
- run: cargo fmt --all --check
typos:
name: "spell check"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: crate-ci/typos@master
with:
files: .
ecosystem:
name: "ecosystem"
runs-on: ubuntu-latest
needs:
- cargo-test-linux
- determine_changes
needs: cargo-test
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
# Ecosystem check needs linter and/or formatter changes.
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
timeout-minutes: 20
if: github.event_name == 'pull_request'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
python-version: "3.11"
- uses: actions/download-artifact@v4
name: Download comparison Ruff binary
- uses: actions/download-artifact@v3
name: Download Ruff binary
id: ruff-target
with:
name: ruff
path: target/debug
- uses: dawidd6/action-download-artifact@v6
name: Download baseline Ruff binary
- uses: dawidd6/action-download-artifact@v2
name: Download base results
with:
name: ruff
branch: ${{ github.event.pull_request.base.ref }}
workflow: "ci.yaml"
check_artifacts: true
- name: Install ruff-ecosystem
run: |
pip install ./python/ruff-ecosystem
- name: Run `ruff check` stable ecosystem check
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
- name: Run ecosystem check
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
chmod +x ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
scripts/check_ecosystem.py ruff ${{ steps.ruff-target.outputs.download-path }}/ruff | tee ecosystem-result
cat ecosystem-result > $GITHUB_STEP_SUMMARY
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
cat ecosystem-result-check-stable > $GITHUB_STEP_SUMMARY
echo "### Linter (stable)" > ecosystem-result
cat ecosystem-result-check-stable >> ecosystem-result
echo "" >> ecosystem-result
- name: Run `ruff check` preview ecosystem check
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
cat ecosystem-result-check-preview > $GITHUB_STEP_SUMMARY
echo "### Linter (preview)" >> ecosystem-result
cat ecosystem-result-check-preview >> ecosystem-result
echo "" >> ecosystem-result
- name: Run `ruff format` stable ecosystem check
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
cat ecosystem-result-format-stable > $GITHUB_STEP_SUMMARY
echo "### Formatter (stable)" >> ecosystem-result
cat ecosystem-result-format-stable >> ecosystem-result
echo "" >> ecosystem-result
- name: Run `ruff format` preview ecosystem check
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
cat ecosystem-result-format-preview > $GITHUB_STEP_SUMMARY
echo "### Formatter (preview)" >> ecosystem-result
cat ecosystem-result-format-preview >> ecosystem-result
echo "" >> ecosystem-result
- name: Export pull request number
run: |
echo ${{ github.event.number }} > pr-number
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
name: Upload PR Number
with:
name: pr-number
path: pr-number
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
name: Upload Results
with:
name: ecosystem-result
path: ecosystem-result
cargo-shear:
name: "cargo shear"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: actions/checkout@v4
- uses: cargo-bins/cargo-binstall@main
- run: cargo binstall --no-confirm cargo-shear
- run: cargo shear
python-package:
name: "python package"
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- uses: Swatinem/rust-cache@v2
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
args: --out dist
- name: "Test wheel"
run: |
pip install --force-reinstall --find-links dist ${{ env.PACKAGE_NAME }}
ruff --help
python -m ruff --help
- name: "Remove wheels from cache"
run: rm -rf target/wheels
pre-commit:
name: "pre-commit"
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
- name: "Install pre-commit"
run: pip install pre-commit
- name: "Cache pre-commit"
uses: actions/cache@v4
with:
path: ~/.cache/pre-commit
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: "Run pre-commit"
run: |
echo '```console' > $GITHUB_STEP_SUMMARY
# Enable color output for pre-commit and remove it for the summary
SKIP=cargo-fmt,clippy,dev-generate-all pre-commit run --all-files --show-diff-on-failure --color=always | \
tee >(sed -E 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})*)?[mGK]//g' >> $GITHUB_STEP_SUMMARY) >&1
exit_code=${PIPESTATUS[0]}
echo '```' >> $GITHUB_STEP_SUMMARY
exit $exit_code
docs:
name: "mkdocs"
runs-on: ubuntu-latest
timeout-minutes: 10
env:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: pip install -r docs/requirements-insiders.txt
- name: "Install dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: pip install -r docs/requirements.txt
- name: "Update README File"
run: python scripts/transform_readme.py --target mkdocs
- name: "Generate docs"
run: python scripts/generate_mkdocs.py
- name: "Check docs formatting"
run: python scripts/check_docs_formatted.py
- name: "Build Insiders docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: mkdocs build --strict -f mkdocs.insiders.yml
- name: "Build docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: mkdocs build --strict -f mkdocs.public.yml
check-formatter-instability-and-black-similarity:
name: "formatter instabilities and black similarity"
runs-on: ubuntu-latest
needs: determine_changes
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup show
- name: "Cache rust"
uses: Swatinem/rust-cache@v2
- name: "Formatter progress"
run: scripts/formatter_ecosystem_checks.sh
- name: "Github step summary"
run: cat target/progress_projects_stats.txt > $GITHUB_STEP_SUMMARY
- name: "Remove checkouts from cache"
run: rm -r target/progress_projects
check-ruff-lsp:
name: "test ruff-lsp"
runs-on: ubuntu-latest
timeout-minutes: 5
needs:
- cargo-test-linux
- determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: extractions/setup-just@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/checkout@v4
name: "Download ruff-lsp source"
with:
repository: "astral-sh/ruff-lsp"
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: actions/download-artifact@v4
name: Download development ruff binary
id: ruff-target
with:
name: ruff
path: target/debug
- name: Install ruff-lsp dependencies
run: |
just install
- name: Run ruff-lsp tests
run: |
# Setup development binary
pip uninstall --yes ruff
chmod +x ${{ steps.ruff-target.outputs.download-path }}/ruff
export PATH=${{ steps.ruff-target.outputs.download-path }}:$PATH
ruff version
just test
benchmarks:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20
steps:
- name: "Checkout Branch"
uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup show
- name: "Install codspeed"
uses: taiki-e/install-action@v2
with:
tool: cargo-codspeed
- uses: Swatinem/rust-cache@v2
- name: "Build benchmarks"
run: cargo codspeed build --features codspeed -p ruff_benchmark
- name: "Run benchmarks"
uses: CodSpeedHQ/action@v3
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}

View File

@@ -1,72 +0,0 @@
name: Daily parser fuzz
on:
workflow_dispatch:
schedule:
- cron: "0 0 * * *"
pull_request:
paths:
- ".github/workflows/daily_fuzz.yaml"
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
PACKAGE_NAME: ruff
FORCE_COLOR: 1
jobs:
fuzz:
name: Fuzz
runs-on: ubuntu-latest
timeout-minutes: 20
# Don't run the cron job on forks:
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install uv
run: curl -LsSf https://astral.sh/uv/install.sh | sh
- name: Install Python requirements
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@v1
- uses: Swatinem/rust-cache@v2
- name: Build ruff
# A debug build means the script runs slower once it gets started,
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
run: cargo build --locked
- name: Fuzz
run: python scripts/fuzz-parser/fuzz.py $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
create-issue-on-failure:
name: Create an issue if the daily fuzz surfaced any bugs
runs-on: ubuntu-latest
needs: fuzz
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.fuzz.result == 'failure' }}
permissions:
issues: write
steps:
- uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
await github.rest.issues.create({
owner: "astral-sh",
repo: "ruff",
title: `Daily parser fuzz failed on ${new Date().toDateString()}`,
body: "Runs listed here: https://github.com/astral-sh/ruff/actions/workflows/daily_fuzz.yml",
labels: ["bug", "parser", "fuzzer"],
})

33
.github/workflows/docs.yaml vendored Normal file
View File

@@ -0,0 +1,33 @@
name: mkdocs
on:
release:
types: [published]
workflow_dispatch:
jobs:
mkdocs:
runs-on: ubuntu-latest
env:
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
- name: "Install dependencies"
run: |
pip install -r docs/requirements.txt
- name: "Copy README File"
run: |
python scripts/transform_readme.py --target mkdocs
python scripts/generate_mkdocs.py
mkdocs build --strict
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@2.0.0
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
command: pages publish site --project-name=ruff-docs --branch ${GITHUB_HEAD_REF} --commit-hash ${GITHUB_SHA}

247
.github/workflows/flake8-to-ruff.yaml vendored Normal file
View File

@@ -0,0 +1,247 @@
name: "[flake8-to-ruff] Release"
on: workflow_dispatch
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
PACKAGE_NAME: flake8-to-ruff
CRATE_NAME: flake8_to_ruff
PYTHON_VERSION: "3.7" # to build abi3 wheels
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
macos-x86_64:
runs-on: macos-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels - x86_64"
uses: PyO3/maturin-action@v1
with:
target: x86_64
args: --release --out dist --sdist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel - x86_64"
run: |
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
macos-universal:
runs-on: macos-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels - universal2"
uses: PyO3/maturin-action@v1
with:
args: --release --universal2 --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel - universal2"
run: |
pip install dist/${{ env.CRATE_NAME }}-*universal2.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
windows:
runs-on: windows-latest
strategy:
matrix:
target: [x64, x86]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: ${{ matrix.target }}
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
shell: bash
run: |
python -m pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
linux:
runs-on: ubuntu-latest
strategy:
matrix:
target: [x86_64, i686]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
if: matrix.target == 'x86_64'
run: |
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
linux-cross:
runs-on: ubuntu-latest
strategy:
matrix:
target: [aarch64, armv7, s390x, ppc64le, ppc64]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
args: --no-default-features --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- uses: uraimo/run-on-arch-action@v2
if: matrix.target != 'ppc64'
name: Install built wheel
with:
arch: ${{ matrix.target }}
distro: ubuntu20.04
githubToken: ${{ github.token }}
install: |
apt-get update
apt-get install -y --no-install-recommends python3 python3-pip
pip3 install -U pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
musllinux:
runs-on: ubuntu-latest
strategy:
matrix:
target:
- x86_64-unknown-linux-musl
- i686-unknown-linux-musl
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@v3
with:
image: alpine:latest
options: -v ${{ github.workspace }}:/io -w /io
run: |
apk add py3-pip
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
musllinux-cross:
runs-on: ubuntu-latest
strategy:
matrix:
platform:
- target: aarch64-unknown-linux-musl
arch: aarch64
- target: armv7-unknown-linux-musleabihf
arch: armv7
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- uses: uraimo/run-on-arch-action@v2
name: Install built wheel
with:
arch: ${{ matrix.platform.arch }}
distro: alpine_latest
githubToken: ${{ github.token }}
install: |
apk add py3-pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
release:
name: Release
runs-on: ubuntu-latest
needs:
- macos-universal
- macos-x86_64
- windows
- linux
- linux-cross
- musllinux
- musllinux-cross
steps:
- uses: actions/download-artifact@v3
with:
name: wheels
- uses: actions/setup-python@v4
- name: "Publish to PyPi"
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.FLAKE8_TO_RUFF_TOKEN }}
run: |
pip install --upgrade twine
twine upload --skip-existing *

View File

@@ -1,29 +0,0 @@
# Notify downstream repositories of a new release.
#
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
# job within `cargo-dist`.
name: "[ruff] Notify dependents"
on:
workflow_call:
inputs:
plan:
required: true
type: string
jobs:
update-dependents:
name: Notify dependents
runs-on: ubuntu-latest
steps:
- name: "Update pre-commit mirror"
uses: actions/github-script@v7
with:
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
script: |
github.rest.actions.createWorkflowDispatch({
owner: 'astral-sh',
repo: 'ruff-pre-commit',
workflow_id: 'main.yml',
ref: 'main',
})

47
.github/workflows/playground.yaml vendored Normal file
View File

@@ -0,0 +1,47 @@
name: "[Playground] Release"
on:
workflow_dispatch:
push:
branches: [main]
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
publish:
runs-on: ubuntu-latest
env:
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
steps:
- uses: actions/checkout@v3
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@v3
with:
node-version: 18
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@v0.4.0
- uses: jetli/wasm-bindgen-action@v0.2.0
- name: "Run wasm-pack"
run: wasm-pack build --target web --out-dir ../../playground/src/pkg crates/ruff_wasm
- name: "Install Node dependencies"
run: npm ci
working-directory: playground
- name: "Run TypeScript checks"
run: npm run check
working-directory: playground
- name: "Build JavaScript bundle"
run: npm run build
working-directory: playground
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@2.0.0
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
command: pages publish playground/dist --project-name=ruff --branch ${GITHUB_HEAD_REF} --commit-hash ${GITHUB_SHA}

View File

@@ -1,8 +1,8 @@
name: Ecosystem check comment
name: PR Check Comment
on:
workflow_run:
workflows: [CI]
workflows: [CI, Benchmark]
types: [completed]
workflow_dispatch:
inputs:
@@ -17,15 +17,14 @@ jobs:
comment:
runs-on: ubuntu-latest
steps:
- uses: dawidd6/action-download-artifact@v6
name: Download pull request number
- uses: dawidd6/action-download-artifact@v2
name: Download PR Number
with:
name: pr-number
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
if_no_artifact_found: ignore
allow_forks: true
- name: Parse pull request number
- name: Extract PR Number
id: pr-number
run: |
if [[ -f pr-number ]]
@@ -33,8 +32,8 @@ jobs:
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
fi
- uses: dawidd6/action-download-artifact@v6
name: "Download ecosystem results"
- uses: dawidd6/action-download-artifact@v2
name: "Download Ecosystem Result"
id: download-ecosystem-result
if: steps.pr-number.outputs.pr-number
with:
@@ -42,48 +41,43 @@ jobs:
workflow: ci.yaml
pr: ${{ steps.pr-number.outputs.pr-number }}
path: pr/ecosystem
workflow_conclusion: completed
if_no_artifact_found: ignore
allow_forks: true
- name: Generate comment content
- uses: dawidd6/action-download-artifact@v2
name: "Download Benchmark Result"
id: download-benchmark-result
if: steps.pr-number.outputs.pr-number
with:
name: summary
workflow: benchmark.yaml
pr: ${{ steps.pr-number.outputs.pr-number }}
path: pr/benchmark
if_no_artifact_found: ignore
- name: Generate Comment
id: generate-comment
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
if: steps.download-ecosystem-result.outputs.found_artifact == 'true' || steps.download-benchmark-result.outputs.found_artifact == 'true'
run: |
# Guard against malicious ecosystem results that symlink to a secret
# file on this runner
if [[ -L pr/ecosystem/ecosystem-result ]]
echo 'comment<<EOF' >> $GITHUB_OUTPUT
echo '## PR Check Results' >> $GITHUB_OUTPUT
if [[ -f pr/ecosystem/ecosystem-result ]]
then
echo "Error: ecosystem-result cannot be a symlink"
exit 1
echo "### Ecosystem" >> $GITHUB_OUTPUT
cat pr/ecosystem/ecosystem-result >> $GITHUB_OUTPUT
fi
# Note this identifier is used to find the comment to update on
# subsequent runs
echo '<!-- generated-comment ecosystem -->' >> comment.txt
if [[ -f pr/benchmark/summary.md ]]
then
cat pr/benchmark/summary.md >> $GITHUB_OUTPUT
fi
echo '## `ruff-ecosystem` results' >> comment.txt
cat pr/ecosystem/ecosystem-result >> comment.txt
echo "" >> comment.txt
echo 'comment<<EOF' >> $GITHUB_OUTPUT
cat comment.txt >> $GITHUB_OUTPUT
echo 'EOF' >> $GITHUB_OUTPUT
- name: Find existing comment
uses: peter-evans/find-comment@v3
if: steps.generate-comment.outcome == 'success'
id: find-comment
with:
issue-number: ${{ steps.pr-number.outputs.pr-number }}
comment-author: "github-actions[bot]"
body-includes: "<!-- generated-comment ecosystem -->"
- name: Create or update comment
if: steps.find-comment.outcome == 'success'
uses: peter-evans/create-or-update-comment@v4
if: steps.generate-comment.outputs.comment
uses: thollander/actions-comment-pull-request@v2
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
issue-number: ${{ steps.pr-number.outputs.pr-number }}
body-path: comment.txt
edit-mode: replace
pr_number: ${{ steps.pr-number.outputs.pr-number }}
message: ${{ steps.generate-comment.outputs.comment }}
comment_tag: PR Check Results

View File

@@ -1,152 +0,0 @@
# Publish the Ruff documentation.
#
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
# job within `cargo-dist`.
name: mkdocs
on:
workflow_dispatch:
inputs:
ref:
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
default: ""
type: string
workflow_call:
inputs:
plan:
required: true
type: string
jobs:
mkdocs:
runs-on: ubuntu-latest
env:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.ref }}
- uses: actions/setup-python@v5
with:
python-version: 3.12
- name: "Set docs version"
run: |
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
# if version is missing, use 'latest'
if [ -z "$version" ]; then
echo "Using 'latest' as version"
version="latest"
fi
# Use version as display name for now
display_name="$version"
echo "version=$version" >> $GITHUB_ENV
echo "display_name=$display_name" >> $GITHUB_ENV
- name: "Set branch name"
run: |
version="${{ env.version }}"
display_name="${{ env.display_name }}"
timestamp="$(date +%s)"
# create branch_display_name from display_name by replacing all
# characters disallowed in git branch names with hyphens
branch_display_name="$(echo "$display_name" | tr -c '[:alnum:]._' '-' | tr -s '-')"
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> $GITHUB_ENV
echo "timestamp=$timestamp" >> $GITHUB_ENV
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: pip install -r docs/requirements-insiders.txt
- name: "Install dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: pip install -r docs/requirements.txt
- name: "Copy README File"
run: |
python scripts/transform_readme.py --target mkdocs
python scripts/generate_mkdocs.py
- name: "Build Insiders docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: mkdocs build --strict -f mkdocs.insiders.yml
- name: "Build docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: mkdocs build --strict -f mkdocs.public.yml
- name: "Clone docs repo"
run: |
version="${{ env.version }}"
git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
- name: "Copy docs"
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
- name: "Commit docs"
working-directory: astral-docs
run: |
branch_name="${{ env.branch_name }}"
git config user.name "astral-docs-bot"
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
git checkout -b $branch_name
git add site/ruff
git commit -m "Update ruff documentation for $version"
- name: "Create Pull Request"
working-directory: astral-docs
env:
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
run: |
version="${{ env.version }}"
display_name="${{ env.display_name }}"
branch_name="${{ env.branch_name }}"
# set the PR title
pull_request_title="Update ruff documentation for $display_name"
# Delete any existing pull requests that are open for this version
# by checking against pull_request_title because the new PR will
# supersede the old one.
gh pr list --state open --json title --jq '.[] | select(.title == "$pull_request_title") | .number' | \
xargs -I {} gh pr close {}
# push the branch to GitHub
git push origin $branch_name
# create the PR
gh pr create --base main --head $branch_name \
--title "$pull_request_title" \
--body "Automated documentation update for $display_name" \
--label "documentation"
- name: "Merge Pull Request"
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
working-directory: astral-docs
env:
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
run: |
branch_name="${{ env.branch_name }}"
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
# give the PR a few seconds to be created before trying to auto-merge it
sleep 10
gh pr merge --squash $branch_name

View File

@@ -1,55 +0,0 @@
# Publish the Ruff playground.
#
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
# job within `cargo-dist`.
name: "[Playground] Release"
on:
workflow_dispatch:
workflow_call:
inputs:
plan:
required: true
type: string
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
publish:
runs-on: ubuntu-latest
env:
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@v4
with:
node-version: 18
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@v0.4.0
- uses: jetli/wasm-bindgen-action@v0.2.0
- name: "Run wasm-pack"
run: wasm-pack build --target web --out-dir ../../playground/src/pkg crates/ruff_wasm
- name: "Install Node dependencies"
run: npm ci
working-directory: playground
- name: "Run TypeScript checks"
run: npm run check
working-directory: playground
- name: "Build JavaScript bundle"
run: npm run build
working-directory: playground
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@v3.7.0
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
command: pages deploy playground/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}

View File

@@ -1,34 +0,0 @@
# Publish a release to PyPI.
#
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish job
# within `cargo-dist`.
name: "[ruff] Publish to PyPI"
on:
workflow_call:
inputs:
plan:
required: true
type: string
jobs:
pypi-publish:
name: Upload to PyPI
runs-on: ubuntu-latest
environment:
name: release
permissions:
# For PyPI's trusted publishing.
id-token: write
steps:
- uses: actions/download-artifact@v4
with:
pattern: wheels-*
path: wheels
merge-multiple: true
- name: Publish to PyPi
uses: pypa/gh-action-pypi-publish@release/v1
with:
skip-existing: true
packages-dir: wheels
verbose: true

View File

@@ -1,55 +0,0 @@
# Build and publish ruff-api for wasm.
#
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish
# job within `cargo-dist`.
name: "Build and publish wasm"
on:
workflow_dispatch:
workflow_call:
inputs:
plan:
required: true
type: string
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
ruff_wasm:
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
strategy:
matrix:
target: [web, bundler, nodejs]
fail-fast: false
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: jetli/wasm-pack-action@v0.4.0
- uses: jetli/wasm-bindgen-action@v0.2.0
- name: "Run wasm-pack build"
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
- name: "Rename generated package"
run: | # Replace the package name w/ jq
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
mv /tmp/package.json crates/ruff_wasm/pkg
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
- uses: actions/setup-node@v4
with:
node-version: 18
registry-url: "https://registry.npmjs.org"
- name: "Publish (dry-run)"
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
run: npm publish --dry-run crates/ruff_wasm/pkg
- name: "Publish"
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
run: npm publish --provenance --access public crates/ruff_wasm/pkg
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -1,300 +0,0 @@
# This file was autogenerated by cargo-dist: https://opensource.axo.dev/cargo-dist/
#
# Copyright 2022-2024, axodotdev
# SPDX-License-Identifier: MIT or Apache-2.0
#
# CI that:
#
# * checks for a Git Tag that looks like a release
# * builds artifacts with cargo-dist (archives, installers, hashes)
# * uploads those artifacts to temporary workflow zip
# * on success, uploads the artifacts to a GitHub Release
#
# Note that the GitHub Release will be created with a generated
# title/body based on your changelogs.
name: Release
permissions:
"contents": "write"
# This task will run whenever you workflow_dispatch with a tag that looks like a version
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where
# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
#
# If PACKAGE_NAME is specified, then the announcement will be for that
# package (erroring out if it doesn't have the given version or isn't cargo-dist-able).
#
# If PACKAGE_NAME isn't specified, then the announcement will be for all
# (cargo-dist-able) packages in the workspace with that version (this mode is
# intended for workspaces with only one dist-able package, or with all dist-able
# packages versioned/released in lockstep).
#
# If you push multiple tags at once, separate instances of this workflow will
# spin up, creating an independent announcement for each one. However, GitHub
# will hard limit this to 3 tags per commit, as it will assume more tags is a
# mistake.
#
# If there's a prerelease-style suffix to the version, then the release(s)
# will be marked as a prerelease.
on:
workflow_dispatch:
inputs:
tag:
description: Release Tag
required: true
default: dry-run
type: string
jobs:
# Run 'cargo dist plan' (or host) to determine what tasks we need to do
plan:
runs-on: "ubuntu-20.04"
outputs:
val: ${{ steps.plan.outputs.manifest }}
tag: ${{ (inputs.tag != 'dry-run' && inputs.tag) || '' }}
tag-flag: ${{ inputs.tag && inputs.tag != 'dry-run' && format('--tag={0}', inputs.tag) || '' }}
publishing: ${{ inputs.tag && inputs.tag != 'dry-run' }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install cargo-dist
# we specify bash to get pipefail; it guards against the `curl` command
# failing. otherwise `sh` won't catch that `curl` returned non-0
shell: bash
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.22.1/cargo-dist-installer.sh | sh"
- name: Cache cargo-dist
uses: actions/upload-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/cargo-dist
# sure would be cool if github gave us proper conditionals...
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
# functionality based on whether this is a pull_request, and whether it's from a fork.
# (PRs run on the *source* but secrets are usually on the *target* -- that's *good*
# but also really annoying to build CI around when it needs secrets to work right.)
- id: plan
run: |
cargo dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
echo "cargo dist ran successfully"
cat plan-dist-manifest.json
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
uses: actions/upload-artifact@v4
with:
name: artifacts-plan-dist-manifest
path: plan-dist-manifest.json
custom-build-binaries:
needs:
- plan
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
uses: ./.github/workflows/build-binaries.yml
with:
plan: ${{ needs.plan.outputs.val }}
secrets: inherit
custom-build-docker:
needs:
- plan
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
uses: ./.github/workflows/build-docker.yml
with:
plan: ${{ needs.plan.outputs.val }}
secrets: inherit
permissions:
"contents": "read"
"packages": "write"
# Build and package all the platform-agnostic(ish) things
build-global-artifacts:
needs:
- plan
- custom-build-binaries
- custom-build-docker
runs-on: "ubuntu-20.04"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install cached cargo-dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/cargo-dist
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
- name: Fetch local artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
- id: cargo-dist
shell: bash
run: |
cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
echo "cargo dist ran successfully"
# Parse out what we just built and upload it to scratch storage
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
- name: "Upload artifacts"
uses: actions/upload-artifact@v4
with:
name: artifacts-build-global
path: |
${{ steps.cargo-dist.outputs.paths }}
${{ env.BUILD_MANIFEST_NAME }}
# Determines if we should publish/announce
host:
needs:
- plan
- custom-build-binaries
- custom-build-docker
- build-global-artifacts
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
runs-on: "ubuntu-20.04"
outputs:
val: ${{ steps.host.outputs.manifest }}
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install cached cargo-dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/cargo-dist
# Fetch artifacts from scratch-storage
- name: Fetch artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
# This is a harmless no-op for GitHub Releases, hosting for that happens in "announce"
- id: host
shell: bash
run: |
cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
echo "artifacts uploaded and released successfully"
cat dist-manifest.json
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
uses: actions/upload-artifact@v4
with:
# Overwrite the previous copy
name: artifacts-dist-manifest
path: dist-manifest.json
custom-publish-pypi:
needs:
- plan
- host
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
uses: ./.github/workflows/publish-pypi.yml
with:
plan: ${{ needs.plan.outputs.val }}
secrets: inherit
# publish jobs get escalated permissions
permissions:
"id-token": "write"
"packages": "write"
custom-publish-wasm:
needs:
- plan
- host
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
uses: ./.github/workflows/publish-wasm.yml
with:
plan: ${{ needs.plan.outputs.val }}
secrets: inherit
# publish jobs get escalated permissions
permissions:
"contents": "read"
"id-token": "write"
"packages": "write"
# Create a GitHub Release while uploading all files to it
announce:
needs:
- plan
- host
- custom-publish-pypi
- custom-publish-wasm
# use "always() && ..." to allow us to wait for all publish jobs while
# still allowing individual publish jobs to skip themselves (for prereleases).
# "host" however must run to completion, no skipping allowed!
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
runs-on: "ubuntu-20.04"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
# Create a GitHub Release while uploading all files to it
- name: "Download GitHub Artifacts"
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: artifacts
merge-multiple: true
- name: Cleanup
run: |
# Remove the granular manifests
rm -f artifacts/*-dist-manifest.json
- name: Create GitHub Release
env:
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
ANNOUNCEMENT_TITLE: "${{ fromJson(needs.host.outputs.val).announcement_title }}"
ANNOUNCEMENT_BODY: "${{ fromJson(needs.host.outputs.val).announcement_github_body }}"
RELEASE_COMMIT: "${{ github.sha }}"
run: |
# Write and read notes from a file to avoid quoting breaking things
echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt
gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/*
custom-notify-dependents:
needs:
- plan
- announce
uses: ./.github/workflows/notify-dependents.yml
with:
plan: ${{ needs.plan.outputs.val }}
secrets: inherit
custom-publish-docs:
needs:
- plan
- announce
uses: ./.github/workflows/publish-docs.yml
with:
plan: ${{ needs.plan.outputs.val }}
secrets: inherit
custom-publish-playground:
needs:
- plan
- announce
uses: ./.github/workflows/publish-playground.yml
with:
plan: ${{ needs.plan.outputs.val }}
secrets: inherit

374
.github/workflows/ruff.yaml vendored Normal file
View File

@@ -0,0 +1,374 @@
name: "[ruff] Release"
on:
workflow_dispatch:
release:
types: [published]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
PACKAGE_NAME: ruff
PYTHON_VERSION: "3.7" # to build abi3 wheels
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
macos-x86_64:
runs-on: macos-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels - x86_64"
uses: PyO3/maturin-action@v1
with:
target: x86_64
args: --release --out dist --sdist
- name: "Install built wheel - x86_64"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-x86_64-apple-darwin.tar.gz
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: binaries
path: |
*.tar.gz
*.sha256
macos-universal:
runs-on: macos-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels - universal2"
uses: PyO3/maturin-action@v1
with:
args: --release --universal2 --out dist
- name: "Install built wheel - universal2"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-aarch64-apple-darwin.tar.gz
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: binaries
path: |
*.tar.gz
*.sha256
windows:
runs-on: windows-latest
strategy:
matrix:
platform:
- target: x86_64-pc-windows-msvc
arch: x64
- target: i686-pc-windows-msvc
arch: x86
- target: aarch64-pc-windows-msvc
arch: x64
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: ${{ matrix.platform.arch }}
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
args: --release --out dist
- name: "Install built wheel"
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
shell: bash
run: |
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
shell: bash
run: |
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: binaries
path: |
*.zip
*.sha256
linux:
runs-on: ubuntu-latest
strategy:
matrix:
target:
- x86_64-unknown-linux-gnu
- i686-unknown-linux-gnu
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
args: --release --out dist
- name: "Install built wheel"
if: ${{ startsWith(matrix.target, 'x86_64') }}
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: binaries
path: |
*.tar.gz
*.sha256
linux-cross:
runs-on: ubuntu-latest
strategy:
matrix:
platform:
- target: aarch64-unknown-linux-gnu
arch: aarch64
- target: armv7-unknown-linux-gnueabihf
arch: armv7
- target: s390x-unknown-linux-gnu
arch: s390x
- target: powerpc64le-unknown-linux-gnu
arch: ppc64le
- target: powerpc64-unknown-linux-gnu
arch: ppc64
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
manylinux: auto
args: --release --out dist
- uses: uraimo/run-on-arch-action@v2
if: matrix.platform.arch != 'ppc64'
name: Install built wheel
with:
arch: ${{ matrix.platform.arch }}
distro: ubuntu20.04
githubToken: ${{ github.token }}
install: |
apt-get update
apt-get install -y --no-install-recommends python3 python3-pip
pip3 install -U pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: binaries
path: |
*.tar.gz
*.sha256
musllinux:
runs-on: ubuntu-latest
strategy:
matrix:
target:
- x86_64-unknown-linux-musl
- i686-unknown-linux-musl
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --out dist
- name: "Install built wheel"
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@v3
with:
image: alpine:latest
options: -v ${{ github.workspace }}:/io -w /io
run: |
apk add py3-pip
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: binaries
path: |
*.tar.gz
*.sha256
musllinux-cross:
runs-on: ubuntu-latest
strategy:
matrix:
platform:
- target: aarch64-unknown-linux-musl
arch: aarch64
- target: armv7-unknown-linux-musleabihf
arch: armv7
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --out dist
- uses: uraimo/run-on-arch-action@v2
name: Install built wheel
with:
arch: ${{ matrix.platform.arch }}
distro: alpine_latest
githubToken: ${{ github.token }}
install: |
apk add py3-pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: binaries
path: |
*.tar.gz
*.sha256
release:
name: Release
runs-on: ubuntu-latest
needs:
- macos-universal
- macos-x86_64
- windows
- linux
- linux-cross
- musllinux
- musllinux-cross
if: "startsWith(github.ref, 'refs/tags/')"
steps:
- uses: actions/download-artifact@v3
with:
name: wheels
- uses: actions/setup-python@v4
- name: "Publish to PyPi"
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.RUFF_TOKEN }}
run: |
pip install --upgrade twine
twine upload --skip-existing *
- name: "Update pre-commit mirror"
run: |
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.RUFF_PRE_COMMIT_PAT }}" -H "X-GitHub-Api-Version: 2022-11-28" https://api.github.com/repos/charliermarsh/ruff-pre-commit/dispatches --data '{"event_type": "pypi_release"}'
- uses: actions/download-artifact@v3
with:
name: binaries
path: binaries
- name: Release
uses: softprops/action-gh-release@v1
with:
files: binaries/*

View File

@@ -1,80 +0,0 @@
name: Sync typeshed
on:
workflow_dispatch:
schedule:
# Run on the 1st and the 15th of every month:
- cron: "0 0 1,15 * *"
env:
FORCE_COLOR: 1
GH_TOKEN: ${{ github.token }}
jobs:
sync:
name: Sync typeshed
runs-on: ubuntu-latest
timeout-minutes: 20
# Don't run the cron job on forks:
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
name: Checkout Ruff
with:
path: ruff
- uses: actions/checkout@v4
name: Checkout typeshed
with:
repository: python/typeshed
path: typeshed
- name: Setup git
run: |
git config --global user.name typeshedbot
git config --global user.email '<>'
- name: Sync typeshed
id: sync
run: |
rm -rf ruff/crates/ruff_vendored/vendor/typeshed
mkdir ruff/crates/ruff_vendored/vendor/typeshed
cp typeshed/README.md ruff/crates/ruff_vendored/vendor/typeshed
cp typeshed/LICENSE ruff/crates/ruff_vendored/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/ruff_vendored/vendor/typeshed/stdlib
rm -rf ruff/crates/ruff_vendored/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/ruff_vendored/vendor/typeshed/source_commit.txt
- name: Commit the changes
id: commit
if: ${{ steps.sync.outcome == 'success' }}
run: |
cd ruff
git checkout -b typeshedbot/sync-typeshed
git add .
git diff --staged --quiet || git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)"
- name: Create a PR
if: ${{ steps.sync.outcome == 'success' && steps.commit.outcome == 'success' }}
run: |
cd ruff
git push --force origin typeshedbot/sync-typeshed
gh pr list --repo $GITHUB_REPOSITORY --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
gh pr create --title "Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "internal"
create-issue-on-failure:
name: Create an issue if the typeshed sync failed
runs-on: ubuntu-latest
needs: [sync]
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.sync.result == 'failure' }}
permissions:
issues: write
steps:
- uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
await github.rest.issues.create({
owner: "astral-sh",
repo: "ruff",
title: `Automated typeshed sync failed on ${new Date().toDateString()}`,
body: "Runs are listed here: https://github.com/astral-sh/ruff/actions/workflows/sync_typeshed.yaml",
})

42
.gitignore vendored
View File

@@ -1,33 +1,8 @@
# Benchmarking cpython (CONTRIBUTING.md)
crates/ruff_linter/resources/test/cpython
# generate_mkdocs.py
mkdocs.generated.yml
# check_ecosystem.py
ruff-old
github_search*.jsonl
# update_schemastore.py
schemastore
# `maturin develop` and ecosystem_all_check.sh
.venv*
# Formatter debugging (crates/ruff_python_formatter/README.md)
scratch.*
# Created by `perf` (CONTRIBUTING.md)
perf.data
perf.data.old
# Created by `flamegraph` (CONTRIBUTING.md)
flamegraph.svg
# Additional target directories that don't invalidate the main compile cache when changing linker settings,
# e.g. `CARGO_TARGET_DIR=target-maturin maturin build --release --strip` or
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
/target*
# samply profiles
profile.json
# tracing-flame traces
tracing.folded
tracing-flamechart.svg
tracing-flamegraph.svg
# Local cache
.ruff_cache
crates/ruff/resources/test/cpython
mkdocs.yml
.overrides
###
# Rust.gitignore
@@ -100,7 +75,6 @@ coverage.xml
.hypothesis/
.pytest_cache/
cover/
repos/
# Translations
*.mo
@@ -217,9 +191,3 @@ cython_debug/
# VIM
.*.sw?
.sw?
# Custom re-inclusions for the resolver test cases
!crates/ruff_python_resolver/resources/test/airflow/venv/
!crates/ruff_python_resolver/resources/test/airflow/venv/lib
!crates/ruff_python_resolver/resources/test/airflow/venv/lib/python3.11/site-packages/_watchdog_fsevents.cpython-311-darwin.so
!crates/ruff_python_resolver/resources/test/airflow/venv/lib/python3.11/site-packages/orjson/orjson.cpython-311-darwin.so

View File

@@ -1,23 +0,0 @@
# default to true for all rules
default: true
# MD007/unordered-list-indent
MD007:
indent: 4
# MD033/no-inline-html
MD033: false
# MD041/first-line-h1
MD041: false
# MD013/line-length
MD013: false
# MD014/commands-show-output
MD014: false
# MD024/no-duplicate-heading
MD024:
# Allow when nested under different parents e.g. CHANGELOG.md
siblings_only: true

View File

@@ -1,78 +1,68 @@
fail_fast: true
exclude: |
(?x)^(
crates/ruff_vendored/vendor/.*|
crates/red_knot_workspace/resources/.*|
crates/ruff_linter/resources/.*|
crates/ruff_linter/src/rules/.*/snapshots/.*|
crates/ruff_notebook/resources/.*|
crates/ruff_server/resources/.*|
crates/ruff/resources/.*|
crates/ruff_python_formatter/resources/.*|
crates/ruff_python_formatter/tests/snapshots/.*|
crates/ruff_python_resolver/resources/.*|
crates/ruff_python_resolver/tests/snapshots/.*
)$
repos:
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.19
rev: v0.12.1
hooks:
- id: validate-pyproject
- repo: https://github.com/executablebooks/mdformat
rev: 0.7.17
rev: 0.7.16
hooks:
- id: mdformat
additional_dependencies:
- mdformat-mkdocs
- mdformat-admon
exclude: |
(?x)^(
docs/formatter/black\.md
| docs/\w+\.md
)$
- mdformat-black
- black==23.1.0 # Must be the latest version of Black
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.41.0
rev: v0.33.0
hooks:
- id: markdownlint-fix
exclude: |
(?x)^(
docs/formatter/black\.md
| docs/\w+\.md
)$
- repo: https://github.com/crate-ci/typos
rev: v1.24.5
hooks:
- id: typos
args:
- --disable
- MD013 # line-length
- MD033 # no-inline-html
- --
- repo: local
hooks:
- id: cargo-fmt
name: cargo fmt
entry: cargo fmt --
language: system
language: rust
types: [rust]
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.5
hooks:
- id: ruff-format
- id: clippy
name: clippy
entry: cargo clippy --workspace --all-targets --all-features -- -D warnings
language: rust
pass_filenames: false
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
name: ruff
entry: cargo run -p ruff_cli -- check --no-cache --force-exclude --fix --exit-non-zero-on-fix
language: rust
types_or: [python, pyi]
require_serial: true
exclude: |
(?x)^(
crates/ruff/resources/.*|
crates/ruff_python_formatter/resources/.*
)$
- id: dev-generate-all
name: dev-generate-all
entry: cargo dev generate-all
language: rust
pass_filenames: false
exclude: target
# Prettier
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.1.0
# Black
- repo: https://github.com/psf/black
rev: 23.1.0
hooks:
- id: prettier
types: [yaml]
- id: black
exclude: |
(?x)^(
crates/ruff/resources/.*|
crates/ruff_python_formatter/resources/.*
)$
ci:
skip: [cargo-fmt, dev-generate-all]
skip: [cargo-fmt, clippy, dev-generate-all]

View File

@@ -1,2 +0,0 @@
# Auto-generated by `cargo-dist`.
.github/workflows/release.yml

View File

@@ -1,5 +0,0 @@
{
"recommendations": [
"rust-lang.rust-analyzer"
]
}

View File

@@ -1,6 +0,0 @@
{
"rust-analyzer.check.extraArgs": [
"--all-features"
],
"rust-analyzer.check.command": "clippy",
}

View File

@@ -1,331 +1,15 @@
# Breaking Changes
## 0.6.0
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
```toml
[tool.ruff.lint.per-file-ignores]
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
```
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
Notebook files and not format them:
```toml
[tool.ruff.format]
exclude = ["*.ipynb"]
```
And, conversely, the following would only format Jupyter Notebook files and not lint them:
```toml
[tool.ruff.lint]
exclude = ["*.ipynb"]
```
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
```toml
[tool.ruff]
extend-exclude = ["*.ipynb"]
```
## 0.5.0
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
- Selecting `ALL` now excludes deprecated rules
- The released archives now include an extra level of nesting, which can be removed with `--strip-components=1` when untarring.
- The release artifact's file name no longer includes the version tag. This enables users to install via `/latest` URLs on GitHub.
## 0.3.0
### Ruff 2024.2 style
The formatter now formats code according to the Ruff 2024.2 style guide. Read the [changelog](./CHANGELOG.md#030) for a detailed list of stabilized style changes.
### `isort`: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
Previously, Ruff used one or two blank lines (or the number configured by `isort.lines-after-imports`) after imports in typing stub files (`.pyi` files).
The [typing style guide for stubs](https://typing.readthedocs.io/en/latest/source/stubs.html#style-guide) recommends using at most 1 blank line for grouping.
As of this release, `isort` now always uses one blank line after imports in stub files, the same as the formatter.
### `build` is no longer excluded by default ([#10093](https://github.com/astral-sh/ruff/pull/10093))
Ruff maintains a list of directories and files that are excluded by default. This list now consists of the following patterns:
- `.bzr`
- `.direnv`
- `.eggs`
- `.git`
- `.git-rewrite`
- `.hg`
- `.ipynb_checkpoints`
- `.mypy_cache`
- `.nox`
- `.pants.d`
- `.pyenv`
- `.pytest_cache`
- `.pytype`
- `.ruff_cache`
- `.svn`
- `.tox`
- `.venv`
- `.vscode`
- `__pypackages__`
- `_build`
- `buck-out`
- `dist`
- `node_modules`
- `site-packages`
- `venv`
Previously, the `build` directory was included in this list. However, the `build` directory tends to be a not-unpopular directory
name, and excluding it by default caused confusion. Ruff now no longer excludes `build` except if it is excluded by a `.gitignore` file
or because it is listed in `extend-exclude`.
### `--format` is no longer a valid `rule` or `linter` command option
Previously, `ruff rule` and `ruff linter` accepted the `--format <FORMAT>` option as an alias for `--output-format`. Ruff no longer
supports this alias. Please use `ruff rule --output-format <FORMAT>` and `ruff linter --output-format <FORMAT>` instead.
## 0.1.9
### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
Ruff maintains a list of default exclusions, which now consists of the following patterns:
- `.bzr`
- `.direnv`
- `.eggs`
- `.git-rewrite`
- `.git`
- `.hg`
- `.ipynb_checkpoints`
- `.mypy_cache`
- `.nox`
- `.pants.d`
- `.pyenv`
- `.pytest_cache`
- `.pytype`
- `.ruff_cache`
- `.svn`
- `.tox`
- `.venv`
- `.vscode`
- `__pypackages__`
- `_build`
- `buck-out`
- `build`
- `dist`
- `node_modules`
- `site-packages`
- `venv`
Previously, the `site-packages` directory was not excluded by default. While `site-packages` tends
to be excluded anyway by virtue of the `.venv` exclusion, this may not be the case when using Ruff
from VS Code outside a virtual environment.
## 0.1.0
### The deprecated `format` setting has been removed
Ruff previously used the `format` setting, `--format` CLI option, and `RUFF_FORMAT` environment variable to
configure the output format of the CLI. This usage was deprecated in `v0.0.291` — the `format` setting is now used
to control Ruff's code formatting. As of this release:
- The `format` setting cannot be used to configure the output format, use `output-format` instead
- The `RUFF_FORMAT` environment variable is ignored, use `RUFF_OUTPUT_FORMAT` instead
- The `--format` option has been removed from `ruff check`, use `--output-format` instead
### Unsafe fixes are not applied by default ([#7769](https://github.com/astral-sh/ruff/pull/7769))
Ruff labels fixes as "safe" and "unsafe". The meaning and intent of your code will be retained when applying safe
fixes, but the meaning could be changed when applying unsafe fixes. Previously, unsafe fixes were always displayed
and applied when fixing was enabled. Now, unsafe fixes are hidden by default and not applied. The `--unsafe-fixes`
flag or `unsafe-fixes` configuration option can be used to enable unsafe fixes.
See the [docs](https://docs.astral.sh/ruff/configuration/#fix-safety) for details.
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
Previously, Ruff enabled all implemented rules in Pycodestyle (`E`) by default. Ruff now only includes the
Pycodestyle prefixes `E4`, `E7`, and `E9` to exclude rules that conflict with automatic formatters. Consequently,
the stable rule set no longer includes `line-too-long` (`E501`) and `mixed-spaces-and-tabs` (`E101`). Other
excluded Pycodestyle rules include whitespace enforcement in `E1` and `E2`; these rules are currently in preview, and are already omitted by default.
This change only affects those using Ruff under its default rule set. Users that include `E` in their `select` will experience no change in behavior.
## 0.0.288
### Remove support for emoji identifiers ([#7212](https://github.com/astral-sh/ruff/pull/7212))
Previously, Ruff supported the non-standard compliant emoji identifiers e.g. `📦 = 1`.
We decided to remove this non-standard language extension, and Ruff now reports syntax errors for emoji identifiers in your code, the same as CPython.
### Improved GitLab fingerprints ([#7203](https://github.com/astral-sh/ruff/pull/7203))
GitLab uses fingerprints to identify new, existing, or fixed violations. Previously, Ruff included the violation's position in the fingerprint. Using the location has the downside that changing any code before the violation causes the fingerprint to change, resulting in GitLab reporting one fixed and one new violation even though it is a pre-existing violation.
Ruff now uses a more stable location-agnostic fingerprint to minimize that existing violations incorrectly get marked as fixed and re-reported as new violations.
Expect GitLab to report each pre-existing violation in your project as fixed and a new violation in your Ruff upgrade PR.
## 0.0.283 / 0.284
### The target Python version now defaults to 3.8 instead of 3.10 ([#6397](https://github.com/astral-sh/ruff/pull/6397))
Previously, when a target Python version was not specified, Ruff would use a default of Python 3.10. However, it is safer to default to an _older_ Python version to avoid assuming the availability of new features. We now default to the oldest supported Python version which is currently Python 3.8.
(We still support Python 3.7 but since [it has reached EOL](https://devguide.python.org/versions/#unsupported-versions) we've decided not to make it the default here.)
Note this change was announced in 0.0.283 but not active until 0.0.284.
## 0.0.277
### `.ipynb_checkpoints`, `.pyenv`, `.pytest_cache`, and `.vscode` are now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
Ruff maintains a list of default exclusions, which now consists of the following patterns:
- `.bzr`
- `.direnv`
- `.eggs`
- `.git`
- `.git-rewrite`
- `.hg`
- `.ipynb_checkpoints`
- `.mypy_cache`
- `.nox`
- `.pants.d`
- `.pyenv`
- `.pytest_cache`
- `.pytype`
- `.ruff_cache`
- `.svn`
- `.tox`
- `.venv`
- `.vscode`
- `__pypackages__`
- `_build`
- `buck-out`
- `build`
- `dist`
- `node_modules`
- `venv`
Previously, the `.ipynb_checkpoints`, `.pyenv`, `.pytest_cache`, and `.vscode` directories were not
excluded by default. This change brings Ruff's default exclusions in line with other tools like
Black.
## 0.0.276
### The `keep-runtime-typing` setting has been reinstated ([#5470](https://github.com/astral-sh/ruff/pull/5470))
The `keep-runtime-typing` setting has been reinstated with revised semantics. This setting was
removed in [#4427](https://github.com/astral-sh/ruff/pull/4427), as it was equivalent to ignoring
the `UP006` and `UP007` rules via Ruff's standard `ignore` mechanism.
Taking `UP006` (rewrite `List[int]` to `list[int]`) as an example, the setting now behaves as
follows:
- On Python 3.7 and Python 3.8, setting `keep-runtime-typing = true` will cause Ruff to ignore
`UP006` violations, even if `from __future__ import annotations` is present in the file.
While such annotations are valid in Python 3.7 and Python 3.8 when combined with
`from __future__ import annotations`, they aren't supported by libraries like Pydantic and
FastAPI, which rely on runtime type checking.
- On Python 3.9 and above, the setting has no effect, as `list[int]` is a valid type annotation,
and libraries like Pydantic and FastAPI support it without issue.
In short: `keep-runtime-typing` can be used to ensure that Ruff doesn't introduce type annotations
that are not supported at runtime by the current Python version, which are unsupported by libraries
like Pydantic and FastAPI.
Note that this is not a breaking change, but is included here to complement the previous removal
of `keep-runtime-typing`.
## 0.0.268
### The `keep-runtime-typing` setting has been removed ([#4427](https://github.com/astral-sh/ruff/pull/4427))
Enabling the `keep-runtime-typing` option, located under the `pyupgrade` section, is equivalent
to ignoring the `UP006` and `UP007` rules via Ruff's standard `ignore` mechanism. As there's no
need for a dedicated setting to disable these rules, the `keep-runtime-typing` option has been
removed.
## 0.0.267
### `update-check` is no longer a valid configuration option ([#4313](https://github.com/astral-sh/ruff/pull/4313))
The `update-check` functionality was deprecated in [#2530](https://github.com/astral-sh/ruff/pull/2530),
in that the behavior itself was removed, and Ruff was changed to warn when that option was enabled.
Now, Ruff will throw an error when `update-check` is provided via a configuration file (e.g.,
`update-check = false`) or through the command-line, since it has no effect. Users should remove
this option from their configuration.
## 0.0.265
### `--fix-only` now exits with a zero exit code, unless `--exit-non-zero-on-fix` is specified ([#4146](https://github.com/astral-sh/ruff/pull/4146))
Previously, `--fix-only` would exit with a non-zero exit code if any fixes were applied. This
behavior was inconsistent with `--fix`, and further, meant that `--exit-non-zero-on-fix` was
effectively ignored when `--fix-only` was specified.
Now, `--fix-only` will exit with a zero exit code, unless `--exit-non-zero-on-fix` is specified,
in which case it will exit with a non-zero exit code if any fixes were applied.
## 0.0.260
### Fixes are now represented as a list of edits ([#3709](https://github.com/astral-sh/ruff/pull/3709))
Previously, Ruff represented each fix as a single edit, which prohibited Ruff from automatically
fixing violations that required multiple edits across a file. As such, Ruff now represents each
fix as a list of edits.
This primarily affects the JSON API. Ruff's JSON representation used to represent the `fix` field as
a single edit, like so:
```json
{
"message": "Remove unused import: `sys`",
"content": "",
"location": {"row": 1, "column": 0},
"end_location": {"row": 2, "column": 0}
}
```
The updated representation instead includes a list of edits:
```json
{
"message": "Remove unused import: `sys`",
"edits": [
{
"content": "",
"location": {"row": 1, "column": 0},
"end_location": {"row": 2, "column": 0},
}
]
}
```
## 0.0.246
### `multiple-statements-on-one-line-def` (`E704`) was removed ([#2773](https://github.com/astral-sh/ruff/pull/2773))
### `multiple-statements-on-one-line-def` (`E704`) was removed ([#2773](https://github.com/charliermarsh/ruff/pull/2773))
This rule was introduced in v0.0.245. However, it turns out that pycodestyle and Flake8 ignore this
rule by default, as it is not part of PEP 8. As such, we've removed it from Ruff.
## 0.0.245
### Ruff's public `check` method was removed ([#2709](https://github.com/astral-sh/ruff/pull/2709))
### Ruff's public `check` method was removed ([#2709](https://github.com/charliermarsh/ruff/pull/2709))
Previously, Ruff exposed a `check` method as a public Rust API. This method was used by few,
if any clients, and was not well documented or supported. As such, it has been removed, with
@@ -333,11 +17,10 @@ the intention of adding a stable public API in the future.
## 0.0.238
### `select`, `extend-select`, `ignore`, and `extend-ignore` have new semantics ([#2312](https://github.com/astral-sh/ruff/pull/2312))
### `select`, `extend-select`, `ignore`, and `extend-ignore` have new semantics ([#2312](https://github.com/charliermarsh/ruff/pull/2312))
Previously, the interplay between `select` and its related options could lead to unexpected
behavior. For example, `ruff --select E501 --ignore ALL` and `ruff --select E501 --extend-ignore ALL`
behaved differently. (See [#2312](https://github.com/astral-sh/ruff/pull/2312) for more
behavior. For example, `ruff --select E501 --ignore ALL` and `ruff --select E501 --extend-ignore ALL` behaved differently. (See [#2312](https://github.com/charliermarsh/ruff/pull/2312) for more
examples.)
When Ruff determines the enabled rule set, it has to reconcile `select` and `ignore` from a variety
@@ -363,14 +46,14 @@ ignore = ["F401"]
Running `ruff --select F` would previously have enabled all `F` rules, apart from `F401`. Now, it
will enable all `F` rules, including `F401`, as the command line's `--select` resets the resolution.
### `remove-six-compat` (`UP016`) has been removed ([#2332](https://github.com/astral-sh/ruff/pull/2332))
### `remove-six-compat` (`UP016`) has been removed ([#2332](https://github.com/charliermarsh/ruff/pull/2332))
The `remove-six-compat` rule has been removed. This rule was only useful for one-time Python 2-to-3
upgrades.
## 0.0.237
### `--explain`, `--clean`, and `--generate-shell-completion` are now subcommands ([#2190](https://github.com/astral-sh/ruff/pull/2190))
### `--explain`, `--clean`, and `--generate-shell-completion` are now subcommands ([#2190](https://github.com/charliermarsh/ruff/pull/2190))
`--explain`, `--clean`, and `--generate-shell-completion` are now implemented as subcommands:
@@ -391,36 +74,36 @@ This change is largely backwards compatible -- most users should experience
no change in behavior. However, please note the following exceptions:
- Subcommands will now fail when invoked with unsupported arguments, instead
of silently ignoring them. For example, the following will now fail:
of silently ignoring them. For example, the following will now fail:
```console
ruff --clean --respect-gitignore
```
```console
ruff --clean --respect-gitignore
```
(the `clean` command doesn't support `--respect-gitignore`.)
(the `clean` command doesn't support `--respect-gitignore`.)
- The semantics of `ruff <arg>` have changed slightly when `<arg>` is a valid subcommand.
For example, prior to this release, running `ruff rule` would run `ruff` over a file or
directory called `rule`. Now, `ruff rule` would invoke the `rule` subcommand. This should
only impact projects with files or directories named `rule`, `check`, `explain`, `clean`,
or `generate-shell-completion`.
For example, prior to this release, running `ruff rule` would run `ruff` over a file or
directory called `rule`. Now, `ruff rule` would invoke the `rule` subcommand. This should
only impact projects with files or directories named `rule`, `check`, `explain`, `clean`,
or `generate-shell-completion`.
- Scripts that invoke ruff should supply `--` before any positional arguments.
(The semantics of `ruff -- <arg>` have not changed.)
(The semantics of `ruff -- <arg>` have not changed.)
- `--explain` previously treated `--format grouped` as a synonym for `--format text`.
This is no longer supported; instead, use `--format text`.
This is no longer supported; instead, use `--format text`.
## 0.0.226
### `misplaced-comparison-constant` (`PLC2201`) was deprecated in favor of `SIM300` ([#1980](https://github.com/astral-sh/ruff/pull/1980))
### `misplaced-comparison-constant` (`PLC2201`) was deprecated in favor of `SIM300` ([#1980](https://github.com/charliermarsh/ruff/pull/1980))
These two rules contain (nearly) identical logic. To deduplicate the rule set, we've upgraded
`SIM300` to handle a few more cases, and deprecated `PLC2201` in favor of `SIM300`.
## 0.0.225
### `@functools.cache` rewrites have been moved to a standalone rule (`UP033`) ([#1938](https://github.com/astral-sh/ruff/pull/1938))
### `@functools.cache` rewrites have been moved to a standalone rule (`UP033`) ([#1938](https://github.com/charliermarsh/ruff/pull/1938))
Previously, `UP011` handled both `@functools.lru_cache()`-to-`@functools.lru_cache` conversions,
_and_ `@functools.lru_cache(maxsize=None)`-to-`@functools.cache` conversions. The latter has been
@@ -429,7 +112,7 @@ to reflect the change in rule code.
## 0.0.222
### `--max-complexity` has been removed from the CLI ([#1877](https://github.com/astral-sh/ruff/pull/1877))
### `--max-complexity` has been removed from the CLI ([#1877](https://github.com/charliermarsh/ruff/pull/1877))
The McCabe plugin's `--max-complexity` setting has been removed from the CLI, for consistency with
the treatment of other, similar settings.
@@ -444,7 +127,7 @@ max-complexity = 10
## 0.0.181
### Files excluded by `.gitignore` are now ignored ([#1234](https://github.com/astral-sh/ruff/pull/1234))
### Files excluded by `.gitignore` are now ignored ([#1234](https://github.com/charliermarsh/ruff/pull/1234))
Ruff will now avoid checking files that are excluded by `.ignore`, `.gitignore`,
`.git/info/exclude`, and global `gitignore` files. This behavior is powered by the [`ignore`](https://docs.rs/ignore/latest/ignore/struct.WalkBuilder.html#ignore-rules)
@@ -457,9 +140,9 @@ default.
## 0.0.178
### Configuration files are now resolved hierarchically ([#1190](https://github.com/astral-sh/ruff/pull/1190))
### Configuration files are now resolved hierarchically ([#1190](https://github.com/charliermarsh/ruff/pull/1190))
`pyproject.toml` files are now resolved hierarchically, such that for each Python file, we find
the first `pyproject.toml` file in its path, and use that to determine its lint settings.
See the [documentation](https://docs.astral.sh/ruff/configuration/#python-file-discovery) for more.
See the [README](https://github.com/charliermarsh/ruff#pyprojecttoml-discovery) for more.

File diff suppressed because it is too large Load Diff

View File

@@ -6,10 +6,10 @@
- [Scope](#scope)
- [Enforcement](#enforcement)
- [Enforcement Guidelines](#enforcement-guidelines)
- [1. Correction](#1-correction)
- [2. Warning](#2-warning)
- [3. Temporary Ban](#3-temporary-ban)
- [4. Permanent Ban](#4-permanent-ban)
- [1. Correction](#1-correction)
- [2. Warning](#2-warning)
- [3. Temporary Ban](#3-temporary-ban)
- [4. Permanent Ban](#4-permanent-ban)
- [Attribution](#attribution)
## Our Pledge
@@ -33,20 +33,20 @@ community include:
- Being respectful of differing opinions, viewpoints, and experiences
- Giving and gracefully accepting constructive feedback
- Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
and learning from the experience
- Focusing on what is best not just for us as individuals, but for the
overall community
overall community
Examples of unacceptable behavior include:
- The use of sexualized language or imagery, and sexual attention or
advances of any kind
advances of any kind
- Trolling, insulting or derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or email
address, without their explicit permission
address, without their explicit permission
- Other conduct which could reasonably be considered inappropriate in a
professional setting
professional setting
## Enforcement Responsibilities
@@ -72,7 +72,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
<charlie.r.marsh@gmail.com>.
charlie.r.marsh@gmail.com.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the

File diff suppressed because it is too large Load Diff

3382
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,214 +1,57 @@
[workspace]
members = ["crates/*"]
resolver = "2"
[workspace.package]
edition = "2021"
rust-version = "1.76"
homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
rust-version = "1.67"
homepage = "https://beta.ruff.rs/docs/"
documentation = "https://beta.ruff.rs/docs/"
repository = "https://github.com/charliermarsh/ruff"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
license = "MIT"
[workspace.dependencies]
ruff = { path = "crates/ruff" }
ruff_cache = { path = "crates/ruff_cache" }
ruff_db = { path = "crates/ruff_db", default-features = false }
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
ruff_formatter = { path = "crates/ruff_formatter" }
ruff_graph = { path = "crates/ruff_graph" }
ruff_index = { path = "crates/ruff_index" }
ruff_linter = { path = "crates/ruff_linter" }
ruff_macros = { path = "crates/ruff_macros" }
ruff_notebook = { path = "crates/ruff_notebook" }
ruff_python_ast = { path = "crates/ruff_python_ast" }
ruff_python_codegen = { path = "crates/ruff_python_codegen" }
ruff_python_formatter = { path = "crates/ruff_python_formatter" }
ruff_python_index = { path = "crates/ruff_python_index" }
ruff_python_literal = { path = "crates/ruff_python_literal" }
ruff_python_parser = { path = "crates/ruff_python_parser" }
ruff_python_semantic = { path = "crates/ruff_python_semantic" }
ruff_python_stdlib = { path = "crates/ruff_python_stdlib" }
ruff_python_trivia = { path = "crates/ruff_python_trivia" }
ruff_server = { path = "crates/ruff_server" }
ruff_source_file = { path = "crates/ruff_source_file" }
ruff_text_size = { path = "crates/ruff_text_size" }
ruff_vendored = { path = "crates/ruff_vendored" }
ruff_workspace = { path = "crates/ruff_workspace" }
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
red_knot_server = { path = "crates/red_knot_server" }
red_knot_workspace = { path = "crates/red_knot_workspace", default-features = false }
aho-corasick = { version = "1.1.3" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { version = "1.0.80" }
assert_fs = { version = "1.1.0" }
argfile = { version = "0.2.0" }
bincode = { version = "1.3.3" }
bitflags = { version = "2.5.0" }
bstr = { version = "1.9.1" }
cachedir = { version = "0.3.1" }
camino = { version = "1.1.7" }
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
clap = { version = "4.5.3", features = ["derive"] }
clap_complete_command = { version = "0.6.0" }
clearscreen = { version = "3.0.0" }
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
colored = { version = "2.1.0" }
console_error_panic_hook = { version = "0.1.7" }
console_log = { version = "1.0.0" }
countme = { version = "3.0.1" }
compact_str = "0.8.0"
criterion = { version = "0.5.1", default-features = false }
crossbeam = { version = "0.8.4" }
dashmap = { version = "6.0.1" }
drop_bomb = { version = "0.1.5" }
env_logger = { version = "0.11.0" }
etcetera = { version = "0.8.0" }
fern = { version = "0.6.1" }
filetime = { version = "0.2.23" }
anyhow = { version = "1.0.69" }
bitflags = { version = "1.3.2" }
chrono = { version = "0.4.23", default-features = false, features = ["clock"] }
clap = { version = "4.1.8", features = ["derive"] }
colored = { version = "2.0.0" }
filetime = { version = "0.2.20" }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
globwalk = { version = "0.9.1" }
hashbrown = "0.14.3"
ignore = { version = "0.4.22" }
imara-diff = { version = "0.1.5" }
imperative = { version = "1.0.4" }
indicatif = { version = "0.17.8" }
indoc = { version = "2.0.4" }
insta = { version = "1.35.1" }
insta-cmd = { version = "0.6.0" }
is-macro = { version = "0.3.5" }
is-wsl = { version = "0.4.0" }
itertools = { version = "0.13.0" }
js-sys = { version = "0.3.69" }
jod-thread = { version = "0.1.2" }
libc = { version = "0.2.153" }
libcst = { version = "1.1.0", default-features = false }
globset = { version = "0.4.10" }
ignore = { version = "0.4.20" }
insta = { version = "1.28.0" }
is-macro = { version = "0.2.2" }
itertools = { version = "0.10.5" }
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "80e4c1399f95e5beb532fdd1e209ad2dbb470438" }
log = { version = "0.4.17" }
lsp-server = { version = "0.7.6" }
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
"proposed",
] }
matchit = { version = "0.8.1" }
memchr = { version = "2.7.1" }
mimalloc = { version = "0.1.39" }
natord = { version = "1.0.9" }
notify = { version = "6.1.1" }
once_cell = { version = "1.19.0" }
ordermap = { version = "0.5.0" }
path-absolutize = { version = "3.1.1" }
path-slash = { version = "0.2.1" }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.6.0", features = ["serde"] }
pretty_assertions = "1.3.0"
proc-macro2 = { version = "1.0.79" }
pyproject-toml = { version = "0.9.0" }
quick-junit = { version = "0.5.0" }
once_cell = { version = "1.17.1" }
path-absolutize = { version = "3.0.14" }
proc-macro2 = { version = "1.0.51" }
quote = { version = "1.0.23" }
rand = { version = "0.8.5" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
serde-wasm-bindgen = { version = "0.6.4" }
serde_json = { version = "1.0.113" }
serde_test = { version = "1.0.152" }
serde_with = { version = "3.6.0", default-features = false, features = [
"macros",
] }
regex = { version = "1.7.1" }
rustc-hash = { version = "1.1.0" }
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "c15f670f2c30cfae6b41a1874893590148c74bc4" }
rustpython-parser = { features = [
"lalrpop",
"serde",
], git = "https://github.com/RustPython/RustPython.git", rev = "c15f670f2c30cfae6b41a1874893590148c74bc4" }
schemars = { version = "0.8.12" }
serde = { version = "1.0.152", features = ["derive"] }
serde_json = { version = "1.0.93" }
shellexpand = { version = "3.0.0" }
similar = { version = "2.4.0", features = ["inline"] }
smallvec = { version = "1.13.2" }
static_assertions = "1.1.0"
strum = { version = "0.26.0", features = ["strum_macros"] }
strum_macros = { version = "0.26.0" }
syn = { version = "2.0.55" }
tempfile = { version = "3.9.0" }
test-case = { version = "3.3.1" }
thiserror = { version = "1.0.58" }
tikv-jemallocator = { version = "0.6.0" }
toml = { version = "0.8.11" }
tracing = { version = "0.1.40" }
tracing-flame = { version = "0.2.0" }
tracing-indicatif = { version = "0.3.6" }
tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt"] }
tracing-tree = { version = "0.4.0" }
typed-arena = { version = "2.0.2" }
unic-ucd-category = { version = "0.9" }
unicode-ident = { version = "1.0.12" }
unicode-width = { version = "0.1.11" }
unicode_names2 = { version = "1.2.2" }
unicode-normalization = { version = "0.1.23" }
ureq = { version = "2.9.6" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = [
"v4",
"fast-rng",
"macro-diagnostics",
"js",
] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.92" }
wasm-bindgen-test = { version = "0.3.42" }
wild = { version = "2" }
zip = { version = "0.6.6", default-features = false }
[workspace.lints.rust]
unsafe_code = "warn"
unreachable_pub = "warn"
unexpected_cfgs = { level = "warn", check-cfg = ["cfg(fuzzing)", "cfg(codspeed)"] }
[workspace.lints.clippy]
pedantic = { level = "warn", priority = -2 }
# Allowed pedantic lints
char_lit_as_u8 = "allow"
collapsible_else_if = "allow"
collapsible_if = "allow"
implicit_hasher = "allow"
map_unwrap_or = "allow"
match_same_arms = "allow"
missing_errors_doc = "allow"
missing_panics_doc = "allow"
module_name_repetitions = "allow"
must_use_candidate = "allow"
similar_names = "allow"
too_many_lines = "allow"
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
needless_raw_string_hashes = "allow"
# Disallowed restriction lints
print_stdout = "warn"
print_stderr = "warn"
dbg_macro = "warn"
empty_drop = "warn"
empty_structs_with_brackets = "warn"
exit = "warn"
get_unwrap = "warn"
rc_buffer = "warn"
rc_mutex = "warn"
rest_pat_in_fully_bound_structs = "warn"
similar = { version = "2.2.1" }
strum = { version = "0.24.1", features = ["strum_macros"] }
strum_macros = { version = "0.24.3" }
syn = { version = "1.0.109" }
test-case = { version = "3.0.0" }
textwrap = { version = "0.16.0" }
toml = { version = "0.7.2" }
[profile.release]
# Note that we set these explicitly, and these values
# were chosen based on a trade-off between compile times
# and runtime performance[1].
#
# [1]: https://github.com/astral-sh/ruff/pull/9031
panic = "abort"
lto = "thin"
codegen-units = 16
# Some crates don't change as much but benefit more from
# more expensive optimization passes, so we selectively
# decrease codegen-units in some cases.
[profile.release.package.ruff_python_parser]
codegen-units = 1
[profile.release.package.ruff_python_ast]
codegen-units = 1
opt-level = 3
[profile.dev.package.insta]
opt-level = 3
@@ -218,72 +61,11 @@ opt-level = 3
# Reduce complexity of a parser function that would trigger a locals limit in a wasm tool.
# https://github.com/bytecodealliance/wasm-tools/blob/b5c3d98e40590512a3b12470ef358d5c7b983b15/crates/wasmparser/src/limits.rs#L29
[profile.dev.package.ruff_python_parser]
[profile.dev.package.rustpython-parser]
opt-level = 1
# Use the `--profile profiling` flag to show symbols in release mode.
# e.g. `cargo build --profile profiling`
[profile.profiling]
# Use the `--profile release-debug` flag to show symbols in release mode.
# e.g. `cargo build --profile release-debug`
[profile.release-debug]
inherits = "release"
debug = 1
# The profile that 'cargo dist' will build with.
[profile.dist]
inherits = "release"
# Config for 'cargo dist'
[workspace.metadata.dist]
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
cargo-dist-version = "0.22.1"
# CI backends to support
ci = "github"
# The installers to generate for each app
installers = ["shell", "powershell"]
# The archive format to use for windows builds (defaults .zip)
windows-archive = ".zip"
# The archive format to use for non-windows builds (defaults .tar.xz)
unix-archive = ".tar.gz"
# Target platforms to build apps for (Rust target-triple syntax)
targets = [
"aarch64-apple-darwin",
"aarch64-pc-windows-msvc",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"arm-unknown-linux-musleabihf",
"armv7-unknown-linux-gnueabihf",
"armv7-unknown-linux-musleabihf",
"i686-pc-windows-msvc",
"i686-unknown-linux-gnu",
"i686-unknown-linux-musl",
"powerpc64-unknown-linux-gnu",
"powerpc64le-unknown-linux-gnu",
"s390x-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-pc-windows-msvc",
"x86_64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
]
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
auto-includes = false
# Whether cargo-dist should create a GitHub Release or use an existing draft
create-release = true
# Which actions to run on pull requests
pr-run-mode = "skip"
# Whether CI should trigger releases with dispatches instead of tag pushes
dispatch-releases = true
# Which phase cargo-dist should use to create the GitHub release
github-release = "announce"
# Whether CI should include auto-generated code to build local artifacts
build-local-artifacts = false
# Local artifacts jobs to run in CI
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
# Publish jobs to run in CI
publish-jobs = ["./publish-pypi", "./publish-wasm"]
# Post-announce jobs to run in CI
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
# Custom permissions for GitHub Jobs
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
# Whether to install an updater program
install-updater = false
# Path that installers should place binaries in
install-path = "CARGO_HOME"

View File

@@ -1,38 +0,0 @@
FROM --platform=$BUILDPLATFORM ubuntu as build
ENV HOME="/root"
WORKDIR $HOME
RUN apt update && apt install -y build-essential curl python3-venv
# Setup zig as cross compiling linker
RUN python3 -m venv $HOME/.venv
RUN .venv/bin/pip install cargo-zigbuild
ENV PATH="$HOME/.venv/bin:$PATH"
# Install rust
ARG TARGETPLATFORM
RUN case "$TARGETPLATFORM" in \
"linux/arm64") echo "aarch64-unknown-linux-musl" > rust_target.txt ;; \
"linux/amd64") echo "x86_64-unknown-linux-musl" > rust_target.txt ;; \
*) exit 1 ;; \
esac
# Update rustup whenever we bump the rust version
COPY rust-toolchain.toml rust-toolchain.toml
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
ENV PATH="$HOME/.cargo/bin:$PATH"
# Installs the correct toolchain version from rust-toolchain.toml and then the musl target
RUN rustup target add $(cat rust_target.txt)
# Build
COPY crates crates
COPY Cargo.toml Cargo.toml
COPY Cargo.lock Cargo.lock
RUN cargo zigbuild --bin ruff --target $(cat rust_target.txt) --release
RUN cp target/$(cat rust_target.txt)/release/ruff /ruff
# TODO: Optimize binary size, with a version that also works when cross compiling
# RUN strip --strip-all /ruff
FROM scratch
COPY --from=build /ruff /ruff
WORKDIR /io
ENTRYPOINT ["/ruff"]

286
LICENSE
View File

@@ -195,15 +195,6 @@ are:
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
- flake8-gettext, licensed as follows:
"""
BSD Zero Clause License
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
- flake8-implicit-str-concat, licensed as follows:
"""
The MIT License (MIT)
@@ -354,60 +345,6 @@ are:
SOFTWARE.
"""
- flake8-slots, licensed as follows:
"""
Copyright (c) 2021 Dominic Davis-Foster
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
"""
- flake8-todos, licensed as follows:
"""
Copyright (c) 2019 EclecticIQ. All rights reserved.
Copyright (c) 2020 Gram <gram@orsinium.dev>. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
- flake8-unused-arguments, licensed as follows:
"""
MIT License
@@ -456,6 +393,7 @@ are:
THE SOFTWARE.
"""
- autoflake, licensed as follows:
"""
Copyright (C) 2012-2018 Steven Myint
@@ -479,31 +417,6 @@ are:
SOFTWARE.
"""
- autotyping, licensed as follows:
"""
MIT License
Copyright (c) 2023 Jelle Zijlstra
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
- Flake8, licensed as follows:
"""
== Flake8 License (MIT) ==
@@ -604,30 +517,6 @@ are:
THE SOFTWARE.
"""
- flynt, licensed as follows:
"""
MIT License
Copyright (c) 2019-2022 Ilya Kamenshchikov
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
- isort, licensed as follows:
"""
@@ -837,31 +726,6 @@ are:
SOFTWARE.
"""
- flake8-async, licensed as follows:
"""
MIT License
Copyright (c) 2022 Cooper Lees
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
- flake8-type-checking, licensed as follows:
"""
Copyright (c) 2021, Sondre Lillebø Gundersen
@@ -1194,132 +1058,11 @@ are:
- flake8-self, licensed as follows:
"""
MIT License
Copyright (c) 2023 Korijn van Golen
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Freely Distributable
"""
- flake8-django, licensed under the GPL license.
- perflint, licensed as follows:
"""
MIT License
Copyright (c) 2022 Anthony Shaw
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
- flake8-logging, licensed as follows:
"""
MIT License
Copyright (c) 2023 Adam Johnson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
- flake8-trio, licensed as follows:
"""
MIT License
Copyright (c) 2022 Zac Hatfield-Dodds
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
- Pyright, licensed as follows:
"""
MIT License
Pyright - A static type checker for the Python language
Copyright (c) Microsoft Corporation. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE
"""
- rust-analyzer/text-size, licensed under the MIT license:
"""
Permission is hereby granted, free of charge, to any
@@ -1371,28 +1114,3 @@ are:
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
- pydoclint, licensed as follows:
"""
MIT License
Copyright (c) 2023 jsh9
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""

462
README.md
View File

@@ -2,22 +2,21 @@
# Ruff
[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v1.json)](https://github.com/charliermarsh/ruff)
[![image](https://img.shields.io/pypi/v/ruff.svg)](https://pypi.python.org/pypi/ruff)
[![image](https://img.shields.io/pypi/l/ruff.svg)](https://github.com/astral-sh/ruff/blob/main/LICENSE)
[![image](https://img.shields.io/pypi/l/ruff.svg)](https://pypi.python.org/pypi/ruff)
[![image](https://img.shields.io/pypi/pyversions/ruff.svg)](https://pypi.python.org/pypi/ruff)
[![Actions status](https://github.com/astral-sh/ruff/workflows/CI/badge.svg)](https://github.com/astral-sh/ruff/actions)
[![Discord](https://img.shields.io/badge/Discord-%235865F2.svg?logo=discord&logoColor=white)](https://discord.com/invite/astral-sh)
[![Actions status](https://github.com/charliermarsh/ruff/workflows/CI/badge.svg)](https://github.com/charliermarsh/ruff/actions)
[**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
[**Discord**](https://discord.gg/c9MhzV8aU5) | [**Docs**](https://beta.ruff.rs/docs/) | [**Playground**](https://play.ruff.rs/)
An extremely fast Python linter and code formatter, written in Rust.
An extremely fast Python linter, written in Rust.
<p align="center">
<picture align="center">
<source media="(prefers-color-scheme: dark)" srcset="https://user-images.githubusercontent.com/1309177/232603514-c95e9b0f-6b31-43de-9a80-9e844173fd6a.svg">
<source media="(prefers-color-scheme: light)" srcset="https://user-images.githubusercontent.com/1309177/232603516-4fb4892d-585c-4b20-b810-3db9161831e4.svg">
<img alt="Shows a bar chart with benchmark results." src="https://user-images.githubusercontent.com/1309177/232603516-4fb4892d-585c-4b20-b810-3db9161831e4.svg">
<source media="(prefers-color-scheme: dark)" srcset="https://user-images.githubusercontent.com/1309177/212613422-7faaf278-706b-4294-ad92-236ffcab3430.svg">
<source media="(prefers-color-scheme: light)" srcset="https://user-images.githubusercontent.com/1309177/212613257-5f4bca12-6d6b-4c79-9bac-51a4c6d08928.svg">
<img alt="Shows a bar chart with benchmark results." src="https://user-images.githubusercontent.com/1309177/212613257-5f4bca12-6d6b-4c79-9bac-51a4c6d08928.svg">
</picture>
</p>
@@ -25,41 +24,39 @@ An extremely fast Python linter and code formatter, written in Rust.
<i>Linting the CPython codebase from scratch.</i>
</p>
- ⚡️ 10-100x faster than existing linters (like Flake8) and formatters (like Black)
- 🐍 Installable via `pip`
- 🛠️ `pyproject.toml` support
- 🤝 Python 3.13 compatibility
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
- 📦 Built-in caching, to avoid re-analyzing unchanged files
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
of popular Flake8 plugins, like flake8-bugbear
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
- ⚡️ 10-100x faster than existing linters
- 🐍 Installable via `pip`
- 🛠️ `pyproject.toml` support
- 🤝 Python 3.11 compatibility
- 📦 Built-in caching, to avoid re-analyzing unchanged files
- 🔧 Autofix support, for automatic error correction (e.g., automatically remove unused imports)
- 📏 Over [500 built-in rules](https://beta.ruff.rs/docs/rules/)
- ⚖️ [Near-parity](https://beta.ruff.rs/docs/faq/#how-does-ruff-compare-to-flake8) with the built-in Flake8 rule set
- 🔌 Native re-implementations of dozens of Flake8 plugins, like flake8-bugbear
- ⌨️ First-party editor integrations for [VS Code](https://github.com/charliermarsh/ruff-vscode) and [more](https://github.com/charliermarsh/ruff-lsp)
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://beta.ruff.rs/docs/configuration/#pyprojecttoml-discovery)
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
functionality behind a single, common interface.
Ruff can be used to replace [Flake8](https://pypi.org/project/flake8/) (plus dozens of plugins),
[Black](https://github.com/psf/black), [isort](https://pypi.org/project/isort/),
[pydocstyle](https://pypi.org/project/pydocstyle/), [pyupgrade](https://pypi.org/project/pyupgrade/),
[autoflake](https://pypi.org/project/autoflake/), and more, all while executing tens or hundreds of
times faster than any individual tool.
[isort](https://pypi.org/project/isort/), [pydocstyle](https://pypi.org/project/pydocstyle/),
[yesqa](https://github.com/asottile/yesqa), [eradicate](https://pypi.org/project/eradicate/),
[pyupgrade](https://pypi.org/project/pyupgrade/), and [autoflake](https://pypi.org/project/autoflake/),
all while executing tens or hundreds of times faster than any individual tool.
Ruff is extremely actively developed and used in major open-source projects like:
- [Apache Airflow](https://github.com/apache/airflow)
- [Apache Superset](https://github.com/apache/superset)
- [pandas](https://github.com/pandas-dev/pandas)
- [FastAPI](https://github.com/tiangolo/fastapi)
- [Hugging Face](https://github.com/huggingface/transformers)
- [Pandas](https://github.com/pandas-dev/pandas)
- [Transformers (Hugging Face)](https://github.com/huggingface/transformers)
- [Apache Airflow](https://github.com/apache/airflow)
- [SciPy](https://github.com/scipy/scipy)
...and [many more](#whos-using-ruff).
...and many more.
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
Read the [launch blog post](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster) or
the most recent [project update](https://notes.crmarsh.com/ruff-the-first-200-releases).
## Testimonials
@@ -87,10 +84,9 @@ of [Conda](https://docs.conda.io/en/latest/):
[**Timothy Crosley**](https://twitter.com/timothycrosley/status/1606420868514877440),
creator of [isort](https://github.com/PyCQA/isort):
> Just switched my first project to Ruff. Only one downside so far: it's so fast I couldn't believe
> it was working till I intentionally introduced some errors.
> Just switched my first project to Ruff. Only one downside so far: it's so fast I couldn't believe it was working till I intentionally introduced some errors.
[**Tim Abbott**](https://github.com/astral-sh/ruff/issues/465#issuecomment-1317400028), lead
[**Tim Abbott**](https://github.com/charliermarsh/ruff/issues/465#issuecomment-1317400028), lead
developer of [Zulip](https://github.com/zulip/zulip):
> This is just ridiculously fast... `ruff` is amazing.
@@ -99,7 +95,7 @@ developer of [Zulip](https://github.com/zulip/zulip):
## Table of Contents
For more, see the [documentation](https://docs.astral.sh/ruff/).
For more, see the [documentation](https://beta.ruff.rs/docs/).
1. [Getting Started](#getting-started)
1. [Configuration](#configuration)
@@ -110,197 +106,122 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
1. [Who's Using Ruff?](#whos-using-ruff)
1. [License](#license)
## Getting Started<a id="getting-started"></a>
## Getting Started
For more, see the [documentation](https://docs.astral.sh/ruff/).
For more, see the [documentation](https://beta.ruff.rs/docs/).
### Installation
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
```shell
# With pip.
pip install ruff
# With pipx.
pipx install ruff
```
Starting with version `0.5.0`, Ruff can be installed with our standalone installers:
```shell
# On macOS and Linux.
curl -LsSf https://astral.sh/ruff/install.sh | sh
# On Windows.
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.6.7/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.6.7/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
and with [a variety of other package managers](https://docs.astral.sh/ruff/installation/).
and with [a variety of other package managers](https://beta.ruff.rs/docs/installation/).
### Usage
To run Ruff as a linter, try any of the following:
To run Ruff, try any of the following:
```shell
ruff check # Lint all files in the current directory (and any subdirectories).
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories).
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`.
ruff check path/to/code/to/file.py # Lint `file.py`.
ruff check @arguments.txt # Lint using an input file, treating its contents as newline-delimited command-line arguments.
ruff check . # Lint all files in the current directory (and any subdirectories)
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories)
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`
ruff check path/to/code/to/file.py # Lint `file.py`
```
Or, to run Ruff as a formatter:
```shell
ruff format # Format all files in the current directory (and any subdirectories).
ruff format path/to/code/ # Format all files in `/path/to/code` (and any subdirectories).
ruff format path/to/code/*.py # Format all `.py` files in `/path/to/code`.
ruff format path/to/code/to/file.py # Format `file.py`.
ruff format @arguments.txt # Format using an input file, treating its contents as newline-delimited command-line arguments.
```
Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff-pre-commit`](https://github.com/astral-sh/ruff-pre-commit):
Ruff can also be used as a [pre-commit](https://pre-commit.com) hook:
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: v0.6.7
rev: 'v0.0.256'
hooks:
# Run the linter.
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format
```
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
Ruff can also be used as a [VS Code extension](https://github.com/charliermarsh/ruff-vscode) or
alongside any other editor through the [Ruff LSP](https://github.com/charliermarsh/ruff-lsp).
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
[`ruff-action`](https://github.com/chartboost/ruff-action):
```yaml
name: Ruff
on: [ push, pull_request ]
jobs:
ruff:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: chartboost/ruff-action@v1
```
### Configuration<a id="configuration"></a>
### Configuration
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
[_Configuration_](https://beta.ruff.rs/docs/configuration/), or [_Settings_](https://beta.ruff.rs/docs/settings/)
for a complete list of all configuration options).
If left unspecified, Ruff's default configuration is equivalent to the following `ruff.toml` file:
If left unspecified, the default configuration is equivalent to:
```toml
[tool.ruff]
# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default.
select = ["E", "F"]
ignore = []
# Allow autofix for all enabled rules (when `--fix`) is provided.
fixable = ["A", "B", "C", "D", "E", "F", "..."]
unfixable = []
# Exclude a variety of commonly ignored directories.
exclude = [
".bzr",
".direnv",
".eggs",
".git",
".git-rewrite",
".hg",
".ipynb_checkpoints",
".mypy_cache",
".nox",
".pants.d",
".pyenv",
".pytest_cache",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
".vscode",
"__pypackages__",
"_build",
"buck-out",
"build",
"dist",
"node_modules",
"site-packages",
"venv",
]
# Same as Black.
line-length = 88
indent-width = 4
# Assume Python 3.8
target-version = "py38"
[lint]
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
select = ["E4", "E7", "E9", "F"]
ignore = []
# Allow fix for all enabled rules (when `--fix`) is provided.
fixable = ["ALL"]
unfixable = []
# Allow unused variables when underscore-prefixed.
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
[format]
# Like Black, use double quotes for strings.
quote-style = "double"
# Assume Python 3.10.
target-version = "py310"
# Like Black, indent with spaces, rather than tabs.
indent-style = "space"
# Like Black, respect magic trailing commas.
skip-magic-trailing-comma = false
# Like Black, automatically detect the appropriate line ending.
line-ending = "auto"
[tool.ruff.mccabe]
# Unlike Flake8, default to a complexity level of 10.
max-complexity = 10
```
Note that, in a `pyproject.toml`, each section header should be prefixed with `tool.ruff`. For
example, `[lint]` should be replaced with `[tool.ruff.lint]`.
Some configuration options can be provided via dedicated command-line arguments, such as those
related to rule enablement and disablement, file discovery, and logging level:
Some configuration options can be provided via the command-line, such as those related to
rule enablement and disablement, file discovery, logging level, and more:
```shell
ruff check --select F401 --select F403 --quiet
ruff check path/to/code/ --select F401 --select F403 --quiet
```
The remaining configuration options can be provided through a catch-all `--config` argument:
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` for more on the
linting command.
```shell
ruff check --config "lint.per-file-ignores = {'some_file.py' = ['F841']}"
```
To opt in to the latest lint rules, formatter style changes, interface updates, and more, enable
[preview mode](https://docs.astral.sh/ruff/rules/) by setting `preview = true` in your configuration
file or passing `--preview` on the command line. Preview mode enables a collection of unstable
features that may change prior to stabilization.
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
for more on the linting and formatting commands, respectively.
## Rules<a id="rules"></a>
## Rules
<!-- Begin section: Rules -->
**Ruff supports over 800 lint rules**, many of which are inspired by popular tools like Flake8,
**Ruff supports over 500 lint rules**, many of which are inspired by popular tools like Flake8,
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
Rust as a first-party feature.
By default, Ruff enables Flake8's `F` rules, along with a subset of the `E` rules, omitting any
stylistic rules that overlap with the use of a formatter, like `ruff format` or
By default, Ruff enables Flake8's `E` and `F` rules. Ruff supports all rules from the `F` category,
and a [subset](https://beta.ruff.rs/docs/rules/#error-e) of the `E` category, omitting those
stylistic rules made obsolete by the use of an autoformatter, like
[Black](https://github.com/psf/black).
If you're just getting started with Ruff, **the default rule set is a great place to start**: it
@@ -308,80 +229,23 @@ catches a wide variety of common errors (like unused imports) with zero configur
<!-- End section: Rules -->
Beyond the defaults, Ruff re-implements some of the most popular Flake8 plugins and related code
quality tools, including:
For a complete enumeration of the supported rules, see [_Rules_](https://beta.ruff.rs/docs/rules/).
- [autoflake](https://pypi.org/project/autoflake/)
- [eradicate](https://pypi.org/project/eradicate/)
- [flake8-2020](https://pypi.org/project/flake8-2020/)
- [flake8-annotations](https://pypi.org/project/flake8-annotations/)
- [flake8-async](https://pypi.org/project/flake8-async)
- [flake8-bandit](https://pypi.org/project/flake8-bandit/) ([#1646](https://github.com/astral-sh/ruff/issues/1646))
- [flake8-blind-except](https://pypi.org/project/flake8-blind-except/)
- [flake8-boolean-trap](https://pypi.org/project/flake8-boolean-trap/)
- [flake8-bugbear](https://pypi.org/project/flake8-bugbear/)
- [flake8-builtins](https://pypi.org/project/flake8-builtins/)
- [flake8-commas](https://pypi.org/project/flake8-commas/)
- [flake8-comprehensions](https://pypi.org/project/flake8-comprehensions/)
- [flake8-copyright](https://pypi.org/project/flake8-copyright/)
- [flake8-datetimez](https://pypi.org/project/flake8-datetimez/)
- [flake8-debugger](https://pypi.org/project/flake8-debugger/)
- [flake8-django](https://pypi.org/project/flake8-django/)
- [flake8-docstrings](https://pypi.org/project/flake8-docstrings/)
- [flake8-eradicate](https://pypi.org/project/flake8-eradicate/)
- [flake8-errmsg](https://pypi.org/project/flake8-errmsg/)
- [flake8-executable](https://pypi.org/project/flake8-executable/)
- [flake8-future-annotations](https://pypi.org/project/flake8-future-annotations/)
- [flake8-gettext](https://pypi.org/project/flake8-gettext/)
- [flake8-implicit-str-concat](https://pypi.org/project/flake8-implicit-str-concat/)
- [flake8-import-conventions](https://github.com/joaopalmeiro/flake8-import-conventions)
- [flake8-logging](https://pypi.org/project/flake8-logging/)
- [flake8-logging-format](https://pypi.org/project/flake8-logging-format/)
- [flake8-no-pep420](https://pypi.org/project/flake8-no-pep420)
- [flake8-pie](https://pypi.org/project/flake8-pie/)
- [flake8-print](https://pypi.org/project/flake8-print/)
- [flake8-pyi](https://pypi.org/project/flake8-pyi/)
- [flake8-pytest-style](https://pypi.org/project/flake8-pytest-style/)
- [flake8-quotes](https://pypi.org/project/flake8-quotes/)
- [flake8-raise](https://pypi.org/project/flake8-raise/)
- [flake8-return](https://pypi.org/project/flake8-return/)
- [flake8-self](https://pypi.org/project/flake8-self/)
- [flake8-simplify](https://pypi.org/project/flake8-simplify/)
- [flake8-slots](https://pypi.org/project/flake8-slots/)
- [flake8-super](https://pypi.org/project/flake8-super/)
- [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/)
- [flake8-todos](https://pypi.org/project/flake8-todos/)
- [flake8-type-checking](https://pypi.org/project/flake8-type-checking/)
- [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
- [flynt](https://pypi.org/project/flynt/) ([#2102](https://github.com/astral-sh/ruff/issues/2102))
- [isort](https://pypi.org/project/isort/)
- [mccabe](https://pypi.org/project/mccabe/)
- [pandas-vet](https://pypi.org/project/pandas-vet/)
- [pep8-naming](https://pypi.org/project/pep8-naming/)
- [pydocstyle](https://pypi.org/project/pydocstyle/)
- [pygrep-hooks](https://github.com/pre-commit/pygrep-hooks)
- [pylint-airflow](https://pypi.org/project/pylint-airflow/)
- [pyupgrade](https://pypi.org/project/pyupgrade/)
- [tryceratops](https://pypi.org/project/tryceratops/)
- [yesqa](https://pypi.org/project/yesqa/)
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
## Contributing<a id="contributing"></a>
## Contributing
Contributions are welcome and highly appreciated. To get started, check out the
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
[**contributing guidelines**](https://beta.ruff.rs/docs/contributing/).
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
## Support<a id="support"></a>
## Support
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/charliermarsh/ruff/issues),
or feel free to [**open a new one**](https://github.com/charliermarsh/ruff/issues/new).
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
You can also ask for help on [**Discord**](https://discord.gg/c9MhzV8aU5).
## Acknowledgements<a id="acknowledgements"></a>
## Acknowledgements
Ruff's linter draws on both the APIs and implementation details of many other
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
@@ -392,144 +256,64 @@ In some cases, Ruff includes a "direct" Rust port of the corresponding tool.
We're grateful to the maintainers of these tools for their work, and for all
the value they've provided to the Python community.
Ruff's formatter is built on a fork of Rome's [`rome_formatter`](https://github.com/rome/tools/tree/main/crates/rome_formatter),
and again draws on both API and implementation details from [Rome](https://github.com/rome/tools),
Ruff's autoformatter is built on a fork of Rome's [`rome_formatter`](https://github.com/rome/tools/tree/main/crates/rome_formatter),
and again draws on both the APIs and implementation details of [Rome](https://github.com/rome/tools),
[Prettier](https://github.com/prettier/prettier), and [Black](https://github.com/psf/black).
Ruff's import resolver is based on the import resolution algorithm from [Pyright](https://github.com/microsoft/pyright).
Ruff is also influenced by a number of tools outside the Python ecosystem, like
[Clippy](https://github.com/rust-lang/rust-clippy) and [ESLint](https://github.com/eslint/eslint).
Ruff is the beneficiary of a large number of [contributors](https://github.com/astral-sh/ruff/graphs/contributors).
Ruff is the beneficiary of a large number of [contributors](https://github.com/charliermarsh/ruff/graphs/contributors).
Ruff is released under the MIT license.
## Who's Using Ruff?<a id="whos-using-ruff"></a>
## Who's Using Ruff?
Ruff is used by a number of major open-source projects and companies, including:
Ruff is used in a number of major open-source projects, including:
- [Albumentations](https://github.com/albumentations-team/albumentations)
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
- [Apache Airflow](https://github.com/apache/airflow)
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))
- [Babel](https://github.com/python-babel/babel)
- Benchling ([Refac](https://github.com/benchling/refac))
- [Bokeh](https://github.com/bokeh/bokeh)
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
- CERN ([Indico](https://getindico.io/))
- [DVC](https://github.com/iterative/dvc)
- [Dagger](https://github.com/dagger/dagger)
- [Dagster](https://github.com/dagster-io/dagster)
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
- [Dify](https://github.com/langgenius/dify)
- [pandas](https://github.com/pandas-dev/pandas)
- [FastAPI](https://github.com/tiangolo/fastapi)
- [Godot](https://github.com/godotengine/godot)
- [Gradio](https://github.com/gradio-app/gradio)
- [Great Expectations](https://github.com/great-expectations/great_expectations)
- [HTTPX](https://github.com/encode/httpx)
- [Hatch](https://github.com/pypa/hatch)
- [Home Assistant](https://github.com/home-assistant/core)
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
[Datasets](https://github.com/huggingface/datasets),
[Diffusers](https://github.com/huggingface/diffusers))
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
- [Ibis](https://github.com/ibis-project/ibis)
- [ivy](https://github.com/unifyai/ivy)
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
- [Kraken Tech](https://kraken.tech/)
- [LangChain](https://github.com/hwchase17/langchain)
- [Litestar](https://litestar.dev/)
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
- Matrix ([Synapse](https://github.com/matrix-org/synapse))
- [MegaLinter](https://github.com/oxsecurity/megalinter)
- Meltano ([Meltano CLI](https://github.com/meltano/meltano), [Singer SDK](https://github.com/meltano/sdk))
- Microsoft ([Semantic Kernel](https://github.com/microsoft/semantic-kernel),
[ONNX Runtime](https://github.com/microsoft/onnxruntime),
[LightGBM](https://github.com/microsoft/LightGBM))
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python))
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
- [Mypy](https://github.com/python/mypy)
- [Nautobot](https://github.com/nautobot/nautobot)
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
- [Neon](https://github.com/neondatabase/neon)
- [Nokia](https://nokia.com/)
- [NoneBot](https://github.com/nonebot/nonebot2)
- [NumPyro](https://github.com/pyro-ppl/numpyro)
- [ONNX](https://github.com/onnx/onnx)
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
- [Open Wine Components](https://github.com/Open-Wine-Components/umu-launcher)
- [PDM](https://github.com/pdm-project/pdm)
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
- [Pandas](https://github.com/pandas-dev/pandas)
- [Pillow](https://github.com/python-pillow/Pillow)
- [Poetry](https://github.com/python-poetry/poetry)
- [Polars](https://github.com/pola-rs/polars)
- [PostHog](https://github.com/PostHog/posthog)
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
- [PyMC](https://github.com/pymc-devs/pymc/)
- [PyMC-Marketing](https://github.com/pymc-labs/pymc-marketing)
- [pytest](https://github.com/pytest-dev/pytest)
- [PyTorch](https://github.com/pytorch/pytorch)
- [Pydantic](https://github.com/pydantic/pydantic)
- [Pylint](https://github.com/PyCQA/pylint)
- [PyVista](https://github.com/pyvista/pyvista)
- [Reflex](https://github.com/reflex-dev/reflex)
- [River](https://github.com/online-ml/river)
- [Rippling](https://rippling.com)
- [Robyn](https://github.com/sansyrox/robyn)
- [Saleor](https://github.com/saleor/saleor)
- Scale AI ([Launch SDK](https://github.com/scaleapi/launch-python-client))
- [Transformers (Hugging Face)](https://github.com/huggingface/transformers)
- [Diffusers (Hugging Face)](https://github.com/huggingface/diffusers)
- [Apache Airflow](https://github.com/apache/airflow)
- [SciPy](https://github.com/scipy/scipy)
- Snowflake ([SnowCLI](https://github.com/Snowflake-Labs/snowcli))
- [Sphinx](https://github.com/sphinx-doc/sphinx)
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
- [Starlette](https://github.com/encode/starlette)
- [Streamlit](https://github.com/streamlit/streamlit)
- [The Algorithms](https://github.com/TheAlgorithms/Python)
- [Vega-Altair](https://github.com/altair-viz/altair)
- WordPress ([Openverse](https://github.com/WordPress/openverse))
- [ZenML](https://github.com/zenml-io/zenml)
- [Zulip](https://github.com/zulip/zulip)
- [build (PyPA)](https://github.com/pypa/build)
- [Bokeh](https://github.com/bokeh/bokeh)
- [Pydantic](https://github.com/pydantic/pydantic)
- [Dagster](https://github.com/dagster-io/dagster)
- [Dagger](https://github.com/dagger/dagger)
- [Sphinx](https://github.com/sphinx-doc/sphinx)
- [Hatch](https://github.com/pypa/hatch)
- [PDM](https://github.com/pdm-project/pdm)
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
- [Great Expectations](https://github.com/great-expectations/great_expectations)
- [ONNX](https://github.com/onnx/onnx)
- [Polars](https://github.com/pola-rs/polars)
- [Ibis](https://github.com/ibis-project/ibis)
- [Synapse (Matrix)](https://github.com/matrix-org/synapse)
- [SnowCLI (Snowflake)](https://github.com/Snowflake-Labs/snowcli)
- [Dispatch (Netflix)](https://github.com/Netflix/dispatch)
- [Saleor](https://github.com/saleor/saleor)
- [Pynecone](https://github.com/pynecone-io/pynecone)
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
- [Home Assistant](https://github.com/home-assistant/core)
- [Pylint](https://github.com/PyCQA/pylint)
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
- [cibuildwheel (PyPA)](https://github.com/pypa/cibuildwheel)
- [delta-rs](https://github.com/delta-io/delta-rs)
- [build (PyPA)](https://github.com/pypa/build)
- [Babel](https://github.com/python-babel/babel)
- [featuretools](https://github.com/alteryx/featuretools)
- [meson-python](https://github.com/mesonbuild/meson-python)
- [ZenML](https://github.com/zenml-io/zenml)
- [delta-rs](https://github.com/delta-io/delta-rs)
- [Starlite](https://github.com/starlite-api/starlite)
- [telemetry-airflow (Mozilla)](https://github.com/mozilla/telemetry-airflow)
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
- [nox](https://github.com/wntrblm/nox)
- [pip](https://github.com/pypa/pip)
- [Neon](https://github.com/neondatabase/neon)
- [The Algorithms](https://github.com/TheAlgorithms/Python)
### Show Your Support
## License
If you're using Ruff, consider adding the Ruff badge to your project's `README.md`:
```md
[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
```
...or `README.rst`:
```rst
.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json
:target: https://github.com/astral-sh/ruff
:alt: Ruff
```
...or, as HTML:
```html
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
```
## License<a id="license"></a>
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
<div align="center">
<a target="_blank" href="https://astral.sh" style="background:none">
<img src="https://raw.githubusercontent.com/astral-sh/ruff/main/assets/svg/Astral.svg" alt="Made by Astral">
</a>
</div>
MIT

View File

@@ -1,21 +1,7 @@
[files]
# https://github.com/crate-ci/typos/issues/868
extend-exclude = ["crates/red_knot_python_semantic/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
extend-exclude = ["snapshots", "black"]
[default.extend-words]
"arange" = "arange" # e.g. `numpy.arange`
trivias = "trivias"
hel = "hel"
whos = "whos"
spawnve = "spawnve"
ned = "ned"
pn = "pn" # `import panel as pd` is a thing
poit = "poit"
BA = "BA" # acronym for "Bad Allowed", used in testing.
jod = "jod" # e.g., `jod-thread`
[default]
extend-ignore-re = [
# Line ignore with trailing "spellchecker:disable-line"
"(?Rm)^.*#\\s*spellchecker:disable-line$",
"LICENSEs",
]

View File

@@ -1,8 +0,0 @@
{
"label": "code style",
"message": "Ruff",
"logoSvg": "<svg width=\"510\" height=\"622\" viewBox=\"0 0 510 622\" fill=\"none\" xmlns=\"http://www.w3.org/2000/svg\"><path fill-rule=\"evenodd\" clip-rule=\"evenodd\" d=\"M206.701 0C200.964 0 196.314 4.64131 196.314 10.3667V41.4667C196.314 47.192 191.663 51.8333 185.927 51.8333H156.843C151.107 51.8333 146.456 56.4746 146.456 62.2V145.133C146.456 150.859 141.806 155.5 136.069 155.5H106.986C101.249 155.5 96.5988 160.141 96.5988 165.867V222.883C96.5988 228.609 91.9484 233.25 86.2118 233.25H57.1283C51.3917 233.25 46.7413 237.891 46.7413 243.617V300.633C46.7413 306.359 42.0909 311 36.3544 311H10.387C4.6504 311 0 315.641 0 321.367V352.467C0 358.192 4.6504 362.833 10.387 362.833H145.418C151.154 362.833 155.804 367.475 155.804 373.2V430.217C155.804 435.942 151.154 440.583 145.418 440.583H116.334C110.597 440.583 105.947 445.225 105.947 450.95V507.967C105.947 513.692 101.297 518.333 95.5601 518.333H66.4766C60.74 518.333 56.0896 522.975 56.0896 528.7V611.633C56.0896 617.359 60.74 622 66.4766 622H149.572C155.309 622 159.959 617.359 159.959 611.633V570.167H201.507C207.244 570.167 211.894 565.525 211.894 559.8V528.7C211.894 522.975 216.544 518.333 222.281 518.333H251.365C257.101 518.333 261.752 513.692 261.752 507.967V476.867C261.752 471.141 266.402 466.5 272.138 466.5H301.222C306.959 466.5 311.609 461.859 311.609 456.133V425.033C311.609 419.308 316.259 414.667 321.996 414.667H351.079C356.816 414.667 361.466 410.025 361.466 404.3V373.2C361.466 367.475 366.117 362.833 371.853 362.833H400.937C406.673 362.833 411.324 358.192 411.324 352.467V321.367C411.324 315.641 415.974 311 421.711 311H450.794C456.531 311 461.181 306.359 461.181 300.633V217.7C461.181 211.975 456.531 207.333 450.794 207.333H420.672C414.936 207.333 410.285 202.692 410.285 196.967V165.867C410.285 160.141 414.936 155.5 420.672 155.5H449.756C455.492 155.5 460.143 150.859 460.143 145.133V114.033C460.143 108.308 464.793 103.667 470.53 103.667H499.613C505.35 103.667 510 99.0253 510 93.3V10.3667C510 4.64132 505.35 0 499.613 0H206.701ZM168.269 440.583C162.532 440.583 157.882 445.225 157.882 450.95V507.967C157.882 513.692 153.231 518.333 147.495 518.333H118.411C112.675 518.333 108.024 522.975 108.024 528.7V559.8C108.024 565.525 112.675 570.167 118.411 570.167H159.959V528.7C159.959 522.975 164.61 518.333 170.346 518.333H199.43C205.166 518.333 209.817 513.692 209.817 507.967V476.867C209.817 471.141 214.467 466.5 220.204 466.5H249.287C255.024 466.5 259.674 461.859 259.674 456.133V425.033C259.674 419.308 264.325 414.667 270.061 414.667H299.145C304.881 414.667 309.532 410.025 309.532 404.3V373.2C309.532 367.475 314.182 362.833 319.919 362.833H349.002C354.739 362.833 359.389 358.192 359.389 352.467V321.367C359.389 315.641 364.039 311 369.776 311H398.859C404.596 311 409.246 306.359 409.246 300.633V269.533C409.246 263.808 404.596 259.167 398.859 259.167H318.88C313.143 259.167 308.493 254.525 308.493 248.8V217.7C308.493 211.975 313.143 207.333 318.88 207.333H347.963C353.7 207.333 358.35 202.692 358.35 196.967V165.867C358.35 160.141 363.001 155.5 368.737 155.5H397.821C403.557 155.5 408.208 150.859 408.208 145.133V114.033C408.208 108.308 412.858 103.667 418.595 103.667H447.678C453.415 103.667 458.065 99.0253 458.065 93.3V62.2C458.065 56.4746 453.415 51.8333 447.678 51.8333H208.778C203.041 51.8333 198.391 56.4746 198.391 62.2V145.133C198.391 150.859 193.741 155.5 188.004 155.5H158.921C153.184 155.5 148.534 160.141 148.534 165.867V222.883C148.534 228.609 143.883 233.25 138.147 233.25H109.063C103.327 233.25 98.6762 237.891 98.6762 243.617V300.633C98.6762 306.359 103.327 311 109.063 311H197.352C203.089 311 207.739 315.641 207.739 321.367V430.217C207.739 435.942 203.089 440.583 197.352 440.583H168.269Z\" fill=\"#D7FF64\"/></svg>",
"logoWidth": 10,
"labelColor": "grey",
"color": "#261230"
}

View File

@@ -1,8 +0,0 @@
{
"label": "",
"message": "Ruff",
"logoSvg": "<svg width=\"510\" height=\"622\" viewBox=\"0 0 510 622\" fill=\"none\" xmlns=\"http://www.w3.org/2000/svg\"><path fill-rule=\"evenodd\" clip-rule=\"evenodd\" d=\"M206.701 0C200.964 0 196.314 4.64131 196.314 10.3667V41.4667C196.314 47.192 191.663 51.8333 185.927 51.8333H156.843C151.107 51.8333 146.456 56.4746 146.456 62.2V145.133C146.456 150.859 141.806 155.5 136.069 155.5H106.986C101.249 155.5 96.5988 160.141 96.5988 165.867V222.883C96.5988 228.609 91.9484 233.25 86.2118 233.25H57.1283C51.3917 233.25 46.7413 237.891 46.7413 243.617V300.633C46.7413 306.359 42.0909 311 36.3544 311H10.387C4.6504 311 0 315.641 0 321.367V352.467C0 358.192 4.6504 362.833 10.387 362.833H145.418C151.154 362.833 155.804 367.475 155.804 373.2V430.217C155.804 435.942 151.154 440.583 145.418 440.583H116.334C110.597 440.583 105.947 445.225 105.947 450.95V507.967C105.947 513.692 101.297 518.333 95.5601 518.333H66.4766C60.74 518.333 56.0896 522.975 56.0896 528.7V611.633C56.0896 617.359 60.74 622 66.4766 622H149.572C155.309 622 159.959 617.359 159.959 611.633V570.167H201.507C207.244 570.167 211.894 565.525 211.894 559.8V528.7C211.894 522.975 216.544 518.333 222.281 518.333H251.365C257.101 518.333 261.752 513.692 261.752 507.967V476.867C261.752 471.141 266.402 466.5 272.138 466.5H301.222C306.959 466.5 311.609 461.859 311.609 456.133V425.033C311.609 419.308 316.259 414.667 321.996 414.667H351.079C356.816 414.667 361.466 410.025 361.466 404.3V373.2C361.466 367.475 366.117 362.833 371.853 362.833H400.937C406.673 362.833 411.324 358.192 411.324 352.467V321.367C411.324 315.641 415.974 311 421.711 311H450.794C456.531 311 461.181 306.359 461.181 300.633V217.7C461.181 211.975 456.531 207.333 450.794 207.333H420.672C414.936 207.333 410.285 202.692 410.285 196.967V165.867C410.285 160.141 414.936 155.5 420.672 155.5H449.756C455.492 155.5 460.143 150.859 460.143 145.133V114.033C460.143 108.308 464.793 103.667 470.53 103.667H499.613C505.35 103.667 510 99.0253 510 93.3V10.3667C510 4.64132 505.35 0 499.613 0H206.701ZM168.269 440.583C162.532 440.583 157.882 445.225 157.882 450.95V507.967C157.882 513.692 153.231 518.333 147.495 518.333H118.411C112.675 518.333 108.024 522.975 108.024 528.7V559.8C108.024 565.525 112.675 570.167 118.411 570.167H159.959V528.7C159.959 522.975 164.61 518.333 170.346 518.333H199.43C205.166 518.333 209.817 513.692 209.817 507.967V476.867C209.817 471.141 214.467 466.5 220.204 466.5H249.287C255.024 466.5 259.674 461.859 259.674 456.133V425.033C259.674 419.308 264.325 414.667 270.061 414.667H299.145C304.881 414.667 309.532 410.025 309.532 404.3V373.2C309.532 367.475 314.182 362.833 319.919 362.833H349.002C354.739 362.833 359.389 358.192 359.389 352.467V321.367C359.389 315.641 364.039 311 369.776 311H398.859C404.596 311 409.246 306.359 409.246 300.633V269.533C409.246 263.808 404.596 259.167 398.859 259.167H318.88C313.143 259.167 308.493 254.525 308.493 248.8V217.7C308.493 211.975 313.143 207.333 318.88 207.333H347.963C353.7 207.333 358.35 202.692 358.35 196.967V165.867C358.35 160.141 363.001 155.5 368.737 155.5H397.821C403.557 155.5 408.208 150.859 408.208 145.133V114.033C408.208 108.308 412.858 103.667 418.595 103.667H447.678C453.415 103.667 458.065 99.0253 458.065 93.3V62.2C458.065 56.4746 453.415 51.8333 447.678 51.8333H208.778C203.041 51.8333 198.391 56.4746 198.391 62.2V145.133C198.391 150.859 193.741 155.5 188.004 155.5H158.921C153.184 155.5 148.534 160.141 148.534 165.867V222.883C148.534 228.609 143.883 233.25 138.147 233.25H109.063C103.327 233.25 98.6762 237.891 98.6762 243.617V300.633C98.6762 306.359 103.327 311 109.063 311H197.352C203.089 311 207.739 315.641 207.739 321.367V430.217C207.739 435.942 203.089 440.583 197.352 440.583H168.269Z\" fill=\"#D7FF64\"/></svg>",
"logoWidth": 10,
"labelColor": "grey",
"color": "#261230"
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.8 KiB

View File

@@ -1,24 +0,0 @@
<svg width="139" height="24" viewBox="0 0 139 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect width="138.764" height="24" rx="2.18182" fill="#261230"/>
<path
d="M8.72798 15.2726H9.91316V11.8697L9.6887 10.4062L9.8952 10.3343L12.1309 15.1649L14.3486 10.3343L14.5461 10.4062L14.3486 11.8607V15.2726H15.5248V8.72714H13.9535L12.2117 12.7137H12.0142L10.2723 8.72714H8.72798V15.2726Z"
fill="#D7FF64"/>
<path
d="M22.3432 15.2726H23.6631L21.3017 8.72714H19.7574L17.4589 15.2726H18.7069L19.1558 13.9797H21.9033L22.3432 15.2726ZM19.497 13.0279L19.901 11.8607L20.4308 10.0021H20.6463L21.176 11.8607L21.5711 13.0279H19.497Z"
fill="#D7FF64"/>
<path
d="M25.4209 15.2726H28.1234C30.1077 15.2726 30.9876 14.1413 30.9876 12.0044C30.9876 9.92131 30.1706 8.72714 28.1234 8.72714H25.4209V15.2726ZM26.624 14.2131V9.77765H28.0965C29.147 9.77765 29.7306 10.1907 29.7306 11.4477V12.5521C29.7306 13.6923 29.2817 14.2131 28.0965 14.2131H26.624Z"
fill="#D7FF64"/>
<path
d="M33.079 15.2726H37.6491V14.2131H34.2822V12.3815H37.2002V11.3938H34.2822V9.77765H37.6491V8.72714H33.079V15.2726Z"
fill="#D7FF64"/>
<path
d="M42.923 15.2726H46.2451C47.4572 15.2726 48.2025 14.5812 48.2025 13.5487C48.2025 12.7675 47.8343 12.175 47.0532 11.9954V11.7799C47.6637 11.5734 48.0319 11.0436 48.0319 10.3433C48.0319 9.38259 47.4572 8.72714 46.281 8.72714H42.923V15.2726ZM44.0992 11.4746V9.65195H45.9578C46.4875 9.65195 46.7928 9.92131 46.7928 10.3523V10.7653C46.7928 11.1873 46.4965 11.4746 45.9758 11.4746H44.0992ZM44.0992 14.3388V12.3904H46.0296C46.5863 12.3904 46.9365 12.6418 46.9365 13.1806V13.5666C46.9365 14.0425 46.5684 14.3388 45.9309 14.3388H44.0992Z"
fill="#D7FF64"/>
<path
d="M49.6959 8.72714L52.174 12.579V14.1952H50.1898V15.2726H53.3772V12.579L55.8553 8.72714H54.4456L53.5119 10.2535L52.8744 11.3759H52.6679L52.0483 10.2715L51.1056 8.72714H49.6959Z"
fill="#D7FF64"/>
<path fill-rule="evenodd" clip-rule="evenodd"
d="M74.1824 7.63626C74.1824 7.03377 74.6708 6.54535 75.2733 6.54535H84.0006C84.6031 6.54535 85.0915 7.03377 85.0915 7.63626V9.81808H80.0733V8.94535H79.2006V10.6908H84.0006C84.6031 10.6908 85.0915 11.1792 85.0915 11.7817V16.3635C85.0915 16.966 84.6031 17.4544 84.0006 17.4544H75.2733C74.6708 17.4544 74.1824 16.966 74.1824 16.3635V14.1817L79.2006 14.1817V15.0544H80.0733V13.309L75.2733 13.309C74.6708 13.309 74.1824 12.8206 74.1824 12.2181V7.63626ZM63.4912 6.54545C62.8887 6.54545 62.4003 7.03387 62.4003 7.63636V17.4545H67.4185V14.1818H68.2912V17.4545H73.3094V7.63636C73.3094 7.03387 72.821 6.54545 72.2185 6.54545H63.4912ZM69.164 10.6909V11.5636H66.5458V10.6909H69.164ZM110.619 6.54545C110.016 6.54545 109.528 7.03387 109.528 7.63636V17.4545H114.546V14.1818H115.419V17.4545H120.437V7.63636C120.437 7.03387 119.948 6.54545 119.346 6.54545H110.619ZM116.291 10.6909V11.5636H113.673V10.6909H116.291ZM91.8549 8.29091H96.8731V11.3455C96.8731 11.9479 96.3847 12.4364 95.7822 12.4364H91.8549V13.3091H96.8731V17.4545H87.9276C87.3251 17.4545 86.8367 16.9661 86.8367 16.3636V12.4364H85.964V8.29091H86.8367V6.54545H91.8549V8.29091ZM108.655 7.63636C108.655 7.03387 108.166 6.54545 107.564 6.54545H97.7458V17.4545H102.764V14.1818H103.637V17.4545H108.655V13.3091H106.473V12.4364H107.564C108.166 12.4364 108.655 11.9479 108.655 11.3455V7.63636ZM104.509 10.6909V11.5636H101.891V10.6909H104.509ZM132.218 13.3091L126.327 13.3091V6.54547L121.309 6.54547V17.4546H132.218V13.3091Z"
fill="#D7FF64"/>
</svg>

Before

Width:  |  Height:  |  Size: 3.4 KiB

View File

@@ -1,21 +1,7 @@
doc-valid-idents = [
"..",
"CodeQL",
"FastAPI",
"IPython",
"LangChain",
"LibCST",
"McCabe",
"NumPy",
"SCREAMING_SNAKE_CASE",
"SQLAlchemy",
"StackOverflow",
"PyCharm",
]
ignore-interior-mutability = [
# Interned is read-only. The wrapped `Rc` never gets updated.
"ruff_formatter::format_element::Interned",
# The expression is read-only.
"ruff_python_ast::hashable::HashableExpr",
"CodeQL",
"IPython",
"NumPy",
"..",
]

View File

@@ -0,0 +1,21 @@
[package]
name = "flake8-to-ruff"
version = "0.0.256"
edition = { workspace = true }
rust-version = { workspace = true }
[dependencies]
ruff = { path = "../ruff", default-features = false }
anyhow = { workspace = true }
clap = { workspace = true }
colored = { workspace = true }
configparser = { version = "3.0.2" }
once_cell = { workspace = true }
regex = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
strum = { workspace = true }
strum_macros = { workspace = true }
toml = { workspace = true }

View File

@@ -0,0 +1,98 @@
# flake8-to-ruff
Convert existing Flake8 configuration files (`setup.cfg`, `tox.ini`, or `.flake8`) for use with
[Ruff](https://github.com/charliermarsh/ruff).
Generates a Ruff-compatible `pyproject.toml` section.
## Installation and Usage
### Installation
Available as [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) on PyPI:
```shell
pip install flake8-to-ruff
```
### Usage
To run `flake8-to-ruff`:
```shell
flake8-to-ruff path/to/setup.cfg
flake8-to-ruff path/to/tox.ini
flake8-to-ruff path/to/.flake8
```
`flake8-to-ruff` will print the relevant `pyproject.toml` sections to standard output, like so:
```toml
[tool.ruff]
exclude = [
'.svn',
'CVS',
'.bzr',
'.hg',
'.git',
'__pycache__',
'.tox',
'.idea',
'.mypy_cache',
'.venv',
'node_modules',
'_state_machine.py',
'test_fstring.py',
'bad_coding2.py',
'badsyntax_*.py',
]
select = [
'A',
'E',
'F',
'Q',
]
ignore = []
[tool.ruff.flake8-quotes]
inline-quotes = 'single'
[tool.ruff.pep8-naming]
ignore-names = [
'foo',
'bar',
]
```
### Plugins
`flake8-to-ruff` will attempt to infer any activated plugins based on the settings provided in your
configuration file.
For example, if your `.flake8` file includes a `docstring-convention` property, `flake8-to-ruff`
will enable the appropriate [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
checks.
Alternatively, you can manually specify plugins on the command-line:
```shell
flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
```
## Limitations
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
configuration options that don't exist in Flake8.)
1. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
codes from unsupported plugins. (See the [Ruff README](https://github.com/charliermarsh/ruff#user-content-how-does-ruff-compare-to-flake8)
for the complete list of supported plugins.)
## License
MIT
## Contributing
Contributions are welcome and hugely appreciated. To get started, check out the
[contributing guidelines](https://github.com/charliermarsh/ruff/blob/main/CONTRIBUTING.md).

View File

@@ -0,0 +1,65 @@
[build-system]
requires = [
# The minimum setuptools version is specific to the PEP 517 backend,
# and may be stricter than the version required in `setup.cfg`
"setuptools>=40.6.0,!=60.9.0",
"wheel",
# Must be kept in sync with the `install_requirements` in `setup.cfg`
"cffi>=1.12; platform_python_implementation != 'PyPy'",
"setuptools-rust>=0.11.4",
]
build-backend = "setuptools.build_meta"
[tool.black]
line-length = 79
target-version = ["py36"]
[tool.pytest.ini_options]
addopts = "-r s --capture=no --strict-markers --benchmark-disable"
markers = [
"skip_fips: this test is not executed in FIPS mode",
"supported: parametrized test requiring only_if and skip_message",
]
[tool.mypy]
show_error_codes = true
check_untyped_defs = true
no_implicit_reexport = true
warn_redundant_casts = true
warn_unused_ignores = true
warn_unused_configs = true
strict_equality = true
[[tool.mypy.overrides]]
module = [
"pretend"
]
ignore_missing_imports = true
[tool.coverage.run]
branch = true
relative_files = true
source = [
"cryptography",
"tests/",
]
[tool.coverage.paths]
source = [
"src/cryptography",
"*.tox/*/lib*/python*/site-packages/cryptography",
"*.tox\\*\\Lib\\site-packages\\cryptography",
"*.tox/pypy/site-packages/cryptography",
]
tests =[
"tests/",
"*tests\\",
]
[tool.coverage.report]
exclude_lines = [
"@abc.abstractmethod",
"@abc.abstractproperty",
"@typing.overload",
"if typing.TYPE_CHECKING",
]

View File

@@ -0,0 +1,91 @@
[metadata]
name = cryptography
version = attr: cryptography.__version__
description = cryptography is a package which provides cryptographic recipes and primitives to Python developers.
long_description = file: README.rst
long_description_content_type = text/x-rst
license = BSD-3-Clause OR Apache-2.0
url = https://github.com/pyca/cryptography
author = The Python Cryptographic Authority and individual contributors
author_email = cryptography-dev@python.org
project_urls =
Documentation=https://cryptography.io/
Source=https://github.com/pyca/cryptography/
Issues=https://github.com/pyca/cryptography/issues
Changelog=https://cryptography.io/en/latest/changelog/
classifiers =
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: Apache Software License
License :: OSI Approved :: BSD License
Natural Language :: English
Operating System :: MacOS :: MacOS X
Operating System :: POSIX
Operating System :: POSIX :: BSD
Operating System :: POSIX :: Linux
Operating System :: Microsoft :: Windows
Programming Language :: Python
Programming Language :: Python :: 3
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: Implementation :: PyPy
Topic :: Security :: Cryptography
[options]
python_requires = >=3.6
include_package_data = True
zip_safe = False
package_dir =
=src
packages = find:
# `install_requires` must be kept in sync with `pyproject.toml`
install_requires =
cffi >=1.12
[options.packages.find]
where = src
exclude =
_cffi_src
_cffi_src.*
[options.extras_require]
test =
pytest>=6.2.0
pytest-benchmark
pytest-cov
pytest-subtests
pytest-xdist
pretend
iso8601
pytz
hypothesis>=1.11.4,!=3.79.2
docs =
sphinx >= 1.6.5,!=1.8.0,!=3.1.0,!=3.1.1,!=5.2.0,!=5.2.0.post0
sphinx_rtd_theme
docstest =
pyenchant >= 1.6.11
twine >= 1.12.0
sphinxcontrib-spelling >= 4.0.1
sdist =
setuptools_rust >= 0.11.4
pep8test =
black
flake8
flake8-import-order
pep8-naming
# This extra is for OpenSSH private keys that use bcrypt KDF
# Versions: v3.1.3 - ignore_few_rounds, v3.1.5 - abi3
ssh =
bcrypt >= 3.1.5
[flake8]
ignore = E203,E211,W503,W504,N818
exclude = .tox,*.egg,.git,_build,.hypothesis
select = E,W,F,N,I
application-import-names = cryptography,cryptography_vectors,tests

View File

@@ -0,0 +1,19 @@
[flake8]
# Ignore style and complexity
# E: style errors
# W: style warnings
# C: complexity
# D: docstring warnings (unused pydocstyle extension)
# F841: local variable assigned but never used
ignore = E, C, W, D, F841
builtins = c, get_config
exclude =
.cache,
.github,
docs,
jupyterhub/alembic*,
onbuild,
scripts,
share,
tools,
setup.py

View File

@@ -0,0 +1,43 @@
[flake8]
# Exclude the grpc generated code
exclude = ./manim/grpc/gen/*
max-complexity = 15
max-line-length = 88
statistics = True
# Prevents some flake8-rst-docstrings errors
rst-roles = attr,class,func,meth,mod,obj,ref,doc,exc
rst-directives = manim, SEEALSO, seealso
docstring-convention=numpy
select = A,A00,B,B9,C4,C90,D,E,F,F,PT,RST,SIM,W
# General Compatibility
extend-ignore = E203, W503, D202, D212, D213, D404
# Misc
F401, F403, F405, F841, E501, E731, E402, F811, F821,
# Plug-in: flake8-builtins
A001, A002, A003,
# Plug-in: flake8-bugbear
B006, B007, B008, B009, B010, B903, B950,
# Plug-in: flake8-simplify
SIM105, SIM106, SIM119,
# Plug-in: flake8-comprehensions
C901
# Plug-in: flake8-pytest-style
PT001, PT004, PT006, PT011, PT018, PT022, PT023,
# Plug-in: flake8-docstrings
D100, D101, D102, D103, D104, D105, D106, D107,
D200, D202, D204, D205, D209,
D301,
D400, D401, D402, D403, D405, D406, D407, D409, D411, D412, D414,
# Plug-in: flake8-rst-docstrings
RST201, RST203, RST210, RST212, RST213, RST215,
RST301, RST303,

View File

@@ -0,0 +1,36 @@
[flake8]
min_python_version = 3.7.0
max-line-length = 88
ban-relative-imports = true
# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
format-greedy = 1
inline-quotes = double
enable-extensions = TC, TC1
type-checking-strict = true
eradicate-whitelist-extend = ^-.*;
extend-ignore =
# E203: Whitespace before ':' (pycqa/pycodestyle#373)
E203,
# SIM106: Handle error-cases first
SIM106,
# ANN101: Missing type annotation for self in method
ANN101,
# ANN102: Missing type annotation for cls in classmethod
ANN102,
# PIE781: assign-and-return
PIE781,
# PIE798 no-unnecessary-class: Consider using a module for namespacing instead
PIE798,
per-file-ignores =
# TC002: Move third-party import '...' into a type-checking block
__init__.py:TC002,
# ANN201: Missing return type annotation for public function
tests/test_*:ANN201
tests/**/test_*:ANN201
extend-exclude =
# Frozen and not subject to change in this repo:
get-poetry.py,
install-poetry.py,
# External to the project's coding standards:
tests/fixtures/*,
tests/**/fixtures/*,

View File

@@ -0,0 +1,19 @@
[flake8]
max-line-length=120
docstring-convention=all
import-order-style=pycharm
application_import_names=bot,tests
exclude=.cache,.venv,.git,constants.py
extend-ignore=
B311,W503,E226,S311,T000,E731
# Missing Docstrings
D100,D104,D105,D107,
# Docstring Whitespace
D203,D212,D214,D215,
# Docstring Quotes
D301,D302,
# Docstring Content
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
# Type Annotations
ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401
per-file-ignores=tests/*:D,ANN

View File

@@ -0,0 +1,6 @@
[flake8]
ignore = E203, E501, W503
per-file-ignores =
requests/__init__.py:E402, F401
requests/compat.py:E402, F401
tests/compat.py:F401

View File

@@ -0,0 +1,34 @@
[project]
name = "flake8-to-ruff"
keywords = ["automation", "flake8", "pycodestyle", "pyflakes", "pylint", "clippy"]
classifiers = [
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
]
author = "Charlie Marsh"
author_email = "charlie.r.marsh@gmail.com"
description = "Convert existing Flake8 configuration to Ruff."
requires-python = ">=3.7"
[project.urls]
repository = "https://github.com/charliermarsh/ruff#subdirectory=crates/flake8_to_ruff"
[build-system]
requires = ["maturin>=0.14,<0.15"]
build-backend = "maturin"
[tool.maturin]
bindings = "bin"
strip = true

View File

@@ -0,0 +1,68 @@
//! Utility to generate Ruff's `pyproject.toml` section from a Flake8 INI file.
use std::path::PathBuf;
use anyhow::Result;
use clap::Parser;
use configparser::ini::Ini;
use ruff::flake8_to_ruff::{self, ExternalConfig};
use ruff::logging::{set_up_logging, LogLevel};
#[derive(Parser)]
#[command(
about = "Convert existing Flake8 configuration to Ruff.",
long_about = None
)]
struct Args {
/// Path to the Flake8 configuration file (e.g., `setup.cfg`, `tox.ini`, or
/// `.flake8`).
#[arg(required = true)]
file: PathBuf,
/// Optional path to a `pyproject.toml` file, used to ensure compatibility
/// with Black.
#[arg(long)]
pyproject: Option<PathBuf>,
/// List of plugins to enable.
#[arg(long, value_delimiter = ',')]
plugin: Option<Vec<flake8_to_ruff::Plugin>>,
}
fn main() -> Result<()> {
set_up_logging(&LogLevel::Default)?;
let args = Args::parse();
// Read the INI file.
let mut ini = Ini::new_cs();
ini.set_multiline(true);
let config = ini.load(args.file).map_err(|msg| anyhow::anyhow!(msg))?;
// Read the pyproject.toml file.
let pyproject = args.pyproject.map(flake8_to_ruff::parse).transpose()?;
let external_config = pyproject
.as_ref()
.and_then(|pyproject| pyproject.tool.as_ref())
.map(|tool| ExternalConfig {
black: tool.black.as_ref(),
isort: tool.isort.as_ref(),
..Default::default()
})
.unwrap_or_default();
let external_config = ExternalConfig {
project: pyproject
.as_ref()
.and_then(|pyproject| pyproject.project.as_ref()),
..external_config
};
// Create Ruff's pyproject.toml section.
let pyproject = flake8_to_ruff::convert(&config, &external_config, args.plugin)?;
#[allow(clippy::print_stdout)]
{
println!("{}", toml::to_string_pretty(&pyproject)?);
}
Ok(())
}

View File

@@ -1,39 +0,0 @@
[package]
name = "red_knot"
version = "0.0.0"
edition.workspace = true
rust-version.workspace = true
homepage.workspace = true
documentation.workspace = true
repository.workspace = true
authors.workspace = true
license.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
red_knot_python_semantic = { workspace = true }
red_knot_workspace = { workspace = true, features = ["zstd"] }
red_knot_server = { workspace = true }
ruff_db = { workspace = true, features = ["os", "cache"] }
anyhow = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["wrap_help"] }
colored = { workspace = true }
countme = { workspace = true, features = ["enable"] }
crossbeam = { workspace = true }
ctrlc = { version = "3.4.4" }
rayon = { workspace = true }
salsa = { workspace = true }
tracing = { workspace = true, features = ["release_max_level_debug"] }
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
tracing-flame = { workspace = true }
tracing-tree = { workspace = true }
[dev-dependencies]
filetime = { workspace = true }
tempfile = { workspace = true }
[lints]
workspace = true

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

View File

@@ -1,128 +0,0 @@
# Tracing
Traces are a useful tool to narrow down the location of a bug or, at least, to understand why the compiler is doing a particular thing.
Note, tracing messages with severity `debug` or greater are user-facing. They should be phrased accordingly.
Tracing spans are only shown when using `-vvv`.
## Verbosity levels
The CLI supports different verbosity levels.
- default: Only show errors and warnings.
- `-v` activates `info!`: Show generally useful information such as paths of configuration files, detected platform, etc., but it's not a lot of messages, it's something you'll activate in CI by default. cargo build e.g. shows you which packages are fresh.
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
but this can result in a confused logging output where messages from different threads are intertwined.
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
### Examples
#### Show all debug messages
Shows debug messages from all crates.
```bash
RED_KNOT_LOG=debug
```
#### Show salsa query execution messages
Show the salsa `execute: my_query` messages in addition to all red knot messages.
```bash
RED_KNOT_LOG=ruff=trace,red_knot=trace,salsa=info
```
#### Show typing traces
Only show traces for the `red_knot_python_semantic::types` module.
```bash
RED_KNOT_LOG="red_knot_python_semantic::types"
```
Note: Ensure that you use `-vvv` to see tracing spans.
#### Show messages for a single file
Shows all messages that are inside of a span for a specific file.
```bash
RED_KNOT_LOG=red_knot[{file=/home/micha/astral/test/x.py}]=trace
```
**Note**: Tracing still shows all spans because tracing can't know at the time of entering the span
whether one if its children has the file `x.py`.
**Note**: Salsa currently logs the entire memoized values. In our case, the source text and parsed AST.
This very quickly leads to extremely long outputs.
## Tracing and Salsa
Be mindful about using `tracing` in Salsa queries, especially when using `warn` or `error` because it isn't guaranteed
that the query will execute after restoring from a persistent cache. In which case the user won't see the message.
For example, don't use `tracing` to show the user a message when generating a lint violation failed
because the message would only be shown when linting the file the first time, but not on subsequent analysis
runs or when restoring from a persistent cache. This can be confusing for users because they
don't understand why a specific lint violation isn't raised. Instead, change your
query to return the failure as part of the query's result or use a Salsa accumulator.
## Tracing in tests
You can use `ruff_db::testing::setup_logging` or `ruff_db::testing::setup_logging_with_filter` to set up logging in tests.
```rust
use ruff_db::testing::setup_logging;
#[test]
fn test() {
let _logging = setup_logging();
tracing::info!("This message will be printed to stderr");
}
```
Note: Most test runners capture stderr and only show its output when a test fails.
Note also that `setup_logging` only sets up logging for the current thread because [`set_global_default`](https://docs.rs/tracing/latest/tracing/subscriber/fn.set_global_default.html) can only be
called **once**.
## Release builds
`trace!` events are removed in release builds.
## Profiling
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
```bash
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
```
You can convert the textual representation into a visual one using `inferno`.
```shell
cargo install inferno
```
```shell
# flamegraph
cat tracing.folded | inferno-flamegraph > tracing-flamegraph.svg
# flamechart
cat tracing.folded | inferno-flamegraph --flamechart > tracing-flamechart.svg
```
![Example flamegraph](./tracing-flamegraph.png)
See [`tracing-flame`](https://crates.io/crates/tracing-flame) for more details.

View File

@@ -1,254 +0,0 @@
//! Sets up logging for Red Knot
use anyhow::Context;
use colored::Colorize;
use std::fmt;
use std::fs::File;
use std::io::BufWriter;
use tracing::{Event, Subscriber};
use tracing_subscriber::filter::LevelFilter;
use tracing_subscriber::fmt::format::Writer;
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
use tracing_subscriber::registry::LookupSpan;
use tracing_subscriber::EnvFilter;
/// Logging flags to `#[command(flatten)]` into your CLI
#[derive(clap::Args, Debug, Clone, Default)]
#[command(about = None, long_about = None)]
pub(crate) struct Verbosity {
#[arg(
long,
short = 'v',
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
action = clap::ArgAction::Count,
global = true,
)]
verbose: u8,
}
impl Verbosity {
/// Returns the verbosity level based on the number of `-v` flags.
///
/// Returns `None` if the user did not specify any verbosity flags.
pub(crate) fn level(&self) -> VerbosityLevel {
match self.verbose {
0 => VerbosityLevel::Default,
1 => VerbosityLevel::Verbose,
2 => VerbosityLevel::ExtraVerbose,
_ => VerbosityLevel::Trace,
}
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub(crate) enum VerbosityLevel {
/// Default output level. Only shows Ruff and Red Knot events up to the [`WARN`](tracing::Level::WARN).
Default,
/// Enables verbose output. Emits Ruff and Red Knot events up to the [`INFO`](tracing::Level::INFO).
/// Corresponds to `-v`.
Verbose,
/// Enables a more verbose tracing format and emits Ruff and Red Knot events up to [`DEBUG`](tracing::Level::DEBUG).
/// Corresponds to `-vv`
ExtraVerbose,
/// Enables all tracing events and uses a tree-like output format. Corresponds to `-vvv`.
Trace,
}
impl VerbosityLevel {
const fn level_filter(self) -> LevelFilter {
match self {
VerbosityLevel::Default => LevelFilter::WARN,
VerbosityLevel::Verbose => LevelFilter::INFO,
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
VerbosityLevel::Trace => LevelFilter::TRACE,
}
}
pub(crate) const fn is_trace(self) -> bool {
matches!(self, VerbosityLevel::Trace)
}
pub(crate) const fn is_extra_verbose(self) -> bool {
matches!(self, VerbosityLevel::ExtraVerbose)
}
}
pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuard> {
use tracing_subscriber::prelude::*;
// The `RED_KNOT_LOG` environment variable overrides the default log level.
let filter = if let Ok(log_env_variable) = std::env::var("RED_KNOT_LOG") {
EnvFilter::builder()
.parse(log_env_variable)
.context("Failed to parse directives specified in RED_KNOT_LOG environment variable.")?
} else {
match level {
VerbosityLevel::Default => {
// Show warning traces
EnvFilter::default().add_directive(LevelFilter::WARN.into())
}
level => {
let level_filter = level.level_filter();
// Show info|debug|trace events, but allow `RED_KNOT_LOG` to override
let filter = EnvFilter::default().add_directive(
format!("red_knot={level_filter}")
.parse()
.expect("Hardcoded directive to be valid"),
);
filter.add_directive(
format!("ruff={level_filter}")
.parse()
.expect("Hardcoded directive to be valid"),
)
}
}
};
let (profiling_layer, guard) = setup_profile();
let registry = tracing_subscriber::registry()
.with(filter)
.with(profiling_layer);
if level.is_trace() {
let subscriber = registry.with(
tracing_tree::HierarchicalLayer::default()
.with_indent_lines(true)
.with_indent_amount(2)
.with_bracketed_fields(true)
.with_thread_ids(true)
.with_targets(true)
.with_writer(std::io::stderr)
.with_timer(tracing_tree::time::Uptime::default()),
);
subscriber.init();
} else {
let subscriber = registry.with(
tracing_subscriber::fmt::layer()
.event_format(RedKnotFormat {
display_level: true,
display_timestamp: level.is_extra_verbose(),
show_spans: false,
})
.with_writer(std::io::stderr),
);
subscriber.init();
}
Ok(TracingGuard {
_flame_guard: guard,
})
}
#[allow(clippy::type_complexity)]
fn setup_profile<S>() -> (
Option<tracing_flame::FlameLayer<S, BufWriter<File>>>,
Option<tracing_flame::FlushGuard<BufWriter<File>>>,
)
where
S: Subscriber + for<'span> LookupSpan<'span>,
{
if let Ok("1" | "true") = std::env::var("RED_KNOT_LOG_PROFILE").as_deref() {
let (layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded")
.expect("Flame layer to be created");
(Some(layer), Some(guard))
} else {
(None, None)
}
}
pub(crate) struct TracingGuard {
_flame_guard: Option<tracing_flame::FlushGuard<BufWriter<File>>>,
}
struct RedKnotFormat {
display_timestamp: bool,
display_level: bool,
show_spans: bool,
}
/// See <https://docs.rs/tracing-subscriber/0.3.18/src/tracing_subscriber/fmt/format/mod.rs.html#1026-1156>
impl<S, N> FormatEvent<S, N> for RedKnotFormat
where
S: Subscriber + for<'a> LookupSpan<'a>,
N: for<'a> FormatFields<'a> + 'static,
{
fn format_event(
&self,
ctx: &FmtContext<'_, S, N>,
mut writer: Writer<'_>,
event: &Event<'_>,
) -> fmt::Result {
let meta = event.metadata();
let ansi = writer.has_ansi_escapes();
if self.display_timestamp {
let timestamp = chrono::Local::now()
.format("%Y-%m-%d %H:%M:%S.%f")
.to_string();
if ansi {
write!(writer, "{} ", timestamp.dimmed())?;
} else {
write!(
writer,
"{} ",
chrono::Local::now().format("%Y-%m-%d %H:%M:%S.%f")
)?;
}
}
if self.display_level {
let level = meta.level();
// Same colors as tracing
if ansi {
let formatted_level = level.to_string();
match *level {
tracing::Level::TRACE => {
write!(writer, "{} ", formatted_level.purple().bold())?;
}
tracing::Level::DEBUG => write!(writer, "{} ", formatted_level.blue().bold())?,
tracing::Level::INFO => write!(writer, "{} ", formatted_level.green().bold())?,
tracing::Level::WARN => write!(writer, "{} ", formatted_level.yellow().bold())?,
tracing::Level::ERROR => write!(writer, "{} ", level.to_string().red().bold())?,
}
} else {
write!(writer, "{level} ")?;
}
}
if self.show_spans {
let span = event.parent();
let mut seen = false;
let span = span
.and_then(|id| ctx.span(id))
.or_else(|| ctx.lookup_current());
let scope = span.into_iter().flat_map(|span| span.scope().from_root());
for span in scope {
seen = true;
if ansi {
write!(writer, "{}:", span.metadata().name().bold())?;
} else {
write!(writer, "{}:", span.metadata().name())?;
}
}
if seen {
writer.write_char(' ')?;
}
}
ctx.field_format().format_fields(writer.by_ref(), event)?;
writeln!(writer)
}
}

View File

@@ -1,384 +0,0 @@
use std::process::{ExitCode, Termination};
use std::sync::Mutex;
use anyhow::{anyhow, Context};
use clap::Parser;
use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use salsa::plumbing::ZalsaDatabase;
use red_knot_python_semantic::SitePackages;
use red_knot_server::run_server;
use red_knot_workspace::db::RootDatabase;
use red_knot_workspace::watch;
use red_knot_workspace::watch::WorkspaceWatcher;
use red_knot_workspace::workspace::settings::Configuration;
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
use target_version::TargetVersion;
use crate::logging::{setup_tracing, Verbosity};
mod logging;
mod target_version;
mod verbosity;
#[derive(Debug, Parser)]
#[command(
author,
name = "red-knot",
about = "An extremely fast Python type checker."
)]
#[command(version)]
struct Args {
#[command(subcommand)]
pub(crate) command: Option<Command>,
#[arg(
long,
help = "Changes the current working directory.",
long_help = "Changes the current working directory before any specified operations. This affects the workspace and configuration discovery.",
value_name = "PATH"
)]
current_directory: Option<SystemPathBuf>,
#[arg(
long,
help = "Path to the virtual environment the project uses",
long_help = "\
Path to the virtual environment the project uses. \
If provided, red-knot will use the `site-packages` directory of this virtual environment \
to resolve type information for the project's third-party dependencies.",
value_name = "PATH"
)]
venv_path: Option<SystemPathBuf>,
#[arg(
long,
value_name = "DIRECTORY",
help = "Custom directory to use for stdlib typeshed stubs"
)]
custom_typeshed_dir: Option<SystemPathBuf>,
#[arg(
long,
value_name = "PATH",
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
)]
extra_search_path: Option<Vec<SystemPathBuf>>,
#[arg(
long,
help = "Python version to assume when resolving types",
value_name = "VERSION"
)]
target_version: Option<TargetVersion>,
#[clap(flatten)]
verbosity: Verbosity,
#[arg(
long,
help = "Run in watch mode by re-running whenever files change",
short = 'W'
)]
watch: bool,
}
impl Args {
fn to_configuration(&self, cli_cwd: &SystemPath) -> Configuration {
let mut configuration = Configuration::default();
if let Some(target_version) = self.target_version {
configuration.target_version = Some(target_version.into());
}
if let Some(venv_path) = &self.venv_path {
configuration.search_paths.site_packages = Some(SitePackages::Derived {
venv_path: SystemPath::absolute(venv_path, cli_cwd),
});
}
if let Some(custom_typeshed_dir) = &self.custom_typeshed_dir {
configuration.search_paths.custom_typeshed =
Some(SystemPath::absolute(custom_typeshed_dir, cli_cwd));
}
if let Some(extra_search_paths) = &self.extra_search_path {
configuration.search_paths.extra_paths = extra_search_paths
.iter()
.map(|path| Some(SystemPath::absolute(path, cli_cwd)))
.collect();
}
configuration
}
}
#[derive(Debug, clap::Subcommand)]
pub enum Command {
/// Start the language server
Server,
}
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
pub fn main() -> ExitStatus {
run().unwrap_or_else(|error| {
use std::io::Write;
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
let mut stderr = std::io::stderr().lock();
// This communicates that this isn't a linter error but Red Knot itself hard-errored for
// some reason (e.g. failed to resolve the configuration)
writeln!(stderr, "{}", "Red Knot failed".red().bold()).ok();
// Currently we generally only see one error, but e.g. with io errors when resolving
// the configuration it is help to chain errors ("resolving configuration failed" ->
// "failed to read file: subdir/pyproject.toml")
for cause in error.chain() {
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
}
ExitStatus::Error
})
}
fn run() -> anyhow::Result<ExitStatus> {
let args = Args::parse_from(std::env::args().collect::<Vec<_>>());
if matches!(args.command, Some(Command::Server)) {
return run_server().map(|()| ExitStatus::Success);
}
let verbosity = args.verbosity.level();
countme::enable(verbosity.is_trace());
let _guard = setup_tracing(verbosity)?;
// The base path to which all CLI arguments are relative to.
let cli_base_path = {
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
SystemPathBuf::from_path_buf(cwd)
.map_err(|path| {
anyhow!(
"The current working directory '{}' contains non-unicode characters. Red Knot only supports unicode paths.",
path.display()
)
})?
};
let cwd = args
.current_directory
.as_ref()
.map(|cwd| {
if cwd.as_std_path().is_dir() {
Ok(SystemPath::absolute(cwd, &cli_base_path))
} else {
Err(anyhow!(
"Provided current-directory path '{cwd}' is not a directory."
))
}
})
.transpose()?
.unwrap_or_else(|| cli_base_path.clone());
let system = OsSystem::new(cwd.clone());
let cli_configuration = args.to_configuration(&cwd);
let workspace_metadata = WorkspaceMetadata::from_path(
system.current_directory(),
&system,
Some(cli_configuration.clone()),
)?;
// TODO: Use the `program_settings` to compute the key for the database's persistent
// cache and load the cache if it exists.
let mut db = RootDatabase::new(workspace_metadata, system)?;
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_configuration);
// Listen to Ctrl+C and abort the watch mode.
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
ctrlc::set_handler(move || {
let mut lock = main_loop_cancellation_token.lock().unwrap();
if let Some(token) = lock.take() {
token.stop();
}
})?;
let exit_status = if args.watch {
main_loop.watch(&mut db)?
} else {
main_loop.run(&mut db)
};
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
std::mem::forget(db);
Ok(exit_status)
}
#[derive(Copy, Clone)]
pub enum ExitStatus {
/// Checking was successful and there were no errors.
Success = 0,
/// Checking was successful but there were errors.
Failure = 1,
/// Checking failed.
Error = 2,
}
impl Termination for ExitStatus {
fn report(self) -> ExitCode {
ExitCode::from(self as u8)
}
}
struct MainLoop {
/// Sender that can be used to send messages to the main loop.
sender: crossbeam_channel::Sender<MainLoopMessage>,
/// Receiver for the messages sent **to** the main loop.
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
/// The file system watcher, if running in watch mode.
watcher: Option<WorkspaceWatcher>,
cli_configuration: Configuration,
}
impl MainLoop {
fn new(cli_configuration: Configuration) -> (Self, MainLoopCancellationToken) {
let (sender, receiver) = crossbeam_channel::bounded(10);
(
Self {
sender: sender.clone(),
receiver,
watcher: None,
cli_configuration,
},
MainLoopCancellationToken { sender },
)
}
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<ExitStatus> {
tracing::debug!("Starting watch mode");
let sender = self.sender.clone();
let watcher = watch::directory_watcher(move |event| {
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
})?;
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
self.run(db);
Ok(ExitStatus::Success)
}
fn run(mut self, db: &mut RootDatabase) -> ExitStatus {
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
let result = self.main_loop(db);
tracing::debug!("Exiting main loop");
result
}
fn main_loop(&mut self, db: &mut RootDatabase) -> ExitStatus {
// Schedule the first check.
tracing::debug!("Starting main loop");
let mut revision = 0u64;
while let Ok(message) = self.receiver.recv() {
match message {
MainLoopMessage::CheckWorkspace => {
let db = db.snapshot();
let sender = self.sender.clone();
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
// to prevent blocking the main loop here.
rayon::spawn(move || {
if let Ok(result) = db.check() {
// Send the result back to the main loop for printing.
sender
.send(MainLoopMessage::CheckCompleted { result, revision })
.unwrap();
}
});
}
MainLoopMessage::CheckCompleted {
result,
revision: check_revision,
} => {
let has_diagnostics = !result.is_empty();
if check_revision == revision {
for diagnostic in result {
tracing::error!("{}", diagnostic);
}
} else {
tracing::debug!(
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
);
}
if self.watcher.is_none() {
return if has_diagnostics {
ExitStatus::Failure
} else {
ExitStatus::Success
};
}
tracing::trace!("Counts after last check:\n{}", countme::get_all());
}
MainLoopMessage::ApplyChanges(changes) => {
revision += 1;
// Automatically cancels any pending queries and waits for them to complete.
db.apply_changes(changes, Some(&self.cli_configuration));
if let Some(watcher) = self.watcher.as_mut() {
watcher.update(db);
}
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
}
MainLoopMessage::Exit => {
// Cancel any pending queries and wait for them to complete.
// TODO: Don't use Salsa internal APIs
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
let _ = db.zalsa_mut();
return ExitStatus::Success;
}
}
tracing::debug!("Waiting for next main loop message.");
}
ExitStatus::Success
}
}
#[derive(Debug)]
struct MainLoopCancellationToken {
sender: crossbeam_channel::Sender<MainLoopMessage>,
}
impl MainLoopCancellationToken {
fn stop(self) {
self.sender.send(MainLoopMessage::Exit).unwrap();
}
}
/// Message sent from the orchestrator to the main loop.
#[derive(Debug)]
enum MainLoopMessage {
CheckWorkspace,
CheckCompleted { result: Vec<String>, revision: u64 },
ApplyChanges(Vec<watch::ChangeEvent>),
Exit,
}

View File

@@ -1,48 +0,0 @@
/// Enumeration of all supported Python versions
///
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
pub enum TargetVersion {
Py37,
#[default]
Py38,
Py39,
Py310,
Py311,
Py312,
Py313,
}
impl TargetVersion {
const fn as_str(self) -> &'static str {
match self {
Self::Py37 => "py37",
Self::Py38 => "py38",
Self::Py39 => "py39",
Self::Py310 => "py310",
Self::Py311 => "py311",
Self::Py312 => "py312",
Self::Py313 => "py313",
}
}
}
impl std::fmt::Display for TargetVersion {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(self.as_str())
}
}
impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
fn from(value: TargetVersion) -> Self {
match value {
TargetVersion::Py37 => Self::PY37,
TargetVersion::Py38 => Self::PY38,
TargetVersion::Py39 => Self::PY39,
TargetVersion::Py310 => Self::PY310,
TargetVersion::Py311 => Self::PY311,
TargetVersion::Py312 => Self::PY312,
TargetVersion::Py313 => Self::PY313,
}
}
}

View File

@@ -1 +0,0 @@

File diff suppressed because it is too large Load Diff

View File

@@ -1,48 +0,0 @@
[package]
name = "red_knot_python_semantic"
version = "0.0.0"
publish = false
authors = { workspace = true }
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
documentation = { workspace = true }
repository = { workspace = true }
license = { workspace = true }
[dependencies]
ruff_db = { workspace = true }
ruff_index = { workspace = true }
ruff_python_ast = { workspace = true }
ruff_python_stdlib = { workspace = true }
ruff_source_file = { workspace = true }
ruff_text_size = { workspace = true }
ruff_python_literal = { workspace = true }
anyhow = { workspace = true }
bitflags = { workspace = true }
camino = { workspace = true }
compact_str = { workspace = true }
countme = { workspace = true }
ordermap = { workspace = true }
salsa = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
rustc-hash = { workspace = true }
hashbrown = { workspace = true }
smallvec = { workspace = true }
static_assertions = { workspace = true }
test-case = { workspace = true }
[dev-dependencies]
ruff_db = { workspace = true, features = ["os", "testing"] }
ruff_python_parser = { workspace = true }
ruff_vendored = { workspace = true }
anyhow = { workspace = true }
insta = { workspace = true }
tempfile = { workspace = true }
[lints]
workspace = true

View File

@@ -1,9 +0,0 @@
# Red Knot
Semantic analysis for the red-knot project.
## Vendored types for the stdlib
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_python_semantic/vendor/typeshed`. The file `crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).

View File

@@ -1,163 +0,0 @@
use std::hash::Hash;
use std::ops::Deref;
use ruff_db::parsed::ParsedModule;
/// Ref-counted owned reference to an AST node.
///
/// The type holds an owned reference to the node's ref-counted [`ParsedModule`].
/// Holding on to the node's [`ParsedModule`] guarantees that the reference to the
/// node must still be valid.
///
/// Holding on to any [`AstNodeRef`] prevents the [`ParsedModule`] from being released.
///
/// ## Equality
/// Two `AstNodeRef` are considered equal if their wrapped nodes are equal.
#[derive(Clone)]
pub struct AstNodeRef<T> {
/// Owned reference to the node's [`ParsedModule`].
///
/// The node's reference is guaranteed to remain valid as long as it's enclosing
/// [`ParsedModule`] is alive.
_parsed: ParsedModule,
/// Pointer to the referenced node.
node: std::ptr::NonNull<T>,
}
#[allow(unsafe_code)]
impl<T> AstNodeRef<T> {
/// Creates a new `AstNodeRef` that reference `node`. The `parsed` is the [`ParsedModule`] to
/// which the `AstNodeRef` belongs.
///
/// ## Safety
///
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the
/// [`ParsedModule`] to which `node` belongs. It's the caller's responsibility to ensure that
/// the invariant `node belongs to parsed` is upheld.
pub(super) unsafe fn new(parsed: ParsedModule, node: &T) -> Self {
Self {
_parsed: parsed,
node: std::ptr::NonNull::from(node),
}
}
/// Returns a reference to the wrapped node.
pub fn node(&self) -> &T {
// SAFETY: Holding on to `parsed` ensures that the AST to which `node` belongs is still
// alive and not moved.
unsafe { self.node.as_ref() }
}
}
impl<T> Deref for AstNodeRef<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
self.node()
}
}
impl<T> std::fmt::Debug for AstNodeRef<T>
where
T: std::fmt::Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("AstNodeRef").field(&self.node()).finish()
}
}
impl<T> PartialEq for AstNodeRef<T>
where
T: PartialEq,
{
fn eq(&self, other: &Self) -> bool {
self.node().eq(other.node())
}
}
impl<T> Eq for AstNodeRef<T> where T: Eq {}
impl<T> Hash for AstNodeRef<T>
where
T: Hash,
{
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.node().hash(state);
}
}
#[allow(unsafe_code)]
unsafe impl<T> Send for AstNodeRef<T> where T: Send {}
#[allow(unsafe_code)]
unsafe impl<T> Sync for AstNodeRef<T> where T: Sync {}
#[cfg(test)]
mod tests {
use crate::ast_node_ref::AstNodeRef;
use ruff_db::parsed::ParsedModule;
use ruff_python_ast::PySourceType;
use ruff_python_parser::parse_unchecked_source;
#[test]
#[allow(unsafe_code)]
fn equality() {
let parsed_raw = parse_unchecked_source("1 + 2", PySourceType::Python);
let parsed = ParsedModule::new(parsed_raw.clone());
let stmt = &parsed.syntax().body[0];
let node1 = unsafe { AstNodeRef::new(parsed.clone(), stmt) };
let node2 = unsafe { AstNodeRef::new(parsed.clone(), stmt) };
assert_eq!(node1, node2);
// Compare from different trees
let cloned = ParsedModule::new(parsed_raw);
let stmt_cloned = &cloned.syntax().body[0];
let cloned_node = unsafe { AstNodeRef::new(cloned.clone(), stmt_cloned) };
assert_eq!(node1, cloned_node);
let other_raw = parse_unchecked_source("2 + 2", PySourceType::Python);
let other = ParsedModule::new(other_raw);
let other_stmt = &other.syntax().body[0];
let other_node = unsafe { AstNodeRef::new(other.clone(), other_stmt) };
assert_ne!(node1, other_node);
}
#[allow(unsafe_code)]
#[test]
fn inequality() {
let parsed_raw = parse_unchecked_source("1 + 2", PySourceType::Python);
let parsed = ParsedModule::new(parsed_raw.clone());
let stmt = &parsed.syntax().body[0];
let node = unsafe { AstNodeRef::new(parsed.clone(), stmt) };
let other_raw = parse_unchecked_source("2 + 2", PySourceType::Python);
let other = ParsedModule::new(other_raw);
let other_stmt = &other.syntax().body[0];
let other_node = unsafe { AstNodeRef::new(other.clone(), other_stmt) };
assert_ne!(node, other_node);
}
#[test]
#[allow(unsafe_code)]
fn debug() {
let parsed_raw = parse_unchecked_source("1 + 2", PySourceType::Python);
let parsed = ParsedModule::new(parsed_raw.clone());
let stmt = &parsed.syntax().body[0];
let stmt_node = unsafe { AstNodeRef::new(parsed.clone(), stmt) };
let debug = format!("{stmt_node:?}");
assert_eq!(debug, format!("AstNodeRef({stmt:?})"));
}
}

View File

@@ -1,111 +0,0 @@
use ruff_db::files::File;
use ruff_db::{Db as SourceDb, Upcast};
/// Database giving access to semantic information about a Python program.
#[salsa::db]
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
fn is_file_open(&self, file: File) -> bool;
}
#[cfg(test)]
pub(crate) mod tests {
use std::sync::Arc;
use ruff_db::files::{File, Files};
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast};
use super::Db;
#[salsa::db]
pub(crate) struct TestDb {
storage: salsa::Storage<Self>,
files: Files,
system: TestSystem,
vendored: VendoredFileSystem,
events: std::sync::Arc<std::sync::Mutex<Vec<salsa::Event>>>,
}
impl TestDb {
pub(crate) fn new() -> Self {
Self {
storage: salsa::Storage::default(),
system: TestSystem::default(),
vendored: ruff_vendored::file_system().clone(),
events: std::sync::Arc::default(),
files: Files::default(),
}
}
/// Takes the salsa events.
///
/// ## Panics
/// If there are any pending salsa snapshots.
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
let inner = Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
let events = inner.get_mut().unwrap();
std::mem::take(&mut *events)
}
/// Clears the salsa events.
///
/// ## Panics
/// If there are any pending salsa snapshots.
pub(crate) fn clear_salsa_events(&mut self) {
self.take_salsa_events();
}
}
impl DbWithTestSystem for TestDb {
fn test_system(&self) -> &TestSystem {
&self.system
}
fn test_system_mut(&mut self) -> &mut TestSystem {
&mut self.system
}
}
#[salsa::db]
impl SourceDb for TestDb {
fn vendored(&self) -> &VendoredFileSystem {
&self.vendored
}
fn system(&self) -> &dyn System {
&self.system
}
fn files(&self) -> &Files {
&self.files
}
}
impl Upcast<dyn SourceDb> for TestDb {
fn upcast(&self) -> &(dyn SourceDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
self
}
}
#[salsa::db]
impl Db for TestDb {
fn is_file_open(&self, file: File) -> bool {
!file.path(self).is_vendored_path()
}
}
#[salsa::db]
impl salsa::Database for TestDb {
fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {
let event = event();
tracing::trace!("event: {event:?}");
let mut events = self.events.lock().unwrap();
events.push(event);
}
}
}

View File

@@ -1,25 +0,0 @@
use std::hash::BuildHasherDefault;
use rustc_hash::FxHasher;
pub use db::Db;
pub use module_name::ModuleName;
pub use module_resolver::{resolve_module, system_module_search_paths, Module};
pub use program::{Program, ProgramSettings, SearchPathSettings, SitePackages};
pub use python_version::PythonVersion;
pub use semantic_model::{HasTy, SemanticModel};
pub mod ast_node_ref;
mod db;
mod module_name;
mod module_resolver;
mod node_key;
mod program;
mod python_version;
pub mod semantic_index;
mod semantic_model;
pub(crate) mod site_packages;
mod stdlib;
pub mod types;
type FxOrderSet<V> = ordermap::set::OrderSet<V, BuildHasherDefault<FxHasher>>;

View File

@@ -1,216 +0,0 @@
use std::fmt;
use std::ops::Deref;
use compact_str::{CompactString, ToCompactString};
use ruff_python_stdlib::identifiers::is_identifier;
/// A module name, e.g. `foo.bar`.
///
/// Always normalized to the absolute form (never a relative module name, i.e., never `.foo`).
#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
pub struct ModuleName(compact_str::CompactString);
impl ModuleName {
/// Creates a new module name for `name`. Returns `Some` if `name` is a valid, absolute
/// module name and `None` otherwise.
///
/// The module name is invalid if:
///
/// * The name is empty
/// * The name is relative
/// * The name ends with a `.`
/// * The name contains a sequence of multiple dots
/// * A component of a name (the part between two dots) isn't a valid python identifier.
#[inline]
#[must_use]
pub fn new(name: &str) -> Option<Self> {
Self::is_valid_name(name).then(|| Self(CompactString::from(name)))
}
/// Creates a new module name for `name` where `name` is a static string.
/// Returns `Some` if `name` is a valid, absolute module name and `None` otherwise.
///
/// The module name is invalid if:
///
/// * The name is empty
/// * The name is relative
/// * The name ends with a `.`
/// * The name contains a sequence of multiple dots
/// * A component of a name (the part between two dots) isn't a valid python identifier.
///
/// ## Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
///
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
/// assert_eq!(ModuleName::new_static(""), None);
/// assert_eq!(ModuleName::new_static("..foo"), None);
/// assert_eq!(ModuleName::new_static(".foo"), None);
/// assert_eq!(ModuleName::new_static("foo."), None);
/// assert_eq!(ModuleName::new_static("foo..bar"), None);
/// assert_eq!(ModuleName::new_static("2000"), None);
/// ```
#[inline]
#[must_use]
pub fn new_static(name: &'static str) -> Option<Self> {
Self::is_valid_name(name).then(|| Self(CompactString::const_new(name)))
}
#[must_use]
fn is_valid_name(name: &str) -> bool {
!name.is_empty() && name.split('.').all(is_identifier)
}
/// An iterator over the components of the module name:
///
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
///
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
/// ```
#[must_use]
pub fn components(&self) -> impl DoubleEndedIterator<Item = &str> {
self.0.split('.')
}
/// The name of this module's immediate parent, if it has a parent.
///
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
///
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
/// assert_eq!(ModuleName::new_static("root").unwrap().parent(), None);
/// ```
#[must_use]
pub fn parent(&self) -> Option<ModuleName> {
let (parent, _) = self.0.rsplit_once('.')?;
Some(Self(parent.to_compact_string()))
}
/// Returns `true` if the name starts with `other`.
///
/// This is equivalent to checking if `self` is a sub-module of `other`.
///
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
///
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
///
/// assert!(!ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("bar").unwrap()));
/// assert!(!ModuleName::new_static("foo_bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
/// ```
#[must_use]
pub fn starts_with(&self, other: &ModuleName) -> bool {
let mut self_components = self.components();
let other_components = other.components();
for other_component in other_components {
if self_components.next() != Some(other_component) {
return false;
}
}
true
}
#[must_use]
#[inline]
pub fn as_str(&self) -> &str {
&self.0
}
/// Construct a [`ModuleName`] from a sequence of parts.
///
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
///
/// assert_eq!(&*ModuleName::from_components(["a"]).unwrap(), "a");
/// assert_eq!(&*ModuleName::from_components(["a", "b"]).unwrap(), "a.b");
/// assert_eq!(&*ModuleName::from_components(["a", "b", "c"]).unwrap(), "a.b.c");
///
/// assert_eq!(ModuleName::from_components(["a-b"]), None);
/// assert_eq!(ModuleName::from_components(["a", "a-b"]), None);
/// assert_eq!(ModuleName::from_components(["a", "b", "a-b-c"]), None);
/// ```
#[must_use]
pub fn from_components<'a>(components: impl IntoIterator<Item = &'a str>) -> Option<Self> {
let mut components = components.into_iter();
let first_part = components.next()?;
if !is_identifier(first_part) {
return None;
}
let name = if let Some(second_part) = components.next() {
if !is_identifier(second_part) {
return None;
}
let mut name = format!("{first_part}.{second_part}");
for part in components {
if !is_identifier(part) {
return None;
}
name.push('.');
name.push_str(part);
}
CompactString::from(&name)
} else {
CompactString::from(first_part)
};
Some(Self(name))
}
/// Extend `self` with the components of `other`
///
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
///
/// let mut module_name = ModuleName::new_static("foo").unwrap();
/// module_name.extend(&ModuleName::new_static("bar").unwrap());
/// assert_eq!(&module_name, "foo.bar");
/// module_name.extend(&ModuleName::new_static("baz.eggs.ham").unwrap());
/// assert_eq!(&module_name, "foo.bar.baz.eggs.ham");
/// ```
pub fn extend(&mut self, other: &ModuleName) {
self.0.push('.');
self.0.push_str(other);
}
}
impl Deref for ModuleName {
type Target = str;
#[inline]
fn deref(&self) -> &Self::Target {
self.as_str()
}
}
impl PartialEq<str> for ModuleName {
fn eq(&self, other: &str) -> bool {
self.as_str() == other
}
}
impl PartialEq<ModuleName> for str {
fn eq(&self, other: &ModuleName) -> bool {
self == other.as_str()
}
}
impl std::fmt::Display for ModuleName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}

View File

@@ -1,45 +0,0 @@
use std::iter::FusedIterator;
pub use module::Module;
pub use resolver::resolve_module;
pub(crate) use resolver::{file_to_module, SearchPaths};
use ruff_db::system::SystemPath;
use crate::module_resolver::resolver::search_paths;
use crate::Db;
use resolver::SearchPathIterator;
mod module;
mod path;
mod resolver;
mod typeshed;
#[cfg(test)]
mod testing;
/// Returns an iterator over all search paths pointing to a system path
pub fn system_module_search_paths(db: &dyn Db) -> SystemModuleSearchPathsIter {
SystemModuleSearchPathsIter {
inner: search_paths(db),
}
}
pub struct SystemModuleSearchPathsIter<'db> {
inner: SearchPathIterator<'db>,
}
impl<'db> Iterator for SystemModuleSearchPathsIter<'db> {
type Item = &'db SystemPath;
fn next(&mut self) -> Option<Self::Item> {
loop {
let next = self.inner.next()?;
if let Some(system_path) = next.as_system_path() {
return Some(system_path);
}
}
}
}
impl FusedIterator for SystemModuleSearchPathsIter<'_> {}

View File

@@ -1,85 +0,0 @@
use std::fmt::Formatter;
use std::sync::Arc;
use ruff_db::files::File;
use super::path::SearchPath;
use crate::module_name::ModuleName;
/// Representation of a Python module.
#[derive(Clone, PartialEq, Eq)]
pub struct Module {
inner: Arc<ModuleInner>,
}
impl Module {
pub(crate) fn new(
name: ModuleName,
kind: ModuleKind,
search_path: SearchPath,
file: File,
) -> Self {
Self {
inner: Arc::new(ModuleInner {
name,
kind,
search_path,
file,
}),
}
}
/// The absolute name of the module (e.g. `foo.bar`)
pub fn name(&self) -> &ModuleName {
&self.inner.name
}
/// The file to the source code that defines this module
pub fn file(&self) -> File {
self.inner.file
}
/// The search path from which the module was resolved.
pub(crate) fn search_path(&self) -> &SearchPath {
&self.inner.search_path
}
/// Determine whether this module is a single-file module or a package
pub fn kind(&self) -> ModuleKind {
self.inner.kind
}
}
impl std::fmt::Debug for Module {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Module")
.field("name", &self.name())
.field("kind", &self.kind())
.field("file", &self.file())
.field("search_path", &self.search_path())
.finish()
}
}
#[derive(PartialEq, Eq)]
struct ModuleInner {
name: ModuleName,
kind: ModuleKind,
search_path: SearchPath,
file: File,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub enum ModuleKind {
/// A single-file module (e.g. `foo.py` or `foo.pyi`)
Module,
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
Package,
}
impl ModuleKind {
pub const fn is_package(self) -> bool {
matches!(self, ModuleKind::Package)
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,302 +0,0 @@
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
use ruff_db::vendored::VendoredPathBuf;
use crate::db::tests::TestDb;
use crate::program::{Program, SearchPathSettings};
use crate::python_version::PythonVersion;
use crate::{ProgramSettings, SitePackages};
/// A test case for the module resolver.
///
/// You generally shouldn't construct instances of this struct directly;
/// instead, use the [`TestCaseBuilder`].
pub(crate) struct TestCase<T> {
pub(crate) db: TestDb,
pub(crate) src: SystemPathBuf,
pub(crate) stdlib: T,
// Most test cases only ever need a single `site-packages` directory,
// so this is a single directory instead of a `Vec` of directories,
// like it is in `ruff_db::Program`.
pub(crate) site_packages: SystemPathBuf,
pub(crate) target_version: PythonVersion,
}
/// A `(file_name, file_contents)` tuple
pub(crate) type FileSpec = (&'static str, &'static str);
/// Specification for a typeshed mock to be created as part of a test
#[derive(Debug, Clone, Copy, Default)]
pub(crate) struct MockedTypeshed {
/// The stdlib files to be created in the typeshed mock
pub(crate) stdlib_files: &'static [FileSpec],
/// The contents of the `stdlib/VERSIONS` file
/// to be created in the typeshed mock
pub(crate) versions: &'static str,
}
#[derive(Debug)]
pub(crate) struct VendoredTypeshed;
#[derive(Debug)]
pub(crate) struct UnspecifiedTypeshed;
/// A builder for a module-resolver test case.
///
/// The builder takes care of creating a [`TestDb`]
/// instance, applying the module resolver settings,
/// and creating mock directories for the stdlib, `site-packages`,
/// first-party code, etc.
///
/// For simple tests that do not involve typeshed,
/// test cases can be created as follows:
///
/// ```rs
/// let test_case = TestCaseBuilder::new()
/// .with_src_files(...)
/// .build();
///
/// let test_case2 = TestCaseBuilder::new()
/// .with_site_packages_files(...)
/// .build();
/// ```
///
/// Any tests can specify the target Python version that should be used
/// in the module resolver settings:
///
/// ```rs
/// let test_case = TestCaseBuilder::new()
/// .with_src_files(...)
/// .with_target_version(...)
/// .build();
/// ```
///
/// For tests checking that standard-library module resolution is working
/// correctly, you should usually create a [`MockedTypeshed`] instance
/// and pass it to the [`TestCaseBuilder::with_custom_typeshed`] method.
/// If you need to check something that involves the vendored typeshed stubs
/// we include as part of the binary, you can instead use the
/// [`TestCaseBuilder::with_vendored_typeshed`] method.
/// For either of these, you should almost always try to be explicit
/// about the Python version you want to be specified in the module-resolver
/// settings for the test:
///
/// ```rs
/// const TYPESHED = MockedTypeshed { ... };
///
/// let test_case = resolver_test_case()
/// .with_custom_typeshed(TYPESHED)
/// .with_target_version(...)
/// .build();
///
/// let test_case2 = resolver_test_case()
/// .with_vendored_typeshed()
/// .with_target_version(...)
/// .build();
/// ```
///
/// If you have not called one of those options, the `stdlib` field
/// on the [`TestCase`] instance created from `.build()` will be set
/// to `()`.
pub(crate) struct TestCaseBuilder<T> {
typeshed_option: T,
target_version: PythonVersion,
first_party_files: Vec<FileSpec>,
site_packages_files: Vec<FileSpec>,
}
impl<T> TestCaseBuilder<T> {
/// Specify files to be created in the `src` mock directory
pub(crate) fn with_src_files(mut self, files: &[FileSpec]) -> Self {
self.first_party_files.extend(files.iter().copied());
self
}
/// Specify files to be created in the `site-packages` mock directory
pub(crate) fn with_site_packages_files(mut self, files: &[FileSpec]) -> Self {
self.site_packages_files.extend(files.iter().copied());
self
}
/// Specify the target Python version the module resolver should assume
pub(crate) fn with_target_version(mut self, target_version: PythonVersion) -> Self {
self.target_version = target_version;
self
}
fn write_mock_directory(
db: &mut TestDb,
location: impl AsRef<SystemPath>,
files: impl IntoIterator<Item = FileSpec>,
) -> SystemPathBuf {
let root = location.as_ref().to_path_buf();
// Make sure to create the directory even if the list of files is empty:
db.memory_file_system().create_directory_all(&root).unwrap();
db.write_files(
files
.into_iter()
.map(|(relative_path, contents)| (root.join(relative_path), contents)),
)
.unwrap();
root
}
}
impl TestCaseBuilder<UnspecifiedTypeshed> {
pub(crate) fn new() -> TestCaseBuilder<UnspecifiedTypeshed> {
Self {
typeshed_option: UnspecifiedTypeshed,
target_version: PythonVersion::default(),
first_party_files: vec![],
site_packages_files: vec![],
}
}
/// Use the vendored stdlib stubs included in the Ruff binary for this test case
pub(crate) fn with_vendored_typeshed(self) -> TestCaseBuilder<VendoredTypeshed> {
let TestCaseBuilder {
typeshed_option: _,
target_version,
first_party_files,
site_packages_files,
} = self;
TestCaseBuilder {
typeshed_option: VendoredTypeshed,
target_version,
first_party_files,
site_packages_files,
}
}
/// Use a mock typeshed directory for this test case
pub(crate) fn with_custom_typeshed(
self,
typeshed: MockedTypeshed,
) -> TestCaseBuilder<MockedTypeshed> {
let TestCaseBuilder {
typeshed_option: _,
target_version,
first_party_files,
site_packages_files,
} = self;
TestCaseBuilder {
typeshed_option: typeshed,
target_version,
first_party_files,
site_packages_files,
}
}
pub(crate) fn build(self) -> TestCase<()> {
let TestCase {
db,
src,
stdlib: _,
site_packages,
target_version,
} = self.with_custom_typeshed(MockedTypeshed::default()).build();
TestCase {
db,
src,
stdlib: (),
site_packages,
target_version,
}
}
}
impl TestCaseBuilder<MockedTypeshed> {
pub(crate) fn build(self) -> TestCase<SystemPathBuf> {
let TestCaseBuilder {
typeshed_option,
target_version,
first_party_files,
site_packages_files,
} = self;
let mut db = TestDb::new();
let site_packages =
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
Program::from_settings(
&db,
&ProgramSettings {
target_version,
search_paths: SearchPathSettings {
extra_paths: vec![],
src_root: src.clone(),
custom_typeshed: Some(typeshed.clone()),
site_packages: SitePackages::Known(vec![site_packages.clone()]),
},
},
)
.expect("Valid program settings");
TestCase {
db,
src,
stdlib: typeshed.join("stdlib"),
site_packages,
target_version,
}
}
fn build_typeshed_mock(db: &mut TestDb, typeshed_to_build: &MockedTypeshed) -> SystemPathBuf {
let typeshed = SystemPathBuf::from("/typeshed");
let MockedTypeshed {
stdlib_files,
versions,
} = typeshed_to_build;
Self::write_mock_directory(
db,
typeshed.join("stdlib"),
stdlib_files
.iter()
.copied()
.chain(std::iter::once(("VERSIONS", *versions))),
);
typeshed
}
}
impl TestCaseBuilder<VendoredTypeshed> {
pub(crate) fn build(self) -> TestCase<VendoredPathBuf> {
let TestCaseBuilder {
typeshed_option: VendoredTypeshed,
target_version,
first_party_files,
site_packages_files,
} = self;
let mut db = TestDb::new();
let site_packages =
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
Program::from_settings(
&db,
&ProgramSettings {
target_version,
search_paths: SearchPathSettings {
site_packages: SitePackages::Known(vec![site_packages.clone()]),
..SearchPathSettings::new(src.clone())
},
},
)
.expect("Valid search path settings");
TestCase {
db,
src,
stdlib: VendoredPathBuf::from("stdlib"),
site_packages,
target_version,
}
}
}

View File

@@ -1,701 +0,0 @@
use std::collections::BTreeMap;
use std::fmt;
use std::num::{NonZeroU16, NonZeroUsize};
use std::ops::{RangeFrom, RangeInclusive};
use std::str::FromStr;
use rustc_hash::FxHashMap;
use crate::db::Db;
use crate::module_name::ModuleName;
use crate::{Program, PythonVersion};
pub(in crate::module_resolver) fn vendored_typeshed_versions(db: &dyn Db) -> TypeshedVersions {
TypeshedVersions::from_str(
&db.vendored()
.read_to_string("stdlib/VERSIONS")
.expect("The vendored typeshed stubs should contain a VERSIONS file"),
)
.expect("The VERSIONS file in the vendored typeshed stubs should be well-formed")
}
pub(crate) fn typeshed_versions(db: &dyn Db) -> &TypeshedVersions {
Program::get(db).search_paths(db).typeshed_versions()
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub(crate) struct TypeshedVersionsParseError {
line_number: Option<NonZeroU16>,
reason: TypeshedVersionsParseErrorKind,
}
impl fmt::Display for TypeshedVersionsParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let TypeshedVersionsParseError {
line_number,
reason,
} = self;
if let Some(line_number) = line_number {
write!(
f,
"Error while parsing line {line_number} of typeshed's VERSIONS file: {reason}"
)
} else {
write!(f, "Error while parsing typeshed's VERSIONS file: {reason}")
}
}
}
impl std::error::Error for TypeshedVersionsParseError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
if let TypeshedVersionsParseErrorKind::IntegerParsingFailure { err, .. } = &self.reason {
Some(err)
} else {
None
}
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub(super) enum TypeshedVersionsParseErrorKind {
TooManyLines(NonZeroUsize),
UnexpectedNumberOfColons,
InvalidModuleName(String),
UnexpectedNumberOfHyphens,
UnexpectedNumberOfPeriods(String),
IntegerParsingFailure {
version: String,
err: std::num::ParseIntError,
},
}
impl fmt::Display for TypeshedVersionsParseErrorKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::TooManyLines(num_lines) => write!(
f,
"File has too many lines ({num_lines}); maximum allowed is {}",
NonZeroU16::MAX
),
Self::UnexpectedNumberOfColons => {
f.write_str("Expected every non-comment line to have exactly one colon")
}
Self::InvalidModuleName(name) => write!(
f,
"Expected all components of '{name}' to be valid Python identifiers"
),
Self::UnexpectedNumberOfHyphens => {
f.write_str("Expected every non-comment line to have exactly one '-' character")
}
Self::UnexpectedNumberOfPeriods(format) => write!(
f,
"Expected all versions to be in the form {{MAJOR}}.{{MINOR}}; got '{format}'"
),
Self::IntegerParsingFailure { version, err } => write!(
f,
"Failed to convert '{version}' to a pair of integers due to {err}",
),
}
}
}
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct TypeshedVersions(FxHashMap<ModuleName, PyVersionRange>);
impl TypeshedVersions {
#[must_use]
fn exact(&self, module_name: &ModuleName) -> Option<&PyVersionRange> {
self.0.get(module_name)
}
#[must_use]
pub(in crate::module_resolver) fn query_module(
&self,
module: &ModuleName,
target_version: PythonVersion,
) -> TypeshedVersionsQueryResult {
if let Some(range) = self.exact(module) {
if range.contains(target_version) {
TypeshedVersionsQueryResult::Exists
} else {
TypeshedVersionsQueryResult::DoesNotExist
}
} else {
let mut module = module.parent();
while let Some(module_to_try) = module {
if let Some(range) = self.exact(&module_to_try) {
return {
if range.contains(target_version) {
TypeshedVersionsQueryResult::MaybeExists
} else {
TypeshedVersionsQueryResult::DoesNotExist
}
};
}
module = module_to_try.parent();
}
TypeshedVersionsQueryResult::DoesNotExist
}
}
}
/// Possible answers [`TypeshedVersions::query_module()`] could give to the question:
/// "Does this module exist in the stdlib at runtime on a certain target version?"
#[derive(Debug, Copy, PartialEq, Eq, Clone, Hash)]
pub(crate) enum TypeshedVersionsQueryResult {
/// The module definitely exists in the stdlib at runtime on the user-specified target version.
///
/// For example:
/// - The target version is Python 3.8
/// - We're querying whether the `asyncio.tasks` module exists in the stdlib
/// - The VERSIONS file contains the line `asyncio.tasks: 3.8-`
Exists,
/// The module definitely does not exist in the stdlib on the user-specified target version.
///
/// For example:
/// - We're querying whether the `foo` module exists in the stdlib
/// - There is no top-level `foo` module in VERSIONS
///
/// OR:
/// - The target version is Python 3.8
/// - We're querying whether the module `importlib.abc` exists in the stdlib
/// - The VERSIONS file contains the line `importlib.abc: 3.10-`,
/// indicating that the module was added in 3.10
///
/// OR:
/// - The target version is Python 3.8
/// - We're querying whether the module `collections.abc` exists in the stdlib
/// - The VERSIONS file does not contain any information about the `collections.abc` submodule,
/// but *does* contain the line `collections: 3.10-`,
/// indicating that the entire `collections` package was added in Python 3.10.
DoesNotExist,
/// The module potentially exists in the stdlib and, if it does,
/// it definitely exists on the user-specified target version.
///
/// This variant is only relevant for submodules,
/// for which the typeshed VERSIONS file does not provide comprehensive information.
/// (The VERSIONS file is guaranteed to provide information about all top-level stdlib modules and packages,
/// but not necessarily about all submodules within each top-level package.)
///
/// For example:
/// - The target version is Python 3.8
/// - We're querying whether the `asyncio.staggered` module exists in the stdlib
/// - The typeshed VERSIONS file contains the line `asyncio: 3.8`,
/// indicating that the `asyncio` package was added in Python 3.8,
/// but does not contain any explicit information about the `asyncio.staggered` submodule.
MaybeExists,
}
impl FromStr for TypeshedVersions {
type Err = TypeshedVersionsParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut map = FxHashMap::default();
for (line_index, line) in s.lines().enumerate() {
// humans expect line numbers to be 1-indexed
let line_number = NonZeroUsize::new(line_index.saturating_add(1)).unwrap();
let Ok(line_number) = NonZeroU16::try_from(line_number) else {
return Err(TypeshedVersionsParseError {
line_number: None,
reason: TypeshedVersionsParseErrorKind::TooManyLines(line_number),
});
};
let Some(content) = line.split('#').map(str::trim).next() else {
continue;
};
if content.is_empty() {
continue;
}
let mut parts = content.split(':').map(str::trim);
let (Some(module_name), Some(rest), None) = (parts.next(), parts.next(), parts.next())
else {
return Err(TypeshedVersionsParseError {
line_number: Some(line_number),
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons,
});
};
let Some(module_name) = ModuleName::new(module_name) else {
return Err(TypeshedVersionsParseError {
line_number: Some(line_number),
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
module_name.to_string(),
),
});
};
match PyVersionRange::from_str(rest) {
Ok(version) => map.insert(module_name, version),
Err(reason) => {
return Err(TypeshedVersionsParseError {
line_number: Some(line_number),
reason,
})
}
};
}
Ok(Self(map))
}
}
impl fmt::Display for TypeshedVersions {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let sorted_items: BTreeMap<&ModuleName, &PyVersionRange> = self.0.iter().collect();
for (module_name, range) in sorted_items {
writeln!(f, "{module_name}: {range}")?;
}
Ok(())
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
enum PyVersionRange {
AvailableFrom(RangeFrom<PythonVersion>),
AvailableWithin(RangeInclusive<PythonVersion>),
}
impl PyVersionRange {
#[must_use]
fn contains(&self, version: PythonVersion) -> bool {
match self {
Self::AvailableFrom(inner) => inner.contains(&version),
Self::AvailableWithin(inner) => inner.contains(&version),
}
}
}
impl FromStr for PyVersionRange {
type Err = TypeshedVersionsParseErrorKind;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split('-').map(str::trim);
match (parts.next(), parts.next(), parts.next()) {
(Some(lower), Some(""), None) => {
let lower = PythonVersion::from_versions_file_string(lower)?;
Ok(Self::AvailableFrom(lower..))
}
(Some(lower), Some(upper), None) => {
let lower = PythonVersion::from_versions_file_string(lower)?;
let upper = PythonVersion::from_versions_file_string(upper)?;
Ok(Self::AvailableWithin(lower..=upper))
}
_ => Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens),
}
}
}
impl fmt::Display for PyVersionRange {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::AvailableFrom(range_from) => write!(f, "{}-", range_from.start),
Self::AvailableWithin(range_inclusive) => {
write!(f, "{}-{}", range_inclusive.start(), range_inclusive.end())
}
}
}
}
impl PythonVersion {
fn from_versions_file_string(s: &str) -> Result<Self, TypeshedVersionsParseErrorKind> {
let mut parts = s.split('.').map(str::trim);
let (Some(major), Some(minor), None) = (parts.next(), parts.next(), parts.next()) else {
return Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
s.to_string(),
));
};
PythonVersion::try_from((major, minor)).map_err(|int_parse_error| {
TypeshedVersionsParseErrorKind::IntegerParsingFailure {
version: s.to_string(),
err: int_parse_error,
}
})
}
}
#[cfg(test)]
mod tests {
use std::num::{IntErrorKind, NonZeroU16};
use std::path::Path;
use insta::assert_snapshot;
use crate::db::tests::TestDb;
use super::*;
const TYPESHED_STDLIB_DIR: &str = "stdlib";
#[allow(unsafe_code)]
const ONE: Option<NonZeroU16> = Some(unsafe { NonZeroU16::new_unchecked(1) });
impl TypeshedVersions {
#[must_use]
fn contains_exact(&self, module: &ModuleName) -> bool {
self.exact(module).is_some()
}
#[must_use]
fn len(&self) -> usize {
self.0.len()
}
}
#[test]
fn can_parse_vendored_versions_file() {
let db = TestDb::new();
let versions = vendored_typeshed_versions(&db);
assert!(versions.len() > 100);
assert!(versions.len() < 1000);
let asyncio = ModuleName::new_static("asyncio").unwrap();
let asyncio_staggered = ModuleName::new_static("asyncio.staggered").unwrap();
let audioop = ModuleName::new_static("audioop").unwrap();
assert!(versions.contains_exact(&asyncio));
assert_eq!(
versions.query_module(&asyncio, PythonVersion::PY310),
TypeshedVersionsQueryResult::Exists
);
assert!(versions.contains_exact(&asyncio_staggered));
assert_eq!(
versions.query_module(&asyncio_staggered, PythonVersion::PY38),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
versions.query_module(&asyncio_staggered, PythonVersion::PY37),
TypeshedVersionsQueryResult::DoesNotExist
);
assert!(versions.contains_exact(&audioop));
assert_eq!(
versions.query_module(&audioop, PythonVersion::PY312),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
versions.query_module(&audioop, PythonVersion::PY313),
TypeshedVersionsQueryResult::DoesNotExist
);
}
#[test]
fn typeshed_versions_consistent_with_vendored_stubs() {
let db = TestDb::new();
let vendored_typeshed_versions = vendored_typeshed_versions(&db);
let vendored_typeshed_dir =
Path::new(env!("CARGO_MANIFEST_DIR")).join("../ruff_vendored/vendor/typeshed");
let mut empty_iterator = true;
let stdlib_stubs_path = vendored_typeshed_dir.join(TYPESHED_STDLIB_DIR);
for entry in std::fs::read_dir(&stdlib_stubs_path).unwrap() {
empty_iterator = false;
let entry = entry.unwrap();
let absolute_path = entry.path();
let relative_path = absolute_path
.strip_prefix(&stdlib_stubs_path)
.unwrap_or_else(|_| panic!("Expected path to be a child of {stdlib_stubs_path:?} but found {absolute_path:?}"));
let relative_path_str = relative_path.as_os_str().to_str().unwrap_or_else(|| {
panic!("Expected all typeshed paths to be valid UTF-8; got {relative_path:?}")
});
if relative_path_str == "VERSIONS" {
continue;
}
let top_level_module = if let Some(extension) = relative_path.extension() {
// It was a file; strip off the file extension to get the module name:
let extension = extension
.to_str()
.unwrap_or_else(||panic!("Expected all file extensions to be UTF-8; was not true for {relative_path:?}"));
relative_path_str
.strip_suffix(extension)
.and_then(|string| string.strip_suffix('.')).unwrap_or_else(|| {
panic!("Expected path {relative_path_str:?} to end with computed extension {extension:?}")
})
} else {
// It was a directory; no need to do anything to get the module name
relative_path_str
};
let top_level_module = ModuleName::new(top_level_module)
.unwrap_or_else(|| panic!("{top_level_module:?} was not a valid module name!"));
assert!(vendored_typeshed_versions.contains_exact(&top_level_module));
}
assert!(
!empty_iterator,
"Expected there to be at least one file or directory in the vendored typeshed stubs"
);
}
#[test]
fn can_parse_mock_versions_file() {
const VERSIONS: &str = "\
# a comment
# some more comment
# yet more comment
# and some more comment
bar: 2.7-3.10
# more comment
bar.baz: 3.1-3.9
foo: 3.8- # trailing comment
";
let parsed_versions = TypeshedVersions::from_str(VERSIONS).unwrap();
assert_eq!(parsed_versions.len(), 3);
assert_snapshot!(parsed_versions.to_string(), @r###"
bar: 2.7-3.10
bar.baz: 3.1-3.9
foo: 3.8-
"###
);
}
#[test]
fn version_within_range_parsed_correctly() {
let parsed_versions = TypeshedVersions::from_str("bar: 2.7-3.10").unwrap();
let bar = ModuleName::new_static("bar").unwrap();
assert!(parsed_versions.contains_exact(&bar));
assert_eq!(
parsed_versions.query_module(&bar, PythonVersion::PY37),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&bar, PythonVersion::PY310),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&bar, PythonVersion::PY311),
TypeshedVersionsQueryResult::DoesNotExist
);
}
#[test]
fn version_from_range_parsed_correctly() {
let parsed_versions = TypeshedVersions::from_str("foo: 3.8-").unwrap();
let foo = ModuleName::new_static("foo").unwrap();
assert!(parsed_versions.contains_exact(&foo));
assert_eq!(
parsed_versions.query_module(&foo, PythonVersion::PY37),
TypeshedVersionsQueryResult::DoesNotExist
);
assert_eq!(
parsed_versions.query_module(&foo, PythonVersion::PY38),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&foo, PythonVersion::PY311),
TypeshedVersionsQueryResult::Exists
);
}
#[test]
fn explicit_submodule_parsed_correctly() {
let parsed_versions = TypeshedVersions::from_str("bar.baz: 3.1-3.9").unwrap();
let bar_baz = ModuleName::new_static("bar.baz").unwrap();
assert!(parsed_versions.contains_exact(&bar_baz));
assert_eq!(
parsed_versions.query_module(&bar_baz, PythonVersion::PY37),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&bar_baz, PythonVersion::PY39),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&bar_baz, PythonVersion::PY310),
TypeshedVersionsQueryResult::DoesNotExist
);
}
#[test]
fn implicit_submodule_queried_correctly() {
let parsed_versions = TypeshedVersions::from_str("bar: 2.7-3.10").unwrap();
let bar_eggs = ModuleName::new_static("bar.eggs").unwrap();
assert!(!parsed_versions.contains_exact(&bar_eggs));
assert_eq!(
parsed_versions.query_module(&bar_eggs, PythonVersion::PY37),
TypeshedVersionsQueryResult::MaybeExists
);
assert_eq!(
parsed_versions.query_module(&bar_eggs, PythonVersion::PY310),
TypeshedVersionsQueryResult::MaybeExists
);
assert_eq!(
parsed_versions.query_module(&bar_eggs, PythonVersion::PY311),
TypeshedVersionsQueryResult::DoesNotExist
);
}
#[test]
fn nonexistent_module_queried_correctly() {
let parsed_versions = TypeshedVersions::from_str("eggs: 3.8-").unwrap();
let spam = ModuleName::new_static("spam").unwrap();
assert!(!parsed_versions.contains_exact(&spam));
assert_eq!(
parsed_versions.query_module(&spam, PythonVersion::PY37),
TypeshedVersionsQueryResult::DoesNotExist
);
assert_eq!(
parsed_versions.query_module(&spam, PythonVersion::PY313),
TypeshedVersionsQueryResult::DoesNotExist
);
}
#[test]
fn invalid_huge_versions_file() {
let offset = 100;
let too_many = u16::MAX as usize + offset;
let mut massive_versions_file = String::new();
for i in 0..too_many {
massive_versions_file.push_str(&format!("x{i}: 3.8-\n"));
}
assert_eq!(
TypeshedVersions::from_str(&massive_versions_file),
Err(TypeshedVersionsParseError {
line_number: None,
reason: TypeshedVersionsParseErrorKind::TooManyLines(
NonZeroUsize::new(too_many + 1 - offset).unwrap()
)
})
);
}
#[test]
fn invalid_typeshed_versions_bad_colon_number() {
assert_eq!(
TypeshedVersions::from_str("foo 3.7"),
Err(TypeshedVersionsParseError {
line_number: ONE,
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons
})
);
assert_eq!(
TypeshedVersions::from_str("foo:: 3.7"),
Err(TypeshedVersionsParseError {
line_number: ONE,
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons
})
);
}
#[test]
fn invalid_typeshed_versions_non_identifier_modules() {
assert_eq!(
TypeshedVersions::from_str("not!an!identifier!: 3.7"),
Err(TypeshedVersionsParseError {
line_number: ONE,
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
"not!an!identifier!".to_string()
)
})
);
assert_eq!(
TypeshedVersions::from_str("(also_not).(an_identifier): 3.7"),
Err(TypeshedVersionsParseError {
line_number: ONE,
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
"(also_not).(an_identifier)".to_string()
)
})
);
}
#[test]
fn invalid_typeshed_versions_bad_hyphen_number() {
assert_eq!(
TypeshedVersions::from_str("foo: 3.8"),
Err(TypeshedVersionsParseError {
line_number: ONE,
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
})
);
assert_eq!(
TypeshedVersions::from_str("foo: 3.8--"),
Err(TypeshedVersionsParseError {
line_number: ONE,
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
})
);
assert_eq!(
TypeshedVersions::from_str("foo: 3.8--3.9"),
Err(TypeshedVersionsParseError {
line_number: ONE,
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
})
);
}
#[test]
fn invalid_typeshed_versions_bad_period_number() {
assert_eq!(
TypeshedVersions::from_str("foo: 38-"),
Err(TypeshedVersionsParseError {
line_number: ONE,
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods("38".to_string())
})
);
assert_eq!(
TypeshedVersions::from_str("foo: 3..8-"),
Err(TypeshedVersionsParseError {
line_number: ONE,
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
"3..8".to_string()
)
})
);
assert_eq!(
TypeshedVersions::from_str("foo: 3.8-3..11"),
Err(TypeshedVersionsParseError {
line_number: ONE,
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
"3..11".to_string()
)
})
);
}
#[test]
fn invalid_typeshed_versions_non_digits() {
let err = TypeshedVersions::from_str("foo: 1.two-").unwrap_err();
assert_eq!(err.line_number, ONE);
let TypeshedVersionsParseErrorKind::IntegerParsingFailure { version, err } = err.reason
else {
panic!()
};
assert_eq!(version, "1.two".to_string());
assert_eq!(*err.kind(), IntErrorKind::InvalidDigit);
let err = TypeshedVersions::from_str("foo: 3.8-four.9").unwrap_err();
assert_eq!(err.line_number, ONE);
let TypeshedVersionsParseErrorKind::IntegerParsingFailure { version, err } = err.reason
else {
panic!()
};
assert_eq!(version, "four.9".to_string());
assert_eq!(*err.kind(), IntErrorKind::InvalidDigit);
}
}

View File

@@ -1,24 +0,0 @@
use ruff_python_ast::{AnyNodeRef, NodeKind};
use ruff_text_size::{Ranged, TextRange};
/// Compact key for a node for use in a hash map.
///
/// Compares two nodes by their kind and text range.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub(super) struct NodeKey {
kind: NodeKind,
range: TextRange,
}
impl NodeKey {
pub(super) fn from_node<'a, N>(node: N) -> Self
where
N: Into<AnyNodeRef<'a>>,
{
let node = node.into();
NodeKey {
kind: node.kind(),
range: node.range(),
}
}
}

View File

@@ -1,100 +0,0 @@
use crate::python_version::PythonVersion;
use anyhow::Context;
use salsa::Durability;
use salsa::Setter;
use ruff_db::system::{SystemPath, SystemPathBuf};
use crate::module_resolver::SearchPaths;
use crate::Db;
#[salsa::input(singleton)]
pub struct Program {
pub target_version: PythonVersion,
#[return_ref]
pub(crate) search_paths: SearchPaths,
}
impl Program {
pub fn from_settings(db: &dyn Db, settings: &ProgramSettings) -> anyhow::Result<Self> {
let ProgramSettings {
target_version,
search_paths,
} = settings;
tracing::info!("Target version: Python {target_version}");
let search_paths = SearchPaths::from_settings(db, search_paths)
.with_context(|| "Invalid search path settings")?;
Ok(Program::builder(settings.target_version, search_paths)
.durability(Durability::HIGH)
.new(db))
}
pub fn update_search_paths(
self,
db: &mut dyn Db,
search_path_settings: &SearchPathSettings,
) -> anyhow::Result<()> {
let search_paths = SearchPaths::from_settings(db, search_path_settings)?;
if self.search_paths(db) != &search_paths {
tracing::debug!("Update search paths");
self.set_search_paths(db).to(search_paths);
}
Ok(())
}
pub fn custom_stdlib_search_path(self, db: &dyn Db) -> Option<&SystemPath> {
self.search_paths(db).custom_stdlib()
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct ProgramSettings {
pub target_version: PythonVersion,
pub search_paths: SearchPathSettings,
}
/// Configures the search paths for module resolution.
#[derive(Eq, PartialEq, Debug, Clone)]
pub struct SearchPathSettings {
/// List of user-provided paths that should take first priority in the module resolution.
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
/// or pyright's stubPath configuration setting.
pub extra_paths: Vec<SystemPathBuf>,
/// The root of the workspace, used for finding first-party modules.
pub src_root: SystemPathBuf,
/// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types.
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
/// bundled as a zip file in the binary
pub custom_typeshed: Option<SystemPathBuf>,
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
pub site_packages: SitePackages,
}
impl SearchPathSettings {
pub fn new(src_root: SystemPathBuf) -> Self {
Self {
src_root,
extra_paths: vec![],
custom_typeshed: None,
site_packages: SitePackages::Known(vec![]),
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum SitePackages {
Derived {
venv_path: SystemPathBuf,
},
/// Resolved site packages paths
Known(Vec<SystemPathBuf>),
}

View File

@@ -1,69 +0,0 @@
use std::fmt;
/// Representation of a Python version.
///
/// Unlike the `TargetVersion` enums in the CLI crates,
/// this does not necessarily represent a Python version that we actually support.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct PythonVersion {
pub major: u8,
pub minor: u8,
}
impl PythonVersion {
pub const PY37: PythonVersion = PythonVersion { major: 3, minor: 7 };
pub const PY38: PythonVersion = PythonVersion { major: 3, minor: 8 };
pub const PY39: PythonVersion = PythonVersion { major: 3, minor: 9 };
pub const PY310: PythonVersion = PythonVersion {
major: 3,
minor: 10,
};
pub const PY311: PythonVersion = PythonVersion {
major: 3,
minor: 11,
};
pub const PY312: PythonVersion = PythonVersion {
major: 3,
minor: 12,
};
pub const PY313: PythonVersion = PythonVersion {
major: 3,
minor: 13,
};
pub fn free_threaded_build_available(self) -> bool {
self >= PythonVersion::PY313
}
}
impl Default for PythonVersion {
fn default() -> Self {
Self::PY38
}
}
impl TryFrom<(&str, &str)> for PythonVersion {
type Error = std::num::ParseIntError;
fn try_from(value: (&str, &str)) -> Result<Self, Self::Error> {
let (major, minor) = value;
Ok(Self {
major: major.parse()?,
minor: minor.parse()?,
})
}
}
impl From<(u8, u8)> for PythonVersion {
fn from(value: (u8, u8)) -> Self {
let (major, minor) = value;
Self { major, minor }
}
}
impl fmt::Display for PythonVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let PythonVersion { major, minor } = self;
write!(f, "{major}.{minor}")
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,210 +0,0 @@
use rustc_hash::FxHashMap;
use ruff_index::newtype_index;
use ruff_python_ast as ast;
use ruff_python_ast::ExpressionRef;
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
use crate::semantic_index::semantic_index;
use crate::semantic_index::symbol::ScopeId;
use crate::Db;
/// AST ids for a single scope.
///
/// The motivation for building the AST ids per scope isn't about reducing invalidation because
/// the struct changes whenever the parsed AST changes. Instead, it's mainly that we can
/// build the AST ids struct when building the symbol table and also keep the property that
/// IDs of outer scopes are unaffected by changes in inner scopes.
///
/// For example, we don't want that adding new statements to `foo` changes the statement id of `x = foo()` in:
///
/// ```python
/// def foo():
/// return 5
///
/// x = foo()
/// ```
#[derive(Debug)]
pub(crate) struct AstIds {
/// Maps expressions to their expression id.
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
/// Maps expressions which "use" a symbol (that is, [`ast::ExprName`]) to a use id.
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
}
impl AstIds {
fn expression_id(&self, key: impl Into<ExpressionNodeKey>) -> ScopedExpressionId {
self.expressions_map[&key.into()]
}
fn use_id(&self, key: impl Into<ExpressionNodeKey>) -> ScopedUseId {
self.uses_map[&key.into()]
}
}
fn ast_ids<'db>(db: &'db dyn Db, scope: ScopeId) -> &'db AstIds {
semantic_index(db, scope.file(db)).ast_ids(scope.file_scope_id(db))
}
pub trait HasScopedUseId {
/// The type of the ID uniquely identifying the use.
type Id: Copy;
/// Returns the ID that uniquely identifies the use in `scope`.
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id;
}
/// Uniquely identifies a use of a name in a [`crate::semantic_index::symbol::FileScopeId`].
#[newtype_index]
pub struct ScopedUseId;
impl HasScopedUseId for ast::ExprName {
type Id = ScopedUseId;
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
let expression_ref = ExpressionRef::from(self);
expression_ref.scoped_use_id(db, scope)
}
}
impl HasScopedUseId for ast::ExpressionRef<'_> {
type Id = ScopedUseId;
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
let ast_ids = ast_ids(db, scope);
ast_ids.use_id(*self)
}
}
pub trait HasScopedAstId {
/// The type of the ID uniquely identifying the node.
type Id: Copy;
/// Returns the ID that uniquely identifies the node in `scope`.
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id;
}
/// Uniquely identifies an [`ast::Expr`] in a [`crate::semantic_index::symbol::FileScopeId`].
#[newtype_index]
pub struct ScopedExpressionId;
macro_rules! impl_has_scoped_expression_id {
($ty: ty) => {
impl HasScopedAstId for $ty {
type Id = ScopedExpressionId;
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
let expression_ref = ExpressionRef::from(self);
expression_ref.scoped_ast_id(db, scope)
}
}
};
}
impl_has_scoped_expression_id!(ast::ExprBoolOp);
impl_has_scoped_expression_id!(ast::ExprName);
impl_has_scoped_expression_id!(ast::ExprBinOp);
impl_has_scoped_expression_id!(ast::ExprUnaryOp);
impl_has_scoped_expression_id!(ast::ExprLambda);
impl_has_scoped_expression_id!(ast::ExprIf);
impl_has_scoped_expression_id!(ast::ExprDict);
impl_has_scoped_expression_id!(ast::ExprSet);
impl_has_scoped_expression_id!(ast::ExprListComp);
impl_has_scoped_expression_id!(ast::ExprSetComp);
impl_has_scoped_expression_id!(ast::ExprDictComp);
impl_has_scoped_expression_id!(ast::ExprGenerator);
impl_has_scoped_expression_id!(ast::ExprAwait);
impl_has_scoped_expression_id!(ast::ExprYield);
impl_has_scoped_expression_id!(ast::ExprYieldFrom);
impl_has_scoped_expression_id!(ast::ExprCompare);
impl_has_scoped_expression_id!(ast::ExprCall);
impl_has_scoped_expression_id!(ast::ExprFString);
impl_has_scoped_expression_id!(ast::ExprStringLiteral);
impl_has_scoped_expression_id!(ast::ExprBytesLiteral);
impl_has_scoped_expression_id!(ast::ExprNumberLiteral);
impl_has_scoped_expression_id!(ast::ExprBooleanLiteral);
impl_has_scoped_expression_id!(ast::ExprNoneLiteral);
impl_has_scoped_expression_id!(ast::ExprEllipsisLiteral);
impl_has_scoped_expression_id!(ast::ExprAttribute);
impl_has_scoped_expression_id!(ast::ExprSubscript);
impl_has_scoped_expression_id!(ast::ExprStarred);
impl_has_scoped_expression_id!(ast::ExprNamed);
impl_has_scoped_expression_id!(ast::ExprList);
impl_has_scoped_expression_id!(ast::ExprTuple);
impl_has_scoped_expression_id!(ast::ExprSlice);
impl_has_scoped_expression_id!(ast::ExprIpyEscapeCommand);
impl_has_scoped_expression_id!(ast::Expr);
impl HasScopedAstId for ast::ExpressionRef<'_> {
type Id = ScopedExpressionId;
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
let ast_ids = ast_ids(db, scope);
ast_ids.expression_id(*self)
}
}
#[derive(Debug)]
pub(super) struct AstIdsBuilder {
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
}
impl AstIdsBuilder {
pub(super) fn new() -> Self {
Self {
expressions_map: FxHashMap::default(),
uses_map: FxHashMap::default(),
}
}
/// Adds `expr` to the expression ids map and returns its id.
pub(super) fn record_expression(&mut self, expr: &ast::Expr) -> ScopedExpressionId {
let expression_id = self.expressions_map.len().into();
self.expressions_map.insert(expr.into(), expression_id);
expression_id
}
/// Adds `expr` to the use ids map and returns its id.
pub(super) fn record_use(&mut self, expr: &ast::Expr) -> ScopedUseId {
let use_id = self.uses_map.len().into();
self.uses_map.insert(expr.into(), use_id);
use_id
}
pub(super) fn finish(mut self) -> AstIds {
self.expressions_map.shrink_to_fit();
self.uses_map.shrink_to_fit();
AstIds {
expressions_map: self.expressions_map,
uses_map: self.uses_map,
}
}
}
/// Node key that can only be constructed for expressions.
pub(crate) mod node_key {
use ruff_python_ast as ast;
use crate::node_key::NodeKey;
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub(crate) struct ExpressionNodeKey(NodeKey);
impl From<ast::ExpressionRef<'_>> for ExpressionNodeKey {
fn from(value: ast::ExpressionRef<'_>) -> Self {
Self(NodeKey::from_node(value))
}
}
impl From<&ast::Expr> for ExpressionNodeKey {
fn from(value: &ast::Expr) -> Self {
Self(NodeKey::from_node(value))
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,39 +0,0 @@
use ruff_db::files::File;
use ruff_python_ast as ast;
use crate::ast_node_ref::AstNodeRef;
use crate::db::Db;
use crate::semantic_index::expression::Expression;
use crate::semantic_index::symbol::{FileScopeId, ScopeId};
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(crate) enum Constraint<'db> {
Expression(Expression<'db>),
Pattern(PatternConstraint<'db>),
}
#[salsa::tracked]
pub(crate) struct PatternConstraint<'db> {
#[id]
pub(crate) file: File,
#[id]
pub(crate) file_scope: FileScopeId,
#[no_eq]
#[return_ref]
pub(crate) subject: AstNodeRef<ast::Expr>,
#[no_eq]
#[return_ref]
pub(crate) pattern: AstNodeRef<ast::Pattern>,
#[no_eq]
count: countme::Count<PatternConstraint<'static>>,
}
impl<'db> PatternConstraint<'db> {
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
self.file_scope(db).to_scope_id(db, self.file(db))
}
}

View File

@@ -1,622 +0,0 @@
use ruff_db::files::File;
use ruff_db::parsed::ParsedModule;
use ruff_python_ast as ast;
use crate::ast_node_ref::AstNodeRef;
use crate::node_key::NodeKey;
use crate::semantic_index::symbol::{FileScopeId, ScopeId, ScopedSymbolId};
use crate::Db;
#[salsa::tracked]
pub struct Definition<'db> {
/// The file in which the definition occurs.
#[id]
pub(crate) file: File,
/// The scope in which the definition occurs.
#[id]
pub(crate) file_scope: FileScopeId,
/// The symbol defined.
#[id]
pub(crate) symbol: ScopedSymbolId,
#[no_eq]
#[return_ref]
pub(crate) kind: DefinitionKind,
#[no_eq]
count: countme::Count<Definition<'static>>,
}
impl<'db> Definition<'db> {
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
self.file_scope(db).to_scope_id(db, self.file(db))
}
pub(crate) fn category(self, db: &'db dyn Db) -> DefinitionCategory {
self.kind(db).category()
}
pub(crate) fn is_declaration(self, db: &'db dyn Db) -> bool {
self.kind(db).category().is_declaration()
}
pub(crate) fn is_binding(self, db: &'db dyn Db) -> bool {
self.kind(db).category().is_binding()
}
}
#[derive(Copy, Clone, Debug)]
pub(crate) enum DefinitionNodeRef<'a> {
Import(&'a ast::Alias),
ImportFrom(ImportFromDefinitionNodeRef<'a>),
For(ForStmtDefinitionNodeRef<'a>),
Function(&'a ast::StmtFunctionDef),
Class(&'a ast::StmtClassDef),
NamedExpression(&'a ast::ExprNamed),
Assignment(AssignmentDefinitionNodeRef<'a>),
AnnotatedAssignment(&'a ast::StmtAnnAssign),
AugmentedAssignment(&'a ast::StmtAugAssign),
Comprehension(ComprehensionDefinitionNodeRef<'a>),
Parameter(ast::AnyParameterRef<'a>),
WithItem(WithItemDefinitionNodeRef<'a>),
MatchPattern(MatchPatternDefinitionNodeRef<'a>),
ExceptHandler(ExceptHandlerDefinitionNodeRef<'a>),
}
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
fn from(node: &'a ast::StmtFunctionDef) -> Self {
Self::Function(node)
}
}
impl<'a> From<&'a ast::StmtClassDef> for DefinitionNodeRef<'a> {
fn from(node: &'a ast::StmtClassDef) -> Self {
Self::Class(node)
}
}
impl<'a> From<&'a ast::ExprNamed> for DefinitionNodeRef<'a> {
fn from(node: &'a ast::ExprNamed) -> Self {
Self::NamedExpression(node)
}
}
impl<'a> From<&'a ast::StmtAnnAssign> for DefinitionNodeRef<'a> {
fn from(node: &'a ast::StmtAnnAssign) -> Self {
Self::AnnotatedAssignment(node)
}
}
impl<'a> From<&'a ast::StmtAugAssign> for DefinitionNodeRef<'a> {
fn from(node: &'a ast::StmtAugAssign) -> Self {
Self::AugmentedAssignment(node)
}
}
impl<'a> From<&'a ast::Alias> for DefinitionNodeRef<'a> {
fn from(node_ref: &'a ast::Alias) -> Self {
Self::Import(node_ref)
}
}
impl<'a> From<ImportFromDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(node_ref: ImportFromDefinitionNodeRef<'a>) -> Self {
Self::ImportFrom(node_ref)
}
}
impl<'a> From<ForStmtDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(value: ForStmtDefinitionNodeRef<'a>) -> Self {
Self::For(value)
}
}
impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(node_ref: AssignmentDefinitionNodeRef<'a>) -> Self {
Self::Assignment(node_ref)
}
}
impl<'a> From<WithItemDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(node_ref: WithItemDefinitionNodeRef<'a>) -> Self {
Self::WithItem(node_ref)
}
}
impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(node: ComprehensionDefinitionNodeRef<'a>) -> Self {
Self::Comprehension(node)
}
}
impl<'a> From<ast::AnyParameterRef<'a>> for DefinitionNodeRef<'a> {
fn from(node: ast::AnyParameterRef<'a>) -> Self {
Self::Parameter(node)
}
}
impl<'a> From<MatchPatternDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(node: MatchPatternDefinitionNodeRef<'a>) -> Self {
Self::MatchPattern(node)
}
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
pub(crate) node: &'a ast::StmtImportFrom,
pub(crate) alias_index: usize,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct AssignmentDefinitionNodeRef<'a> {
pub(crate) assignment: &'a ast::StmtAssign,
pub(crate) target: &'a ast::ExprName,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct WithItemDefinitionNodeRef<'a> {
pub(crate) node: &'a ast::WithItem,
pub(crate) target: &'a ast::ExprName,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct ForStmtDefinitionNodeRef<'a> {
pub(crate) iterable: &'a ast::Expr,
pub(crate) target: &'a ast::ExprName,
pub(crate) is_async: bool,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct ExceptHandlerDefinitionNodeRef<'a> {
pub(crate) handler: &'a ast::ExceptHandlerExceptHandler,
pub(crate) is_star: bool,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct ComprehensionDefinitionNodeRef<'a> {
pub(crate) iterable: &'a ast::Expr,
pub(crate) target: &'a ast::ExprName,
pub(crate) first: bool,
pub(crate) is_async: bool,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct MatchPatternDefinitionNodeRef<'a> {
/// The outermost pattern node in which the identifier being defined occurs.
pub(crate) pattern: &'a ast::Pattern,
/// The identifier being defined.
pub(crate) identifier: &'a ast::Identifier,
/// The index of the identifier in the pattern when visiting the `pattern` node in evaluation
/// order.
pub(crate) index: u32,
}
impl DefinitionNodeRef<'_> {
#[allow(unsafe_code)]
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionKind {
match self {
DefinitionNodeRef::Import(alias) => {
DefinitionKind::Import(AstNodeRef::new(parsed, alias))
}
DefinitionNodeRef::ImportFrom(ImportFromDefinitionNodeRef { node, alias_index }) => {
DefinitionKind::ImportFrom(ImportFromDefinitionKind {
node: AstNodeRef::new(parsed, node),
alias_index,
})
}
DefinitionNodeRef::Function(function) => {
DefinitionKind::Function(AstNodeRef::new(parsed, function))
}
DefinitionNodeRef::Class(class) => {
DefinitionKind::Class(AstNodeRef::new(parsed, class))
}
DefinitionNodeRef::NamedExpression(named) => {
DefinitionKind::NamedExpression(AstNodeRef::new(parsed, named))
}
DefinitionNodeRef::Assignment(AssignmentDefinitionNodeRef { assignment, target }) => {
DefinitionKind::Assignment(AssignmentDefinitionKind {
assignment: AstNodeRef::new(parsed.clone(), assignment),
target: AstNodeRef::new(parsed, target),
})
}
DefinitionNodeRef::AnnotatedAssignment(assign) => {
DefinitionKind::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
}
DefinitionNodeRef::AugmentedAssignment(augmented_assignment) => {
DefinitionKind::AugmentedAssignment(AstNodeRef::new(parsed, augmented_assignment))
}
DefinitionNodeRef::For(ForStmtDefinitionNodeRef {
iterable,
target,
is_async,
}) => DefinitionKind::For(ForStmtDefinitionKind {
iterable: AstNodeRef::new(parsed.clone(), iterable),
target: AstNodeRef::new(parsed, target),
is_async,
}),
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef {
iterable,
target,
first,
is_async,
}) => DefinitionKind::Comprehension(ComprehensionDefinitionKind {
iterable: AstNodeRef::new(parsed.clone(), iterable),
target: AstNodeRef::new(parsed, target),
first,
is_async,
}),
DefinitionNodeRef::Parameter(parameter) => match parameter {
ast::AnyParameterRef::Variadic(parameter) => {
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
}
ast::AnyParameterRef::NonVariadic(parameter) => {
DefinitionKind::ParameterWithDefault(AstNodeRef::new(parsed, parameter))
}
},
DefinitionNodeRef::WithItem(WithItemDefinitionNodeRef { node, target }) => {
DefinitionKind::WithItem(WithItemDefinitionKind {
node: AstNodeRef::new(parsed.clone(), node),
target: AstNodeRef::new(parsed, target),
})
}
DefinitionNodeRef::MatchPattern(MatchPatternDefinitionNodeRef {
pattern,
identifier,
index,
}) => DefinitionKind::MatchPattern(MatchPatternDefinitionKind {
pattern: AstNodeRef::new(parsed.clone(), pattern),
identifier: AstNodeRef::new(parsed, identifier),
index,
}),
DefinitionNodeRef::ExceptHandler(ExceptHandlerDefinitionNodeRef {
handler,
is_star,
}) => DefinitionKind::ExceptHandler(ExceptHandlerDefinitionKind {
handler: AstNodeRef::new(parsed.clone(), handler),
is_star,
}),
}
}
pub(super) fn key(self) -> DefinitionNodeKey {
match self {
Self::Import(node) => node.into(),
Self::ImportFrom(ImportFromDefinitionNodeRef { node, alias_index }) => {
(&node.names[alias_index]).into()
}
Self::Function(node) => node.into(),
Self::Class(node) => node.into(),
Self::NamedExpression(node) => node.into(),
Self::Assignment(AssignmentDefinitionNodeRef {
assignment: _,
target,
}) => target.into(),
Self::AnnotatedAssignment(node) => node.into(),
Self::AugmentedAssignment(node) => node.into(),
Self::For(ForStmtDefinitionNodeRef {
iterable: _,
target,
is_async: _,
}) => target.into(),
Self::Comprehension(ComprehensionDefinitionNodeRef { target, .. }) => target.into(),
Self::Parameter(node) => match node {
ast::AnyParameterRef::Variadic(parameter) => parameter.into(),
ast::AnyParameterRef::NonVariadic(parameter) => parameter.into(),
},
Self::WithItem(WithItemDefinitionNodeRef { node: _, target }) => target.into(),
Self::MatchPattern(MatchPatternDefinitionNodeRef { identifier, .. }) => {
identifier.into()
}
Self::ExceptHandler(ExceptHandlerDefinitionNodeRef { handler, .. }) => handler.into(),
}
}
}
#[derive(Clone, Copy, Debug)]
pub(crate) enum DefinitionCategory {
/// A Definition which binds a value to a name (e.g. `x = 1`).
Binding,
/// A Definition which declares the upper-bound of acceptable types for this name (`x: int`).
Declaration,
/// A Definition which both declares a type and binds a value (e.g. `x: int = 1`).
DeclarationAndBinding,
}
impl DefinitionCategory {
/// True if this definition establishes a "declared type" for the symbol.
///
/// If so, any assignments reached by this definition are in error if they assign a value of a
/// type not assignable to the declared type.
///
/// Annotations establish a declared type. So do function and class definitions, and imports.
pub(crate) fn is_declaration(self) -> bool {
matches!(
self,
DefinitionCategory::Declaration | DefinitionCategory::DeclarationAndBinding
)
}
/// True if this definition assigns a value to the symbol.
///
/// False only for annotated assignments without a RHS.
pub(crate) fn is_binding(self) -> bool {
matches!(
self,
DefinitionCategory::Binding | DefinitionCategory::DeclarationAndBinding
)
}
}
#[derive(Clone, Debug)]
pub enum DefinitionKind {
Import(AstNodeRef<ast::Alias>),
ImportFrom(ImportFromDefinitionKind),
Function(AstNodeRef<ast::StmtFunctionDef>),
Class(AstNodeRef<ast::StmtClassDef>),
NamedExpression(AstNodeRef<ast::ExprNamed>),
Assignment(AssignmentDefinitionKind),
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
AugmentedAssignment(AstNodeRef<ast::StmtAugAssign>),
For(ForStmtDefinitionKind),
Comprehension(ComprehensionDefinitionKind),
Parameter(AstNodeRef<ast::Parameter>),
ParameterWithDefault(AstNodeRef<ast::ParameterWithDefault>),
WithItem(WithItemDefinitionKind),
MatchPattern(MatchPatternDefinitionKind),
ExceptHandler(ExceptHandlerDefinitionKind),
}
impl DefinitionKind {
pub(crate) fn category(&self) -> DefinitionCategory {
match self {
// functions, classes, and imports always bind, and we consider them declarations
DefinitionKind::Function(_)
| DefinitionKind::Class(_)
| DefinitionKind::Import(_)
| DefinitionKind::ImportFrom(_) => DefinitionCategory::DeclarationAndBinding,
// a parameter always binds a value, but is only a declaration if annotated
DefinitionKind::Parameter(parameter) => {
if parameter.annotation.is_some() {
DefinitionCategory::DeclarationAndBinding
} else {
DefinitionCategory::Binding
}
}
// presence of a default is irrelevant, same logic as for a no-default parameter
DefinitionKind::ParameterWithDefault(parameter_with_default) => {
if parameter_with_default.parameter.annotation.is_some() {
DefinitionCategory::DeclarationAndBinding
} else {
DefinitionCategory::Binding
}
}
// annotated assignment is always a declaration, only a binding if there is a RHS
DefinitionKind::AnnotatedAssignment(ann_assign) => {
if ann_assign.value.is_some() {
DefinitionCategory::DeclarationAndBinding
} else {
DefinitionCategory::Declaration
}
}
// all of these bind values without declaring a type
DefinitionKind::NamedExpression(_)
| DefinitionKind::Assignment(_)
| DefinitionKind::AugmentedAssignment(_)
| DefinitionKind::For(_)
| DefinitionKind::Comprehension(_)
| DefinitionKind::WithItem(_)
| DefinitionKind::MatchPattern(_)
| DefinitionKind::ExceptHandler(_) => DefinitionCategory::Binding,
}
}
}
#[derive(Clone, Debug)]
#[allow(dead_code)]
pub struct MatchPatternDefinitionKind {
pattern: AstNodeRef<ast::Pattern>,
identifier: AstNodeRef<ast::Identifier>,
index: u32,
}
impl MatchPatternDefinitionKind {
pub(crate) fn pattern(&self) -> &ast::Pattern {
self.pattern.node()
}
pub(crate) fn index(&self) -> u32 {
self.index
}
}
#[derive(Clone, Debug)]
pub struct ComprehensionDefinitionKind {
iterable: AstNodeRef<ast::Expr>,
target: AstNodeRef<ast::ExprName>,
first: bool,
is_async: bool,
}
impl ComprehensionDefinitionKind {
pub(crate) fn iterable(&self) -> &ast::Expr {
self.iterable.node()
}
pub(crate) fn target(&self) -> &ast::ExprName {
self.target.node()
}
pub(crate) fn is_first(&self) -> bool {
self.first
}
pub(crate) fn is_async(&self) -> bool {
self.is_async
}
}
#[derive(Clone, Debug)]
pub struct ImportFromDefinitionKind {
node: AstNodeRef<ast::StmtImportFrom>,
alias_index: usize,
}
impl ImportFromDefinitionKind {
pub(crate) fn import(&self) -> &ast::StmtImportFrom {
self.node.node()
}
pub(crate) fn alias(&self) -> &ast::Alias {
&self.node.node().names[self.alias_index]
}
}
#[derive(Clone, Debug)]
pub struct AssignmentDefinitionKind {
assignment: AstNodeRef<ast::StmtAssign>,
target: AstNodeRef<ast::ExprName>,
}
impl AssignmentDefinitionKind {
pub(crate) fn assignment(&self) -> &ast::StmtAssign {
self.assignment.node()
}
pub(crate) fn target(&self) -> &ast::ExprName {
self.target.node()
}
}
#[derive(Clone, Debug)]
pub struct WithItemDefinitionKind {
node: AstNodeRef<ast::WithItem>,
target: AstNodeRef<ast::ExprName>,
}
impl WithItemDefinitionKind {
pub(crate) fn node(&self) -> &ast::WithItem {
self.node.node()
}
pub(crate) fn target(&self) -> &ast::ExprName {
self.target.node()
}
}
#[derive(Clone, Debug)]
pub struct ForStmtDefinitionKind {
iterable: AstNodeRef<ast::Expr>,
target: AstNodeRef<ast::ExprName>,
is_async: bool,
}
impl ForStmtDefinitionKind {
pub(crate) fn iterable(&self) -> &ast::Expr {
self.iterable.node()
}
pub(crate) fn target(&self) -> &ast::ExprName {
self.target.node()
}
pub(crate) fn is_async(&self) -> bool {
self.is_async
}
}
#[derive(Clone, Debug)]
pub struct ExceptHandlerDefinitionKind {
handler: AstNodeRef<ast::ExceptHandlerExceptHandler>,
is_star: bool,
}
impl ExceptHandlerDefinitionKind {
pub(crate) fn node(&self) -> &ast::ExceptHandlerExceptHandler {
self.handler.node()
}
pub(crate) fn handled_exceptions(&self) -> Option<&ast::Expr> {
self.node().type_.as_deref()
}
pub(crate) fn is_star(&self) -> bool {
self.is_star
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub(crate) struct DefinitionNodeKey(NodeKey);
impl From<&ast::Alias> for DefinitionNodeKey {
fn from(node: &ast::Alias) -> Self {
Self(NodeKey::from_node(node))
}
}
impl From<&ast::StmtFunctionDef> for DefinitionNodeKey {
fn from(node: &ast::StmtFunctionDef) -> Self {
Self(NodeKey::from_node(node))
}
}
impl From<&ast::StmtClassDef> for DefinitionNodeKey {
fn from(node: &ast::StmtClassDef) -> Self {
Self(NodeKey::from_node(node))
}
}
impl From<&ast::ExprName> for DefinitionNodeKey {
fn from(node: &ast::ExprName) -> Self {
Self(NodeKey::from_node(node))
}
}
impl From<&ast::ExprNamed> for DefinitionNodeKey {
fn from(node: &ast::ExprNamed) -> Self {
Self(NodeKey::from_node(node))
}
}
impl From<&ast::StmtAnnAssign> for DefinitionNodeKey {
fn from(node: &ast::StmtAnnAssign) -> Self {
Self(NodeKey::from_node(node))
}
}
impl From<&ast::StmtAugAssign> for DefinitionNodeKey {
fn from(node: &ast::StmtAugAssign) -> Self {
Self(NodeKey::from_node(node))
}
}
impl From<&ast::StmtFor> for DefinitionNodeKey {
fn from(value: &ast::StmtFor) -> Self {
Self(NodeKey::from_node(value))
}
}
impl From<&ast::Parameter> for DefinitionNodeKey {
fn from(node: &ast::Parameter) -> Self {
Self(NodeKey::from_node(node))
}
}
impl From<&ast::ParameterWithDefault> for DefinitionNodeKey {
fn from(node: &ast::ParameterWithDefault) -> Self {
Self(NodeKey::from_node(node))
}
}
impl From<&ast::Identifier> for DefinitionNodeKey {
fn from(identifier: &ast::Identifier) -> Self {
Self(NodeKey::from_node(identifier))
}
}
impl From<&ast::ExceptHandlerExceptHandler> for DefinitionNodeKey {
fn from(handler: &ast::ExceptHandlerExceptHandler) -> Self {
Self(NodeKey::from_node(handler))
}
}

View File

@@ -1,34 +0,0 @@
use crate::ast_node_ref::AstNodeRef;
use crate::db::Db;
use crate::semantic_index::symbol::{FileScopeId, ScopeId};
use ruff_db::files::File;
use ruff_python_ast as ast;
use salsa;
/// An independently type-inferable expression.
///
/// Includes constraint expressions (e.g. if tests) and the RHS of an unpacking assignment.
#[salsa::tracked]
pub(crate) struct Expression<'db> {
/// The file in which the expression occurs.
#[id]
pub(crate) file: File,
/// The scope in which the expression occurs.
#[id]
pub(crate) file_scope: FileScopeId,
/// The expression node.
#[no_eq]
#[return_ref]
pub(crate) node_ref: AstNodeRef<ast::Expr>,
#[no_eq]
count: countme::Count<Expression<'static>>,
}
impl<'db> Expression<'db> {
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
self.file_scope(db).to_scope_id(db, self.file(db))
}
}

View File

@@ -1,439 +0,0 @@
use std::hash::{Hash, Hasher};
use std::ops::Range;
use bitflags::bitflags;
use hashbrown::hash_map::RawEntryMut;
use ruff_db::files::File;
use ruff_db::parsed::ParsedModule;
use ruff_index::{newtype_index, IndexVec};
use ruff_python_ast::name::Name;
use ruff_python_ast::{self as ast};
use rustc_hash::FxHasher;
use crate::ast_node_ref::AstNodeRef;
use crate::node_key::NodeKey;
use crate::semantic_index::{semantic_index, SymbolMap};
use crate::Db;
#[derive(Eq, PartialEq, Debug)]
pub struct Symbol {
name: Name,
flags: SymbolFlags,
}
impl Symbol {
fn new(name: Name) -> Self {
Self {
name,
flags: SymbolFlags::empty(),
}
}
fn insert_flags(&mut self, flags: SymbolFlags) {
self.flags.insert(flags);
}
/// The symbol's name.
pub fn name(&self) -> &Name {
&self.name
}
/// Is the symbol used in its containing scope?
pub fn is_used(&self) -> bool {
self.flags.contains(SymbolFlags::IS_USED)
}
/// Is the symbol defined in its containing scope?
pub fn is_bound(&self) -> bool {
self.flags.contains(SymbolFlags::IS_BOUND)
}
}
bitflags! {
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
struct SymbolFlags: u8 {
const IS_USED = 1 << 0;
const IS_BOUND = 1 << 1;
/// TODO: This flag is not yet set by anything
const MARKED_GLOBAL = 1 << 2;
/// TODO: This flag is not yet set by anything
const MARKED_NONLOCAL = 1 << 3;
}
}
/// ID that uniquely identifies a symbol in a file.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct FileSymbolId {
scope: FileScopeId,
scoped_symbol_id: ScopedSymbolId,
}
impl FileSymbolId {
pub fn scope(self) -> FileScopeId {
self.scope
}
pub(crate) fn scoped_symbol_id(self) -> ScopedSymbolId {
self.scoped_symbol_id
}
}
impl From<FileSymbolId> for ScopedSymbolId {
fn from(val: FileSymbolId) -> Self {
val.scoped_symbol_id()
}
}
/// Symbol ID that uniquely identifies a symbol inside a [`Scope`].
#[newtype_index]
pub struct ScopedSymbolId;
/// A cross-module identifier of a scope that can be used as a salsa query parameter.
#[salsa::tracked]
pub struct ScopeId<'db> {
#[id]
pub file: File,
#[id]
pub file_scope_id: FileScopeId,
/// The node that introduces this scope.
#[no_eq]
#[return_ref]
pub node: NodeWithScopeKind,
#[no_eq]
count: countme::Count<ScopeId<'static>>,
}
impl<'db> ScopeId<'db> {
pub(crate) fn is_function_like(self, db: &'db dyn Db) -> bool {
// Type parameter scopes behave like function scopes in terms of name resolution; CPython
// symbol table also uses the term "function-like" for these scopes.
matches!(
self.node(db),
NodeWithScopeKind::ClassTypeParameters(_)
| NodeWithScopeKind::FunctionTypeParameters(_)
| NodeWithScopeKind::Function(_)
| NodeWithScopeKind::ListComprehension(_)
| NodeWithScopeKind::SetComprehension(_)
| NodeWithScopeKind::DictComprehension(_)
| NodeWithScopeKind::GeneratorExpression(_)
)
}
#[cfg(test)]
pub(crate) fn name(self, db: &'db dyn Db) -> &'db str {
match self.node(db) {
NodeWithScopeKind::Module => "<module>",
NodeWithScopeKind::Class(class) | NodeWithScopeKind::ClassTypeParameters(class) => {
class.name.as_str()
}
NodeWithScopeKind::Function(function)
| NodeWithScopeKind::FunctionTypeParameters(function) => function.name.as_str(),
NodeWithScopeKind::Lambda(_) => "<lambda>",
NodeWithScopeKind::ListComprehension(_) => "<listcomp>",
NodeWithScopeKind::SetComprehension(_) => "<setcomp>",
NodeWithScopeKind::DictComprehension(_) => "<dictcomp>",
NodeWithScopeKind::GeneratorExpression(_) => "<generator>",
}
}
}
/// ID that uniquely identifies a scope inside of a module.
#[newtype_index]
pub struct FileScopeId;
impl FileScopeId {
/// Returns the scope id of the module-global scope.
pub fn global() -> Self {
FileScopeId::from_u32(0)
}
pub fn is_global(self) -> bool {
self == FileScopeId::global()
}
pub fn to_scope_id(self, db: &dyn Db, file: File) -> ScopeId<'_> {
let index = semantic_index(db, file);
index.scope_ids_by_scope[self]
}
}
#[derive(Debug, Eq, PartialEq)]
pub struct Scope {
pub(super) parent: Option<FileScopeId>,
pub(super) kind: ScopeKind,
pub(super) descendents: Range<FileScopeId>,
}
impl Scope {
pub fn parent(self) -> Option<FileScopeId> {
self.parent
}
pub fn kind(&self) -> ScopeKind {
self.kind
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ScopeKind {
Module,
Annotation,
Class,
Function,
Comprehension,
}
impl ScopeKind {
pub const fn is_comprehension(self) -> bool {
matches!(self, ScopeKind::Comprehension)
}
}
/// Symbol table for a specific [`Scope`].
#[derive(Debug)]
pub struct SymbolTable {
/// The symbols in this scope.
symbols: IndexVec<ScopedSymbolId, Symbol>,
/// The symbols indexed by name.
symbols_by_name: SymbolMap,
}
impl SymbolTable {
fn new() -> Self {
Self {
symbols: IndexVec::new(),
symbols_by_name: SymbolMap::default(),
}
}
fn shrink_to_fit(&mut self) {
self.symbols.shrink_to_fit();
}
pub(crate) fn symbol(&self, symbol_id: impl Into<ScopedSymbolId>) -> &Symbol {
&self.symbols[symbol_id.into()]
}
#[allow(unused)]
pub(crate) fn symbol_ids(&self) -> impl Iterator<Item = ScopedSymbolId> {
self.symbols.indices()
}
pub fn symbols(&self) -> impl Iterator<Item = &Symbol> {
self.symbols.iter()
}
/// Returns the symbol named `name`.
pub(crate) fn symbol_by_name(&self, name: &str) -> Option<&Symbol> {
let id = self.symbol_id_by_name(name)?;
Some(self.symbol(id))
}
/// Returns the [`ScopedSymbolId`] of the symbol named `name`.
pub(crate) fn symbol_id_by_name(&self, name: &str) -> Option<ScopedSymbolId> {
let (id, ()) = self
.symbols_by_name
.raw_entry()
.from_hash(Self::hash_name(name), |id| {
self.symbol(*id).name().as_str() == name
})?;
Some(*id)
}
fn hash_name(name: &str) -> u64 {
let mut hasher = FxHasher::default();
name.hash(&mut hasher);
hasher.finish()
}
}
impl PartialEq for SymbolTable {
fn eq(&self, other: &Self) -> bool {
// We don't need to compare the symbols_by_name because the name is already captured in `Symbol`.
self.symbols == other.symbols
}
}
impl Eq for SymbolTable {}
#[derive(Debug)]
pub(super) struct SymbolTableBuilder {
table: SymbolTable,
}
impl SymbolTableBuilder {
pub(super) fn new() -> Self {
Self {
table: SymbolTable::new(),
}
}
pub(super) fn add_symbol(&mut self, name: Name) -> (ScopedSymbolId, bool) {
let hash = SymbolTable::hash_name(&name);
let entry = self
.table
.symbols_by_name
.raw_entry_mut()
.from_hash(hash, |id| self.table.symbols[*id].name() == &name);
match entry {
RawEntryMut::Occupied(entry) => (*entry.key(), false),
RawEntryMut::Vacant(entry) => {
let symbol = Symbol::new(name);
let id = self.table.symbols.push(symbol);
entry.insert_with_hasher(hash, id, (), |id| {
SymbolTable::hash_name(self.table.symbols[*id].name().as_str())
});
(id, true)
}
}
}
pub(super) fn mark_symbol_bound(&mut self, id: ScopedSymbolId) {
self.table.symbols[id].insert_flags(SymbolFlags::IS_BOUND);
}
pub(super) fn mark_symbol_used(&mut self, id: ScopedSymbolId) {
self.table.symbols[id].insert_flags(SymbolFlags::IS_USED);
}
pub(super) fn finish(mut self) -> SymbolTable {
self.table.shrink_to_fit();
self.table
}
}
/// Reference to a node that introduces a new scope.
#[derive(Copy, Clone, Debug)]
pub(crate) enum NodeWithScopeRef<'a> {
Module,
Class(&'a ast::StmtClassDef),
Function(&'a ast::StmtFunctionDef),
Lambda(&'a ast::ExprLambda),
FunctionTypeParameters(&'a ast::StmtFunctionDef),
ClassTypeParameters(&'a ast::StmtClassDef),
ListComprehension(&'a ast::ExprListComp),
SetComprehension(&'a ast::ExprSetComp),
DictComprehension(&'a ast::ExprDictComp),
GeneratorExpression(&'a ast::ExprGenerator),
}
impl NodeWithScopeRef<'_> {
/// Converts the unowned reference to an owned [`NodeWithScopeKind`].
///
/// # Safety
/// The node wrapped by `self` must be a child of `module`.
#[allow(unsafe_code)]
pub(super) unsafe fn to_kind(self, module: ParsedModule) -> NodeWithScopeKind {
match self {
NodeWithScopeRef::Module => NodeWithScopeKind::Module,
NodeWithScopeRef::Class(class) => {
NodeWithScopeKind::Class(AstNodeRef::new(module, class))
}
NodeWithScopeRef::Function(function) => {
NodeWithScopeKind::Function(AstNodeRef::new(module, function))
}
NodeWithScopeRef::Lambda(lambda) => {
NodeWithScopeKind::Lambda(AstNodeRef::new(module, lambda))
}
NodeWithScopeRef::FunctionTypeParameters(function) => {
NodeWithScopeKind::FunctionTypeParameters(AstNodeRef::new(module, function))
}
NodeWithScopeRef::ClassTypeParameters(class) => {
NodeWithScopeKind::ClassTypeParameters(AstNodeRef::new(module, class))
}
NodeWithScopeRef::ListComprehension(comprehension) => {
NodeWithScopeKind::ListComprehension(AstNodeRef::new(module, comprehension))
}
NodeWithScopeRef::SetComprehension(comprehension) => {
NodeWithScopeKind::SetComprehension(AstNodeRef::new(module, comprehension))
}
NodeWithScopeRef::DictComprehension(comprehension) => {
NodeWithScopeKind::DictComprehension(AstNodeRef::new(module, comprehension))
}
NodeWithScopeRef::GeneratorExpression(generator) => {
NodeWithScopeKind::GeneratorExpression(AstNodeRef::new(module, generator))
}
}
}
pub(super) fn scope_kind(self) -> ScopeKind {
match self {
NodeWithScopeRef::Module => ScopeKind::Module,
NodeWithScopeRef::Class(_) => ScopeKind::Class,
NodeWithScopeRef::Function(_) => ScopeKind::Function,
NodeWithScopeRef::Lambda(_) => ScopeKind::Function,
NodeWithScopeRef::FunctionTypeParameters(_)
| NodeWithScopeRef::ClassTypeParameters(_) => ScopeKind::Annotation,
NodeWithScopeRef::ListComprehension(_)
| NodeWithScopeRef::SetComprehension(_)
| NodeWithScopeRef::DictComprehension(_)
| NodeWithScopeRef::GeneratorExpression(_) => ScopeKind::Comprehension,
}
}
pub(crate) fn node_key(self) -> NodeWithScopeKey {
match self {
NodeWithScopeRef::Module => NodeWithScopeKey::Module,
NodeWithScopeRef::Class(class) => NodeWithScopeKey::Class(NodeKey::from_node(class)),
NodeWithScopeRef::Function(function) => {
NodeWithScopeKey::Function(NodeKey::from_node(function))
}
NodeWithScopeRef::Lambda(lambda) => {
NodeWithScopeKey::Lambda(NodeKey::from_node(lambda))
}
NodeWithScopeRef::FunctionTypeParameters(function) => {
NodeWithScopeKey::FunctionTypeParameters(NodeKey::from_node(function))
}
NodeWithScopeRef::ClassTypeParameters(class) => {
NodeWithScopeKey::ClassTypeParameters(NodeKey::from_node(class))
}
NodeWithScopeRef::ListComprehension(comprehension) => {
NodeWithScopeKey::ListComprehension(NodeKey::from_node(comprehension))
}
NodeWithScopeRef::SetComprehension(comprehension) => {
NodeWithScopeKey::SetComprehension(NodeKey::from_node(comprehension))
}
NodeWithScopeRef::DictComprehension(comprehension) => {
NodeWithScopeKey::DictComprehension(NodeKey::from_node(comprehension))
}
NodeWithScopeRef::GeneratorExpression(generator) => {
NodeWithScopeKey::GeneratorExpression(NodeKey::from_node(generator))
}
}
}
}
/// Node that introduces a new scope.
#[derive(Clone, Debug)]
pub enum NodeWithScopeKind {
Module,
Class(AstNodeRef<ast::StmtClassDef>),
ClassTypeParameters(AstNodeRef<ast::StmtClassDef>),
Function(AstNodeRef<ast::StmtFunctionDef>),
FunctionTypeParameters(AstNodeRef<ast::StmtFunctionDef>),
Lambda(AstNodeRef<ast::ExprLambda>),
ListComprehension(AstNodeRef<ast::ExprListComp>),
SetComprehension(AstNodeRef<ast::ExprSetComp>),
DictComprehension(AstNodeRef<ast::ExprDictComp>),
GeneratorExpression(AstNodeRef<ast::ExprGenerator>),
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub(crate) enum NodeWithScopeKey {
Module,
Class(NodeKey),
ClassTypeParameters(NodeKey),
Function(NodeKey),
FunctionTypeParameters(NodeKey),
Lambda(NodeKey),
ListComprehension(NodeKey),
SetComprehension(NodeKey),
DictComprehension(NodeKey),
GeneratorExpression(NodeKey),
}

View File

@@ -1,575 +0,0 @@
//! First, some terminology:
//!
//! * A "binding" gives a new value to a variable. This includes many different Python statements
//! (assignment statements of course, but also imports, `def` and `class` statements, `as`
//! clauses in `with` and `except` statements, match patterns, and others) and even one
//! expression kind (named expressions). It notably does not include annotated assignment
//! statements without a right-hand side value; these do not assign any new value to the
//! variable. We consider function parameters to be bindings as well, since (from the perspective
//! of the function's internal scope), a function parameter begins the scope bound to a value.
//!
//! * A "declaration" establishes an upper bound type for the values that a variable may be
//! permitted to take on. Annotated assignment statements (with or without an RHS value) are
//! declarations; annotated function parameters are also declarations. We consider `def` and
//! `class` statements to also be declarations, so as to prohibit accidentally shadowing them.
//!
//! Annotated assignments with a right-hand side, and annotated function parameters, are both
//! bindings and declarations.
//!
//! We use [`Definition`] as the universal term (and Salsa tracked struct) encompassing both
//! bindings and declarations. (This sacrifices a bit of type safety in exchange for improved
//! performance via fewer Salsa tracked structs and queries, since most declarations -- typed
//! parameters and annotated assignments with RHS -- are both bindings and declarations.)
//!
//! At any given use of a variable, we can ask about both its "declared type" and its "inferred
//! type". These may be different, but the inferred type must always be assignable to the declared
//! type; that is, the declared type is always wider, and the inferred type may be more precise. If
//! we see an invalid assignment, we emit a diagnostic and abandon our inferred type, deferring to
//! the declared type (this allows an explicit annotation to override bad inference, without a
//! cast), maintaining the invariant.
//!
//! The **inferred type** represents the most precise type we believe encompasses all possible
//! values for the variable at a given use. It is based on a union of the bindings which can reach
//! that use through some control flow path, and the narrowing constraints that control flow must
//! have passed through between the binding and the use. For example, in this code:
//!
//! ```python
//! x = 1 if flag else None
//! if x is not None:
//! use(x)
//! ```
//!
//! For the use of `x` on the third line, the inferred type should be `Literal[1]`. This is based
//! on the binding on the first line, which assigns the type `Literal[1] | None`, and the narrowing
//! constraint on the second line, which rules out the type `None`, since control flow must pass
//! through this constraint to reach the use in question.
//!
//! The **declared type** represents the code author's declaration (usually through a type
//! annotation) that a given variable should not be assigned any type outside the declared type. In
//! our model, declared types are also control-flow-sensitive; we allow the code author to
//! explicitly re-declare the same variable with a different type. So for a given binding of a
//! variable, we will want to ask which declarations of that variable can reach that binding, in
//! order to determine whether the binding is permitted, or should be a type error. For example:
//!
//! ```python
//! from pathlib import Path
//! def f(path: str):
//! path: Path = Path(path)
//! ```
//!
//! In this function, the initial declared type of `path` is `str`, meaning that the assignment
//! `path = Path(path)` would be a type error, since it assigns to `path` a value whose type is not
//! assignable to `str`. This is the purpose of declared types: they prevent accidental assignment
//! of the wrong type to a variable.
//!
//! But in some cases it is useful to "shadow" or "re-declare" a variable with a new type, and we
//! permit this, as long as it is done with an explicit re-annotation. So `path: Path =
//! Path(path)`, with the explicit `: Path` annotation, is permitted.
//!
//! The general rule is that whatever declaration(s) can reach a given binding determine the
//! validity of that binding. If there is a path in which the symbol is not declared, that is a
//! declaration of `Unknown`. If multiple declarations can reach a binding, we union them, but by
//! default we also issue a type error, since this implicit union of declared types may hide an
//! error.
//!
//! To support type inference, we build a map from each use of a symbol to the bindings live at
//! that use, and the type narrowing constraints that apply to each binding.
//!
//! Let's take this code sample:
//!
//! ```python
//! x = 1
//! x = 2
//! y = x
//! if flag:
//! x = 3
//! else:
//! x = 4
//! z = x
//! ```
//!
//! In this snippet, we have four bindings of `x` (the statements assigning `1`, `2`, `3`, and `4`
//! to it), and two uses of `x` (the `y = x` and `z = x` assignments). The first binding of `x`
//! does not reach any use, because it's immediately replaced by the second binding, before any use
//! happens. (A linter could thus flag the statement `x = 1` as likely superfluous.)
//!
//! The first use of `x` has one live binding: the assignment `x = 2`.
//!
//! Things get a bit more complex when we have branches. We will definitely take either the `if` or
//! the `else` branch. Thus, the second use of `x` has two live bindings: `x = 3` and `x = 4`. The
//! `x = 2` assignment is no longer visible, because it must be replaced by either `x = 3` or `x =
//! 4`, no matter which branch was taken. We don't know which branch was taken, so we must consider
//! both bindings as live, which means eventually we would (in type inference) look at these two
//! bindings and infer a type of `Literal[3, 4]` -- the union of `Literal[3]` and `Literal[4]` --
//! for the second use of `x`.
//!
//! So that's one question our use-def map needs to answer: given a specific use of a symbol, which
//! binding(s) can reach that use. In [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number
//! all uses (that means a `Name` node with `Load` context) so we have a `ScopedUseId` to
//! efficiently represent each use.
//!
//! We also need to know, for a given definition of a symbol, what type narrowing constraints apply
//! to it. For instance, in this code sample:
//!
//! ```python
//! x = 1 if flag else None
//! if x is not None:
//! use(x)
//! ```
//!
//! At the use of `x`, the live binding of `x` is `1 if flag else None`, which would infer as the
//! type `Literal[1] | None`. But the constraint `x is not None` dominates this use, which means we
//! can rule out the possibility that `x` is `None` here, which should give us the type
//! `Literal[1]` for this use.
//!
//! For declared types, we need to be able to answer the question "given a binding to a symbol,
//! which declarations of that symbol can reach the binding?" This allows us to emit a diagnostic
//! if the binding is attempting to bind a value of a type that is not assignable to the declared
//! type for that symbol, at that point in control flow.
//!
//! We also need to know, given a declaration of a symbol, what the inferred type of that symbol is
//! at that point. This allows us to emit a diagnostic in a case like `x = "foo"; x: int`. The
//! binding `x = "foo"` occurs before the declaration `x: int`, so according to our
//! control-flow-sensitive interpretation of declarations, the assignment is not an error. But the
//! declaration is an error, since it would violate the "inferred type must be assignable to
//! declared type" rule.
//!
//! Another case we need to handle is when a symbol is referenced from a different scope (for
//! example, an import or a nonlocal reference). We call this "public" use of a symbol. For public
//! use of a symbol, we prefer the declared type, if there are any declarations of that symbol; if
//! not, we fall back to the inferred type. So we also need to know which declarations and bindings
//! can reach the end of the scope.
//!
//! Technically, public use of a symbol could occur from any point in control flow of the scope
//! where the symbol is defined (via inline imports and import cycles, in the case of an import, or
//! via a function call partway through the local scope that ends up using a symbol from the scope
//! via a global or nonlocal reference.) But modeling this fully accurately requires whole-program
//! analysis that isn't tractable for an efficient analysis, since it means a given symbol could
//! have a different type every place it's referenced throughout the program, depending on the
//! shape of arbitrarily-sized call/import graphs. So we follow other Python type checkers in
//! making the simplifying assumption that usually the scope will finish execution before its
//! symbols are made visible to other scopes; for instance, most imports will import from a
//! complete module, not a partially-executed module. (We may want to get a little smarter than
//! this in the future for some closures, but for now this is where we start.)
//!
//! The data structure we build to answer these questions is the `UseDefMap`. It has a
//! `bindings_by_use` vector of [`SymbolBindings`] indexed by [`ScopedUseId`], a
//! `declarations_by_binding` vector of [`SymbolDeclarations`] indexed by [`ScopedDefinitionId`], a
//! `bindings_by_declaration` vector of [`SymbolBindings`] indexed by [`ScopedDefinitionId`], and
//! `public_bindings` and `public_definitions` vectors indexed by [`ScopedSymbolId`]. The values in
//! each of these vectors are (in principle) a list of live bindings at that use/definition, or at
//! the end of the scope for that symbol, with a list of the dominating constraints for each
//! binding.
//!
//! In order to avoid vectors-of-vectors-of-vectors and all the allocations that would entail, we
//! don't actually store these "list of visible definitions" as a vector of [`Definition`].
//! Instead, [`SymbolBindings`] and [`SymbolDeclarations`] are structs which use bit-sets to track
//! definitions (and constraints, in the case of bindings) in terms of [`ScopedDefinitionId`] and
//! [`ScopedConstraintId`], which are indices into the `all_definitions` and `all_constraints`
//! indexvecs in the [`UseDefMap`].
//!
//! There is another special kind of possible "definition" for a symbol: there might be a path from
//! the scope entry to a given use in which the symbol is never bound.
//!
//! The simplest way to model "unbound" would be as a "binding" itself: the initial "binding" for
//! each symbol in a scope. But actually modeling it this way would unnecessarily increase the
//! number of [`Definition`]s that Salsa must track. Since "unbound" is special in that all symbols
//! share it, and it doesn't have any additional per-symbol state, and constraints are irrelevant
//! to it, we can represent it more efficiently: we use the `may_be_unbound` boolean on the
//! [`SymbolBindings`] struct. If this flag is `true` for a use of a symbol, it means the symbol
//! has a path to the use in which it is never bound. If this flag is `false`, it means we've
//! eliminated the possibility of unbound: every control flow path to the use includes a binding
//! for this symbol.
//!
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use, definition, and
//! constraint as they are encountered by the
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder) AST visit. For
//! each symbol, the builder tracks the `SymbolState` (`SymbolBindings` and `SymbolDeclarations`)
//! for that symbol. When we hit a use or definition of a symbol, we record the necessary parts of
//! the current state for that symbol that we need for that use or definition. When we reach the
//! end of the scope, it records the state for each symbol as the public definitions of that
//! symbol.
//!
//! Let's walk through the above example. Initially we record for `x` that it has no bindings, and
//! may be unbound. When we see `x = 1`, we record that as the sole live binding of `x`, and flip
//! `may_be_unbound` to `false`. Then we see `x = 2`, and we replace `x = 1` as the sole live
//! binding of `x`. When we get to `y = x`, we record that the live bindings for that use of `x`
//! are just the `x = 2` definition.
//!
//! Then we hit the `if` branch. We visit the `test` node (`flag` in this case), since that will
//! happen regardless. Then we take a pre-branch snapshot of the current state for all symbols,
//! which we'll need later. Then we record `flag` as a possible constraint on the current binding
//! (`x = 2`), and go ahead and visit the `if` body. When we see `x = 3`, it replaces `x = 2`
//! (constrained by `flag`) as the sole live binding of `x`. At the end of the `if` body, we take
//! another snapshot of the current symbol state; we'll call this the post-if-body snapshot.
//!
//! Now we need to visit the `else` clause. The conditions when entering the `else` clause should
//! be the pre-if conditions; if we are entering the `else` clause, we know that the `if` test
//! failed and we didn't execute the `if` body. So we first reset the builder to the pre-if state,
//! using the snapshot we took previously (meaning we now have `x = 2` as the sole binding for `x`
//! again), then visit the `else` clause, where `x = 4` replaces `x = 2` as the sole live binding
//! of `x`.
//!
//! Now we reach the end of the if/else, and want to visit the following code. The state here needs
//! to reflect that we might have gone through the `if` branch, or we might have gone through the
//! `else` branch, and we don't know which. So we need to "merge" our current builder state
//! (reflecting the end-of-else state, with `x = 4` as the only live binding) with our post-if-body
//! snapshot (which has `x = 3` as the only live binding). The result of this merge is that we now
//! have two live bindings of `x`: `x = 3` and `x = 4`.
//!
//! The [`UseDefMapBuilder`] itself just exposes methods for taking a snapshot, resetting to a
//! snapshot, and merging a snapshot into the current state. The logic using these methods lives in
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder), e.g. where it
//! visits a `StmtIf` node.
use self::symbol_state::{
BindingIdWithConstraintsIterator, ConstraintIdIterator, DeclarationIdIterator,
ScopedConstraintId, ScopedDefinitionId, SymbolBindings, SymbolDeclarations, SymbolState,
};
use crate::semantic_index::ast_ids::ScopedUseId;
use crate::semantic_index::definition::Definition;
use crate::semantic_index::symbol::ScopedSymbolId;
use ruff_index::IndexVec;
use rustc_hash::FxHashMap;
use super::constraint::Constraint;
mod bitset;
mod symbol_state;
/// Applicable definitions and constraints for every use of a name.
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct UseDefMap<'db> {
/// Array of [`Definition`] in this scope.
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
/// Array of [`Constraint`] in this scope.
all_constraints: IndexVec<ScopedConstraintId, Constraint<'db>>,
/// [`SymbolBindings`] reaching a [`ScopedUseId`].
bindings_by_use: IndexVec<ScopedUseId, SymbolBindings>,
/// [`SymbolBindings`] or [`SymbolDeclarations`] reaching a given [`Definition`].
///
/// If the definition is a binding (only) -- `x = 1` for example -- then we need
/// [`SymbolDeclarations`] to know whether this binding is permitted by the live declarations.
///
/// If the definition is a declaration (only) -- `x: int` for example -- then we need
/// [`SymbolBindings`] to know whether this declaration is consistent with the previously
/// inferred type.
///
/// If the definition is both a declaration and a binding -- `x: int = 1` for example -- then
/// we don't actually need anything here, all we'll need to validate is that our own RHS is a
/// valid assignment to our own annotation.
definitions_by_definition: FxHashMap<Definition<'db>, SymbolDefinitions>,
/// [`SymbolState`] visible at end of scope for each symbol.
public_symbols: IndexVec<ScopedSymbolId, SymbolState>,
}
impl<'db> UseDefMap<'db> {
pub(crate) fn bindings_at_use(
&self,
use_id: ScopedUseId,
) -> BindingWithConstraintsIterator<'_, 'db> {
self.bindings_iterator(&self.bindings_by_use[use_id])
}
pub(crate) fn use_may_be_unbound(&self, use_id: ScopedUseId) -> bool {
self.bindings_by_use[use_id].may_be_unbound()
}
pub(crate) fn public_bindings(
&self,
symbol: ScopedSymbolId,
) -> BindingWithConstraintsIterator<'_, 'db> {
self.bindings_iterator(self.public_symbols[symbol].bindings())
}
pub(crate) fn public_may_be_unbound(&self, symbol: ScopedSymbolId) -> bool {
self.public_symbols[symbol].may_be_unbound()
}
pub(crate) fn bindings_at_declaration(
&self,
declaration: Definition<'db>,
) -> BindingWithConstraintsIterator<'_, 'db> {
if let SymbolDefinitions::Bindings(bindings) = &self.definitions_by_definition[&declaration]
{
self.bindings_iterator(bindings)
} else {
unreachable!("Declaration has non-Bindings in definitions_by_definition");
}
}
pub(crate) fn declarations_at_binding(
&self,
binding: Definition<'db>,
) -> DeclarationsIterator<'_, 'db> {
if let SymbolDefinitions::Declarations(declarations) =
&self.definitions_by_definition[&binding]
{
self.declarations_iterator(declarations)
} else {
unreachable!("Binding has non-Declarations in definitions_by_definition");
}
}
pub(crate) fn public_declarations(
&self,
symbol: ScopedSymbolId,
) -> DeclarationsIterator<'_, 'db> {
let declarations = self.public_symbols[symbol].declarations();
self.declarations_iterator(declarations)
}
pub(crate) fn has_public_declarations(&self, symbol: ScopedSymbolId) -> bool {
!self.public_symbols[symbol].declarations().is_empty()
}
fn bindings_iterator<'a>(
&'a self,
bindings: &'a SymbolBindings,
) -> BindingWithConstraintsIterator<'a, 'db> {
BindingWithConstraintsIterator {
all_definitions: &self.all_definitions,
all_constraints: &self.all_constraints,
inner: bindings.iter(),
}
}
fn declarations_iterator<'a>(
&'a self,
declarations: &'a SymbolDeclarations,
) -> DeclarationsIterator<'a, 'db> {
DeclarationsIterator {
all_definitions: &self.all_definitions,
inner: declarations.iter(),
may_be_undeclared: declarations.may_be_undeclared(),
}
}
}
/// Either live bindings or live declarations for a symbol.
#[derive(Debug, PartialEq, Eq)]
enum SymbolDefinitions {
Bindings(SymbolBindings),
Declarations(SymbolDeclarations),
}
#[derive(Debug)]
pub(crate) struct BindingWithConstraintsIterator<'map, 'db> {
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
all_constraints: &'map IndexVec<ScopedConstraintId, Constraint<'db>>,
inner: BindingIdWithConstraintsIterator<'map>,
}
impl<'map, 'db> Iterator for BindingWithConstraintsIterator<'map, 'db> {
type Item = BindingWithConstraints<'map, 'db>;
fn next(&mut self) -> Option<Self::Item> {
self.inner
.next()
.map(|def_id_with_constraints| BindingWithConstraints {
binding: self.all_definitions[def_id_with_constraints.definition],
constraints: ConstraintsIterator {
all_constraints: self.all_constraints,
constraint_ids: def_id_with_constraints.constraint_ids,
},
})
}
}
impl std::iter::FusedIterator for BindingWithConstraintsIterator<'_, '_> {}
pub(crate) struct BindingWithConstraints<'map, 'db> {
pub(crate) binding: Definition<'db>,
pub(crate) constraints: ConstraintsIterator<'map, 'db>,
}
pub(crate) struct ConstraintsIterator<'map, 'db> {
all_constraints: &'map IndexVec<ScopedConstraintId, Constraint<'db>>,
constraint_ids: ConstraintIdIterator<'map>,
}
impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
type Item = Constraint<'db>;
fn next(&mut self) -> Option<Self::Item> {
self.constraint_ids
.next()
.map(|constraint_id| self.all_constraints[constraint_id])
}
}
impl std::iter::FusedIterator for ConstraintsIterator<'_, '_> {}
pub(crate) struct DeclarationsIterator<'map, 'db> {
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
inner: DeclarationIdIterator<'map>,
may_be_undeclared: bool,
}
impl DeclarationsIterator<'_, '_> {
pub(crate) fn may_be_undeclared(&self) -> bool {
self.may_be_undeclared
}
}
impl<'map, 'db> Iterator for DeclarationsIterator<'map, 'db> {
type Item = Definition<'db>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|def_id| self.all_definitions[def_id])
}
}
impl std::iter::FusedIterator for DeclarationsIterator<'_, '_> {}
/// A snapshot of the definitions and constraints state at a particular point in control flow.
#[derive(Clone, Debug)]
pub(super) struct FlowSnapshot {
symbol_states: IndexVec<ScopedSymbolId, SymbolState>,
}
#[derive(Debug, Default)]
pub(super) struct UseDefMapBuilder<'db> {
/// Append-only array of [`Definition`].
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
/// Append-only array of [`Constraint`].
all_constraints: IndexVec<ScopedConstraintId, Constraint<'db>>,
/// Live bindings at each so-far-recorded use.
bindings_by_use: IndexVec<ScopedUseId, SymbolBindings>,
/// Live bindings or declarations for each so-far-recorded definition.
definitions_by_definition: FxHashMap<Definition<'db>, SymbolDefinitions>,
/// Currently live bindings and declarations for each symbol.
symbol_states: IndexVec<ScopedSymbolId, SymbolState>,
}
impl<'db> UseDefMapBuilder<'db> {
pub(super) fn new() -> Self {
Self::default()
}
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
let new_symbol = self.symbol_states.push(SymbolState::undefined());
debug_assert_eq!(symbol, new_symbol);
}
pub(super) fn record_binding(&mut self, symbol: ScopedSymbolId, binding: Definition<'db>) {
let def_id = self.all_definitions.push(binding);
let symbol_state = &mut self.symbol_states[symbol];
self.definitions_by_definition.insert(
binding,
SymbolDefinitions::Declarations(symbol_state.declarations().clone()),
);
symbol_state.record_binding(def_id);
}
pub(super) fn record_constraint(&mut self, constraint: Constraint<'db>) {
let constraint_id = self.all_constraints.push(constraint);
for state in &mut self.symbol_states {
state.record_constraint(constraint_id);
}
}
pub(super) fn record_declaration(
&mut self,
symbol: ScopedSymbolId,
declaration: Definition<'db>,
) {
let def_id = self.all_definitions.push(declaration);
let symbol_state = &mut self.symbol_states[symbol];
self.definitions_by_definition.insert(
declaration,
SymbolDefinitions::Bindings(symbol_state.bindings().clone()),
);
symbol_state.record_declaration(def_id);
}
pub(super) fn record_declaration_and_binding(
&mut self,
symbol: ScopedSymbolId,
definition: Definition<'db>,
) {
// We don't need to store anything in self.definitions_by_definition.
let def_id = self.all_definitions.push(definition);
let symbol_state = &mut self.symbol_states[symbol];
symbol_state.record_declaration(def_id);
symbol_state.record_binding(def_id);
}
pub(super) fn record_use(&mut self, symbol: ScopedSymbolId, use_id: ScopedUseId) {
// We have a use of a symbol; clone the current bindings for that symbol, and record them
// as the live bindings for this use.
let new_use = self
.bindings_by_use
.push(self.symbol_states[symbol].bindings().clone());
debug_assert_eq!(use_id, new_use);
}
/// Take a snapshot of the current visible-symbols state.
pub(super) fn snapshot(&self) -> FlowSnapshot {
FlowSnapshot {
symbol_states: self.symbol_states.clone(),
}
}
/// Restore the current builder symbols state to the given snapshot.
pub(super) fn restore(&mut self, snapshot: FlowSnapshot) {
// We never remove symbols from `symbol_states` (it's an IndexVec, and the symbol
// IDs must line up), so the current number of known symbols must always be equal to or
// greater than the number of known symbols in a previously-taken snapshot.
let num_symbols = self.symbol_states.len();
debug_assert!(num_symbols >= snapshot.symbol_states.len());
// Restore the current visible-definitions state to the given snapshot.
self.symbol_states = snapshot.symbol_states;
// If the snapshot we are restoring is missing some symbols we've recorded since, we need
// to fill them in so the symbol IDs continue to line up. Since they don't exist in the
// snapshot, the correct state to fill them in with is "undefined".
self.symbol_states
.resize(num_symbols, SymbolState::undefined());
}
/// Merge the given snapshot into the current state, reflecting that we might have taken either
/// path to get here. The new state for each symbol should include definitions from both the
/// prior state and the snapshot.
pub(super) fn merge(&mut self, snapshot: FlowSnapshot) {
// We never remove symbols from `symbol_states` (it's an IndexVec, and the symbol
// IDs must line up), so the current number of known symbols must always be equal to or
// greater than the number of known symbols in a previously-taken snapshot.
debug_assert!(self.symbol_states.len() >= snapshot.symbol_states.len());
let mut snapshot_definitions_iter = snapshot.symbol_states.into_iter();
for current in &mut self.symbol_states {
if let Some(snapshot) = snapshot_definitions_iter.next() {
current.merge(snapshot);
} else {
// Symbol not present in snapshot, so it's unbound/undeclared from that path.
current.set_may_be_unbound();
current.set_may_be_undeclared();
}
}
}
pub(super) fn finish(mut self) -> UseDefMap<'db> {
self.all_definitions.shrink_to_fit();
self.all_constraints.shrink_to_fit();
self.symbol_states.shrink_to_fit();
self.bindings_by_use.shrink_to_fit();
self.definitions_by_definition.shrink_to_fit();
UseDefMap {
all_definitions: self.all_definitions,
all_constraints: self.all_constraints,
bindings_by_use: self.bindings_by_use,
public_symbols: self.symbol_states,
definitions_by_definition: self.definitions_by_definition,
}
}
}

View File

@@ -1,309 +0,0 @@
/// Ordered set of `u32`.
///
/// Uses an inline bit-set for small values (up to 64 * B), falls back to heap allocated vector of
/// blocks for larger values.
#[derive(Debug, Clone, PartialEq, Eq)]
pub(super) enum BitSet<const B: usize> {
/// Bit-set (in 64-bit blocks) for the first 64 * B entries.
Inline([u64; B]),
/// Overflow beyond 64 * B.
Heap(Vec<u64>),
}
impl<const B: usize> Default for BitSet<B> {
fn default() -> Self {
// B * 64 must fit in a u32, or else we have unusable bits; this assertion makes the
// truncating casts to u32 below safe. This would be better as a const assertion, but
// that's not possible on stable with const generic params. (B should never really be
// anywhere close to this large.)
assert!(B * 64 < (u32::MAX as usize));
// This implementation requires usize >= 32 bits.
static_assertions::const_assert!(usize::BITS >= 32);
Self::Inline([0; B])
}
}
impl<const B: usize> BitSet<B> {
/// Create and return a new [`BitSet`] with a single `value` inserted.
pub(super) fn with(value: u32) -> Self {
let mut bitset = Self::default();
bitset.insert(value);
bitset
}
pub(super) fn is_empty(&self) -> bool {
self.blocks().iter().all(|&b| b == 0)
}
/// Convert from Inline to Heap, if needed, and resize the Heap vector, if needed.
fn resize(&mut self, value: u32) {
let num_blocks_needed = (value / 64) + 1;
self.resize_blocks(num_blocks_needed as usize);
}
fn resize_blocks(&mut self, num_blocks_needed: usize) {
match self {
Self::Inline(blocks) => {
let mut vec = blocks.to_vec();
vec.resize(num_blocks_needed, 0);
*self = Self::Heap(vec);
}
Self::Heap(vec) => {
vec.resize(num_blocks_needed, 0);
}
}
}
fn blocks_mut(&mut self) -> &mut [u64] {
match self {
Self::Inline(blocks) => blocks.as_mut_slice(),
Self::Heap(blocks) => blocks.as_mut_slice(),
}
}
fn blocks(&self) -> &[u64] {
match self {
Self::Inline(blocks) => blocks.as_slice(),
Self::Heap(blocks) => blocks.as_slice(),
}
}
/// Insert a value into the [`BitSet`].
///
/// Return true if the value was newly inserted, false if already present.
pub(super) fn insert(&mut self, value: u32) -> bool {
let value_usize = value as usize;
let (block, index) = (value_usize / 64, value_usize % 64);
if block >= self.blocks().len() {
self.resize(value);
}
let blocks = self.blocks_mut();
let missing = blocks[block] & (1 << index) == 0;
blocks[block] |= 1 << index;
missing
}
/// Intersect in-place with another [`BitSet`].
pub(super) fn intersect(&mut self, other: &BitSet<B>) {
let my_blocks = self.blocks_mut();
let other_blocks = other.blocks();
let min_len = my_blocks.len().min(other_blocks.len());
for i in 0..min_len {
my_blocks[i] &= other_blocks[i];
}
for block in my_blocks.iter_mut().skip(min_len) {
*block = 0;
}
}
/// Union in-place with another [`BitSet`].
pub(super) fn union(&mut self, other: &BitSet<B>) {
let mut max_len = self.blocks().len();
let other_len = other.blocks().len();
if other_len > max_len {
max_len = other_len;
self.resize_blocks(max_len);
}
for (my_block, other_block) in self.blocks_mut().iter_mut().zip(other.blocks()) {
*my_block |= other_block;
}
}
/// Return an iterator over the values (in ascending order) in this [`BitSet`].
pub(super) fn iter(&self) -> BitSetIterator<'_, B> {
let blocks = self.blocks();
BitSetIterator {
blocks,
current_block_index: 0,
current_block: blocks[0],
}
}
}
/// Iterator over values in a [`BitSet`].
#[derive(Debug)]
pub(super) struct BitSetIterator<'a, const B: usize> {
/// The blocks we are iterating over.
blocks: &'a [u64],
/// The index of the block we are currently iterating through.
current_block_index: usize,
/// The block we are currently iterating through (and zeroing as we go.)
current_block: u64,
}
impl<const B: usize> Iterator for BitSetIterator<'_, B> {
type Item = u32;
fn next(&mut self) -> Option<Self::Item> {
while self.current_block == 0 {
if self.current_block_index + 1 >= self.blocks.len() {
return None;
}
self.current_block_index += 1;
self.current_block = self.blocks[self.current_block_index];
}
let lowest_bit_set = self.current_block.trailing_zeros();
// reset the lowest set bit, without a data dependency on `lowest_bit_set`
self.current_block &= self.current_block.wrapping_sub(1);
// SAFETY: `lowest_bit_set` cannot be more than 64, `current_block_index` cannot be more
// than `B - 1`, and we check above that `B * 64 < u32::MAX`. So both `64 *
// current_block_index` and the final value here must fit in u32.
#[allow(clippy::cast_possible_truncation)]
Some(lowest_bit_set + (64 * self.current_block_index) as u32)
}
}
impl<const B: usize> std::iter::FusedIterator for BitSetIterator<'_, B> {}
#[cfg(test)]
mod tests {
use super::BitSet;
fn assert_bitset<const B: usize>(bitset: &BitSet<B>, contents: &[u32]) {
assert_eq!(bitset.iter().collect::<Vec<_>>(), contents);
}
#[test]
fn iter() {
let mut b = BitSet::<1>::with(3);
b.insert(27);
b.insert(6);
assert!(matches!(b, BitSet::Inline(_)));
assert_bitset(&b, &[3, 6, 27]);
}
#[test]
fn iter_overflow() {
let mut b = BitSet::<1>::with(140);
b.insert(100);
b.insert(129);
assert!(matches!(b, BitSet::Heap(_)));
assert_bitset(&b, &[100, 129, 140]);
}
#[test]
fn intersect() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(23);
b2.insert(5);
b1.intersect(&b2);
assert_bitset(&b1, &[4]);
}
#[test]
fn intersect_mixed_1() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(89);
b2.insert(5);
b1.intersect(&b2);
assert_bitset(&b1, &[4]);
}
#[test]
fn intersect_mixed_2() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(23);
b2.insert(89);
b1.intersect(&b2);
assert_bitset(&b1, &[4]);
}
#[test]
fn intersect_heap() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(89);
b2.insert(90);
b1.intersect(&b2);
assert_bitset(&b1, &[4]);
}
#[test]
fn intersect_heap_2() {
let mut b1 = BitSet::<1>::with(89);
let mut b2 = BitSet::<1>::with(89);
b1.insert(91);
b2.insert(90);
b1.intersect(&b2);
assert_bitset(&b1, &[89]);
}
#[test]
fn union() {
let mut b1 = BitSet::<1>::with(2);
let b2 = BitSet::<1>::with(4);
b1.union(&b2);
assert_bitset(&b1, &[2, 4]);
}
#[test]
fn union_mixed_1() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(89);
b2.insert(5);
b1.union(&b2);
assert_bitset(&b1, &[4, 5, 89]);
}
#[test]
fn union_mixed_2() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(23);
b2.insert(89);
b1.union(&b2);
assert_bitset(&b1, &[4, 23, 89]);
}
#[test]
fn union_heap() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(89);
b2.insert(90);
b1.union(&b2);
assert_bitset(&b1, &[4, 89, 90]);
}
#[test]
fn union_heap_2() {
let mut b1 = BitSet::<1>::with(89);
let mut b2 = BitSet::<1>::with(89);
b1.insert(91);
b2.insert(90);
b1.union(&b2);
assert_bitset(&b1, &[89, 90, 91]);
}
#[test]
fn multiple_blocks() {
let mut b = BitSet::<2>::with(120);
b.insert(45);
assert!(matches!(b, BitSet::Inline(_)));
assert_bitset(&b, &[45, 120]);
}
#[test]
fn empty() {
let b = BitSet::<1>::default();
assert!(b.is_empty());
}
}

View File

@@ -1,588 +0,0 @@
//! Track live bindings per symbol, applicable constraints per binding, and live declarations.
//!
//! These data structures operate entirely on scope-local newtype-indices for definitions and
//! constraints, referring to their location in the `all_definitions` and `all_constraints`
//! indexvecs in [`super::UseDefMapBuilder`].
//!
//! We need to track arbitrary associations between bindings and constraints, not just a single set
//! of currently dominating constraints (where "dominating" means "control flow must have passed
//! through it to reach this point"), because we can have dominating constraints that apply to some
//! bindings but not others, as in this code:
//!
//! ```python
//! x = 1 if flag else None
//! if x is not None:
//! if flag2:
//! x = 2 if flag else None
//! x
//! ```
//!
//! The `x is not None` constraint dominates the final use of `x`, but it applies only to the first
//! binding of `x`, not the second, so `None` is a possible value for `x`.
//!
//! And we can't just track, for each binding, an index into a list of dominating constraints,
//! either, because we can have bindings which are still visible, but subject to constraints that
//! are no longer dominating, as in this code:
//!
//! ```python
//! x = 0
//! if flag1:
//! x = 1 if flag2 else None
//! assert x is not None
//! x
//! ```
//!
//! From the point of view of the final use of `x`, the `x is not None` constraint no longer
//! dominates, but it does dominate the `x = 1 if flag2 else None` binding, so we have to keep
//! track of that.
//!
//! The data structures used here ([`BitSet`] and [`smallvec::SmallVec`]) optimize for keeping all
//! data inline (avoiding lots of scattered allocations) in small-to-medium cases, and falling back
//! to heap allocation to be able to scale to arbitrary numbers of live bindings and constraints
//! when needed.
//!
//! Tracking live declarations is simpler, since constraints are not involved, but otherwise very
//! similar to tracking live bindings.
use super::bitset::{BitSet, BitSetIterator};
use ruff_index::newtype_index;
use smallvec::SmallVec;
/// A newtype-index for a definition in a particular scope.
#[newtype_index]
pub(super) struct ScopedDefinitionId;
/// A newtype-index for a constraint expression in a particular scope.
#[newtype_index]
pub(super) struct ScopedConstraintId;
/// Can reference this * 64 total definitions inline; more will fall back to the heap.
const INLINE_BINDING_BLOCKS: usize = 3;
/// A [`BitSet`] of [`ScopedDefinitionId`], representing live bindings of a symbol in a scope.
type Bindings = BitSet<INLINE_BINDING_BLOCKS>;
type BindingsIterator<'a> = BitSetIterator<'a, INLINE_BINDING_BLOCKS>;
/// Can reference this * 64 total declarations inline; more will fall back to the heap.
const INLINE_DECLARATION_BLOCKS: usize = 3;
/// A [`BitSet`] of [`ScopedDefinitionId`], representing live declarations of a symbol in a scope.
type Declarations = BitSet<INLINE_DECLARATION_BLOCKS>;
type DeclarationsIterator<'a> = BitSetIterator<'a, INLINE_DECLARATION_BLOCKS>;
/// Can reference this * 64 total constraints inline; more will fall back to the heap.
const INLINE_CONSTRAINT_BLOCKS: usize = 2;
/// Can keep inline this many live bindings per symbol at a given time; more will go to heap.
const INLINE_BINDINGS_PER_SYMBOL: usize = 4;
/// One [`BitSet`] of applicable [`ScopedConstraintId`] per live binding.
type InlineConstraintArray = [BitSet<INLINE_CONSTRAINT_BLOCKS>; INLINE_BINDINGS_PER_SYMBOL];
type Constraints = SmallVec<InlineConstraintArray>;
type ConstraintsIterator<'a> = std::slice::Iter<'a, BitSet<INLINE_CONSTRAINT_BLOCKS>>;
type ConstraintsIntoIterator = smallvec::IntoIter<InlineConstraintArray>;
/// Live declarations for a single symbol at some point in control flow.
#[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct SymbolDeclarations {
/// [`BitSet`]: which declarations (as [`ScopedDefinitionId`]) can reach the current location?
live_declarations: Declarations,
/// Could the symbol be un-declared at this point?
may_be_undeclared: bool,
}
impl SymbolDeclarations {
fn undeclared() -> Self {
Self {
live_declarations: Declarations::default(),
may_be_undeclared: true,
}
}
/// Record a newly-encountered declaration for this symbol.
fn record_declaration(&mut self, declaration_id: ScopedDefinitionId) {
self.live_declarations = Declarations::with(declaration_id.into());
self.may_be_undeclared = false;
}
/// Add undeclared as a possibility for this symbol.
fn set_may_be_undeclared(&mut self) {
self.may_be_undeclared = true;
}
/// Return an iterator over live declarations for this symbol.
pub(super) fn iter(&self) -> DeclarationIdIterator {
DeclarationIdIterator {
inner: self.live_declarations.iter(),
}
}
pub(super) fn is_empty(&self) -> bool {
self.live_declarations.is_empty()
}
pub(super) fn may_be_undeclared(&self) -> bool {
self.may_be_undeclared
}
}
/// Live bindings and narrowing constraints for a single symbol at some point in control flow.
#[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct SymbolBindings {
/// [`BitSet`]: which bindings (as [`ScopedDefinitionId`]) can reach the current location?
live_bindings: Bindings,
/// For each live binding, which [`ScopedConstraintId`] apply?
///
/// This is a [`smallvec::SmallVec`] which should always have one [`BitSet`] of constraints per
/// binding in `live_bindings`.
constraints: Constraints,
/// Could the symbol be unbound at this point?
may_be_unbound: bool,
}
impl SymbolBindings {
fn unbound() -> Self {
Self {
live_bindings: Bindings::default(),
constraints: Constraints::default(),
may_be_unbound: true,
}
}
/// Add Unbound as a possibility for this symbol.
fn set_may_be_unbound(&mut self) {
self.may_be_unbound = true;
}
/// Record a newly-encountered binding for this symbol.
pub(super) fn record_binding(&mut self, binding_id: ScopedDefinitionId) {
// The new binding replaces all previous live bindings in this path, and has no
// constraints.
self.live_bindings = Bindings::with(binding_id.into());
self.constraints = Constraints::with_capacity(1);
self.constraints.push(BitSet::default());
self.may_be_unbound = false;
}
/// Add given constraint to all live bindings.
pub(super) fn record_constraint(&mut self, constraint_id: ScopedConstraintId) {
for bitset in &mut self.constraints {
bitset.insert(constraint_id.into());
}
}
/// Iterate over currently live bindings for this symbol.
pub(super) fn iter(&self) -> BindingIdWithConstraintsIterator {
BindingIdWithConstraintsIterator {
definitions: self.live_bindings.iter(),
constraints: self.constraints.iter(),
}
}
pub(super) fn may_be_unbound(&self) -> bool {
self.may_be_unbound
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct SymbolState {
declarations: SymbolDeclarations,
bindings: SymbolBindings,
}
impl SymbolState {
/// Return a new [`SymbolState`] representing an unbound, undeclared symbol.
pub(super) fn undefined() -> Self {
Self {
declarations: SymbolDeclarations::undeclared(),
bindings: SymbolBindings::unbound(),
}
}
/// Add Unbound as a possibility for this symbol.
pub(super) fn set_may_be_unbound(&mut self) {
self.bindings.set_may_be_unbound();
}
/// Record a newly-encountered binding for this symbol.
pub(super) fn record_binding(&mut self, binding_id: ScopedDefinitionId) {
self.bindings.record_binding(binding_id);
}
/// Add given constraint to all live bindings.
pub(super) fn record_constraint(&mut self, constraint_id: ScopedConstraintId) {
self.bindings.record_constraint(constraint_id);
}
/// Add undeclared as a possibility for this symbol.
pub(super) fn set_may_be_undeclared(&mut self) {
self.declarations.set_may_be_undeclared();
}
/// Record a newly-encountered declaration of this symbol.
pub(super) fn record_declaration(&mut self, declaration_id: ScopedDefinitionId) {
self.declarations.record_declaration(declaration_id);
}
/// Merge another [`SymbolState`] into this one.
pub(super) fn merge(&mut self, b: SymbolState) {
let mut a = Self {
bindings: SymbolBindings {
live_bindings: Bindings::default(),
constraints: Constraints::default(),
may_be_unbound: self.bindings.may_be_unbound || b.bindings.may_be_unbound,
},
declarations: SymbolDeclarations {
live_declarations: self.declarations.live_declarations.clone(),
may_be_undeclared: self.declarations.may_be_undeclared
|| b.declarations.may_be_undeclared,
},
};
std::mem::swap(&mut a, self);
self.declarations
.live_declarations
.union(&b.declarations.live_declarations);
let mut a_defs_iter = a.bindings.live_bindings.iter();
let mut b_defs_iter = b.bindings.live_bindings.iter();
let mut a_constraints_iter = a.bindings.constraints.into_iter();
let mut b_constraints_iter = b.bindings.constraints.into_iter();
let mut opt_a_def: Option<u32> = a_defs_iter.next();
let mut opt_b_def: Option<u32> = b_defs_iter.next();
// Iterate through the definitions from `a` and `b`, always processing the lower definition
// ID first, and pushing each definition onto the merged `SymbolState` with its
// constraints. If a definition is found in both `a` and `b`, push it with the intersection
// of the constraints from the two paths; a constraint that applies from only one possible
// path is irrelevant.
// Helper to push `def`, with constraints in `constraints_iter`, onto `self`.
let push = |def, constraints_iter: &mut ConstraintsIntoIterator, merged: &mut Self| {
merged.bindings.live_bindings.insert(def);
// SAFETY: we only ever create SymbolState with either no definitions and no constraint
// bitsets (`::unbound`) or one definition and one constraint bitset (`::with`), and
// `::merge` always pushes one definition and one constraint bitset together (just
// below), so the number of definitions and the number of constraint bitsets can never
// get out of sync.
let constraints = constraints_iter
.next()
.expect("definitions and constraints length mismatch");
merged.bindings.constraints.push(constraints);
};
loop {
match (opt_a_def, opt_b_def) {
(Some(a_def), Some(b_def)) => match a_def.cmp(&b_def) {
std::cmp::Ordering::Less => {
// Next definition ID is only in `a`, push it to `self` and advance `a`.
push(a_def, &mut a_constraints_iter, self);
opt_a_def = a_defs_iter.next();
}
std::cmp::Ordering::Greater => {
// Next definition ID is only in `b`, push it to `self` and advance `b`.
push(b_def, &mut b_constraints_iter, self);
opt_b_def = b_defs_iter.next();
}
std::cmp::Ordering::Equal => {
// Next definition is in both; push to `self` and intersect constraints.
push(a_def, &mut b_constraints_iter, self);
// SAFETY: we only ever create SymbolState with either no definitions and
// no constraint bitsets (`::unbound`) or one definition and one constraint
// bitset (`::with`), and `::merge` always pushes one definition and one
// constraint bitset together (just below), so the number of definitions
// and the number of constraint bitsets can never get out of sync.
let a_constraints = a_constraints_iter
.next()
.expect("definitions and constraints length mismatch");
// If the same definition is visible through both paths, any constraint
// that applies on only one path is irrelevant to the resulting type from
// unioning the two paths, so we intersect the constraints.
self.bindings
.constraints
.last_mut()
.unwrap()
.intersect(&a_constraints);
opt_a_def = a_defs_iter.next();
opt_b_def = b_defs_iter.next();
}
},
(Some(a_def), None) => {
// We've exhausted `b`, just push the def from `a` and move on to the next.
push(a_def, &mut a_constraints_iter, self);
opt_a_def = a_defs_iter.next();
}
(None, Some(b_def)) => {
// We've exhausted `a`, just push the def from `b` and move on to the next.
push(b_def, &mut b_constraints_iter, self);
opt_b_def = b_defs_iter.next();
}
(None, None) => break,
}
}
}
pub(super) fn bindings(&self) -> &SymbolBindings {
&self.bindings
}
pub(super) fn declarations(&self) -> &SymbolDeclarations {
&self.declarations
}
/// Could the symbol be unbound?
pub(super) fn may_be_unbound(&self) -> bool {
self.bindings.may_be_unbound()
}
}
/// The default state of a symbol, if we've seen no definitions of it, is undefined (that is,
/// both unbound and undeclared).
impl Default for SymbolState {
fn default() -> Self {
SymbolState::undefined()
}
}
/// A single binding (as [`ScopedDefinitionId`]) with an iterator of its applicable
/// [`ScopedConstraintId`].
#[derive(Debug)]
pub(super) struct BindingIdWithConstraints<'a> {
pub(super) definition: ScopedDefinitionId,
pub(super) constraint_ids: ConstraintIdIterator<'a>,
}
#[derive(Debug)]
pub(super) struct BindingIdWithConstraintsIterator<'a> {
definitions: BindingsIterator<'a>,
constraints: ConstraintsIterator<'a>,
}
impl<'a> Iterator for BindingIdWithConstraintsIterator<'a> {
type Item = BindingIdWithConstraints<'a>;
fn next(&mut self) -> Option<Self::Item> {
match (self.definitions.next(), self.constraints.next()) {
(None, None) => None,
(Some(def), Some(constraints)) => Some(BindingIdWithConstraints {
definition: ScopedDefinitionId::from_u32(def),
constraint_ids: ConstraintIdIterator {
wrapped: constraints.iter(),
},
}),
// SAFETY: see above.
_ => unreachable!("definitions and constraints length mismatch"),
}
}
}
impl std::iter::FusedIterator for BindingIdWithConstraintsIterator<'_> {}
#[derive(Debug)]
pub(super) struct ConstraintIdIterator<'a> {
wrapped: BitSetIterator<'a, INLINE_CONSTRAINT_BLOCKS>,
}
impl Iterator for ConstraintIdIterator<'_> {
type Item = ScopedConstraintId;
fn next(&mut self) -> Option<Self::Item> {
self.wrapped.next().map(ScopedConstraintId::from_u32)
}
}
impl std::iter::FusedIterator for ConstraintIdIterator<'_> {}
#[derive(Debug)]
pub(super) struct DeclarationIdIterator<'a> {
inner: DeclarationsIterator<'a>,
}
impl<'a> Iterator for DeclarationIdIterator<'a> {
type Item = ScopedDefinitionId;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(ScopedDefinitionId::from_u32)
}
}
impl std::iter::FusedIterator for DeclarationIdIterator<'_> {}
#[cfg(test)]
mod tests {
use super::{ScopedConstraintId, ScopedDefinitionId, SymbolState};
fn assert_bindings(symbol: &SymbolState, may_be_unbound: bool, expected: &[&str]) {
assert_eq!(symbol.may_be_unbound(), may_be_unbound);
let actual = symbol
.bindings()
.iter()
.map(|def_id_with_constraints| {
format!(
"{}<{}>",
def_id_with_constraints.definition.as_u32(),
def_id_with_constraints
.constraint_ids
.map(ScopedConstraintId::as_u32)
.map(|idx| idx.to_string())
.collect::<Vec<_>>()
.join(", ")
)
})
.collect::<Vec<_>>();
assert_eq!(actual, expected);
}
pub(crate) fn assert_declarations(
symbol: &SymbolState,
may_be_undeclared: bool,
expected: &[u32],
) {
assert_eq!(symbol.declarations.may_be_undeclared(), may_be_undeclared);
let actual = symbol
.declarations()
.iter()
.map(ScopedDefinitionId::as_u32)
.collect::<Vec<_>>();
assert_eq!(actual, expected);
}
#[test]
fn unbound() {
let sym = SymbolState::undefined();
assert_bindings(&sym, true, &[]);
}
#[test]
fn with() {
let mut sym = SymbolState::undefined();
sym.record_binding(ScopedDefinitionId::from_u32(0));
assert_bindings(&sym, false, &["0<>"]);
}
#[test]
fn set_may_be_unbound() {
let mut sym = SymbolState::undefined();
sym.record_binding(ScopedDefinitionId::from_u32(0));
sym.set_may_be_unbound();
assert_bindings(&sym, true, &["0<>"]);
}
#[test]
fn record_constraint() {
let mut sym = SymbolState::undefined();
sym.record_binding(ScopedDefinitionId::from_u32(0));
sym.record_constraint(ScopedConstraintId::from_u32(0));
assert_bindings(&sym, false, &["0<0>"]);
}
#[test]
fn merge() {
// merging the same definition with the same constraint keeps the constraint
let mut sym0a = SymbolState::undefined();
sym0a.record_binding(ScopedDefinitionId::from_u32(0));
sym0a.record_constraint(ScopedConstraintId::from_u32(0));
let mut sym0b = SymbolState::undefined();
sym0b.record_binding(ScopedDefinitionId::from_u32(0));
sym0b.record_constraint(ScopedConstraintId::from_u32(0));
sym0a.merge(sym0b);
let mut sym0 = sym0a;
assert_bindings(&sym0, false, &["0<0>"]);
// merging the same definition with differing constraints drops all constraints
let mut sym1a = SymbolState::undefined();
sym1a.record_binding(ScopedDefinitionId::from_u32(1));
sym1a.record_constraint(ScopedConstraintId::from_u32(1));
let mut sym1b = SymbolState::undefined();
sym1b.record_binding(ScopedDefinitionId::from_u32(1));
sym1b.record_constraint(ScopedConstraintId::from_u32(2));
sym1a.merge(sym1b);
let sym1 = sym1a;
assert_bindings(&sym1, false, &["1<>"]);
// merging a constrained definition with unbound keeps both
let mut sym2a = SymbolState::undefined();
sym2a.record_binding(ScopedDefinitionId::from_u32(2));
sym2a.record_constraint(ScopedConstraintId::from_u32(3));
let sym2b = SymbolState::undefined();
sym2a.merge(sym2b);
let sym2 = sym2a;
assert_bindings(&sym2, true, &["2<3>"]);
// merging different definitions keeps them each with their existing constraints
sym0.merge(sym2);
let sym = sym0;
assert_bindings(&sym, true, &["0<0>", "2<3>"]);
}
#[test]
fn no_declaration() {
let sym = SymbolState::undefined();
assert_declarations(&sym, true, &[]);
}
#[test]
fn record_declaration() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(1));
assert_declarations(&sym, false, &[1]);
}
#[test]
fn record_declaration_override() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(1));
sym.record_declaration(ScopedDefinitionId::from_u32(2));
assert_declarations(&sym, false, &[2]);
}
#[test]
fn record_declaration_merge() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(1));
let mut sym2 = SymbolState::undefined();
sym2.record_declaration(ScopedDefinitionId::from_u32(2));
sym.merge(sym2);
assert_declarations(&sym, false, &[1, 2]);
}
#[test]
fn record_declaration_merge_partial_undeclared() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(1));
let sym2 = SymbolState::undefined();
sym.merge(sym2);
assert_declarations(&sym, true, &[1]);
}
#[test]
fn set_may_be_undeclared() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(0));
sym.set_may_be_undeclared();
assert_declarations(&sym, true, &[0]);
}
}

View File

@@ -1,250 +0,0 @@
use ruff_db::files::{File, FilePath};
use ruff_db::source::line_index;
use ruff_python_ast as ast;
use ruff_python_ast::{Expr, ExpressionRef};
use ruff_source_file::LineIndex;
use crate::module_name::ModuleName;
use crate::module_resolver::{resolve_module, Module};
use crate::semantic_index::ast_ids::HasScopedAstId;
use crate::semantic_index::semantic_index;
use crate::types::{binding_ty, global_symbol_ty, infer_scope_types, Type};
use crate::Db;
pub struct SemanticModel<'db> {
db: &'db dyn Db,
file: File,
}
impl<'db> SemanticModel<'db> {
pub fn new(db: &'db dyn Db, file: File) -> Self {
Self { db, file }
}
// TODO we don't actually want to expose the Db directly to lint rules, but we need to find a
// solution for exposing information from types
pub fn db(&self) -> &dyn Db {
self.db
}
pub fn file_path(&self) -> &FilePath {
self.file.path(self.db)
}
pub fn line_index(&self) -> LineIndex {
line_index(self.db.upcast(), self.file)
}
pub fn resolve_module(&self, module_name: ModuleName) -> Option<Module> {
resolve_module(self.db, module_name)
}
pub fn global_symbol_ty(&self, module: &Module, symbol_name: &str) -> Type<'db> {
global_symbol_ty(self.db, module.file(), symbol_name)
}
}
pub trait HasTy {
/// Returns the inferred type of `self`.
///
/// ## Panics
/// May panic if `self` is from another file than `model`.
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db>;
}
impl HasTy for ast::ExpressionRef<'_> {
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
let index = semantic_index(model.db, model.file);
let file_scope = index.expression_scope_id(*self);
let scope = file_scope.to_scope_id(model.db, model.file);
let expression_id = self.scoped_ast_id(model.db, scope);
infer_scope_types(model.db, scope).expression_ty(expression_id)
}
}
macro_rules! impl_expression_has_ty {
($ty: ty) => {
impl HasTy for $ty {
#[inline]
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
let expression_ref = ExpressionRef::from(self);
expression_ref.ty(model)
}
}
};
}
impl_expression_has_ty!(ast::ExprBoolOp);
impl_expression_has_ty!(ast::ExprNamed);
impl_expression_has_ty!(ast::ExprBinOp);
impl_expression_has_ty!(ast::ExprUnaryOp);
impl_expression_has_ty!(ast::ExprLambda);
impl_expression_has_ty!(ast::ExprIf);
impl_expression_has_ty!(ast::ExprDict);
impl_expression_has_ty!(ast::ExprSet);
impl_expression_has_ty!(ast::ExprListComp);
impl_expression_has_ty!(ast::ExprSetComp);
impl_expression_has_ty!(ast::ExprDictComp);
impl_expression_has_ty!(ast::ExprGenerator);
impl_expression_has_ty!(ast::ExprAwait);
impl_expression_has_ty!(ast::ExprYield);
impl_expression_has_ty!(ast::ExprYieldFrom);
impl_expression_has_ty!(ast::ExprCompare);
impl_expression_has_ty!(ast::ExprCall);
impl_expression_has_ty!(ast::ExprFString);
impl_expression_has_ty!(ast::ExprStringLiteral);
impl_expression_has_ty!(ast::ExprBytesLiteral);
impl_expression_has_ty!(ast::ExprNumberLiteral);
impl_expression_has_ty!(ast::ExprBooleanLiteral);
impl_expression_has_ty!(ast::ExprNoneLiteral);
impl_expression_has_ty!(ast::ExprEllipsisLiteral);
impl_expression_has_ty!(ast::ExprAttribute);
impl_expression_has_ty!(ast::ExprSubscript);
impl_expression_has_ty!(ast::ExprStarred);
impl_expression_has_ty!(ast::ExprName);
impl_expression_has_ty!(ast::ExprList);
impl_expression_has_ty!(ast::ExprTuple);
impl_expression_has_ty!(ast::ExprSlice);
impl_expression_has_ty!(ast::ExprIpyEscapeCommand);
impl HasTy for ast::Expr {
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
match self {
Expr::BoolOp(inner) => inner.ty(model),
Expr::Named(inner) => inner.ty(model),
Expr::BinOp(inner) => inner.ty(model),
Expr::UnaryOp(inner) => inner.ty(model),
Expr::Lambda(inner) => inner.ty(model),
Expr::If(inner) => inner.ty(model),
Expr::Dict(inner) => inner.ty(model),
Expr::Set(inner) => inner.ty(model),
Expr::ListComp(inner) => inner.ty(model),
Expr::SetComp(inner) => inner.ty(model),
Expr::DictComp(inner) => inner.ty(model),
Expr::Generator(inner) => inner.ty(model),
Expr::Await(inner) => inner.ty(model),
Expr::Yield(inner) => inner.ty(model),
Expr::YieldFrom(inner) => inner.ty(model),
Expr::Compare(inner) => inner.ty(model),
Expr::Call(inner) => inner.ty(model),
Expr::FString(inner) => inner.ty(model),
Expr::StringLiteral(inner) => inner.ty(model),
Expr::BytesLiteral(inner) => inner.ty(model),
Expr::NumberLiteral(inner) => inner.ty(model),
Expr::BooleanLiteral(inner) => inner.ty(model),
Expr::NoneLiteral(inner) => inner.ty(model),
Expr::EllipsisLiteral(inner) => inner.ty(model),
Expr::Attribute(inner) => inner.ty(model),
Expr::Subscript(inner) => inner.ty(model),
Expr::Starred(inner) => inner.ty(model),
Expr::Name(inner) => inner.ty(model),
Expr::List(inner) => inner.ty(model),
Expr::Tuple(inner) => inner.ty(model),
Expr::Slice(inner) => inner.ty(model),
Expr::IpyEscapeCommand(inner) => inner.ty(model),
}
}
}
macro_rules! impl_binding_has_ty {
($ty: ty) => {
impl HasTy for $ty {
#[inline]
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
let index = semantic_index(model.db, model.file);
let binding = index.definition(self);
binding_ty(model.db, binding)
}
}
};
}
impl_binding_has_ty!(ast::StmtFunctionDef);
impl_binding_has_ty!(ast::StmtClassDef);
impl_binding_has_ty!(ast::Alias);
impl_binding_has_ty!(ast::Parameter);
impl_binding_has_ty!(ast::ParameterWithDefault);
#[cfg(test)]
mod tests {
use ruff_db::files::system_path_to_file;
use ruff_db::parsed::parsed_module;
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
use crate::db::tests::TestDb;
use crate::program::{Program, SearchPathSettings};
use crate::python_version::PythonVersion;
use crate::types::Type;
use crate::{HasTy, ProgramSettings, SemanticModel};
fn setup_db<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<TestDb> {
let mut db = TestDb::new();
db.write_files(files)?;
Program::from_settings(
&db,
&ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings::new(SystemPathBuf::from("/src")),
},
)?;
Ok(db)
}
#[test]
fn function_ty() -> anyhow::Result<()> {
let db = setup_db([("/src/foo.py", "def test(): pass")])?;
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
let ast = parsed_module(&db, foo);
let function = ast.suite()[0].as_function_def_stmt().unwrap();
let model = SemanticModel::new(&db, foo);
let ty = function.ty(&model);
assert!(matches!(ty, Type::Function(_)));
Ok(())
}
#[test]
fn class_ty() -> anyhow::Result<()> {
let db = setup_db([("/src/foo.py", "class Test: pass")])?;
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
let ast = parsed_module(&db, foo);
let class = ast.suite()[0].as_class_def_stmt().unwrap();
let model = SemanticModel::new(&db, foo);
let ty = class.ty(&model);
assert!(matches!(ty, Type::Class(_)));
Ok(())
}
#[test]
fn alias_ty() -> anyhow::Result<()> {
let db = setup_db([
("/src/foo.py", "class Test: pass"),
("/src/bar.py", "from foo import Test"),
])?;
let bar = system_path_to_file(&db, "/src/bar.py").unwrap();
let ast = parsed_module(&db, bar);
let import = ast.suite()[0].as_import_from_stmt().unwrap();
let alias = &import.names[0];
let model = SemanticModel::new(&db, bar);
let ty = alias.ty(&model);
assert!(matches!(ty, Type::Class(_)));
Ok(())
}
}

View File

@@ -1,842 +0,0 @@
//! Utilities for finding the `site-packages` directory,
//! into which third-party packages are installed.
//!
//! The routines exposed by this module have different behaviour depending
//! on the platform of the *host machine*, which may be
//! different from the *target platform for type checking*. (A user
//! might be running red-knot on a Windows machine, but might
//! reasonably ask us to type-check code assuming that the code runs
//! on Linux.)
use std::fmt;
use std::io;
use std::num::NonZeroUsize;
use std::ops::Deref;
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use crate::PythonVersion;
type SitePackagesDiscoveryResult<T> = Result<T, SitePackagesDiscoveryError>;
/// Abstraction for a Python virtual environment.
///
/// Most of this information is derived from the virtual environment's `pyvenv.cfg` file.
/// The format of this file is not defined anywhere, and exactly which keys are present
/// depends on the tool that was used to create the virtual environment.
#[derive(Debug)]
pub(crate) struct VirtualEnvironment {
venv_path: SysPrefixPath,
base_executable_home_path: PythonHomePath,
include_system_site_packages: bool,
/// The version of the Python executable that was used to create this virtual environment.
///
/// The Python version is encoded under different keys and in different formats
/// by different virtual-environment creation tools,
/// and the key is never read by the standard-library `site.py` module,
/// so it's possible that we might not be able to find this information
/// in an acceptable format under any of the keys we expect.
/// This field will be `None` if so.
version: Option<PythonVersion>,
}
impl VirtualEnvironment {
pub(crate) fn new(
path: impl AsRef<SystemPath>,
system: &dyn System,
) -> SitePackagesDiscoveryResult<Self> {
Self::new_impl(path.as_ref(), system)
}
fn new_impl(path: &SystemPath, system: &dyn System) -> SitePackagesDiscoveryResult<Self> {
fn pyvenv_cfg_line_number(index: usize) -> NonZeroUsize {
index.checked_add(1).and_then(NonZeroUsize::new).unwrap()
}
let venv_path = SysPrefixPath::new(path, system)?;
let pyvenv_cfg_path = venv_path.join("pyvenv.cfg");
tracing::debug!("Attempting to parse virtual environment metadata at '{pyvenv_cfg_path}'");
let pyvenv_cfg = system
.read_to_string(&pyvenv_cfg_path)
.map_err(SitePackagesDiscoveryError::NoPyvenvCfgFile)?;
let mut include_system_site_packages = false;
let mut base_executable_home_path = None;
let mut version_info_string = None;
// A `pyvenv.cfg` file *looks* like a `.ini` file, but actually isn't valid `.ini` syntax!
// The Python standard-library's `site` module parses these files by splitting each line on
// '=' characters, so that's what we should do as well.
//
// See also: https://snarky.ca/how-virtual-environments-work/
for (index, line) in pyvenv_cfg.lines().enumerate() {
if let Some((key, value)) = line.split_once('=') {
let key = key.trim();
if key.is_empty() {
return Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
pyvenv_cfg_path,
PyvenvCfgParseErrorKind::MalformedKeyValuePair {
line_number: pyvenv_cfg_line_number(index),
},
));
}
let value = value.trim();
if value.is_empty() {
return Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
pyvenv_cfg_path,
PyvenvCfgParseErrorKind::MalformedKeyValuePair {
line_number: pyvenv_cfg_line_number(index),
},
));
}
if value.contains('=') {
return Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
pyvenv_cfg_path,
PyvenvCfgParseErrorKind::TooManyEquals {
line_number: pyvenv_cfg_line_number(index),
},
));
}
match key {
"include-system-site-packages" => {
include_system_site_packages = value.eq_ignore_ascii_case("true");
}
"home" => base_executable_home_path = Some(value),
// `virtualenv` and `uv` call this key `version_info`,
// but the stdlib venv module calls it `version`
"version" | "version_info" => version_info_string = Some(value),
_ => continue,
}
}
}
// The `home` key is read by the standard library's `site.py` module,
// so if it's missing from the `pyvenv.cfg` file
// (or the provided value is invalid),
// it's reasonable to consider the virtual environment irredeemably broken.
let Some(base_executable_home_path) = base_executable_home_path else {
return Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
pyvenv_cfg_path,
PyvenvCfgParseErrorKind::NoHomeKey,
));
};
let base_executable_home_path = PythonHomePath::new(base_executable_home_path, system)
.map_err(|io_err| {
SitePackagesDiscoveryError::PyvenvCfgParseError(
pyvenv_cfg_path,
PyvenvCfgParseErrorKind::InvalidHomeValue(io_err),
)
})?;
// but the `version`/`version_info` key is not read by the standard library,
// and is provided under different keys depending on which virtual-environment creation tool
// created the `pyvenv.cfg` file. Lenient parsing is appropriate here:
// the file isn't really *invalid* if it doesn't have this key,
// or if the value doesn't parse according to our expectations.
let version = version_info_string.and_then(|version_string| {
let mut version_info_parts = version_string.split('.');
let (major, minor) = (version_info_parts.next()?, version_info_parts.next()?);
PythonVersion::try_from((major, minor)).ok()
});
let metadata = Self {
venv_path,
base_executable_home_path,
include_system_site_packages,
version,
};
tracing::trace!("Resolved metadata for virtual environment: {metadata:?}");
Ok(metadata)
}
/// Return a list of `site-packages` directories that are available from this virtual environment
///
/// See the documentation for `site_packages_dir_from_sys_prefix` for more details.
pub(crate) fn site_packages_directories(
&self,
system: &dyn System,
) -> SitePackagesDiscoveryResult<Vec<SystemPathBuf>> {
let VirtualEnvironment {
venv_path,
base_executable_home_path,
include_system_site_packages,
version,
} = self;
let mut site_packages_directories = vec![site_packages_directory_from_sys_prefix(
venv_path, *version, system,
)?];
if *include_system_site_packages {
let system_sys_prefix =
SysPrefixPath::from_executable_home_path(base_executable_home_path);
// If we fail to resolve the `sys.prefix` path from the base executable home path,
// or if we fail to resolve the `site-packages` from the `sys.prefix` path,
// we should probably print a warning but *not* abort type checking
if let Some(sys_prefix_path) = system_sys_prefix {
match site_packages_directory_from_sys_prefix(&sys_prefix_path, *version, system) {
Ok(site_packages_directory) => {
site_packages_directories.push(site_packages_directory);
}
Err(error) => tracing::warn!(
"{error}. System site-packages will not be used for module resolution."
),
}
} else {
tracing::warn!(
"Failed to resolve `sys.prefix` of the system Python installation \
from the `home` value in the `pyvenv.cfg` file at '{}'. \
System site-packages will not be used for module resolution.",
venv_path.join("pyvenv.cfg")
);
}
}
tracing::debug!("Resolved site-packages directories for this virtual environment are: {site_packages_directories:?}");
Ok(site_packages_directories)
}
}
#[derive(Debug, thiserror::Error)]
pub(crate) enum SitePackagesDiscoveryError {
#[error("Invalid --venv-path argument: {0} could not be canonicalized")]
VenvDirCanonicalizationError(SystemPathBuf, #[source] io::Error),
#[error("Invalid --venv-path argument: {0} does not point to a directory on disk")]
VenvDirIsNotADirectory(SystemPathBuf),
#[error("--venv-path points to a broken venv with no pyvenv.cfg file")]
NoPyvenvCfgFile(#[source] io::Error),
#[error("Failed to parse the pyvenv.cfg file at {0} because {1}")]
PyvenvCfgParseError(SystemPathBuf, PyvenvCfgParseErrorKind),
#[error("Failed to search the `lib` directory of the Python installation at {1} for `site-packages`")]
CouldNotReadLibDirectory(#[source] io::Error, SysPrefixPath),
#[error("Could not find the `site-packages` directory for the Python installation at {0}")]
NoSitePackagesDirFound(SysPrefixPath),
}
/// The various ways in which parsing a `pyvenv.cfg` file could fail
#[derive(Debug)]
pub(crate) enum PyvenvCfgParseErrorKind {
TooManyEquals { line_number: NonZeroUsize },
MalformedKeyValuePair { line_number: NonZeroUsize },
NoHomeKey,
InvalidHomeValue(io::Error),
}
impl fmt::Display for PyvenvCfgParseErrorKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::TooManyEquals { line_number } => {
write!(f, "line {line_number} has too many '=' characters")
}
Self::MalformedKeyValuePair { line_number } => write!(
f,
"line {line_number} has a malformed `<key> = <value>` pair"
),
Self::NoHomeKey => f.write_str("the file does not have a `home` key"),
Self::InvalidHomeValue(io_err) => {
write!(
f,
"the following error was encountered \
when trying to resolve the `home` value to a directory on disk: {io_err}"
)
}
}
}
}
/// Attempt to retrieve the `site-packages` directory
/// associated with a given Python installation.
///
/// The location of the `site-packages` directory can vary according to the
/// Python version that this installation represents. The Python version may
/// or may not be known at this point, which is why the `python_version`
/// parameter is an `Option`.
fn site_packages_directory_from_sys_prefix(
sys_prefix_path: &SysPrefixPath,
python_version: Option<PythonVersion>,
system: &dyn System,
) -> SitePackagesDiscoveryResult<SystemPathBuf> {
tracing::debug!("Searching for site-packages directory in {sys_prefix_path}");
if cfg!(target_os = "windows") {
let site_packages = sys_prefix_path.join(r"Lib\site-packages");
return system
.is_directory(&site_packages)
.then_some(site_packages)
.ok_or(SitePackagesDiscoveryError::NoSitePackagesDirFound(
sys_prefix_path.to_owned(),
));
}
// In the Python standard library's `site.py` module (used for finding `site-packages`
// at runtime), we can find this in [the non-Windows branch]:
//
// ```py
// libdirs = [sys.platlibdir]
// if sys.platlibdir != "lib":
// libdirs.append("lib")
// ```
//
// Pyright therefore searches for both a `lib/python3.X/site-packages` directory
// and a `lib64/python3.X/site-packages` directory on non-MacOS Unix systems,
// since `sys.platlibdir` can sometimes be equal to `"lib64"`.
//
// However, we only care about the `site-packages` directory insofar as it allows
// us to discover Python source code that can be used for inferring type
// information regarding third-party dependencies. That means that we don't need
// to care about any possible `lib64/site-packages` directories, since
// [the `sys`-module documentation] states that `sys.platlibdir` is *only* ever
// used for C extensions, never for pure-Python modules.
//
// [the non-Windows branch]: https://github.com/python/cpython/blob/a8be8fc6c4682089be45a87bd5ee1f686040116c/Lib/site.py#L401-L410
// [the `sys`-module documentation]: https://docs.python.org/3/library/sys.html#sys.platlibdir
// If we were able to figure out what Python version this installation is,
// we should be able to avoid iterating through all items in the `lib/` directory:
if let Some(version) = python_version {
let expected_path = sys_prefix_path.join(format!("lib/python{version}/site-packages"));
if system.is_directory(&expected_path) {
return Ok(expected_path);
}
if version.free_threaded_build_available() {
// Nearly the same as `expected_path`, but with an additional `t` after {version}:
let alternative_path =
sys_prefix_path.join(format!("lib/python{version}t/site-packages"));
if system.is_directory(&alternative_path) {
return Ok(alternative_path);
}
}
}
// Either we couldn't figure out the version before calling this function
// (e.g., from a `pyvenv.cfg` file if this was a venv),
// or we couldn't find a `site-packages` folder at the expected location given
// the parsed version
//
// Note: the `python3.x` part of the `site-packages` path can't be computed from
// the `--target-version` the user has passed, as they might be running Python 3.12 locally
// even if they've requested that we type check their code "as if" they're running 3.8.
for entry_result in system
.read_directory(&sys_prefix_path.join("lib"))
.map_err(|io_err| {
SitePackagesDiscoveryError::CouldNotReadLibDirectory(io_err, sys_prefix_path.to_owned())
})?
{
let Ok(entry) = entry_result else {
continue;
};
if !entry.file_type().is_directory() {
continue;
}
let mut path = entry.into_path();
let name = path
.file_name()
.expect("File name to be non-null because path is guaranteed to be a child of `lib`");
if !name.starts_with("python3.") {
continue;
}
path.push("site-packages");
if system.is_directory(&path) {
return Ok(path);
}
}
Err(SitePackagesDiscoveryError::NoSitePackagesDirFound(
sys_prefix_path.to_owned(),
))
}
/// A path that represents the value of [`sys.prefix`] at runtime in Python
/// for a given Python executable.
///
/// For the case of a virtual environment, where a
/// Python binary is at `/.venv/bin/python`, `sys.prefix` is the path to
/// the virtual environment the Python binary lies inside, i.e. `/.venv`,
/// and `site-packages` will be at `.venv/lib/python3.X/site-packages`.
/// System Python installations generally work the same way: if a system
/// Python installation lies at `/opt/homebrew/bin/python`, `sys.prefix`
/// will be `/opt/homebrew`, and `site-packages` will be at
/// `/opt/homebrew/lib/python3.X/site-packages`.
///
/// [`sys.prefix`]: https://docs.python.org/3/library/sys.html#sys.prefix
#[derive(Debug, PartialEq, Eq, Clone)]
pub(crate) struct SysPrefixPath(SystemPathBuf);
impl SysPrefixPath {
fn new(
unvalidated_path: impl AsRef<SystemPath>,
system: &dyn System,
) -> SitePackagesDiscoveryResult<Self> {
Self::new_impl(unvalidated_path.as_ref(), system)
}
fn new_impl(
unvalidated_path: &SystemPath,
system: &dyn System,
) -> SitePackagesDiscoveryResult<Self> {
// It's important to resolve symlinks here rather than simply making the path absolute,
// since system Python installations often only put symlinks in the "expected"
// locations for `home` and `site-packages`
let canonicalized = system
.canonicalize_path(unvalidated_path)
.map_err(|io_err| {
SitePackagesDiscoveryError::VenvDirCanonicalizationError(
unvalidated_path.to_path_buf(),
io_err,
)
})?;
system
.is_directory(&canonicalized)
.then_some(Self(canonicalized))
.ok_or_else(|| {
SitePackagesDiscoveryError::VenvDirIsNotADirectory(unvalidated_path.to_path_buf())
})
}
fn from_executable_home_path(path: &PythonHomePath) -> Option<Self> {
// No need to check whether `path.parent()` is a directory:
// the parent of a canonicalised path that is known to exist
// is guaranteed to be a directory.
if cfg!(target_os = "windows") {
Some(Self(path.to_path_buf()))
} else {
path.parent().map(|path| Self(path.to_path_buf()))
}
}
}
impl Deref for SysPrefixPath {
type Target = SystemPath;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl fmt::Display for SysPrefixPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "`sys.prefix` path '{}'", self.0)
}
}
/// The value given by the `home` key in `pyvenv.cfg` files.
///
/// This is equivalent to `{sys_prefix_path}/bin`, and points
/// to a directory in which a Python executable can be found.
/// Confusingly, it is *not* the same as the [`PYTHONHOME`]
/// environment variable that Python provides! However, it's
/// consistent among all mainstream creators of Python virtual
/// environments (the stdlib Python `venv` module, the third-party
/// `virtualenv` library, and `uv`), was specified by
/// [the original PEP adding the `venv` module],
/// and it's one of the few fields that's read by the Python
/// standard library's `site.py` module.
///
/// Although it doesn't appear to be specified anywhere,
/// all existing virtual environment tools always use an absolute path
/// for the `home` value, and the Python standard library also assumes
/// that the `home` value will be an absolute path.
///
/// Other values, such as the path to the Python executable or the
/// base-executable `sys.prefix` value, are either only provided in
/// `pyvenv.cfg` files by some virtual-environment creators,
/// or are included under different keys depending on which
/// virtual-environment creation tool you've used.
///
/// [`PYTHONHOME`]: https://docs.python.org/3/using/cmdline.html#envvar-PYTHONHOME
/// [the original PEP adding the `venv` module]: https://peps.python.org/pep-0405/
#[derive(Debug, PartialEq, Eq)]
struct PythonHomePath(SystemPathBuf);
impl PythonHomePath {
fn new(path: impl AsRef<SystemPath>, system: &dyn System) -> io::Result<Self> {
let path = path.as_ref();
// It's important to resolve symlinks here rather than simply making the path absolute,
// since system Python installations often only put symlinks in the "expected"
// locations for `home` and `site-packages`
let canonicalized = system.canonicalize_path(path)?;
system
.is_directory(&canonicalized)
.then_some(Self(canonicalized))
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "not a directory"))
}
}
impl Deref for PythonHomePath {
type Target = SystemPath;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl fmt::Display for PythonHomePath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "`home` location '{}'", self.0)
}
}
impl PartialEq<SystemPath> for PythonHomePath {
fn eq(&self, other: &SystemPath) -> bool {
&*self.0 == other
}
}
impl PartialEq<SystemPathBuf> for PythonHomePath {
fn eq(&self, other: &SystemPathBuf) -> bool {
self == &**other
}
}
#[cfg(test)]
mod tests {
use ruff_db::system::TestSystem;
use super::*;
struct VirtualEnvironmentTester {
system: TestSystem,
minor_version: u8,
free_threaded: bool,
system_site_packages: bool,
pyvenv_cfg_version_field: Option<&'static str>,
}
impl VirtualEnvironmentTester {
/// Builds a mock virtual environment, and returns the path to the venv
fn build_mock_venv(&self) -> SystemPathBuf {
let VirtualEnvironmentTester {
system,
minor_version,
system_site_packages,
free_threaded,
pyvenv_cfg_version_field,
} = self;
let memory_fs = system.memory_file_system();
let unix_site_packages = if *free_threaded {
format!("lib/python3.{minor_version}t/site-packages")
} else {
format!("lib/python3.{minor_version}/site-packages")
};
let system_install_sys_prefix =
SystemPathBuf::from(&*format!("/Python3.{minor_version}"));
let (system_home_path, system_exe_path, system_site_packages_path) =
if cfg!(target_os = "windows") {
let system_home_path = system_install_sys_prefix.clone();
let system_exe_path = system_home_path.join("python.exe");
let system_site_packages_path =
system_install_sys_prefix.join(r"Lib\site-packages");
(system_home_path, system_exe_path, system_site_packages_path)
} else {
let system_home_path = system_install_sys_prefix.join("bin");
let system_exe_path = system_home_path.join("python");
let system_site_packages_path =
system_install_sys_prefix.join(&unix_site_packages);
(system_home_path, system_exe_path, system_site_packages_path)
};
memory_fs.write_file(system_exe_path, "").unwrap();
memory_fs
.create_directory_all(&system_site_packages_path)
.unwrap();
let venv_sys_prefix = SystemPathBuf::from("/.venv");
let (venv_exe, site_packages_path) = if cfg!(target_os = "windows") {
(
venv_sys_prefix.join(r"Scripts\python.exe"),
venv_sys_prefix.join(r"Lib\site-packages"),
)
} else {
(
venv_sys_prefix.join("bin/python"),
venv_sys_prefix.join(&unix_site_packages),
)
};
memory_fs.write_file(&venv_exe, "").unwrap();
memory_fs.create_directory_all(&site_packages_path).unwrap();
let pyvenv_cfg_path = venv_sys_prefix.join("pyvenv.cfg");
let mut pyvenv_cfg_contents = format!("home = {system_home_path}\n");
if let Some(version_field) = pyvenv_cfg_version_field {
pyvenv_cfg_contents.push_str(version_field);
pyvenv_cfg_contents.push('\n');
}
// Deliberately using weird casing here to test that our pyvenv.cfg parsing is case-insensitive:
if *system_site_packages {
pyvenv_cfg_contents.push_str("include-system-site-packages = TRuE\n");
}
memory_fs
.write_file(pyvenv_cfg_path, &pyvenv_cfg_contents)
.unwrap();
venv_sys_prefix
}
fn test(self) {
let venv_path = self.build_mock_venv();
let venv = VirtualEnvironment::new(venv_path.clone(), &self.system).unwrap();
assert_eq!(
venv.venv_path,
SysPrefixPath(self.system.canonicalize_path(&venv_path).unwrap())
);
assert_eq!(venv.include_system_site_packages, self.system_site_packages);
if self.pyvenv_cfg_version_field.is_some() {
assert_eq!(
venv.version,
Some(PythonVersion {
major: 3,
minor: self.minor_version
})
);
} else {
assert_eq!(venv.version, None);
}
let expected_home = if cfg!(target_os = "windows") {
SystemPathBuf::from(&*format!(r"\Python3.{}", self.minor_version))
} else {
SystemPathBuf::from(&*format!("/Python3.{}/bin", self.minor_version))
};
assert_eq!(venv.base_executable_home_path, expected_home);
let site_packages_directories = venv.site_packages_directories(&self.system).unwrap();
let expected_venv_site_packages = if cfg!(target_os = "windows") {
SystemPathBuf::from(r"\.venv\Lib\site-packages")
} else if self.free_threaded {
SystemPathBuf::from(&*format!(
"/.venv/lib/python3.{}t/site-packages",
self.minor_version
))
} else {
SystemPathBuf::from(&*format!(
"/.venv/lib/python3.{}/site-packages",
self.minor_version
))
};
let expected_system_site_packages = if cfg!(target_os = "windows") {
SystemPathBuf::from(&*format!(
r"\Python3.{}\Lib\site-packages",
self.minor_version
))
} else if self.free_threaded {
SystemPathBuf::from(&*format!(
"/Python3.{minor_version}/lib/python3.{minor_version}t/site-packages",
minor_version = self.minor_version
))
} else {
SystemPathBuf::from(&*format!(
"/Python3.{minor_version}/lib/python3.{minor_version}/site-packages",
minor_version = self.minor_version
))
};
if self.system_site_packages {
assert_eq!(
&site_packages_directories,
&[expected_venv_site_packages, expected_system_site_packages]
);
} else {
assert_eq!(&site_packages_directories, &[expected_venv_site_packages]);
}
}
}
#[test]
fn can_find_site_packages_directory_no_version_field_in_pyvenv_cfg() {
let tester = VirtualEnvironmentTester {
system: TestSystem::default(),
minor_version: 12,
free_threaded: false,
system_site_packages: false,
pyvenv_cfg_version_field: None,
};
tester.test();
}
#[test]
fn can_find_site_packages_directory_venv_style_version_field_in_pyvenv_cfg() {
let tester = VirtualEnvironmentTester {
system: TestSystem::default(),
minor_version: 12,
free_threaded: false,
system_site_packages: false,
pyvenv_cfg_version_field: Some("version = 3.12"),
};
tester.test();
}
#[test]
fn can_find_site_packages_directory_uv_style_version_field_in_pyvenv_cfg() {
let tester = VirtualEnvironmentTester {
system: TestSystem::default(),
minor_version: 12,
free_threaded: false,
system_site_packages: false,
pyvenv_cfg_version_field: Some("version_info = 3.12"),
};
tester.test();
}
#[test]
fn can_find_site_packages_directory_virtualenv_style_version_field_in_pyvenv_cfg() {
let tester = VirtualEnvironmentTester {
system: TestSystem::default(),
minor_version: 12,
free_threaded: false,
system_site_packages: false,
pyvenv_cfg_version_field: Some("version_info = 3.12.0rc2"),
};
tester.test();
}
#[test]
fn can_find_site_packages_directory_freethreaded_build() {
let tester = VirtualEnvironmentTester {
system: TestSystem::default(),
minor_version: 13,
free_threaded: true,
system_site_packages: false,
pyvenv_cfg_version_field: Some("version_info = 3.13"),
};
tester.test();
}
#[test]
fn finds_system_site_packages() {
let tester = VirtualEnvironmentTester {
system: TestSystem::default(),
minor_version: 13,
free_threaded: true,
system_site_packages: true,
pyvenv_cfg_version_field: Some("version_info = 3.13"),
};
tester.test();
}
#[test]
fn reject_venv_that_does_not_exist() {
let system = TestSystem::default();
assert!(matches!(
VirtualEnvironment::new("/.venv", &system),
Err(SitePackagesDiscoveryError::VenvDirIsNotADirectory(_))
));
}
#[test]
fn reject_venv_with_no_pyvenv_cfg_file() {
let system = TestSystem::default();
system
.memory_file_system()
.create_directory_all("/.venv")
.unwrap();
assert!(matches!(
VirtualEnvironment::new("/.venv", &system),
Err(SitePackagesDiscoveryError::NoPyvenvCfgFile(_))
));
}
#[test]
fn parsing_pyvenv_cfg_with_too_many_equals() {
let system = TestSystem::default();
let memory_fs = system.memory_file_system();
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
memory_fs
.write_file(&pyvenv_cfg_path, "home = bar = /.venv/bin")
.unwrap();
let venv_result = VirtualEnvironment::new("/.venv", &system);
assert!(matches!(
venv_result,
Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
path,
PyvenvCfgParseErrorKind::TooManyEquals { line_number }
))
if path == pyvenv_cfg_path && Some(line_number) == NonZeroUsize::new(1)
));
}
#[test]
fn parsing_pyvenv_cfg_with_key_but_no_value_fails() {
let system = TestSystem::default();
let memory_fs = system.memory_file_system();
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
memory_fs.write_file(&pyvenv_cfg_path, "home =").unwrap();
let venv_result = VirtualEnvironment::new("/.venv", &system);
assert!(matches!(
venv_result,
Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
path,
PyvenvCfgParseErrorKind::MalformedKeyValuePair { line_number }
))
if path == pyvenv_cfg_path && Some(line_number) == NonZeroUsize::new(1)
));
}
#[test]
fn parsing_pyvenv_cfg_with_value_but_no_key_fails() {
let system = TestSystem::default();
let memory_fs = system.memory_file_system();
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
memory_fs
.write_file(&pyvenv_cfg_path, "= whatever")
.unwrap();
let venv_result = VirtualEnvironment::new("/.venv", &system);
assert!(matches!(
venv_result,
Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
path,
PyvenvCfgParseErrorKind::MalformedKeyValuePair { line_number }
))
if path == pyvenv_cfg_path && Some(line_number) == NonZeroUsize::new(1)
));
}
#[test]
fn parsing_pyvenv_cfg_with_no_home_key_fails() {
let system = TestSystem::default();
let memory_fs = system.memory_file_system();
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
memory_fs.write_file(&pyvenv_cfg_path, "").unwrap();
let venv_result = VirtualEnvironment::new("/.venv", &system);
assert!(matches!(
venv_result,
Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
path,
PyvenvCfgParseErrorKind::NoHomeKey
))
if path == pyvenv_cfg_path
));
}
#[test]
fn parsing_pyvenv_cfg_with_invalid_home_key_fails() {
let system = TestSystem::default();
let memory_fs = system.memory_file_system();
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
memory_fs
.write_file(&pyvenv_cfg_path, "home = foo")
.unwrap();
let venv_result = VirtualEnvironment::new("/.venv", &system);
assert!(matches!(
venv_result,
Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
path,
PyvenvCfgParseErrorKind::InvalidHomeValue(_)
))
if path == pyvenv_cfg_path
));
}
}

View File

@@ -1,87 +0,0 @@
use crate::module_name::ModuleName;
use crate::module_resolver::resolve_module;
use crate::semantic_index::global_scope;
use crate::semantic_index::symbol::ScopeId;
use crate::types::{global_symbol_ty, Type};
use crate::Db;
/// Enumeration of various core stdlib modules, for which we have dedicated Salsa queries.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum CoreStdlibModule {
Builtins,
Types,
Typeshed,
TypingExtensions,
}
impl CoreStdlibModule {
fn name(self) -> ModuleName {
let module_name = match self {
Self::Builtins => "builtins",
Self::Types => "types",
Self::Typeshed => "_typeshed",
Self::TypingExtensions => "typing_extensions",
};
ModuleName::new_static(module_name)
.unwrap_or_else(|| panic!("{module_name} should be a valid module name!"))
}
}
/// Lookup the type of `symbol` in a given core module
///
/// Returns `Unbound` if the given core module cannot be resolved for some reason
fn core_module_symbol_ty<'db>(
db: &'db dyn Db,
core_module: CoreStdlibModule,
symbol: &str,
) -> Type<'db> {
resolve_module(db, core_module.name())
.map(|module| global_symbol_ty(db, module.file(), symbol))
.unwrap_or(Type::Unbound)
}
/// Lookup the type of `symbol` in the builtins namespace.
///
/// Returns `Unbound` if the `builtins` module isn't available for some reason.
#[inline]
pub(crate) fn builtins_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
core_module_symbol_ty(db, CoreStdlibModule::Builtins, symbol)
}
/// Lookup the type of `symbol` in the `types` module namespace.
///
/// Returns `Unbound` if the `types` module isn't available for some reason.
#[inline]
pub(crate) fn types_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
core_module_symbol_ty(db, CoreStdlibModule::Types, symbol)
}
/// Lookup the type of `symbol` in the `_typeshed` module namespace.
///
/// Returns `Unbound` if the `_typeshed` module isn't available for some reason.
#[inline]
pub(crate) fn typeshed_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
core_module_symbol_ty(db, CoreStdlibModule::Typeshed, symbol)
}
/// Lookup the type of `symbol` in the `typing_extensions` module namespace.
///
/// Returns `Unbound` if the `typing_extensions` module isn't available for some reason.
#[inline]
pub(crate) fn typing_extensions_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
core_module_symbol_ty(db, CoreStdlibModule::TypingExtensions, symbol)
}
/// Get the scope of a core stdlib module.
///
/// Can return `None` if a custom typeshed is used that is missing the core module in question.
fn core_module_scope(db: &dyn Db, core_module: CoreStdlibModule) -> Option<ScopeId<'_>> {
resolve_module(db, core_module.name()).map(|module| global_scope(db, module.file()))
}
/// Get the `builtins` module scope.
///
/// Can return `None` if a custom typeshed is used that is missing `builtins.pyi`.
pub(crate) fn builtins_module_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
core_module_scope(db, CoreStdlibModule::Builtins)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,575 +0,0 @@
//! Smart builders for union and intersection types.
//!
//! Invariants we maintain here:
//! * No single-element union types (should just be the contained type instead.)
//! * No single-positive-element intersection types. Single-negative-element are OK, we don't
//! have a standalone negation type so there's no other representation for this.
//! * The same type should never appear more than once in a union or intersection. (This should
//! be expanded to cover subtyping -- see below -- but for now we only implement it for type
//! identity.)
//! * Disjunctive normal form (DNF): the tree of unions and intersections can never be deeper
//! than a union-of-intersections. Unions cannot contain other unions (the inner union just
//! flattens into the outer one), intersections cannot contain other intersections (also
//! flattens), and intersections cannot contain unions (the intersection distributes over the
//! union, inverting it into a union-of-intersections).
//!
//! The implication of these invariants is that a [`UnionBuilder`] does not necessarily build a
//! [`Type::Union`]. For example, if only one type is added to the [`UnionBuilder`], `build()` will
//! just return that type directly. The same is true for [`IntersectionBuilder`]; for example, if a
//! union type is added to the intersection, it will distribute and [`IntersectionBuilder::build`]
//! may end up returning a [`Type::Union`] of intersections.
//!
//! In the future we should have these additional invariants, but they aren't implemented yet:
//! * No type in a union can be a subtype of any other type in the union (just eliminate the
//! subtype from the union).
//! * No type in an intersection can be a supertype of any other type in the intersection (just
//! eliminate the supertype from the intersection).
//! * An intersection containing two non-overlapping types should simplify to [`Type::Never`].
use crate::types::{builtins_symbol_ty, IntersectionType, Type, UnionType};
use crate::{Db, FxOrderSet};
use smallvec::SmallVec;
pub(crate) struct UnionBuilder<'db> {
elements: Vec<Type<'db>>,
db: &'db dyn Db,
}
impl<'db> UnionBuilder<'db> {
pub(crate) fn new(db: &'db dyn Db) -> Self {
Self {
db,
elements: vec![],
}
}
/// Adds a type to this union.
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
match ty {
Type::Union(union) => {
let new_elements = union.elements(self.db);
self.elements.reserve(new_elements.len());
for element in new_elements {
self = self.add(*element);
}
}
Type::Never => {}
_ => {
let bool_pair = if let Type::BooleanLiteral(b) = ty {
Some(Type::BooleanLiteral(!b))
} else {
None
};
let mut to_add = ty;
let mut to_remove = SmallVec::<[usize; 2]>::new();
for (index, element) in self.elements.iter().enumerate() {
if Some(*element) == bool_pair {
to_add = builtins_symbol_ty(self.db, "bool");
to_remove.push(index);
// The type we are adding is a BooleanLiteral, which doesn't have any
// subtypes. And we just found that the union already contained our
// mirror-image BooleanLiteral, so it can't also contain bool or any
// supertype of bool. Therefore, we are done.
break;
}
if ty.is_subtype_of(self.db, *element) {
return self;
} else if element.is_subtype_of(self.db, ty) {
to_remove.push(index);
}
}
match to_remove[..] {
[] => self.elements.push(to_add),
[index] => self.elements[index] = to_add,
_ => {
let mut current_index = 0;
let mut to_remove = to_remove.into_iter();
let mut next_to_remove_index = to_remove.next();
self.elements.retain(|_| {
let retain = if Some(current_index) == next_to_remove_index {
next_to_remove_index = to_remove.next();
false
} else {
true
};
current_index += 1;
retain
});
self.elements.push(to_add);
}
}
}
}
self
}
pub(crate) fn build(self) -> Type<'db> {
match self.elements.len() {
0 => Type::Never,
1 => self.elements[0],
_ => Type::Union(UnionType::new(self.db, self.elements.into())),
}
}
}
#[derive(Clone)]
pub(crate) struct IntersectionBuilder<'db> {
// Really this builds a union-of-intersections, because we always keep our set-theoretic types
// in disjunctive normal form (DNF), a union of intersections. In the simplest case there's
// just a single intersection in this vector, and we are building a single intersection type,
// but if a union is added to the intersection, we'll distribute ourselves over that union and
// create a union of intersections.
intersections: Vec<InnerIntersectionBuilder<'db>>,
db: &'db dyn Db,
}
impl<'db> IntersectionBuilder<'db> {
pub(crate) fn new(db: &'db dyn Db) -> Self {
Self {
db,
intersections: vec![InnerIntersectionBuilder::new()],
}
}
fn empty(db: &'db dyn Db) -> Self {
Self {
db,
intersections: vec![],
}
}
pub(crate) fn add_positive(mut self, ty: Type<'db>) -> Self {
if let Type::Union(union) = ty {
// Distribute ourself over this union: for each union element, clone ourself and
// intersect with that union element, then create a new union-of-intersections with all
// of those sub-intersections in it. E.g. if `self` is a simple intersection `T1 & T2`
// and we add `T3 | T4` to the intersection, we don't get `T1 & T2 & (T3 | T4)` (that's
// not in DNF), we distribute the union and get `(T1 & T3) | (T2 & T3) | (T1 & T4) |
// (T2 & T4)`. If `self` is already a union-of-intersections `(T1 & T2) | (T3 & T4)`
// and we add `T5 | T6` to it, that flattens all the way out to `(T1 & T2 & T5) | (T1 &
// T2 & T6) | (T3 & T4 & T5) ...` -- you get the idea.
union
.elements(self.db)
.iter()
.map(|elem| self.clone().add_positive(*elem))
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
builder.intersections.extend(sub.intersections);
builder
})
} else {
// If we are already a union-of-intersections, distribute the new intersected element
// across all of those intersections.
for inner in &mut self.intersections {
inner.add_positive(self.db, ty);
}
self
}
}
pub(crate) fn add_negative(mut self, ty: Type<'db>) -> Self {
// See comments above in `add_positive`; this is just the negated version.
if let Type::Union(union) = ty {
union
.elements(self.db)
.iter()
.map(|elem| self.clone().add_negative(*elem))
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
builder.intersections.extend(sub.intersections);
builder
})
} else {
for inner in &mut self.intersections {
inner.add_negative(self.db, ty);
}
self
}
}
pub(crate) fn build(mut self) -> Type<'db> {
// Avoid allocating the UnionBuilder unnecessarily if we have just one intersection:
if self.intersections.len() == 1 {
self.intersections.pop().unwrap().build(self.db)
} else {
UnionType::from_elements(
self.db,
self.intersections
.into_iter()
.map(|inner| inner.build(self.db)),
)
}
}
}
#[derive(Debug, Clone, Default)]
struct InnerIntersectionBuilder<'db> {
positive: FxOrderSet<Type<'db>>,
negative: FxOrderSet<Type<'db>>,
}
impl<'db> InnerIntersectionBuilder<'db> {
fn new() -> Self {
Self::default()
}
/// Adds a positive type to this intersection.
fn add_positive(&mut self, db: &'db dyn Db, ty: Type<'db>) {
match ty {
Type::Intersection(inter) => {
let pos = inter.positive(db);
let neg = inter.negative(db);
self.positive.extend(pos.difference(&self.negative));
self.negative.extend(neg.difference(&self.positive));
self.positive.retain(|elem| !neg.contains(elem));
self.negative.retain(|elem| !pos.contains(elem));
}
_ => {
if !self.negative.remove(&ty) {
self.positive.insert(ty);
};
}
}
}
/// Adds a negative type to this intersection.
fn add_negative(&mut self, db: &'db dyn Db, ty: Type<'db>) {
// TODO Any/Unknown actually should not self-cancel
match ty {
Type::Intersection(intersection) => {
let pos = intersection.negative(db);
let neg = intersection.positive(db);
self.positive.extend(pos.difference(&self.negative));
self.negative.extend(neg.difference(&self.positive));
self.positive.retain(|elem| !neg.contains(elem));
self.negative.retain(|elem| !pos.contains(elem));
}
Type::Never => {}
Type::Unbound => {}
_ => {
if !self.positive.remove(&ty) {
self.negative.insert(ty);
};
}
}
}
fn simplify(&mut self) {
// TODO this should be generalized based on subtyping, for now we just handle a few cases
// Never is a subtype of all types
if self.positive.contains(&Type::Never) {
self.positive.retain(Type::is_never);
self.negative.clear();
}
if self.positive.contains(&Type::Unbound) {
self.positive.retain(Type::is_unbound);
self.negative.clear();
}
// None intersects only with object
for pos in &self.positive {
if let Type::Instance(_) = pos {
// could be `object` type
} else {
self.negative.remove(&Type::None);
break;
}
}
}
fn build(mut self, db: &'db dyn Db) -> Type<'db> {
self.simplify();
match (self.positive.len(), self.negative.len()) {
(0, 0) => Type::Never,
(1, 0) => self.positive[0],
_ => {
self.positive.shrink_to_fit();
self.negative.shrink_to_fit();
Type::Intersection(IntersectionType::new(db, self.positive, self.negative))
}
}
}
}
#[cfg(test)]
mod tests {
use super::{IntersectionBuilder, IntersectionType, Type, UnionType};
use crate::db::tests::TestDb;
use crate::program::{Program, SearchPathSettings};
use crate::python_version::PythonVersion;
use crate::types::{builtins_symbol_ty, UnionBuilder};
use crate::ProgramSettings;
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
fn setup_db() -> TestDb {
let db = TestDb::new();
let src_root = SystemPathBuf::from("/src");
db.memory_file_system()
.create_directory_all(&src_root)
.unwrap();
Program::from_settings(
&db,
&ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings::new(src_root),
},
)
.expect("Valid search path settings");
db
}
#[test]
fn build_union() {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let t1 = Type::IntLiteral(1);
let union = UnionType::from_elements(&db, [t0, t1]).expect_union();
assert_eq!(union.elements(&db), &[t0, t1]);
}
#[test]
fn build_union_single() {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let ty = UnionType::from_elements(&db, [t0]);
assert_eq!(ty, t0);
}
#[test]
fn build_union_empty() {
let db = setup_db();
let ty = UnionBuilder::new(&db).build();
assert_eq!(ty, Type::Never);
}
#[test]
fn build_union_never() {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let ty = UnionType::from_elements(&db, [t0, Type::Never]);
assert_eq!(ty, t0);
}
#[test]
fn build_union_bool() {
let db = setup_db();
let bool_ty = builtins_symbol_ty(&db, "bool");
let t0 = Type::BooleanLiteral(true);
let t1 = Type::BooleanLiteral(true);
let t2 = Type::BooleanLiteral(false);
let t3 = Type::IntLiteral(17);
let union = UnionType::from_elements(&db, [t0, t1, t3]).expect_union();
assert_eq!(union.elements(&db), &[t0, t3]);
let union = UnionType::from_elements(&db, [t0, t1, t2, t3]).expect_union();
assert_eq!(union.elements(&db), &[bool_ty, t3]);
}
#[test]
fn build_union_flatten() {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let t1 = Type::IntLiteral(1);
let t2 = Type::IntLiteral(2);
let u1 = UnionType::from_elements(&db, [t0, t1]);
let union = UnionType::from_elements(&db, [u1, t2]).expect_union();
assert_eq!(union.elements(&db), &[t0, t1, t2]);
}
#[test]
fn build_union_simplify_subtype() {
let db = setup_db();
let t0 = builtins_symbol_ty(&db, "str").to_instance(&db);
let t1 = Type::LiteralString;
let u0 = UnionType::from_elements(&db, [t0, t1]);
let u1 = UnionType::from_elements(&db, [t1, t0]);
assert_eq!(u0, t0);
assert_eq!(u1, t0);
}
#[test]
fn build_union_no_simplify_unknown() {
let db = setup_db();
let t0 = builtins_symbol_ty(&db, "str").to_instance(&db);
let t1 = Type::Unknown;
let u0 = UnionType::from_elements(&db, [t0, t1]);
let u1 = UnionType::from_elements(&db, [t1, t0]);
assert_eq!(u0.expect_union().elements(&db), &[t0, t1]);
assert_eq!(u1.expect_union().elements(&db), &[t1, t0]);
}
#[test]
fn build_union_subsume_multiple() {
let db = setup_db();
let str_ty = builtins_symbol_ty(&db, "str").to_instance(&db);
let int_ty = builtins_symbol_ty(&db, "int").to_instance(&db);
let object_ty = builtins_symbol_ty(&db, "object").to_instance(&db);
let unknown_ty = Type::Unknown;
let u0 = UnionType::from_elements(&db, [str_ty, unknown_ty, int_ty, object_ty]);
assert_eq!(u0.expect_union().elements(&db), &[unknown_ty, object_ty]);
}
impl<'db> IntersectionType<'db> {
fn pos_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
self.positive(db).into_iter().copied().collect()
}
fn neg_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
self.negative(db).into_iter().copied().collect()
}
}
#[test]
fn build_intersection() {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let ta = Type::Any;
let intersection = IntersectionBuilder::new(&db)
.add_positive(ta)
.add_negative(t0)
.build()
.expect_intersection();
assert_eq!(intersection.pos_vec(&db), &[ta]);
assert_eq!(intersection.neg_vec(&db), &[t0]);
}
#[test]
fn build_intersection_flatten_positive() {
let db = setup_db();
let ta = Type::Any;
let t1 = Type::IntLiteral(1);
let t2 = Type::IntLiteral(2);
let i0 = IntersectionBuilder::new(&db)
.add_positive(ta)
.add_negative(t1)
.build();
let intersection = IntersectionBuilder::new(&db)
.add_positive(t2)
.add_positive(i0)
.build()
.expect_intersection();
assert_eq!(intersection.pos_vec(&db), &[t2, ta]);
assert_eq!(intersection.neg_vec(&db), &[t1]);
}
#[test]
fn build_intersection_flatten_negative() {
let db = setup_db();
let ta = Type::Any;
let t1 = Type::IntLiteral(1);
let t2 = Type::IntLiteral(2);
let i0 = IntersectionBuilder::new(&db)
.add_positive(ta)
.add_negative(t1)
.build();
let intersection = IntersectionBuilder::new(&db)
.add_positive(t2)
.add_negative(i0)
.build()
.expect_intersection();
assert_eq!(intersection.pos_vec(&db), &[t2, t1]);
assert_eq!(intersection.neg_vec(&db), &[ta]);
}
#[test]
fn intersection_distributes_over_union() {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let t1 = Type::IntLiteral(1);
let ta = Type::Any;
let u0 = UnionType::from_elements(&db, [t0, t1]);
let union = IntersectionBuilder::new(&db)
.add_positive(ta)
.add_positive(u0)
.build()
.expect_union();
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements(&db)[..] else {
panic!("expected a union of two intersections");
};
assert_eq!(i0.pos_vec(&db), &[ta, t0]);
assert_eq!(i1.pos_vec(&db), &[ta, t1]);
}
#[test]
fn build_intersection_self_negation() {
let db = setup_db();
let ty = IntersectionBuilder::new(&db)
.add_positive(Type::None)
.add_negative(Type::None)
.build();
assert_eq!(ty, Type::Never);
}
#[test]
fn build_intersection_simplify_negative_never() {
let db = setup_db();
let ty = IntersectionBuilder::new(&db)
.add_positive(Type::None)
.add_negative(Type::Never)
.build();
assert_eq!(ty, Type::None);
}
#[test]
fn build_intersection_simplify_positive_never() {
let db = setup_db();
let ty = IntersectionBuilder::new(&db)
.add_positive(Type::None)
.add_positive(Type::Never)
.build();
assert_eq!(ty, Type::Never);
}
#[test]
fn build_intersection_simplify_positive_unbound() {
let db = setup_db();
let ty = IntersectionBuilder::new(&db)
.add_positive(Type::Unbound)
.add_positive(Type::IntLiteral(1))
.build();
assert_eq!(ty, Type::Unbound);
}
#[test]
fn build_intersection_simplify_negative_unbound() {
let db = setup_db();
let ty = IntersectionBuilder::new(&db)
.add_negative(Type::Unbound)
.add_positive(Type::IntLiteral(1))
.build();
assert_eq!(ty, Type::IntLiteral(1));
}
#[test]
fn build_intersection_simplify_negative_none() {
let db = setup_db();
let ty = IntersectionBuilder::new(&db)
.add_negative(Type::None)
.add_positive(Type::IntLiteral(1))
.build();
assert_eq!(ty, Type::IntLiteral(1));
}
}

View File

@@ -1,111 +0,0 @@
use ruff_db::files::File;
use ruff_text_size::{Ranged, TextRange};
use std::fmt::Formatter;
use std::ops::Deref;
use std::sync::Arc;
#[derive(Debug, Eq, PartialEq)]
pub struct TypeCheckDiagnostic {
// TODO: Don't use string keys for rules
pub(super) rule: String,
pub(super) message: String,
pub(super) range: TextRange,
pub(super) file: File,
}
impl TypeCheckDiagnostic {
pub fn rule(&self) -> &str {
&self.rule
}
pub fn message(&self) -> &str {
&self.message
}
pub fn file(&self) -> File {
self.file
}
}
impl Ranged for TypeCheckDiagnostic {
fn range(&self) -> TextRange {
self.range
}
}
/// A collection of type check diagnostics.
///
/// The diagnostics are wrapped in an `Arc` because they need to be cloned multiple times
/// when going from `infer_expression` to `check_file`. We could consider
/// making [`TypeCheckDiagnostic`] a Salsa struct to have them Arena-allocated (once the Tables refactor is done).
/// Using Salsa struct does have the downside that it leaks the Salsa dependency into diagnostics and
/// each Salsa-struct comes with an overhead.
#[derive(Default, Eq, PartialEq)]
pub struct TypeCheckDiagnostics {
inner: Vec<std::sync::Arc<TypeCheckDiagnostic>>,
}
impl TypeCheckDiagnostics {
pub fn new() -> Self {
Self { inner: Vec::new() }
}
pub(super) fn push(&mut self, diagnostic: TypeCheckDiagnostic) {
self.inner.push(Arc::new(diagnostic));
}
pub(crate) fn shrink_to_fit(&mut self) {
self.inner.shrink_to_fit();
}
}
impl Extend<TypeCheckDiagnostic> for TypeCheckDiagnostics {
fn extend<T: IntoIterator<Item = TypeCheckDiagnostic>>(&mut self, iter: T) {
self.inner.extend(iter.into_iter().map(std::sync::Arc::new));
}
}
impl Extend<std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
fn extend<T: IntoIterator<Item = Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
self.inner.extend(iter);
}
}
impl<'a> Extend<&'a std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
fn extend<T: IntoIterator<Item = &'a Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
self.inner
.extend(iter.into_iter().map(std::sync::Arc::clone));
}
}
impl std::fmt::Debug for TypeCheckDiagnostics {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.inner.fmt(f)
}
}
impl Deref for TypeCheckDiagnostics {
type Target = [std::sync::Arc<TypeCheckDiagnostic>];
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl IntoIterator for TypeCheckDiagnostics {
type Item = Arc<TypeCheckDiagnostic>;
type IntoIter = std::vec::IntoIter<std::sync::Arc<TypeCheckDiagnostic>>;
fn into_iter(self) -> Self::IntoIter {
self.inner.into_iter()
}
}
impl<'a> IntoIterator for &'a TypeCheckDiagnostics {
type Item = &'a Arc<TypeCheckDiagnostic>;
type IntoIter = std::slice::Iter<'a, std::sync::Arc<TypeCheckDiagnostic>>;
fn into_iter(self) -> Self::IntoIter {
self.inner.iter()
}
}

Some files were not shown because too many files have changed in this diff Show More