Compare commits
2 Commits
0.9.0
...
preview-bi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d3e160dcb7 | ||
|
|
003851b54c |
@@ -1,10 +1,3 @@
|
||||
[alias]
|
||||
dev = "run --package ruff_dev --bin ruff_dev"
|
||||
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||
|
||||
# statically link the C runtime so the executable does not depend on
|
||||
# that shared/dynamic library.
|
||||
#
|
||||
# See: https://github.com/astral-sh/ruff/issues/11503
|
||||
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"rust-lang.rust-analyzer",
|
||||
"fill-labs.dependi",
|
||||
"serayuzgur.crates",
|
||||
"tamasfe.even-better-toml",
|
||||
"Swellaby.vscode-rust-test-adapter",
|
||||
"charliermarsh.ruff"
|
||||
|
||||
@@ -17,7 +17,4 @@ indent_size = 4
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.md]
|
||||
max_line_length = 100
|
||||
|
||||
[*.toml]
|
||||
indent_size = 4
|
||||
max_line_length = 100
|
||||
8
.gitattributes
vendored
8
.gitattributes
vendored
@@ -2,17 +2,9 @@
|
||||
|
||||
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
|
||||
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
||||
|
||||
crates/ruff_python_parser/resources/invalid/re_lexing/line_continuation_windows_eol.py text eol=crlf
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_windows_eol.py text eol=crlf
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_mac_eol.py text eol=cr
|
||||
|
||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
||||
|
||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||
*.md.snap linguist-language=Markdown
|
||||
|
||||
17
.github/CODEOWNERS
vendored
17
.github/CODEOWNERS
vendored
@@ -5,18 +5,5 @@
|
||||
# - The '*' pattern is global owners.
|
||||
# - Order is important. The last matching pattern has the most precedence.
|
||||
|
||||
/crates/ruff_notebook/ @dhruvmanila
|
||||
/crates/ruff_formatter/ @MichaReiser
|
||||
/crates/ruff_python_formatter/ @MichaReiser
|
||||
/crates/ruff_python_parser/ @MichaReiser @dhruvmanila
|
||||
|
||||
# flake8-pyi
|
||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
||||
|
||||
# Script for fuzzing the parser/red-knot etc.
|
||||
/python/py-fuzzer/ @AlexWaygood
|
||||
|
||||
# red-knot
|
||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
/scripts/knot_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
# Jupyter
|
||||
/crates/ruff_linter/src/jupyter/ @dhruvmanila
|
||||
|
||||
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -3,8 +3,6 @@ Thank you for taking the time to report an issue! We're glad to have you involve
|
||||
|
||||
If you're filing a bug report, please consider including the following information:
|
||||
|
||||
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
|
||||
e.g. "RUF001", "unused variable", "Jupyter notebook"
|
||||
* A minimal code snippet that reproduces the bug.
|
||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||
|
||||
9
.github/actionlint.yaml
vendored
9
.github/actionlint.yaml
vendored
@@ -1,9 +0,0 @@
|
||||
# Configuration for the actionlint tool, which we run via pre-commit
|
||||
# to verify the correctness of the syntax in our GitHub Actions workflows.
|
||||
|
||||
self-hosted-runner:
|
||||
# Various runners we use that aren't recognized out-of-the-box by actionlint:
|
||||
labels:
|
||||
- depot-ubuntu-latest-8
|
||||
- depot-ubuntu-22.04-16
|
||||
- windows-latest-xlarge
|
||||
21
.github/dependabot.yml
vendored
Normal file
21
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
labels: ["internal"]
|
||||
groups:
|
||||
actions:
|
||||
patterns:
|
||||
- "*"
|
||||
ignore:
|
||||
# The latest versions of these are not compatible with our release workflow
|
||||
- dependency-name: "actions/upload-artifact"
|
||||
- dependency-name: "actions/download-artifact"
|
||||
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
labels: ["internal"]
|
||||
111
.github/renovate.json5
vendored
111
.github/renovate.json5
vendored
@@ -1,111 +0,0 @@
|
||||
{
|
||||
$schema: "https://docs.renovatebot.com/renovate-schema.json",
|
||||
dependencyDashboard: true,
|
||||
suppressNotifications: ["prEditedNotification"],
|
||||
extends: ["config:recommended"],
|
||||
labels: ["internal"],
|
||||
schedule: ["before 4am on Monday"],
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
|
||||
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
|
||||
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
|
||||
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
|
||||
// a "negative glob pattern".
|
||||
// See:
|
||||
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
|
||||
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
|
||||
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
|
||||
ignorePaths: ["!docs/requirements*.txt"]
|
||||
},
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
enabled: true,
|
||||
},
|
||||
packageRules: [
|
||||
{
|
||||
// Group upload/download artifact updates, the versions are dependent
|
||||
groupName: "Artifact GitHub Actions dependencies",
|
||||
matchManagers: ["github-actions"],
|
||||
matchDatasources: ["gitea-tags", "github-tags"],
|
||||
matchPackageNames: ["actions/.*-artifact"],
|
||||
description: "Weekly update of artifact-related GitHub Actions dependencies",
|
||||
},
|
||||
{
|
||||
// This package rule disables updates for GitHub runners:
|
||||
// we'd only pin them to a specific version
|
||||
// if there was a deliberate reason to do so
|
||||
groupName: "GitHub runners",
|
||||
matchManagers: ["github-actions"],
|
||||
matchDatasources: ["github-runners"],
|
||||
description: "Disable PRs updating GitHub runners (e.g. 'runs-on: macos-14')",
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
|
||||
// See: https://github.com/astral-sh/uv/issues/3642
|
||||
matchPackageNames: ["zip"],
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// `mkdocs-material` requires a manual update to keep the version in sync
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackageNames: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
description: "Weekly update of pre-commit dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "NPM Development dependencies",
|
||||
matchManagers: ["npm"],
|
||||
matchDepTypes: ["devDependencies"],
|
||||
description: "Weekly update of NPM development dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "Monaco",
|
||||
matchManagers: ["npm"],
|
||||
matchPackageNames: ["monaco"],
|
||||
description: "Weekly update of the Monaco editor",
|
||||
},
|
||||
{
|
||||
groupName: "strum",
|
||||
matchManagers: ["cargo"],
|
||||
matchPackageNames: ["strum"],
|
||||
description: "Weekly update of strum dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "ESLint",
|
||||
matchManagers: ["npm"],
|
||||
matchPackageNames: ["eslint"],
|
||||
allowedVersions: "<9",
|
||||
description: "Constraint ESLint to version 8 until TypeScript-eslint supports ESLint 9", // https://github.com/typescript-eslint/typescript-eslint/issues/8211
|
||||
},
|
||||
],
|
||||
vulnerabilityAlerts: {
|
||||
commitMessageSuffix: "",
|
||||
labels: ["internal", "security"],
|
||||
},
|
||||
}
|
||||
478
.github/workflows/build-binaries.yml
vendored
478
.github/workflows/build-binaries.yml
vendored
@@ -1,478 +0,0 @@
|
||||
# Build ruff on all platforms.
|
||||
#
|
||||
# Generates both wheels (for PyPI) and archived binaries (for GitHub releases).
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
name: "Build binaries"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
# When we change pyproject.toml, we want to ensure that the maturin builds still work.
|
||||
- pyproject.toml
|
||||
# And when we change this workflow itself...
|
||||
- .github/workflows/build-binaries.yml
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: ruff
|
||||
MODULE_NAME: ruff
|
||||
PYTHON_VERSION: "3.11"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
sdist:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.tar.gz --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-sdist
|
||||
path: dist
|
||||
|
||||
macos-x86_64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-macos-x86_64
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
TARGET=x86_64-apple-darwin
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-macos-x86_64
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
macos-aarch64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: arm64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - aarch64"
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-aarch64-apple-darwin
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
TARGET=aarch64-apple-darwin
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-aarch64-apple-darwin
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
windows:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: x86_64-pc-windows-msvc
|
||||
arch: x64
|
||||
- target: i686-pc-windows-msvc
|
||||
arch: x86
|
||||
- target: aarch64-pc-windows-msvc
|
||||
arch: x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
env:
|
||||
# aarch64 build fails, see https://github.com/PyO3/maturin/issues/2110
|
||||
XWIN_VERSION: 16
|
||||
- name: "Test wheel"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.zip
|
||||
*.sha256
|
||||
|
||||
linux:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- i686-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
linux-cross:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
arch: aarch64
|
||||
# see https://github.com/astral-sh/ruff/issues/3791
|
||||
# and https://github.com/gnzlbg/jemallocator/issues/170#issuecomment-1503228963
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-gnueabihf
|
||||
arch: armv7
|
||||
- target: s390x-unknown-linux-gnu
|
||||
arch: s390x
|
||||
- target: powerpc64le-unknown-linux-gnu
|
||||
arch: ppc64le
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: powerpc64-unknown-linux-gnu
|
||||
arch: ppc64
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: arm-unknown-linux-musleabihf
|
||||
arch: arm
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
if: matrix.platform.arch != 'ppc64'
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch == 'arm' && 'armv6' || matrix.platform.arch }}
|
||||
distro: ${{ matrix.platform.arch == 'arm' && 'bullseye' || 'ubuntu20.04' }}
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add python3
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux-cross:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-musl
|
||||
arch: aarch64
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
arch: armv7
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add python3
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
299
.github/workflows/build-docker.yml
vendored
299
.github/workflows/build-docker.yml
vendored
@@ -1,299 +0,0 @@
|
||||
# Build and publish a Docker image.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
#
|
||||
# TODO(charlie): Ideally, the publish step would happen as a publish job within `cargo-dist`, but
|
||||
# sharing the built image as an artifact between jobs is challenging.
|
||||
name: "[ruff] Build Docker image"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/build-docker.yml
|
||||
|
||||
env:
|
||||
RUFF_BASE_IMG: ghcr.io/${{ github.repository_owner }}/ruff
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
name: Build Docker image (ghcr.io/astral-sh/ruff) for ${{ matrix.platform }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Check tag consistency
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
env:
|
||||
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }}
|
||||
run: |
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${TAG}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${TAG}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||
tags: |
|
||||
type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Normalize Platform Pair (replace / with -)
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_TUPLE=${platform//\//-}" >> "$GITHUB_ENV"
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
cache-to: type=gha,mode=min,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,name=${{ env.RUFF_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Export digests
|
||||
env:
|
||||
digest: ${{ steps.build.outputs.digest }}
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digests
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_TUPLE }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
docker-publish:
|
||||
name: Publish Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-build
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
docker buildx imagetools create \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)
|
||||
|
||||
docker-publish-extra:
|
||||
name: Publish additional Docker image based on ${{ matrix.image-mapping }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
||||
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
||||
image-mapping:
|
||||
- alpine:3.20,alpine3.20,alpine
|
||||
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||
- buildpack-deps:bookworm,bookworm,debian
|
||||
steps:
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate Dynamic Dockerfile Tags
|
||||
shell: bash
|
||||
env:
|
||||
TAG_VALUE: ${{ fromJson(inputs.plan).announcement_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the image and tags from the matrix variable
|
||||
IFS=',' read -r BASE_IMAGE BASE_TAGS <<< "${{ matrix.image-mapping }}"
|
||||
|
||||
# Generate Dockerfile content
|
||||
cat <<EOF > Dockerfile
|
||||
FROM ${BASE_IMAGE}
|
||||
COPY --from=${RUFF_BASE_IMG}:latest /ruff /usr/local/bin/ruff
|
||||
ENTRYPOINT []
|
||||
CMD ["/usr/local/bin/ruff"]
|
||||
EOF
|
||||
|
||||
# Initialize a variable to store all tag docker metadata patterns
|
||||
TAG_PATTERNS=""
|
||||
|
||||
# Loop through all base tags and append its docker metadata pattern to the list
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
IFS=','; for TAG in ${BASE_TAGS}; do
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${TAG_VALUE}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${TAG_VALUE}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
|
||||
done
|
||||
|
||||
# Remove the trailing newline from the pattern list
|
||||
TAG_PATTERNS="${TAG_PATTERNS%\\n}"
|
||||
|
||||
# Export image cache name
|
||||
echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> "$GITHUB_ENV"
|
||||
|
||||
# Export tag patterns using the multiline env var syntax
|
||||
{
|
||||
echo "TAG_PATTERNS<<EOF"
|
||||
echo -e "${TAG_PATTERNS}"
|
||||
echo EOF
|
||||
} >> "$GITHUB_ENV"
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
# ghcr.io prefers index level annotations
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
${{ env.TAG_PATTERNS }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# We do not really need to cache here as the Dockerfile is tiny
|
||||
#cache-from: type=gha,scope=ruff-${{ env.IMAGE_REF }}
|
||||
#cache-to: type=gha,mode=min,scope=ruff-${{ env.IMAGE_REF }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
annotations: ${{ steps.meta.outputs.annotations }}
|
||||
|
||||
# This is effectively a duplicate of `docker-publish` to make https://github.com/astral-sh/ruff/pkgs/container/ruff
|
||||
# show the ruff base image first since GitHub always shows the last updated image digests
|
||||
# This works by annotating the original digests (previously non-annotated) which triggers an update to ghcr.io
|
||||
docker-republish:
|
||||
name: Annotate Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish-extra
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces)
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
|
||||
# shellcheck disable=SC2046
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)
|
||||
326
.github/workflows/ci.yaml
vendored
326
.github/workflows/ci.yaml
vendored
@@ -16,15 +16,13 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
PACKAGE_NAME: ruff
|
||||
PYTHON_VERSION: "3.12"
|
||||
PYTHON_VERSION: "3.11"
|
||||
|
||||
jobs:
|
||||
determine_changes:
|
||||
name: "Determine changes"
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
# Flag that is raised when any code that affects parser is changed
|
||||
parser: ${{ steps.changed.outputs.parser_any_changed }}
|
||||
# Flag that is raised when any code that affects linter is changed
|
||||
linter: ${{ steps.changed.outputs.linter_any_changed }}
|
||||
# Flag that is raised when any code that affects formatter is changed
|
||||
@@ -32,29 +30,15 @@ jobs:
|
||||
# Flag that is raised when any code is changed
|
||||
# This is superset of the linter and formatter
|
||||
code: ${{ steps.changed.outputs.code_any_changed }}
|
||||
# Flag that is raised when any code that affects the fuzzer is changed
|
||||
fuzz: ${{ steps.changed.outputs.fuzz_any_changed }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- uses: tj-actions/changed-files@v45
|
||||
- uses: tj-actions/changed-files@v42
|
||||
id: changed
|
||||
with:
|
||||
files_yaml: |
|
||||
parser:
|
||||
- Cargo.toml
|
||||
- Cargo.lock
|
||||
- crates/ruff_python_trivia/**
|
||||
- crates/ruff_source_file/**
|
||||
- crates/ruff_text_size/**
|
||||
- crates/ruff_python_ast/**
|
||||
- crates/ruff_python_parser/**
|
||||
- python/py-fuzzer/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
linter:
|
||||
- Cargo.toml
|
||||
- Cargo.lock
|
||||
@@ -62,6 +46,7 @@ jobs:
|
||||
- "!crates/ruff_python_formatter/**"
|
||||
- "!crates/ruff_formatter/**"
|
||||
- "!crates/ruff_dev/**"
|
||||
- "!crates/ruff_shrinking/**"
|
||||
- scripts/*
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
@@ -82,15 +67,9 @@ jobs:
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
fuzz:
|
||||
- fuzz/Cargo.toml
|
||||
- fuzz/Cargo.lock
|
||||
- fuzz/fuzz_targets/**
|
||||
|
||||
code:
|
||||
- "**/*"
|
||||
- "!**/*.md"
|
||||
- "crates/red_knot_python_semantic/resources/mdtest/**/*.md"
|
||||
- "!docs/**"
|
||||
- "!assets/**"
|
||||
|
||||
@@ -100,8 +79,6 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
- run: cargo fmt --all --check
|
||||
@@ -114,8 +91,6 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: |
|
||||
rustup component add clippy
|
||||
@@ -124,18 +99,16 @@ jobs:
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm -p red_knot_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
@@ -157,60 +130,22 @@ jobs:
|
||||
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
# Use --document-private-items so that all our doc comments are kept in
|
||||
# sync, not just public items. Eventually we should do this for all
|
||||
# crates; for now add crates here as they are warning-clean to prevent
|
||||
# regression.
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p red_knot_test -p ruff_db --document-private-items
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug/ruff
|
||||
|
||||
cargo-test-linux-release:
|
||||
name: "cargo test (linux, release)"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo insta test --release --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-test-windows:
|
||||
name: "cargo test (windows)"
|
||||
runs-on: windows-latest-xlarge
|
||||
runs-on: windows-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
@@ -220,9 +155,6 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
|
||||
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
|
||||
run: |
|
||||
cargo nextest run --all-features --profile ci
|
||||
cargo test --all-features --doc
|
||||
@@ -235,144 +167,39 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.13.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Test ruff_wasm"
|
||||
- name: "Run wasm-pack"
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
- name: "Test red_knot_wasm"
|
||||
run: |
|
||||
cd crates/red_knot_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
runs-on: macos-latest
|
||||
if: ${{ github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
cargo-build-msrv:
|
||||
name: "cargo build (msrv)"
|
||||
cargo-fuzz:
|
||||
name: "cargo fuzz"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: SebRollen/toml-action@v1.2.0
|
||||
id: msrv
|
||||
with:
|
||||
file: "Cargo.toml"
|
||||
field: "workspace.package.rust-version"
|
||||
- name: "Install Rust toolchain"
|
||||
env:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: rustup default "${MSRV}"
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: cargo "+${MSRV}" insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-fuzz-build:
|
||||
name: "cargo fuzz build"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ github.ref == 'refs/heads/main' || needs.determine_changes.outputs.fuzz == 'true' || needs.determine_changes.outputs.code == 'true' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@main
|
||||
- name: "Install cargo-fuzz"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- name: "Install cargo-fuzz"
|
||||
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
|
||||
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
|
||||
- run: cargo fuzz build -s none
|
||||
|
||||
fuzz-parser:
|
||||
name: "fuzz parser"
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@v5
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
with:
|
||||
name: ruff
|
||||
path: ruff-to-test
|
||||
- name: Fuzz
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.download-cached-binary.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x "${DOWNLOAD_PATH}/ruff"
|
||||
|
||||
(
|
||||
uvx \
|
||||
--python="${PYTHON_VERSION}" \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable="${DOWNLOAD_PATH}/ruff" \
|
||||
--bin=ruff \
|
||||
0-500
|
||||
)
|
||||
|
||||
scripts:
|
||||
name: "test scripts"
|
||||
runs-on: ubuntu-latest
|
||||
@@ -381,12 +208,10 @@ jobs:
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- run: ./scripts/add_rule.py --name DoTheThing --prefix F --code 999 --linter pyflakes
|
||||
- run: ./scripts/add_rule.py --name DoTheThing --prefix PL --code C0999 --linter pylint
|
||||
- run: cargo check
|
||||
- run: cargo fmt --all --check
|
||||
- run: |
|
||||
@@ -397,35 +222,34 @@ jobs:
|
||||
|
||||
ecosystem:
|
||||
name: "ecosystem"
|
||||
runs-on: depot-ubuntu-latest-8
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
# Ecosystem check needs linter and/or formatter changes.
|
||||
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
||||
if: github.event_name == 'pull_request' && ${{
|
||||
needs.determine_changes.outputs.code == 'true'
|
||||
}}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v3
|
||||
name: Download comparison Ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v7
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
name: Download baseline Ruff binary
|
||||
with:
|
||||
name: ruff
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
|
||||
- name: Install ruff-ecosystem
|
||||
@@ -434,72 +258,64 @@ jobs:
|
||||
|
||||
- name: Run `ruff check` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
||||
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
||||
|
||||
cat ecosystem-result-check-stable > "$GITHUB_STEP_SUMMARY"
|
||||
cat ecosystem-result-check-stable > $GITHUB_STEP_SUMMARY
|
||||
echo "### Linter (stable)" > ecosystem-result
|
||||
cat ecosystem-result-check-stable >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff check` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
||||
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
||||
|
||||
cat ecosystem-result-check-preview > "$GITHUB_STEP_SUMMARY"
|
||||
cat ecosystem-result-check-preview > $GITHUB_STEP_SUMMARY
|
||||
echo "### Linter (preview)" >> ecosystem-result
|
||||
cat ecosystem-result-check-preview >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff format` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
||||
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
||||
|
||||
cat ecosystem-result-format-stable > "$GITHUB_STEP_SUMMARY"
|
||||
cat ecosystem-result-format-stable > $GITHUB_STEP_SUMMARY
|
||||
echo "### Formatter (stable)" >> ecosystem-result
|
||||
cat ecosystem-result-format-stable >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff format` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
||||
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
||||
|
||||
cat ecosystem-result-format-preview > "$GITHUB_STEP_SUMMARY"
|
||||
cat ecosystem-result-format-preview > $GITHUB_STEP_SUMMARY
|
||||
echo "### Formatter (preview)" >> ecosystem-result
|
||||
cat ecosystem-result-format-preview >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
@@ -508,30 +324,34 @@ jobs:
|
||||
run: |
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v3
|
||||
name: Upload PR Number
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v3
|
||||
name: Upload Results
|
||||
with:
|
||||
name: ecosystem-result
|
||||
path: ecosystem-result
|
||||
|
||||
cargo-shear:
|
||||
name: "cargo shear"
|
||||
cargo-udeps:
|
||||
name: "cargo udeps"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@main
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
- name: "Install nightly Rust toolchain"
|
||||
# Only pinned to make caching work, update freely
|
||||
run: rustup toolchain install nightly-2023-10-15
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install cargo-udeps"
|
||||
uses: taiki-e/install-action@cargo-udeps
|
||||
- name: "Run cargo-udeps"
|
||||
run: cargo +nightly-2023-10-15 udeps
|
||||
|
||||
python-package:
|
||||
name: "python package"
|
||||
@@ -539,8 +359,6 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -554,7 +372,7 @@ jobs:
|
||||
args: --out dist
|
||||
- name: "Test wheel"
|
||||
run: |
|
||||
pip install --force-reinstall --find-links dist "${PACKAGE_NAME}"
|
||||
pip install --force-reinstall --find-links dist ${{ env.PACKAGE_NAME }}
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Remove wheels from cache"
|
||||
@@ -566,8 +384,6 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -583,14 +399,13 @@ jobs:
|
||||
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: "Run pre-commit"
|
||||
run: |
|
||||
echo '```console' > "$GITHUB_STEP_SUMMARY"
|
||||
echo '```console' > $GITHUB_STEP_SUMMARY
|
||||
# Enable color output for pre-commit and remove it for the summary
|
||||
# Use --hook-stage=manual to enable slower pre-commit hooks that are skipped by default
|
||||
SKIP=cargo-fmt,clippy,dev-generate-all pre-commit run --all-files --show-diff-on-failure --color=always --hook-stage=manual | \
|
||||
tee >(sed -E 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})*)?[mGK]//g' >> "$GITHUB_STEP_SUMMARY") >&1
|
||||
exit_code="${PIPESTATUS[0]}"
|
||||
echo '```' >> "$GITHUB_STEP_SUMMARY"
|
||||
exit "$exit_code"
|
||||
SKIP=cargo-fmt,clippy,dev-generate-all pre-commit run --all-files --show-diff-on-failure --color=always | \
|
||||
tee >(sed -E 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})*)?[mGK]//g' >> $GITHUB_STEP_SUMMARY) >&1
|
||||
exit_code=${PIPESTATUS[0]}
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
exit $exit_code
|
||||
|
||||
docs:
|
||||
name: "mkdocs"
|
||||
@@ -600,11 +415,7 @@ jobs:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
@@ -612,15 +423,13 @@ jobs:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: uv pip install -r docs/requirements.txt --system
|
||||
run: pip install -r docs/requirements.txt
|
||||
- name: "Update README File"
|
||||
run: python scripts/transform_readme.py --target mkdocs
|
||||
- name: "Generate docs"
|
||||
@@ -642,18 +451,16 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Cache rust"
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: "Run checks"
|
||||
- name: "Formatter progress"
|
||||
run: scripts/formatter_ecosystem_checks.sh
|
||||
- name: "Github step summary"
|
||||
run: cat target/formatter-ecosystem/stats.txt > "$GITHUB_STEP_SUMMARY"
|
||||
run: cat target/progress_projects_stats.txt > $GITHUB_STEP_SUMMARY
|
||||
- name: "Remove checkouts from cache"
|
||||
run: rm -r target/formatter-ecosystem
|
||||
run: rm -r target/progress_projects
|
||||
|
||||
check-ruff-lsp:
|
||||
name: "test ruff-lsp"
|
||||
@@ -664,21 +471,20 @@ jobs:
|
||||
- determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: extractions/setup-just@v2
|
||||
- uses: extractions/setup-just@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
name: "Download ruff-lsp source"
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: "astral-sh/ruff-lsp"
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v3
|
||||
name: Download development ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
@@ -690,27 +496,23 @@ jobs:
|
||||
just install
|
||||
|
||||
- name: Run ruff-lsp tests
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Setup development binary
|
||||
pip uninstall --yes ruff
|
||||
chmod +x "${DOWNLOAD_PATH}/ruff"
|
||||
export PATH="${DOWNLOAD_PATH}:${PATH}"
|
||||
chmod +x ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
export PATH=${{ steps.ruff-target.outputs.download-path }}:$PATH
|
||||
ruff version
|
||||
|
||||
just test
|
||||
|
||||
benchmarks:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
@@ -726,7 +528,7 @@ jobs:
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@v3
|
||||
uses: CodSpeedHQ/action@v2
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
78
.github/workflows/daily_fuzz.yaml
vendored
78
.github/workflows/daily_fuzz.yaml
vendored
@@ -1,78 +0,0 @@
|
||||
name: Daily parser fuzz
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/daily_fuzz.yaml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
PACKAGE_NAME: ruff
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
fuzz:
|
||||
name: Fuzz
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@v5
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
run: cargo build --locked
|
||||
- name: Fuzz
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
(
|
||||
uvx \
|
||||
--python=3.12 \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable=target/debug/ruff \
|
||||
--bin=ruff \
|
||||
$(shuf -i 0-9999999999999999999 -n 1000)
|
||||
)
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily fuzz surfaced any bugs
|
||||
runs-on: ubuntu-latest
|
||||
needs: fuzz
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.fuzz.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.create({
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Daily parser fuzz failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
labels: ["bug", "parser", "fuzzer"],
|
||||
})
|
||||
71
.github/workflows/daily_property_tests.yaml
vendored
71
.github/workflows/daily_property_tests.yaml
vendored
@@ -1,71 +0,0 @@
|
||||
name: Daily property test run
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 12 * * *"
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/daily_property_tests.yaml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
property_tests:
|
||||
name: Property tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Build Red Knot
|
||||
# A release build takes longer (2 min vs 1 min), but the property tests run much faster in release
|
||||
# mode (1.5 min vs 14 min), so the overall time is shorter with a release build.
|
||||
run: cargo build --locked --release --package red_knot_python_semantic --tests
|
||||
- name: Run property tests
|
||||
shell: bash
|
||||
run: |
|
||||
export QUICKCHECK_TESTS=100000
|
||||
for _ in {1..5}; do
|
||||
cargo test --locked --release --package red_knot_python_semantic -- --ignored types::property_tests::stable
|
||||
done
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily property test run surfaced any bugs
|
||||
runs-on: ubuntu-latest
|
||||
needs: property_tests
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.property_tests.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.create({
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Daily property test run failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
labels: ["bug", "red-knot", "testing"],
|
||||
})
|
||||
55
.github/workflows/docs.yaml
vendored
Normal file
55
.github/workflows/docs.yaml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.4.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy site --project-name=astral-docs --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
29
.github/workflows/notify-dependents.yml
vendored
29
.github/workflows/notify-dependents.yml
vendored
@@ -1,29 +0,0 @@
|
||||
# Notify downstream repositories of a new release.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: "[ruff] Notify dependents"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
update-dependents:
|
||||
name: Notify dependents
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "Update pre-commit mirror"
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: 'astral-sh',
|
||||
repo: 'ruff-pre-commit',
|
||||
workflow_id: 'main.yml',
|
||||
ref: 'main',
|
||||
})
|
||||
48
.github/workflows/playground.yaml
vendored
Normal file
48
.github/workflows/playground.yaml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: "[Playground] Release"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- name: "Run wasm-pack"
|
||||
run: wasm-pack build --target web --out-dir ../../playground/src/pkg crates/ruff_wasm
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build JavaScript bundle"
|
||||
run: npm run build
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.4.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
27
.github/workflows/pr-comment.yaml
vendored
27
.github/workflows/pr-comment.yaml
vendored
@@ -10,29 +10,29 @@ on:
|
||||
description: The ecosystem workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@v7
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v7
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
@@ -43,20 +43,11 @@ jobs:
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious ecosystem results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/ecosystem/ecosystem-result ]]
|
||||
then
|
||||
echo "Error: ecosystem-result cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment ecosystem -->' >> comment.txt
|
||||
@@ -65,9 +56,9 @@ jobs:
|
||||
cat pr/ecosystem/ecosystem-result >> comment.txt
|
||||
echo "" >> comment.txt
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
echo 'comment<<EOF' >> $GITHUB_OUTPUT
|
||||
cat comment.txt >> $GITHUB_OUTPUT
|
||||
echo 'EOF' >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
|
||||
144
.github/workflows/publish-docs.yml
vendored
144
.github/workflows/publish-docs.yml
vendored
@@ -1,144 +0,0 @@
|
||||
# Publish the Ruff documentation.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
persist-credentials: true
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- name: "Set docs version"
|
||||
env:
|
||||
version: ${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}
|
||||
run: |
|
||||
# if version is missing, use 'latest'
|
||||
if [ -z "$version" ]; then
|
||||
echo "Using 'latest' as version"
|
||||
version="latest"
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
display_name="$version"
|
||||
|
||||
echo "version=$version" >> "$GITHUB_ENV"
|
||||
echo "display_name=$display_name" >> "$GITHUB_ENV"
|
||||
|
||||
- name: "Set branch name"
|
||||
run: |
|
||||
timestamp="$(date +%s)"
|
||||
|
||||
# create branch_display_name from display_name by replacing all
|
||||
# characters disallowed in git branch names with hyphens
|
||||
branch_display_name="$(echo "${display_name}" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
|
||||
echo "timestamp=$timestamp" >> "$GITHUB_ENV"
|
||||
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
|
||||
- name: "Clone docs repo"
|
||||
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
|
||||
- name: "Copy docs"
|
||||
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
|
||||
|
||||
- name: "Commit docs"
|
||||
working-directory: astral-docs
|
||||
run: |
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
|
||||
git checkout -b "${branch_name}"
|
||||
git add site/ruff
|
||||
git commit -m "Update ruff documentation for $version"
|
||||
|
||||
- name: "Create Pull Request"
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
# set the PR title
|
||||
pull_request_title="Update ruff documentation for ${display_name}"
|
||||
|
||||
# Delete any existing pull requests that are open for this version
|
||||
# by checking against pull_request_title because the new PR will
|
||||
# supersede the old one.
|
||||
gh pr list --state open --json title --jq '.[] | select(.title == "$pull_request_title") | .number' | \
|
||||
xargs -I {} gh pr close {}
|
||||
|
||||
# push the branch to GitHub
|
||||
git push origin "${branch_name}"
|
||||
|
||||
# create the PR
|
||||
gh pr create \
|
||||
--base=main \
|
||||
--head="${branch_name}" \
|
||||
--title="${pull_request_title}" \
|
||||
--body="Automated documentation update for ${display_name}" \
|
||||
--label="documentation"
|
||||
|
||||
- name: "Merge Pull Request"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
gh pr merge --squash "${branch_name}"
|
||||
57
.github/workflows/publish-playground.yml
vendored
57
.github/workflows/publish-playground.yml
vendored
@@ -1,57 +0,0 @@
|
||||
# Publish the Ruff playground.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: "[Playground] Release"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- name: "Run wasm-pack"
|
||||
run: wasm-pack build --target web --out-dir ../../playground/src/pkg crates/ruff_wasm
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build JavaScript bundle"
|
||||
run: npm run build
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.13.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
32
.github/workflows/publish-pypi.yml
vendored
32
.github/workflows/publish-pypi.yml
vendored
@@ -1,32 +0,0 @@
|
||||
# Publish a release to PyPI.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish job
|
||||
# within `cargo-dist`.
|
||||
name: "[ruff] Publish to PyPI"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
pypi-publish:
|
||||
name: Upload to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
run: uv publish -v wheels/*
|
||||
57
.github/workflows/publish-wasm.yml
vendored
57
.github/workflows/publish-wasm.yml
vendored
@@ -1,57 +0,0 @@
|
||||
# Build and publish ruff-api for wasm.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish
|
||||
# job within `cargo-dist`.
|
||||
name: "Build and publish wasm"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
ruff_wasm:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
target: [web, bundler, nodejs]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- name: "Run wasm-pack build"
|
||||
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
||||
- name: "Rename generated package"
|
||||
run: | # Replace the package name w/ jq
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: "Publish (dry-run)"
|
||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --dry-run crates/ruff_wasm/pkg
|
||||
- name: "Publish"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --provenance --access public crates/ruff_wasm/pkg
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
593
.github/workflows/release.yaml
vendored
Normal file
593
.github/workflows/release.yaml
vendored
Normal file
@@ -0,0 +1,593 @@
|
||||
name: "[ruff] Release"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "The version to tag, without the leading 'v'. If omitted, will initiate a dry run (no uploads)."
|
||||
type: string
|
||||
sha:
|
||||
description: "The full sha of the commit to be released. If omitted, the latest commit on the default branch will be used."
|
||||
default: ""
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
# When we change pyproject.toml, we want to ensure that the maturin builds still work
|
||||
- pyproject.toml
|
||||
# And when we change this workflow itself...
|
||||
- .github/workflows/release.yaml
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: ruff
|
||||
PYTHON_VERSION: "3.11"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
macos-x86_64:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - x86_64"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
macos-universal:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - universal2"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
args: --release --locked --target universal2-apple-darwin --out dist
|
||||
- name: "Test wheel - universal2"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
windows:
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: x86_64-pc-windows-msvc
|
||||
arch: x64
|
||||
- target: i686-pc-windows-msvc
|
||||
arch: x86
|
||||
- target: aarch64-pc-windows-msvc
|
||||
arch: x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.zip
|
||||
*.sha256
|
||||
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- i686-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
linux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
arch: aarch64
|
||||
# see https://github.com/astral-sh/ruff/issues/3791
|
||||
# and https://github.com/gnzlbg/jemallocator/issues/170#issuecomment-1503228963
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-gnueabihf
|
||||
arch: armv7
|
||||
- target: s390x-unknown-linux-gnu
|
||||
arch: s390x
|
||||
- target: powerpc64le-unknown-linux-gnu
|
||||
arch: ppc64le
|
||||
- target: powerpc64-unknown-linux-gnu
|
||||
arch: ppc64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
if: matrix.platform.arch != 'ppc64'
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: ubuntu20.04
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add python3
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-musl
|
||||
arch: aarch64
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
arch: armv7
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add python3
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
validate-tag:
|
||||
name: Validate tag
|
||||
runs-on: ubuntu-latest
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main # We checkout the main branch to check for the commit
|
||||
- name: Check main branch
|
||||
if: ${{ inputs.sha }}
|
||||
run: |
|
||||
# Fetch the main branch since a shallow checkout is used by default
|
||||
git fetch origin main --unshallow
|
||||
if ! git branch --contains ${{ inputs.sha }} | grep -E '(^|\s)main$'; then
|
||||
echo "The specified sha is not on the main branch" >&2
|
||||
exit 1
|
||||
fi
|
||||
- name: Check tag consistency
|
||||
run: |
|
||||
# Switch to the commit we want to release
|
||||
git checkout ${{ inputs.sha }}
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ inputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
upload-release:
|
||||
name: Upload to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- macos-universal
|
||||
- macos-x86_64
|
||||
- windows
|
||||
- linux
|
||||
- linux-cross
|
||||
- musllinux
|
||||
- musllinux-cross
|
||||
- validate-tag
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For pypi trusted publishing
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: wheels
|
||||
- name: Publish to PyPi
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
skip-existing: true
|
||||
packages-dir: wheels
|
||||
verbose: true
|
||||
|
||||
tag-release:
|
||||
name: Tag release
|
||||
runs-on: ubuntu-latest
|
||||
needs: upload-release
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
permissions:
|
||||
# For git tag
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- name: git tag
|
||||
run: |
|
||||
git config user.email "hey@astral.sh"
|
||||
git config user.name "Ruff Release CI"
|
||||
git tag -m "v${{ inputs.tag }}" "v${{ inputs.tag }}"
|
||||
# If there is duplicate tag, this will fail. The publish to pypi action will have been a noop (due to skip
|
||||
# existing), so we make a non-destructive exit here
|
||||
git push --tags
|
||||
|
||||
publish-release:
|
||||
name: Publish to GitHub
|
||||
runs-on: ubuntu-latest
|
||||
needs: tag-release
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
permissions:
|
||||
# For GitHub release publishing
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: binaries
|
||||
- name: "Publish to GitHub"
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
draft: true
|
||||
files: binaries/*
|
||||
tag_name: v${{ inputs.tag }}
|
||||
|
||||
docker-publish:
|
||||
# This action doesn't need to wait on any other task, it's easy to re-tag if something failed and we're validating
|
||||
# the tag here also
|
||||
name: Push Docker image ghcr.io/astral-sh/ruff
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For the docker push
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/astral-sh/ruff
|
||||
|
||||
- name: Check tag consistency
|
||||
# Unlike validate-tag we don't check if the commit is on the main branch, but it seems good enough since we can
|
||||
# change docker tags
|
||||
if: ${{ inputs.tag }}
|
||||
run: |
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ inputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# Reuse the builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
push: ${{ inputs.tag != '' }}
|
||||
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ inputs.tag || 'dry-run' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
# After the release has been published, we update downstream repositories
|
||||
# This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
|
||||
update-dependents:
|
||||
name: Update dependents
|
||||
runs-on: ubuntu-latest
|
||||
needs: publish-release
|
||||
steps:
|
||||
- name: "Update pre-commit mirror"
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: 'astral-sh',
|
||||
repo: 'ruff-pre-commit',
|
||||
workflow_id: 'main.yml',
|
||||
ref: 'main',
|
||||
})
|
||||
300
.github/workflows/release.yml
vendored
300
.github/workflows/release.yml
vendored
@@ -1,300 +0,0 @@
|
||||
# This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/
|
||||
#
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
#
|
||||
# CI that:
|
||||
#
|
||||
# * checks for a Git Tag that looks like a release
|
||||
# * builds artifacts with dist (archives, installers, hashes)
|
||||
# * uploads those artifacts to temporary workflow zip
|
||||
# * on success, uploads the artifacts to a GitHub Release
|
||||
#
|
||||
# Note that the GitHub Release will be created with a generated
|
||||
# title/body based on your changelogs.
|
||||
|
||||
name: Release
|
||||
permissions:
|
||||
"contents": "write"
|
||||
|
||||
# This task will run whenever you workflow_dispatch with a tag that looks like a version
|
||||
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
|
||||
# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where
|
||||
# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION
|
||||
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
|
||||
#
|
||||
# If PACKAGE_NAME is specified, then the announcement will be for that
|
||||
# package (erroring out if it doesn't have the given version or isn't dist-able).
|
||||
#
|
||||
# If PACKAGE_NAME isn't specified, then the announcement will be for all
|
||||
# (dist-able) packages in the workspace with that version (this mode is
|
||||
# intended for workspaces with only one dist-able package, or with all dist-able
|
||||
# packages versioned/released in lockstep).
|
||||
#
|
||||
# If you push multiple tags at once, separate instances of this workflow will
|
||||
# spin up, creating an independent announcement for each one. However, GitHub
|
||||
# will hard limit this to 3 tags per commit, as it will assume more tags is a
|
||||
# mistake.
|
||||
#
|
||||
# If there's a prerelease-style suffix to the version, then the release(s)
|
||||
# will be marked as a prerelease.
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: Release Tag
|
||||
required: true
|
||||
default: dry-run
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
# Run 'dist plan' (or host) to determine what tasks we need to do
|
||||
plan:
|
||||
runs-on: "ubuntu-20.04"
|
||||
outputs:
|
||||
val: ${{ steps.plan.outputs.manifest }}
|
||||
tag: ${{ (inputs.tag != 'dry-run' && inputs.tag) || '' }}
|
||||
tag-flag: ${{ inputs.tag && inputs.tag != 'dry-run' && format('--tag={0}', inputs.tag) || '' }}
|
||||
publishing: ${{ inputs.tag && inputs.tag != 'dry-run' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install dist
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.25.2-prerelease.3/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/dist
|
||||
# sure would be cool if github gave us proper conditionals...
|
||||
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
|
||||
# functionality based on whether this is a pull_request, and whether it's from a fork.
|
||||
# (PRs run on the *source* but secrets are usually on the *target* -- that's *good*
|
||||
# but also really annoying to build CI around when it needs secrets to work right.)
|
||||
- id: plan
|
||||
run: |
|
||||
dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-plan-dist-manifest
|
||||
path: plan-dist-manifest.json
|
||||
|
||||
custom-build-binaries:
|
||||
needs:
|
||||
- plan
|
||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
||||
uses: ./.github/workflows/build-binaries.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-build-docker:
|
||||
needs:
|
||||
- plan
|
||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
permissions:
|
||||
"contents": "read"
|
||||
"packages": "write"
|
||||
|
||||
# Build and package all the platform-agnostic(ish) things
|
||||
build-global-artifacts:
|
||||
needs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
- id: cargo-dist
|
||||
shell: bash
|
||||
run: |
|
||||
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
|
||||
# Parse out what we just built and upload it to scratch storage
|
||||
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
|
||||
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
|
||||
echo "EOF" >> "$GITHUB_OUTPUT"
|
||||
|
||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||
- name: "Upload artifacts"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-build-global
|
||||
path: |
|
||||
${{ steps.cargo-dist.outputs.paths }}
|
||||
${{ env.BUILD_MANIFEST_NAME }}
|
||||
# Determines if we should publish/announce
|
||||
host:
|
||||
needs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
- build-global-artifacts
|
||||
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
runs-on: "ubuntu-20.04"
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
# This is a harmless no-op for GitHub Releases, hosting for that happens in "announce"
|
||||
- id: host
|
||||
shell: bash
|
||||
run: |
|
||||
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
echo "artifacts uploaded and released successfully"
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
# Overwrite the previous copy
|
||||
name: artifacts-dist-manifest
|
||||
path: dist-manifest.json
|
||||
|
||||
custom-publish-pypi:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-pypi.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
custom-publish-wasm:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-wasm.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"contents": "read"
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
announce:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi
|
||||
- custom-publish-wasm
|
||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||
# "host" however must run to completion, no skipping allowed!
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: artifacts
|
||||
merge-multiple: true
|
||||
- name: Cleanup
|
||||
run: |
|
||||
# Remove the granular manifests
|
||||
rm -f artifacts/*-dist-manifest.json
|
||||
- name: Create GitHub Release
|
||||
env:
|
||||
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
||||
ANNOUNCEMENT_TITLE: "${{ fromJson(needs.host.outputs.val).announcement_title }}"
|
||||
ANNOUNCEMENT_BODY: "${{ fromJson(needs.host.outputs.val).announcement_github_body }}"
|
||||
RELEASE_COMMIT: "${{ github.sha }}"
|
||||
run: |
|
||||
# Write and read notes from a file to avoid quoting breaking things
|
||||
echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt
|
||||
|
||||
gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/*
|
||||
|
||||
custom-notify-dependents:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/notify-dependents.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-publish-docs:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/publish-docs.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-publish-playground:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/publish-playground.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
82
.github/workflows/sync_typeshed.yaml
vendored
82
.github/workflows/sync_typeshed.yaml
vendored
@@ -1,82 +0,0 @@
|
||||
name: Sync typeshed
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Run on the 1st and the 15th of every month:
|
||||
- cron: "0 0 1,15 * *"
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
jobs:
|
||||
sync:
|
||||
name: Sync typeshed
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
path: ruff
|
||||
persist-credentials: true
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout typeshed
|
||||
with:
|
||||
repository: python/typeshed
|
||||
path: typeshed
|
||||
persist-credentials: false
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_vendored/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
run: |
|
||||
cd ruff
|
||||
git checkout -b typeshedbot/sync-typeshed
|
||||
git add .
|
||||
git diff --staged --quiet || git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)"
|
||||
- name: Create a PR
|
||||
if: ${{ steps.sync.outcome == 'success' && steps.commit.outcome == 'success' }}
|
||||
run: |
|
||||
cd ruff
|
||||
git push --force origin typeshedbot/sync-typeshed
|
||||
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "internal"
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the typeshed sync failed
|
||||
runs-on: ubuntu-latest
|
||||
needs: [sync]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.sync.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.create({
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Automated typeshed sync failed on ${new Date().toDateString()}`,
|
||||
body: "Runs are listed here: https://github.com/astral-sh/ruff/actions/workflows/sync_typeshed.yaml",
|
||||
})
|
||||
12
.github/zizmor.yml
vendored
12
.github/zizmor.yml
vendored
@@ -1,12 +0,0 @@
|
||||
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
|
||||
# https://woodruffw.github.io/zizmor/configuration/
|
||||
#
|
||||
# TODO: can we remove the ignores here so that our workflows are more secure?
|
||||
rules:
|
||||
dangerous-triggers:
|
||||
ignore:
|
||||
- pr-comment.yaml
|
||||
cache-poisoning:
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -21,14 +21,6 @@ flamegraph.svg
|
||||
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
|
||||
/target*
|
||||
|
||||
# samply profiles
|
||||
profile.json
|
||||
|
||||
# tracing-flame traces
|
||||
tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
@@ -100,7 +92,6 @@ coverage.xml
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
repos/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
||||
@@ -14,18 +14,7 @@ MD041: false
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD014/commands-show-output
|
||||
MD014: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
siblings_only: true
|
||||
|
||||
# MD046/code-block-style
|
||||
#
|
||||
# Ignore this because it conflicts with the code block style used in content
|
||||
# tabs of mkdocs-material which is to add a blank line after the content title.
|
||||
#
|
||||
# Ref: https://github.com/astral-sh/ruff/pull/15011#issuecomment-2544790854
|
||||
MD046: false
|
||||
allow_different_nesting: true
|
||||
|
||||
@@ -1,14 +1,9 @@
|
||||
fail_fast: false
|
||||
fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.github/workflows/release.yml|
|
||||
crates/red_knot_vendored/vendor/.*|
|
||||
crates/red_knot_workspace/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff_notebook/resources/.*|
|
||||
crates/ruff_server/resources/.*|
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*|
|
||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||
@@ -18,17 +13,17 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.23
|
||||
rev: v0.15
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 0.7.21
|
||||
rev: 0.7.17
|
||||
hooks:
|
||||
- id: mdformat
|
||||
additional_dependencies:
|
||||
- mdformat-mkdocs==4.0.0
|
||||
- mdformat-footnote==0.1.1
|
||||
- mdformat-mkdocs
|
||||
- mdformat-admon
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black\.md
|
||||
@@ -36,7 +31,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.43.0
|
||||
rev: v0.37.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -45,21 +40,8 @@ repos:
|
||||
| docs/\w+\.md
|
||||
)$
|
||||
|
||||
- repo: https://github.com/adamchainz/blacken-docs
|
||||
rev: 1.19.1
|
||||
hooks:
|
||||
- id: blacken-docs
|
||||
args: ["--pyi", "--line-length", "130"]
|
||||
files: '^crates/.*/resources/mdtest/.*\.md'
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.*?invalid(_.+)*_syntax\.md
|
||||
)$
|
||||
additional_dependencies:
|
||||
- black==24.10.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.29.4
|
||||
rev: v1.16.22
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -73,52 +55,25 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.6
|
||||
rev: v0.1.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.4.2
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.0.3
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.0.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.30.0
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
|
||||
# `actionlint` hook, for verifying correct syntax in GitHub Actions workflows.
|
||||
# Some additional configuration for `actionlint` can be found in `.github/actionlint.yaml`.
|
||||
- repo: https://github.com/rhysd/actionlint
|
||||
rev: v1.7.6
|
||||
hooks:
|
||||
- id: actionlint
|
||||
stages:
|
||||
# This hook is disabled by default, since it's quite slow.
|
||||
# To run all hooks *including* this hook, use `uvx pre-commit run -a --hook-stage=manual`.
|
||||
# To run *just* this hook, use `uvx pre-commit run -a actionlint --hook-stage=manual`.
|
||||
- manual
|
||||
args:
|
||||
- "-ignore=SC2129" # ignorable stylistic lint from shellcheck
|
||||
- "-ignore=SC2016" # another shellcheck lint: seems to have false positives?
|
||||
additional_dependencies:
|
||||
# actionlint has a shellcheck integration which extracts shell scripts in `run:` steps from GitHub Actions
|
||||
# and checks these with shellcheck. This is arguably its most useful feature,
|
||||
# but the integration only works if shellcheck is installed
|
||||
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, dev-generate-all]
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
# Auto-generated by `cargo-dist`.
|
||||
.github/workflows/release.yml
|
||||
5
.vscode/extensions.json
vendored
5
.vscode/extensions.json
vendored
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"rust-lang.rust-analyzer"
|
||||
]
|
||||
}
|
||||
6
.vscode/settings.json
vendored
6
.vscode/settings.json
vendored
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"rust-analyzer.check.extraArgs": [
|
||||
"--all-features"
|
||||
],
|
||||
"rust-analyzer.check.command": "clippy",
|
||||
}
|
||||
@@ -1,143 +1,5 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.9.0
|
||||
|
||||
Ruff now formats your code according to the 2025 style guide. As a result, your code might now get formatted differently. See the [changelog](./CHANGELOG.md#090) for a detailed list of changes.
|
||||
|
||||
## 0.8.0
|
||||
|
||||
- **Default to Python 3.9**
|
||||
|
||||
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
|
||||
|
||||
- **Changed location of `pydoclint` diagnostics**
|
||||
|
||||
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
|
||||
|
||||
If you've opted into these preview rules but have them suppressed using
|
||||
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
|
||||
some places, this change may mean that you need to move the `noqa` suppression
|
||||
comments. Most users should be unaffected by this change.
|
||||
|
||||
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
|
||||
|
||||
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
|
||||
|
||||
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
|
||||
|
||||
- **Changes to the line width calculation**
|
||||
|
||||
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
|
||||
|
||||
## 0.7.0
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
|
||||
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
|
||||
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
|
||||
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
|
||||
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
|
||||
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
|
||||
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
|
||||
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
|
||||
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
|
||||
instead.
|
||||
|
||||
## 0.6.0
|
||||
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
|
||||
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
|
||||
```
|
||||
|
||||
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
|
||||
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
|
||||
Notebook files and not format them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.format]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
And, conversely, the following would only format Jupyter Notebook files and not lint them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
## 0.5.0
|
||||
|
||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
||||
- Selecting `ALL` now excludes deprecated rules
|
||||
- The released archives now include an extra level of nesting, which can be removed with `--strip-components=1` when untarring.
|
||||
- The release artifact's file name no longer includes the version tag. This enables users to install via `/latest` URLs on GitHub.
|
||||
|
||||
## 0.3.0
|
||||
|
||||
### Ruff 2024.2 style
|
||||
|
||||
The formatter now formats code according to the Ruff 2024.2 style guide. Read the [changelog](./CHANGELOG.md#030) for a detailed list of stabilized style changes.
|
||||
|
||||
### `isort`: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
|
||||
|
||||
Previously, Ruff used one or two blank lines (or the number configured by `isort.lines-after-imports`) after imports in typing stub files (`.pyi` files).
|
||||
The [typing style guide for stubs](https://typing.readthedocs.io/en/latest/source/stubs.html#style-guide) recommends using at most 1 blank line for grouping.
|
||||
As of this release, `isort` now always uses one blank line after imports in stub files, the same as the formatter.
|
||||
|
||||
### `build` is no longer excluded by default ([#10093](https://github.com/astral-sh/ruff/pull/10093))
|
||||
|
||||
Ruff maintains a list of directories and files that are excluded by default. This list now consists of the following patterns:
|
||||
|
||||
- `.bzr`
|
||||
- `.direnv`
|
||||
- `.eggs`
|
||||
- `.git`
|
||||
- `.git-rewrite`
|
||||
- `.hg`
|
||||
- `.ipynb_checkpoints`
|
||||
- `.mypy_cache`
|
||||
- `.nox`
|
||||
- `.pants.d`
|
||||
- `.pyenv`
|
||||
- `.pytest_cache`
|
||||
- `.pytype`
|
||||
- `.ruff_cache`
|
||||
- `.svn`
|
||||
- `.tox`
|
||||
- `.venv`
|
||||
- `.vscode`
|
||||
- `__pypackages__`
|
||||
- `_build`
|
||||
- `buck-out`
|
||||
- `dist`
|
||||
- `node_modules`
|
||||
- `site-packages`
|
||||
- `venv`
|
||||
|
||||
Previously, the `build` directory was included in this list. However, the `build` directory tends to be a not-unpopular directory
|
||||
name, and excluding it by default caused confusion. Ruff now no longer excludes `build` except if it is excluded by a `.gitignore` file
|
||||
or because it is listed in `extend-exclude`.
|
||||
|
||||
### `--format` is no longer a valid `rule` or `linter` command option
|
||||
|
||||
Previously, `ruff rule` and `ruff linter` accepted the `--format <FORMAT>` option as an alias for `--output-format`. Ruff no longer
|
||||
supports this alias. Please use `ruff rule --output-format <FORMAT>` and `ruff linter --output-format <FORMAT>` instead.
|
||||
|
||||
## 0.1.9
|
||||
|
||||
### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
|
||||
@@ -196,7 +58,7 @@ flag or `unsafe-fixes` configuration option can be used to enable unsafe fixes.
|
||||
|
||||
See the [docs](https://docs.astral.sh/ruff/configuration/#fix-safety) for details.
|
||||
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
|
||||
Previously, Ruff enabled all implemented rules in Pycodestyle (`E`) by default. Ruff now only includes the
|
||||
Pycodestyle prefixes `E4`, `E7`, and `E9` to exclude rules that conflict with automatic formatters. Consequently,
|
||||
|
||||
2095
CHANGELOG.md
2095
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
197
CONTRIBUTING.md
197
CONTRIBUTING.md
@@ -2,20 +2,58 @@
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
- [The Basics](#the-basics)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Development](#development)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
- [Rule naming convention](#rule-naming-convention)
|
||||
- [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
|
||||
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
- [MkDocs](#mkdocs)
|
||||
- [Release Process](#release-process)
|
||||
- [Creating a new release](#creating-a-new-release)
|
||||
- [Ecosystem CI](#ecosystem-ci)
|
||||
- [Benchmarking and Profiling](#benchmarking-and-profiling)
|
||||
- [CPython Benchmark](#cpython-benchmark)
|
||||
- [Microbenchmarks](#microbenchmarks)
|
||||
- [Benchmark-driven Development](#benchmark-driven-development)
|
||||
- [PR Summary](#pr-summary)
|
||||
- [Tips](#tips)
|
||||
- [Profiling Projects](#profiling-projects)
|
||||
- [Linux](#linux)
|
||||
- [Mac](#mac)
|
||||
- [`cargo dev`](#cargo-dev)
|
||||
- [Subsystems](#subsystems)
|
||||
- [Compilation Pipeline](#compilation-pipeline)
|
||||
- [Import Categorization](#import-categorization)
|
||||
- [Project root](#project-root)
|
||||
- [Package root](#package-root)
|
||||
- [Import categorization](#import-categorization-1)
|
||||
|
||||
## The Basics
|
||||
|
||||
Ruff welcomes contributions in the form of pull requests.
|
||||
Ruff welcomes contributions in the form of Pull Requests.
|
||||
|
||||
For small changes (e.g., bug fixes), feel free to submit a PR.
|
||||
|
||||
For larger changes (e.g., new lint rules, new functionality, new configuration options), consider
|
||||
creating an [**issue**](https://github.com/astral-sh/ruff/issues) outlining your proposed change.
|
||||
You can also join us on [Discord](https://discord.com/invite/astral-sh) to discuss your idea with the
|
||||
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5) to discuss your idea with the
|
||||
community. We've labeled [beginner-friendly tasks](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
|
||||
in the issue tracker, along with [bugs](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
|
||||
and [improvements](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Aaccepted)
|
||||
that are ready for contributions.
|
||||
|
||||
If you're looking for a place to start, we recommend implementing a new lint rule (see:
|
||||
[_Adding a new lint rule_](#example-adding-a-new-lint-rule), which will allow you to learn from and
|
||||
pattern-match against the examples in the existing codebase. Many lint rules are inspired by
|
||||
existing Python plugins, which can be used as a reference implementation.
|
||||
|
||||
As a concrete example: consider taking on one of the rules from the [`flake8-pyi`](https://github.com/astral-sh/ruff/issues/848)
|
||||
plugin, and looking to the originating [Python source](https://github.com/PyCQA/flake8-pyi) for
|
||||
guidance.
|
||||
|
||||
If you have suggestions on how we might improve the contributing documentation, [let us know](https://github.com/astral-sh/ruff/discussions/5693)!
|
||||
|
||||
### Prerequisites
|
||||
@@ -29,14 +67,16 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
||||
cargo install cargo-insta
|
||||
```
|
||||
|
||||
You'll need [uv](https://docs.astral.sh/uv/getting-started/installation/) (or `pipx` and `pip`) to
|
||||
run Python utility commands.
|
||||
And you'll need pre-commit to run some validation checks:
|
||||
|
||||
```shell
|
||||
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
||||
```
|
||||
|
||||
You can optionally install pre-commit hooks to automatically run the validation checks
|
||||
when making a commit:
|
||||
|
||||
```shell
|
||||
uv tool install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
@@ -64,14 +104,12 @@ and that it passes both the lint and test validation checks:
|
||||
```shell
|
||||
cargo clippy --workspace --all-targets --all-features -- -D warnings # Rust linting
|
||||
RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
|
||||
uvx pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
These checks will run on GitHub Actions when you open your Pull Request, but running them locally
|
||||
will save you time and expedite the merge process.
|
||||
|
||||
If you're using VS Code, you can also install the recommended [rust-analyzer](https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer) extension to get these checks while editing.
|
||||
|
||||
Note that many code changes also require updating the snapshot tests, which is done interactively
|
||||
after running `cargo test` like so:
|
||||
|
||||
@@ -79,14 +117,7 @@ after running `cargo test` like so:
|
||||
cargo insta review
|
||||
```
|
||||
|
||||
If your pull request relates to a specific lint rule, include the category and rule code in the
|
||||
title, as in the following examples:
|
||||
|
||||
- \[`flake8-bugbear`\] Avoid false positive for usage after `continue` (`B031`)
|
||||
- \[`flake8-simplify`\] Detect implicit `else` cases in `needless-bool` (`SIM103`)
|
||||
- \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
|
||||
|
||||
Your pull request will be reviewed by a maintainer, which may involve a few rounds of iteration
|
||||
Your Pull Request will be reviewed by a maintainer, which may involve a few rounds of iteration
|
||||
prior to merging.
|
||||
|
||||
### Project Structure
|
||||
@@ -94,8 +125,8 @@ prior to merging.
|
||||
Ruff is structured as a monorepo with a [flat crate structure](https://matklad.github.io/2021/08/22/large-rust-workspaces.html),
|
||||
such that all crates are contained in a flat `crates` directory.
|
||||
|
||||
The vast majority of the code, including all lint rules, lives in the `ruff_linter` crate (located
|
||||
at `crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
|
||||
The vast majority of the code, including all lint rules, lives in the `ruff` crate (located at
|
||||
`crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
|
||||
|
||||
At the time of writing, the repository includes the following crates:
|
||||
|
||||
@@ -139,7 +170,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
|
||||
1. Create a file for your rule (e.g., `crates/ruff_linter/src/rules/flake8_bugbear/rules/assert_false.rs`).
|
||||
|
||||
1. In that file, define a violation struct (e.g., `pub struct AssertFalse`). You can grep for
|
||||
`#[derive(ViolationMetadata)]` to see examples.
|
||||
`#[violation]` to see examples.
|
||||
|
||||
1. In that file, define a function that adds the violation to the diagnostic list as appropriate
|
||||
(e.g., `pub(crate) fn assert_false`) based on whatever inputs are required for the rule (e.g.,
|
||||
@@ -168,14 +199,11 @@ and calling out to lint rule analyzer functions as it goes.
|
||||
If you need to inspect the AST, you can run `cargo dev print-ast` with a Python file. Grep
|
||||
for the `Diagnostic::new` invocations to understand how other, similar rules are implemented.
|
||||
|
||||
Once you're satisfied with your code, add tests for your rule
|
||||
(see: [rule testing](#rule-testing-fixtures-and-snapshots)), and regenerate the documentation and
|
||||
associated assets (like our JSON Schema) with `cargo dev generate-all`.
|
||||
Once you're satisfied with your code, add tests for your rule. See [rule testing](#rule-testing-fixtures-and-snapshots)
|
||||
for more details.
|
||||
|
||||
Finally, submit a pull request, and include the category, rule name, and rule code in the title, as
|
||||
in:
|
||||
|
||||
> \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
|
||||
Finally, regenerate the documentation and other generated assets (like our JSON Schema) with:
|
||||
`cargo dev generate-all`.
|
||||
|
||||
#### Rule naming convention
|
||||
|
||||
@@ -249,7 +277,7 @@ These represent, respectively: the schema used to parse the `pyproject.toml` fil
|
||||
intermediate representation; and the final, internal representation used to power Ruff.
|
||||
|
||||
To add a new configuration option, you'll likely want to modify these latter few files (along with
|
||||
`args.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`arg.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`dummy_variable_rgx`, which defines a regular expression to match against acceptable unused
|
||||
variables (e.g., `_`).
|
||||
|
||||
@@ -265,24 +293,30 @@ To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
|
||||
1. Install MkDocs and Material for MkDocs with:
|
||||
|
||||
```shell
|
||||
pip install -r docs/requirements.txt
|
||||
```
|
||||
|
||||
1. Generate the MkDocs site with:
|
||||
|
||||
```shell
|
||||
uv run --no-project --isolated --with-requirements docs/requirements.txt scripts/generate_mkdocs.py
|
||||
python scripts/generate_mkdocs.py
|
||||
```
|
||||
|
||||
1. Run the development server with:
|
||||
|
||||
```shell
|
||||
# For contributors.
|
||||
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
|
||||
mkdocs serve -f mkdocs.public.yml
|
||||
|
||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
|
||||
mkdocs serve -f mkdocs.insiders.yml
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
[http://127.0.0.1:8000/ruff/](http://127.0.0.1:8000/ruff/).
|
||||
[http://127.0.0.1:8000/docs/](http://127.0.0.1:8000/docs/).
|
||||
|
||||
## Release Process
|
||||
|
||||
@@ -295,64 +329,42 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
|
||||
### Creating a new release
|
||||
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
We use an experimental in-house tool for managing releases.
|
||||
|
||||
1. Run `./scripts/release.sh`; this command will:
|
||||
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
1. Install `rooster`: `pip install git+https://github.com/zanieb/rooster@main`
|
||||
1. Run `rooster release`; this command will:
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
- Display contributors for the release
|
||||
|
||||
1. The changelog should then be editorialized for consistency
|
||||
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
|
||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
|
||||
1. Create a pull request with the changelog and version updates
|
||||
|
||||
1. Merge the PR
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yaml) with:
|
||||
- The new version number (without starting `v`)
|
||||
|
||||
- The commit hash of the merged release pull request on `main`
|
||||
1. The release workflow will do the following:
|
||||
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
|
||||
make sure you're [re-running all the failed
|
||||
jobs](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-failed-jobs-in-a-workflow) and not just a single failed job.
|
||||
uploaded anything, you can restart after pushing a fix.
|
||||
1. Upload to PyPI.
|
||||
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
|
||||
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).
|
||||
1. Attach artifacts to draft GitHub release
|
||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||
downstream jobs manually if needed.
|
||||
|
||||
1. Verify the GitHub release:
|
||||
|
||||
1. The Changelog should match the content of `CHANGELOG.md`
|
||||
1. Append the contributors from the `scripts/release.sh` script
|
||||
|
||||
1. Publish the GitHub release
|
||||
1. Open the draft release in the GitHub release section
|
||||
1. Copy the changelog for the release into the GitHub release
|
||||
- See previous releases for formatting of section headers
|
||||
1. Generate the contributor list with `rooster contributors` and add to the release notes
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
||||
|
||||
1. If needed, update the [`ruff-lsp`](https://github.com/astral-sh/ruff-lsp) and
|
||||
[`ruff-vscode`](https://github.com/astral-sh/ruff-vscode) repositories and follow
|
||||
the release instructions in those repositories. `ruff-lsp` should always be updated
|
||||
before `ruff-vscode`.
|
||||
|
||||
This step is generally not required for a patch release, but should always be done
|
||||
for a minor release.
|
||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||
|
||||
## Ecosystem CI
|
||||
|
||||
@@ -360,8 +372,9 @@ GitHub Actions will run your changes against a number of real-world projects fro
|
||||
report on any linter or formatter differences. You can also run those checks locally via:
|
||||
|
||||
```shell
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
pip install -e ./python/ruff-ecosystem
|
||||
ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
```
|
||||
|
||||
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
|
||||
@@ -374,7 +387,7 @@ We have several ways of benchmarking and profiling Ruff:
|
||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
||||
- Profiling the linter on either the microbenchmarks or entire projects
|
||||
|
||||
> **Note**
|
||||
> \[!NOTE\]
|
||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
||||
@@ -388,18 +401,12 @@ which makes it a good target for benchmarking.
|
||||
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
|
||||
```
|
||||
|
||||
Install `hyperfine`:
|
||||
|
||||
```shell
|
||||
cargo install hyperfine
|
||||
```
|
||||
|
||||
To benchmark the release build:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
|
||||
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
|
||||
@@ -418,7 +425,7 @@ To benchmark against the ecosystem's existing tools:
|
||||
|
||||
```shell
|
||||
hyperfine --ignore-failure --warmup 5 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"pyflakes crates/ruff_linter/resources/test/cpython" \
|
||||
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
|
||||
"pycodestyle crates/ruff_linter/resources/test/cpython" \
|
||||
@@ -464,10 +471,10 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
```
|
||||
|
||||
You can run `uv venv --project ./scripts/benchmarks`, activate the venv and then run `uv sync --project ./scripts/benchmarks` to create a working environment for the
|
||||
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
|
||||
above. All reported benchmarks were computed using the versions specified by
|
||||
`./scripts/benchmarks/pyproject.toml` on Python 3.11.
|
||||
|
||||
@@ -521,8 +528,6 @@ You can run the benchmarks with
|
||||
cargo benchmark
|
||||
```
|
||||
|
||||
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
|
||||
|
||||
#### Benchmark-driven Development
|
||||
|
||||
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
|
||||
@@ -561,7 +566,7 @@ cargo install critcmp
|
||||
|
||||
#### Tips
|
||||
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
||||
to only run the lexer benchmarks.
|
||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
||||
@@ -631,11 +636,11 @@ Otherwise, follow the instructions from the linux section.
|
||||
`cargo dev` is a shortcut for `cargo run --package ruff_dev --bin ruff_dev`. You can run some useful
|
||||
utils with it:
|
||||
|
||||
- `cargo dev print-ast <file>`: Print the AST of a python file using Ruff's
|
||||
[Python parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser).
|
||||
For `if True: pass # comment`, you can see the syntax tree, the byte offsets for start and
|
||||
stop of each node and also how the `:` token, the comment and whitespace are not represented
|
||||
anymore:
|
||||
- `cargo dev print-ast <file>`: Print the AST of a python file using the
|
||||
[RustPython parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser) that is
|
||||
mainly used in Ruff. For `if True: pass # comment`, you can see the syntax tree, the byte offsets
|
||||
for start and stop of each node and also how the `:` token, the comment and whitespace are not
|
||||
represented anymore:
|
||||
|
||||
```text
|
||||
[
|
||||
@@ -808,8 +813,8 @@ To understand Ruff's import categorization system, we first need to define two c
|
||||
"project root".)
|
||||
- "Package root": The top-most directory defining the Python package that includes a given Python
|
||||
file. To find the package root for a given Python file, traverse up its parent directories until
|
||||
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't in a subtree
|
||||
marked as a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
|
||||
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't marked as
|
||||
a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
|
||||
just before that, i.e., the first directory in the package.
|
||||
|
||||
For example, given:
|
||||
@@ -863,7 +868,7 @@ each configuration file.
|
||||
|
||||
The package root is used to determine a file's "module path". Consider, again, `baz.py`. In that
|
||||
case, `./my_project/src/foo` was identified as the package root, so the module path for `baz.py`
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
(inclusive of the root itself). The module path can be thought of as "the path you would use to
|
||||
import the module" (e.g., `import foo.bar.baz`).
|
||||
|
||||
@@ -898,11 +903,15 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||
automatically. This check is as simple as comparing the first segment of the current file's
|
||||
module path to the first segment of the import.
|
||||
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
|
||||
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
file `foo.py`.
|
||||
|
||||
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
|
||||
This ensures that Ruff supports both flat and "src" layouts out of the box.
|
||||
By default, `src` is set to the project root. In the above example, we'd want to set
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
|
||||
2545
Cargo.lock
generated
2545
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
251
Cargo.toml
251
Cargo.toml
@@ -4,7 +4,7 @@ resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.80"
|
||||
rust-version = "1.71"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -12,170 +12,108 @@ authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
||||
license = "MIT"
|
||||
|
||||
[workspace.dependencies]
|
||||
ruff = { path = "crates/ruff" }
|
||||
ruff_cache = { path = "crates/ruff_cache" }
|
||||
ruff_db = { path = "crates/ruff_db", default-features = false }
|
||||
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
|
||||
ruff_formatter = { path = "crates/ruff_formatter" }
|
||||
ruff_graph = { path = "crates/ruff_graph" }
|
||||
ruff_index = { path = "crates/ruff_index" }
|
||||
ruff_linter = { path = "crates/ruff_linter" }
|
||||
ruff_macros = { path = "crates/ruff_macros" }
|
||||
ruff_notebook = { path = "crates/ruff_notebook" }
|
||||
ruff_python_ast = { path = "crates/ruff_python_ast" }
|
||||
ruff_python_codegen = { path = "crates/ruff_python_codegen" }
|
||||
ruff_python_formatter = { path = "crates/ruff_python_formatter" }
|
||||
ruff_python_index = { path = "crates/ruff_python_index" }
|
||||
ruff_python_literal = { path = "crates/ruff_python_literal" }
|
||||
ruff_python_parser = { path = "crates/ruff_python_parser" }
|
||||
ruff_python_semantic = { path = "crates/ruff_python_semantic" }
|
||||
ruff_python_stdlib = { path = "crates/ruff_python_stdlib" }
|
||||
ruff_python_trivia = { path = "crates/ruff_python_trivia" }
|
||||
ruff_server = { path = "crates/ruff_server" }
|
||||
ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
red_knot_vendored = { path = "crates/red_knot_vendored" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
||||
red_knot_server = { path = "crates/red_knot_server" }
|
||||
red_knot_test = { path = "crates/red_knot_test" }
|
||||
red_knot_workspace = { path = "crates/red_knot_workspace", default-features = false }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
aho-corasick = { version = "1.1.2" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.80" }
|
||||
assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
anyhow = { version = "1.0.79" }
|
||||
argfile = { version = "0.1.6" }
|
||||
assert_cmd = { version = "2.0.13" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bstr = { version = "1.9.1" }
|
||||
bitflags = { version = "2.4.1" }
|
||||
bstr = { version = "1.9.0" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.6.0" }
|
||||
clearscreen = { version = "4.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
||||
chrono = { version = "0.4.34", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.4.18", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clearscreen = { version = "2.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.3.3", default-features = false }
|
||||
colored = { version = "2.1.0" }
|
||||
configparser = { version = "3.0.3" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
compact_str = "0.8.0"
|
||||
countme = { version ="3.0.1"}
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
dirs = { version = "5.0.0" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
etcetera = { version = "0.8.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
env_logger = { version ="0.10.1"}
|
||||
fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.23" }
|
||||
fs-err = { version ="2.11.0"}
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
globwalk = { version = "0.9.1" }
|
||||
hashbrown = { version = "0.15.0", default-features = false, features = [
|
||||
"raw-entry",
|
||||
"inline-more",
|
||||
] }
|
||||
hexf-parse = { version ="0.2.1"}
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imara-diff ={ version = "0.1.5"}
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
indicatif ={ version = "0.17.8"}
|
||||
indoc ={ version = "2.0.4"}
|
||||
insta = { version = "1.34.0", feature = ["filters", "glob"] }
|
||||
insta-cmd = { version = "0.4.0" }
|
||||
is-macro = { version = "0.3.5" }
|
||||
is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.14.0" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "0.1.2" }
|
||||
libc = { version = "0.2.153" }
|
||||
itertools = { version = "0.12.1" }
|
||||
js-sys = { version = "0.3.67" }
|
||||
lalrpop-util = { version = "0.20.0", default-features = false }
|
||||
lexical-parse-float = { version = "0.8.0", features = ["format"] }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
"proposed",
|
||||
] }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
mimalloc = { version ="0.1.39"}
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "7.0.0" }
|
||||
ordermap = { version = "0.5.0" }
|
||||
notify = { version = "6.1.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
pep440_rs = { version = "0.7.1" }
|
||||
pep440_rs = { version = "0.4.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.13.4" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
proc-macro2 = { version = "1.0.78" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.3.5" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
rayon = { version = "1.8.1" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "88a1d7774d78f048fbd77d40abca9ebd729fd1f0" }
|
||||
result-like = { version = "0.5.0" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.4" }
|
||||
seahash = { version ="4.1.0"}
|
||||
semver = { version = "1.0.21" }
|
||||
serde = { version = "1.0.196", features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.3" }
|
||||
serde_json = { version = "1.0.113" }
|
||||
serde_test = { version = "1.0.152" }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
"macros",
|
||||
] }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = ["macros"] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
shlex = { version ="1.3.0"}
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
smallvec = { version = "1.13.1" }
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.26.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.26.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
strum = { version = "0.25.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.25.3" }
|
||||
syn = { version = "2.0.40" }
|
||||
tempfile = { version ="3.9.0"}
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
thiserror = { version = "1.0.57" }
|
||||
tikv-jemallocator = { version ="0.5.0"}
|
||||
toml = { version = "0.8.9" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||
"env-filter",
|
||||
"fmt",
|
||||
] }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unic-ucd-category = { version ="0.9"}
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode-width = { version = "0.2.0" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
ureq = { version = "2.9.6" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
unicode_names2 = { version = "1.2.1" }
|
||||
ureq = { version = "2.9.1" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
] }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wasm-bindgen = { version = "0.2.84" }
|
||||
wasm-bindgen-test = { version = "0.3.40" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = [
|
||||
"cfg(fuzzing)",
|
||||
"cfg(codspeed)",
|
||||
] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
@@ -184,16 +122,14 @@ char_lit_as_u8 = "allow"
|
||||
collapsible_else_if = "allow"
|
||||
collapsible_if = "allow"
|
||||
implicit_hasher = "allow"
|
||||
map_unwrap_or = "allow"
|
||||
match_same_arms = "allow"
|
||||
missing_errors_doc = "allow"
|
||||
missing_panics_doc = "allow"
|
||||
module_name_repetitions = "allow"
|
||||
must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
single_match_else = "allow"
|
||||
too_many_lines = "allow"
|
||||
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
|
||||
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
|
||||
needless_raw_string_hashes = "allow"
|
||||
# Disallowed restriction lints
|
||||
print_stdout = "warn"
|
||||
@@ -206,10 +142,6 @@ get_unwrap = "warn"
|
||||
rc_buffer = "warn"
|
||||
rc_mutex = "warn"
|
||||
rest_pat_in_fully_bound_structs = "warn"
|
||||
# nursery rules
|
||||
redundant_clone = "warn"
|
||||
debug_assert_with_mut_call = "warn"
|
||||
unused_peekable = "warn"
|
||||
|
||||
[profile.release]
|
||||
# Note that we set these explicitly, and these values
|
||||
@@ -244,64 +176,3 @@ opt-level = 1
|
||||
[profile.profiling]
|
||||
inherits = "release"
|
||||
debug = 1
|
||||
|
||||
# The profile that 'cargo dist' will build with.
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
# Config for 'dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.25.2-prerelease.3"
|
||||
# CI backends to support
|
||||
ci = "github"
|
||||
# The installers to generate for each app
|
||||
installers = ["shell", "powershell"]
|
||||
# The archive format to use for windows builds (defaults .zip)
|
||||
windows-archive = ".zip"
|
||||
# The archive format to use for non-windows builds (defaults .tar.xz)
|
||||
unix-archive = ".tar.gz"
|
||||
# Target platforms to build apps for (Rust target-triple syntax)
|
||||
targets = [
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"arm-unknown-linux-musleabihf",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"i686-pc-windows-msvc",
|
||||
"i686-unknown-linux-gnu",
|
||||
"i686-unknown-linux-musl",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
]
|
||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether dist should create a Github Release or use an existing draft
|
||||
create-release = true
|
||||
# Which actions to run on pull requests
|
||||
pr-run-mode = "skip"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# Which phase dist should use to create the GitHub release
|
||||
github-release = "announce"
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
# Local artifacts jobs to run in CI
|
||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||
# Post-announce jobs to run in CI
|
||||
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
# Path that installers should place binaries in
|
||||
install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM --platform=$BUILDPLATFORM ubuntu AS build
|
||||
FROM --platform=$BUILDPLATFORM ubuntu as build
|
||||
ENV HOME="/root"
|
||||
WORKDIR $HOME
|
||||
|
||||
|
||||
25
LICENSE
25
LICENSE
@@ -1371,28 +1371,3 @@ are:
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- pydoclint, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 jsh9
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
132
README.md
132
README.md
@@ -4,12 +4,11 @@
|
||||
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://github.com/astral-sh/ruff/actions)
|
||||
[](https://discord.com/invite/astral-sh)
|
||||
|
||||
[**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
||||
[**Discord**](https://discord.gg/c9MhzV8aU5) | [**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
||||
|
||||
An extremely fast Python linter and code formatter, written in Rust.
|
||||
|
||||
@@ -28,15 +27,15 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- ⚡️ 10-100x faster than existing linters (like Flake8) and formatters (like Black)
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.13 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
|
||||
- 🤝 Python 3.12 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
- 📏 Over [700 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -50,7 +49,6 @@ times faster than any individual tool.
|
||||
Ruff is extremely actively developed and used in major open-source projects like:
|
||||
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
- [Apache Superset](https://github.com/apache/superset)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Hugging Face](https://github.com/huggingface/transformers)
|
||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||
@@ -110,47 +108,16 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
1. [Who's Using Ruff?](#whos-using-ruff)
|
||||
1. [License](#license)
|
||||
|
||||
## Getting Started<a id="getting-started"></a>
|
||||
## Getting Started
|
||||
|
||||
For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
|
||||
### Installation
|
||||
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI.
|
||||
|
||||
Invoke Ruff directly with [`uvx`](https://docs.astral.sh/uv/):
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
uvx ruff check # Lint all files in the current directory.
|
||||
uvx ruff format # Format all files in the current directory.
|
||||
```
|
||||
|
||||
Or install Ruff with `uv` (recommended), `pip`, or `pipx`:
|
||||
|
||||
```shell
|
||||
# With uv.
|
||||
uv tool install ruff@latest # Install Ruff globally.
|
||||
uv add --dev ruff # Or add Ruff to your project.
|
||||
|
||||
# With pip.
|
||||
pip install ruff
|
||||
|
||||
# With pipx.
|
||||
pipx install ruff
|
||||
```
|
||||
|
||||
Starting with version `0.5.0`, Ruff can be installed with our standalone installers:
|
||||
|
||||
```shell
|
||||
# On macOS and Linux.
|
||||
curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
|
||||
# On Windows.
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.9.0/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.9.0/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -161,7 +128,7 @@ and with [a variety of other package managers](https://docs.astral.sh/ruff/insta
|
||||
To run Ruff as a linter, try any of the following:
|
||||
|
||||
```shell
|
||||
ruff check # Lint all files in the current directory (and any subdirectories).
|
||||
ruff check . # Lint all files in the current directory (and any subdirectories).
|
||||
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories).
|
||||
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`.
|
||||
ruff check path/to/code/to/file.py # Lint `file.py`.
|
||||
@@ -171,7 +138,7 @@ ruff check @arguments.txt # Lint using an input file, treating its con
|
||||
Or, to run Ruff as a formatter:
|
||||
|
||||
```shell
|
||||
ruff format # Format all files in the current directory (and any subdirectories).
|
||||
ruff format . # Format all files in the current directory (and any subdirectories).
|
||||
ruff format path/to/code/ # Format all files in `/path/to/code` (and any subdirectories).
|
||||
ruff format path/to/code/*.py # Format all `.py` files in `/path/to/code`.
|
||||
ruff format path/to/code/to/file.py # Format `file.py`.
|
||||
@@ -183,7 +150,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.9.0
|
||||
rev: v0.2.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -192,10 +159,11 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/astral-sh/ruff-action):
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
|
||||
```yaml
|
||||
name: Ruff
|
||||
@@ -204,19 +172,20 @@ jobs:
|
||||
ruff:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/ruff-action@v3
|
||||
- uses: actions/checkout@v3
|
||||
- uses: chartboost/ruff-action@v1
|
||||
```
|
||||
|
||||
### Configuration<a id="configuration"></a>
|
||||
### Configuration
|
||||
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
for a complete list of all configuration options).
|
||||
|
||||
If left unspecified, Ruff's default configuration is equivalent to the following `ruff.toml` file:
|
||||
If left unspecified, Ruff's default configuration is equivalent to:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Exclude a variety of commonly ignored directories.
|
||||
exclude = [
|
||||
".bzr",
|
||||
@@ -251,10 +220,10 @@ exclude = [
|
||||
line-length = 88
|
||||
indent-width = 4
|
||||
|
||||
# Assume Python 3.9
|
||||
target-version = "py39"
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
|
||||
[lint]
|
||||
[tool.ruff.lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
select = ["E4", "E7", "E9", "F"]
|
||||
ignore = []
|
||||
@@ -266,7 +235,7 @@ unfixable = []
|
||||
# Allow unused variables when underscore-prefixed.
|
||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
[format]
|
||||
[tool.ruff.format]
|
||||
# Like Black, use double quotes for strings.
|
||||
quote-style = "double"
|
||||
|
||||
@@ -280,35 +249,21 @@ skip-magic-trailing-comma = false
|
||||
line-ending = "auto"
|
||||
```
|
||||
|
||||
Note that, in a `pyproject.toml`, each section header should be prefixed with `tool.ruff`. For
|
||||
example, `[lint]` should be replaced with `[tool.ruff.lint]`.
|
||||
|
||||
Some configuration options can be provided via dedicated command-line arguments, such as those
|
||||
related to rule enablement and disablement, file discovery, and logging level:
|
||||
Some configuration options can be provided via the command-line, such as those related to
|
||||
rule enablement and disablement, file discovery, and logging level:
|
||||
|
||||
```shell
|
||||
ruff check --select F401 --select F403 --quiet
|
||||
ruff check path/to/code/ --select F401 --select F403 --quiet
|
||||
```
|
||||
|
||||
The remaining configuration options can be provided through a catch-all `--config` argument:
|
||||
|
||||
```shell
|
||||
ruff check --config "lint.per-file-ignores = {'some_file.py' = ['F841']}"
|
||||
```
|
||||
|
||||
To opt in to the latest lint rules, formatter style changes, interface updates, and more, enable
|
||||
[preview mode](https://docs.astral.sh/ruff/rules/) by setting `preview = true` in your configuration
|
||||
file or passing `--preview` on the command line. Preview mode enables a collection of unstable
|
||||
features that may change prior to stabilization.
|
||||
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
|
||||
## Rules<a id="rules"></a>
|
||||
## Rules
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
**Ruff supports over 800 lint rules**, many of which are inspired by popular tools like Flake8,
|
||||
**Ruff supports over 700 lint rules**, many of which are inspired by popular tools like Flake8,
|
||||
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
||||
Rust as a first-party feature.
|
||||
|
||||
@@ -364,6 +319,7 @@ quality tools, including:
|
||||
- [flake8-super](https://pypi.org/project/flake8-super/)
|
||||
- [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/)
|
||||
- [flake8-todos](https://pypi.org/project/flake8-todos/)
|
||||
- [flake8-trio](https://pypi.org/project/flake8-trio/)
|
||||
- [flake8-type-checking](https://pypi.org/project/flake8-type-checking/)
|
||||
- [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
|
||||
- [flynt](https://pypi.org/project/flynt/) ([#2102](https://github.com/astral-sh/ruff/issues/2102))
|
||||
@@ -380,21 +336,21 @@ quality tools, including:
|
||||
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
|
||||
|
||||
## Contributing<a id="contributing"></a>
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
## Support<a id="support"></a>
|
||||
## Support
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
You can also ask for help on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
## Acknowledgements<a id="acknowledgements"></a>
|
||||
## Acknowledgements
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
|
||||
@@ -418,11 +374,10 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
## Who's Using Ruff?<a id="whos-using-ruff"></a>
|
||||
## Who's Using Ruff?
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
- [Albumentations](https://github.com/albumentations-team/albumentations)
|
||||
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
|
||||
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
@@ -430,16 +385,13 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- CrowdCent ([NumerBlox](https://github.com/crowdcent/numerblox)) <!-- typos: ignore -->
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- CERN ([Indico](https://getindico.io/))
|
||||
- [DVC](https://github.com/iterative/dvc)
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||
- [Dify](https://github.com/langgenius/dify)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Godot](https://github.com/godotengine/godot)
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||
- [HTTPX](https://github.com/encode/httpx)
|
||||
@@ -448,12 +400,10 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||
- [Kraken Tech](https://kraken.tech/)
|
||||
- [LangChain](https://github.com/hwchase17/langchain)
|
||||
- [Litestar](https://litestar.dev/)
|
||||
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
|
||||
@@ -463,18 +413,15 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Microsoft ([Semantic Kernel](https://github.com/microsoft/semantic-kernel),
|
||||
[ONNX Runtime](https://github.com/microsoft/onnxruntime),
|
||||
[LightGBM](https://github.com/microsoft/LightGBM))
|
||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python))
|
||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python-sdk))
|
||||
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
|
||||
- [Mypy](https://github.com/python/mypy)
|
||||
- [Nautobot](https://github.com/nautobot/nautobot)
|
||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||
- [Neon](https://github.com/neondatabase/neon)
|
||||
- [Nokia](https://nokia.com/)
|
||||
- [NoneBot](https://github.com/nonebot/nonebot2)
|
||||
- [NumPyro](https://github.com/pyro-ppl/numpyro)
|
||||
- [ONNX](https://github.com/onnx/onnx)
|
||||
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||
- [Open Wine Components](https://github.com/Open-Wine-Components/umu-launcher)
|
||||
- [PDM](https://github.com/pdm-project/pdm)
|
||||
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
|
||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||
@@ -502,7 +449,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Sphinx](https://github.com/sphinx-doc/sphinx)
|
||||
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
|
||||
- [Starlette](https://github.com/encode/starlette)
|
||||
- [Streamlit](https://github.com/streamlit/streamlit)
|
||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||
@@ -538,9 +484,9 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
||||
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
||||
```
|
||||
|
||||
## License<a id="license"></a>
|
||||
## License
|
||||
|
||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
MIT
|
||||
|
||||
<div align="center">
|
||||
<a target="_blank" href="https://astral.sh" style="background:none">
|
||||
|
||||
18
_typos.toml
18
_typos.toml
@@ -1,27 +1,11 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = [
|
||||
"crates/red_knot_vendored/vendor/**/*",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
"crates/red_knot_workspace/src/workspace/pyproject/package_name.rs"
|
||||
]
|
||||
extend-exclude = ["**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
hel = "hel"
|
||||
whos = "whos"
|
||||
spawnve = "spawnve"
|
||||
ned = "ned"
|
||||
pn = "pn" # `import panel as pn` is a thing
|
||||
poit = "poit"
|
||||
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
||||
jod = "jod" # e.g., `jod-thread`
|
||||
Numer = "Numer" # Library name 'NumerBlox' in "Who's Using Ruff?"
|
||||
|
||||
[default]
|
||||
extend-ignore-re = [
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$",
|
||||
"LICENSEs",
|
||||
]
|
||||
|
||||
28
clippy.toml
28
clippy.toml
@@ -1,25 +1,7 @@
|
||||
doc-valid-idents = [
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
"SNMPv1",
|
||||
"SNMPv2",
|
||||
"SNMPv3",
|
||||
"PyFlakes"
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
"StackOverflow",
|
||||
"CodeQL",
|
||||
"IPython",
|
||||
"NumPy",
|
||||
"..",
|
||||
]
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
[package]
|
||||
name = "red_knot"
|
||||
version = "0.0.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
documentation.workspace = true
|
||||
repository.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true, features = ["zstd"] }
|
||||
red_knot_server = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true, features = ["wrap_help"] }
|
||||
colored = { workspace = true }
|
||||
countme = { workspace = true, features = ["enable"] }
|
||||
crossbeam = { workspace = true }
|
||||
ctrlc = { version = "3.4.4" }
|
||||
rayon = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true, features = ["release_max_level_debug"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
tracing-flame = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
filetime = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 40 KiB |
@@ -1,128 +0,0 @@
|
||||
# Tracing
|
||||
|
||||
Traces are a useful tool to narrow down the location of a bug or, at least, to understand why the compiler is doing a particular thing.
|
||||
Note, tracing messages with severity `debug` or greater are user-facing. They should be phrased accordingly.
|
||||
Tracing spans are only shown when using `-vvv`.
|
||||
|
||||
## Verbosity levels
|
||||
|
||||
The CLI supports different verbosity levels.
|
||||
|
||||
- default: Only show errors and warnings.
|
||||
- `-v` activates `info!`: Show generally useful information such as paths of configuration files, detected platform, etc., but it's not a lot of messages, it's something you'll activate in CI by default. cargo build e.g. shows you which packages are fresh.
|
||||
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
|
||||
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
|
||||
|
||||
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
|
||||
|
||||
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
|
||||
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
|
||||
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
|
||||
|
||||
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
|
||||
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
|
||||
but this can result in a confused logging output where messages from different threads are intertwined.
|
||||
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
|
||||
|
||||
### Examples
|
||||
|
||||
#### Show all debug messages
|
||||
|
||||
Shows debug messages from all crates.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=debug
|
||||
```
|
||||
|
||||
#### Show salsa query execution messages
|
||||
|
||||
Show the salsa `execute: my_query` messages in addition to all red knot messages.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=ruff=trace,red_knot=trace,salsa=info
|
||||
```
|
||||
|
||||
#### Show typing traces
|
||||
|
||||
Only show traces for the `red_knot_python_semantic::types` module.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG="red_knot_python_semantic::types"
|
||||
```
|
||||
|
||||
Note: Ensure that you use `-vvv` to see tracing spans.
|
||||
|
||||
#### Show messages for a single file
|
||||
|
||||
Shows all messages that are inside of a span for a specific file.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=red_knot[{file=/home/micha/astral/test/x.py}]=trace
|
||||
```
|
||||
|
||||
**Note**: Tracing still shows all spans because tracing can't know at the time of entering the span
|
||||
whether one if its children has the file `x.py`.
|
||||
|
||||
**Note**: Salsa currently logs the entire memoized values. In our case, the source text and parsed AST.
|
||||
This very quickly leads to extremely long outputs.
|
||||
|
||||
## Tracing and Salsa
|
||||
|
||||
Be mindful about using `tracing` in Salsa queries, especially when using `warn` or `error` because it isn't guaranteed
|
||||
that the query will execute after restoring from a persistent cache. In which case the user won't see the message.
|
||||
|
||||
For example, don't use `tracing` to show the user a message when generating a lint violation failed
|
||||
because the message would only be shown when linting the file the first time, but not on subsequent analysis
|
||||
runs or when restoring from a persistent cache. This can be confusing for users because they
|
||||
don't understand why a specific lint violation isn't raised. Instead, change your
|
||||
query to return the failure as part of the query's result or use a Salsa accumulator.
|
||||
|
||||
## Tracing in tests
|
||||
|
||||
You can use `ruff_db::testing::setup_logging` or `ruff_db::testing::setup_logging_with_filter` to set up logging in tests.
|
||||
|
||||
```rust
|
||||
use ruff_db::testing::setup_logging;
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
let _logging = setup_logging();
|
||||
|
||||
tracing::info!("This message will be printed to stderr");
|
||||
}
|
||||
```
|
||||
|
||||
Note: Most test runners capture stderr and only show its output when a test fails.
|
||||
|
||||
Note also that `setup_logging` only sets up logging for the current thread because [`set_global_default`](https://docs.rs/tracing/latest/tracing/subscriber/fn.set_global_default.html) can only be
|
||||
called **once**.
|
||||
|
||||
## Release builds
|
||||
|
||||
`trace!` events are removed in release builds.
|
||||
|
||||
## Profiling
|
||||
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
|
||||
```
|
||||
|
||||
You can convert the textual representation into a visual one using `inferno`.
|
||||
|
||||
```shell
|
||||
cargo install inferno
|
||||
```
|
||||
|
||||
```shell
|
||||
# flamegraph
|
||||
cat tracing.folded | inferno-flamegraph > tracing-flamegraph.svg
|
||||
|
||||
# flamechart
|
||||
cat tracing.folded | inferno-flamegraph --flamechart > tracing-flamechart.svg
|
||||
```
|
||||
|
||||

|
||||
|
||||
See [`tracing-flame`](https://crates.io/crates/tracing-flame) for more details.
|
||||
@@ -1,254 +0,0 @@
|
||||
//! Sets up logging for Red Knot
|
||||
|
||||
use anyhow::Context;
|
||||
use colored::Colorize;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use tracing::{Event, Subscriber};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> VerbosityLevel {
|
||||
match self.verbose {
|
||||
0 => VerbosityLevel::Default,
|
||||
1 => VerbosityLevel::Verbose,
|
||||
2 => VerbosityLevel::ExtraVerbose,
|
||||
_ => VerbosityLevel::Trace,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
/// Default output level. Only shows Ruff and Red Knot events up to the [`WARN`](tracing::Level::WARN).
|
||||
Default,
|
||||
|
||||
/// Enables verbose output. Emits Ruff and Red Knot events up to the [`INFO`](tracing::Level::INFO).
|
||||
/// Corresponds to `-v`.
|
||||
Verbose,
|
||||
|
||||
/// Enables a more verbose tracing format and emits Ruff and Red Knot events up to [`DEBUG`](tracing::Level::DEBUG).
|
||||
/// Corresponds to `-vv`
|
||||
ExtraVerbose,
|
||||
|
||||
/// Enables all tracing events and uses a tree-like output format. Corresponds to `-vvv`.
|
||||
Trace,
|
||||
}
|
||||
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Default => LevelFilter::WARN,
|
||||
VerbosityLevel::Verbose => LevelFilter::INFO,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
|
||||
VerbosityLevel::Trace => LevelFilter::TRACE,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) const fn is_trace(self) -> bool {
|
||||
matches!(self, VerbosityLevel::Trace)
|
||||
}
|
||||
|
||||
pub(crate) const fn is_extra_verbose(self) -> bool {
|
||||
matches!(self, VerbosityLevel::ExtraVerbose)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuard> {
|
||||
use tracing_subscriber::prelude::*;
|
||||
|
||||
// The `RED_KNOT_LOG` environment variable overrides the default log level.
|
||||
let filter = if let Ok(log_env_variable) = std::env::var("RED_KNOT_LOG") {
|
||||
EnvFilter::builder()
|
||||
.parse(log_env_variable)
|
||||
.context("Failed to parse directives specified in RED_KNOT_LOG environment variable.")?
|
||||
} else {
|
||||
match level {
|
||||
VerbosityLevel::Default => {
|
||||
// Show warning traces
|
||||
EnvFilter::default().add_directive(LevelFilter::WARN.into())
|
||||
}
|
||||
level => {
|
||||
let level_filter = level.level_filter();
|
||||
|
||||
// Show info|debug|trace events, but allow `RED_KNOT_LOG` to override
|
||||
let filter = EnvFilter::default().add_directive(
|
||||
format!("red_knot={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
);
|
||||
|
||||
filter.add_directive(
|
||||
format!("ruff={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (profiling_layer, guard) = setup_profile();
|
||||
|
||||
let registry = tracing_subscriber::registry()
|
||||
.with(filter)
|
||||
.with(profiling_layer);
|
||||
|
||||
if level.is_trace() {
|
||||
let subscriber = registry.with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(std::io::stderr)
|
||||
.with_timer(tracing_tree::time::Uptime::default()),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
} else {
|
||||
let subscriber = registry.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.event_format(RedKnotFormat {
|
||||
display_level: true,
|
||||
display_timestamp: level.is_extra_verbose(),
|
||||
show_spans: false,
|
||||
})
|
||||
.with_writer(std::io::stderr),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
}
|
||||
|
||||
Ok(TracingGuard {
|
||||
_flame_guard: guard,
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn setup_profile<S>() -> (
|
||||
Option<tracing_flame::FlameLayer<S, BufWriter<File>>>,
|
||||
Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
)
|
||||
where
|
||||
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||
{
|
||||
if let Ok("1" | "true") = std::env::var("RED_KNOT_LOG_PROFILE").as_deref() {
|
||||
let (layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded")
|
||||
.expect("Flame layer to be created");
|
||||
(Some(layer), Some(guard))
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TracingGuard {
|
||||
_flame_guard: Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
}
|
||||
|
||||
struct RedKnotFormat {
|
||||
display_timestamp: bool,
|
||||
display_level: bool,
|
||||
show_spans: bool,
|
||||
}
|
||||
|
||||
/// See <https://docs.rs/tracing-subscriber/0.3.18/src/tracing_subscriber/fmt/format/mod.rs.html#1026-1156>
|
||||
impl<S, N> FormatEvent<S, N> for RedKnotFormat
|
||||
where
|
||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||
N: for<'a> FormatFields<'a> + 'static,
|
||||
{
|
||||
fn format_event(
|
||||
&self,
|
||||
ctx: &FmtContext<'_, S, N>,
|
||||
mut writer: Writer<'_>,
|
||||
event: &Event<'_>,
|
||||
) -> fmt::Result {
|
||||
let meta = event.metadata();
|
||||
let ansi = writer.has_ansi_escapes();
|
||||
|
||||
if self.display_timestamp {
|
||||
let timestamp = chrono::Local::now()
|
||||
.format("%Y-%m-%d %H:%M:%S.%f")
|
||||
.to_string();
|
||||
if ansi {
|
||||
write!(writer, "{} ", timestamp.dimmed())?;
|
||||
} else {
|
||||
write!(
|
||||
writer,
|
||||
"{} ",
|
||||
chrono::Local::now().format("%Y-%m-%d %H:%M:%S.%f")
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.display_level {
|
||||
let level = meta.level();
|
||||
// Same colors as tracing
|
||||
if ansi {
|
||||
let formatted_level = level.to_string();
|
||||
match *level {
|
||||
tracing::Level::TRACE => {
|
||||
write!(writer, "{} ", formatted_level.purple().bold())?;
|
||||
}
|
||||
tracing::Level::DEBUG => write!(writer, "{} ", formatted_level.blue().bold())?,
|
||||
tracing::Level::INFO => write!(writer, "{} ", formatted_level.green().bold())?,
|
||||
tracing::Level::WARN => write!(writer, "{} ", formatted_level.yellow().bold())?,
|
||||
tracing::Level::ERROR => write!(writer, "{} ", level.to_string().red().bold())?,
|
||||
}
|
||||
} else {
|
||||
write!(writer, "{level} ")?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.show_spans {
|
||||
let span = event.parent();
|
||||
let mut seen = false;
|
||||
|
||||
let span = span
|
||||
.and_then(|id| ctx.span(id))
|
||||
.or_else(|| ctx.lookup_current());
|
||||
|
||||
let scope = span.into_iter().flat_map(|span| span.scope().from_root());
|
||||
|
||||
for span in scope {
|
||||
seen = true;
|
||||
if ansi {
|
||||
write!(writer, "{}:", span.metadata().name().bold())?;
|
||||
} else {
|
||||
write!(writer, "{}:", span.metadata().name())?;
|
||||
}
|
||||
}
|
||||
|
||||
if seen {
|
||||
writer.write_char(' ')?;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
||||
|
||||
writeln!(writer)
|
||||
}
|
||||
}
|
||||
@@ -1,370 +0,0 @@
|
||||
use std::process::{ExitCode, Termination};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use clap::Parser;
|
||||
use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use python_version::PythonVersion;
|
||||
use red_knot_python_semantic::SitePackages;
|
||||
use red_knot_server::run_server;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::WorkspaceWatcher;
|
||||
use red_knot_workspace::workspace::settings::Configuration;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
|
||||
use crate::logging::{setup_tracing, Verbosity};
|
||||
|
||||
mod logging;
|
||||
mod python_version;
|
||||
mod verbosity;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "red-knot",
|
||||
about = "An extremely fast Python type checker."
|
||||
)]
|
||||
#[command(version)]
|
||||
struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Option<Command>,
|
||||
|
||||
/// Run the command within the given project directory.
|
||||
///
|
||||
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
|
||||
/// as will the project's virtual environment (`.venv`) unless the `venv-path` option is set.
|
||||
///
|
||||
/// Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.
|
||||
#[arg(long, value_name = "PROJECT")]
|
||||
project: Option<SystemPathBuf>,
|
||||
|
||||
/// Path to the virtual environment the project uses.
|
||||
///
|
||||
/// If provided, red-knot will use the `site-packages` directory of this virtual environment
|
||||
/// to resolve type information for the project's third-party dependencies.
|
||||
#[arg(long, value_name = "PATH")]
|
||||
venv_path: Option<SystemPathBuf>,
|
||||
|
||||
/// Custom directory to use for stdlib typeshed stubs.
|
||||
#[arg(long, value_name = "PATH", alias = "custom-typeshed-dir")]
|
||||
typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// Additional path to use as a module-resolution source (can be passed multiple times).
|
||||
#[arg(long, value_name = "PATH")]
|
||||
extra_search_path: Option<Vec<SystemPathBuf>>,
|
||||
|
||||
/// Python version to assume when resolving types.
|
||||
#[arg(long, value_name = "VERSION", alias = "target-version")]
|
||||
python_version: Option<PythonVersion>,
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
|
||||
/// Run in watch mode by re-running whenever files change.
|
||||
#[arg(long, short = 'W')]
|
||||
watch: bool,
|
||||
}
|
||||
|
||||
impl Args {
|
||||
fn to_configuration(&self, cli_cwd: &SystemPath) -> Configuration {
|
||||
let mut configuration = Configuration::default();
|
||||
|
||||
if let Some(python_version) = self.python_version {
|
||||
configuration.python_version = Some(python_version.into());
|
||||
}
|
||||
|
||||
if let Some(venv_path) = &self.venv_path {
|
||||
configuration.search_paths.site_packages = Some(SitePackages::Derived {
|
||||
venv_path: SystemPath::absolute(venv_path, cli_cwd),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(typeshed) = &self.typeshed {
|
||||
configuration.search_paths.typeshed = Some(SystemPath::absolute(typeshed, cli_cwd));
|
||||
}
|
||||
|
||||
if let Some(extra_search_paths) = &self.extra_search_path {
|
||||
configuration.search_paths.extra_paths = extra_search_paths
|
||||
.iter()
|
||||
.map(|path| Some(SystemPath::absolute(path, cli_cwd)))
|
||||
.collect();
|
||||
}
|
||||
|
||||
configuration
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Start the language server
|
||||
Server,
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
pub fn main() -> ExitStatus {
|
||||
run().unwrap_or_else(|error| {
|
||||
use std::io::Write;
|
||||
|
||||
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
|
||||
let mut stderr = std::io::stderr().lock();
|
||||
|
||||
// This communicates that this isn't a linter error but Red Knot itself hard-errored for
|
||||
// some reason (e.g. failed to resolve the configuration)
|
||||
writeln!(stderr, "{}", "Red Knot failed".red().bold()).ok();
|
||||
// Currently we generally only see one error, but e.g. with io errors when resolving
|
||||
// the configuration it is help to chain errors ("resolving configuration failed" ->
|
||||
// "failed to read file: subdir/pyproject.toml")
|
||||
for cause in error.chain() {
|
||||
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
|
||||
}
|
||||
|
||||
ExitStatus::Error
|
||||
})
|
||||
}
|
||||
|
||||
fn run() -> anyhow::Result<ExitStatus> {
|
||||
let args = Args::parse_from(std::env::args());
|
||||
|
||||
if matches!(args.command, Some(Command::Server)) {
|
||||
return run_server().map(|()| ExitStatus::Success);
|
||||
}
|
||||
|
||||
let verbosity = args.verbosity.level();
|
||||
countme::enable(verbosity.is_trace());
|
||||
let _guard = setup_tracing(verbosity)?;
|
||||
|
||||
// The base path to which all CLI arguments are relative to.
|
||||
let cli_base_path = {
|
||||
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
|
||||
SystemPathBuf::from_path_buf(cwd)
|
||||
.map_err(|path| {
|
||||
anyhow!(
|
||||
"The current working directory `{}` contains non-Unicode characters. Red Knot only supports Unicode paths.",
|
||||
path.display()
|
||||
)
|
||||
})?
|
||||
};
|
||||
|
||||
let cwd = args
|
||||
.project
|
||||
.as_ref()
|
||||
.map(|cwd| {
|
||||
if cwd.as_std_path().is_dir() {
|
||||
Ok(SystemPath::absolute(cwd, &cli_base_path))
|
||||
} else {
|
||||
Err(anyhow!("Provided project path `{cwd}` is not a directory"))
|
||||
}
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| cli_base_path.clone());
|
||||
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let cli_configuration = args.to_configuration(&cwd);
|
||||
let workspace_metadata = WorkspaceMetadata::discover(
|
||||
system.current_directory(),
|
||||
&system,
|
||||
Some(&cli_configuration),
|
||||
)?;
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, system)?;
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_configuration);
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
ctrlc::set_handler(move || {
|
||||
let mut lock = main_loop_cancellation_token.lock().unwrap();
|
||||
|
||||
if let Some(token) = lock.take() {
|
||||
token.stop();
|
||||
}
|
||||
})?;
|
||||
|
||||
let exit_status = if args.watch {
|
||||
main_loop.watch(&mut db)?
|
||||
} else {
|
||||
main_loop.run(&mut db)
|
||||
};
|
||||
|
||||
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
|
||||
|
||||
std::mem::forget(db);
|
||||
|
||||
Ok(exit_status)
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum ExitStatus {
|
||||
/// Checking was successful and there were no errors.
|
||||
Success = 0,
|
||||
|
||||
/// Checking was successful but there were errors.
|
||||
Failure = 1,
|
||||
|
||||
/// Checking failed.
|
||||
Error = 2,
|
||||
}
|
||||
|
||||
impl Termination for ExitStatus {
|
||||
fn report(self) -> ExitCode {
|
||||
ExitCode::from(self as u8)
|
||||
}
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
/// Sender that can be used to send messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
|
||||
/// Receiver for the messages sent **to** the main loop.
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
|
||||
cli_configuration: Configuration,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new(cli_configuration: Configuration) -> (Self, MainLoopCancellationToken) {
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
|
||||
(
|
||||
Self {
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
cli_configuration,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
}
|
||||
|
||||
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<ExitStatus> {
|
||||
tracing::debug!("Starting watch mode");
|
||||
let sender = self.sender.clone();
|
||||
let watcher = watch::directory_watcher(move |event| {
|
||||
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
|
||||
})?;
|
||||
|
||||
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
|
||||
|
||||
self.run(db);
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
|
||||
fn run(mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
|
||||
let result = self.main_loop(db);
|
||||
|
||||
tracing::debug!("Exiting main loop");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn main_loop(&mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
// Schedule the first check.
|
||||
tracing::debug!("Starting main loop");
|
||||
|
||||
let mut revision = 0u64;
|
||||
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
MainLoopMessage::CheckWorkspace => {
|
||||
let db = db.clone();
|
||||
let sender = self.sender.clone();
|
||||
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = db.check() {
|
||||
// Send the result back to the main loop for printing.
|
||||
sender
|
||||
.send(MainLoopMessage::CheckCompleted { result, revision })
|
||||
.unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
MainLoopMessage::CheckCompleted {
|
||||
result,
|
||||
revision: check_revision,
|
||||
} => {
|
||||
let has_diagnostics = !result.is_empty();
|
||||
if check_revision == revision {
|
||||
#[allow(clippy::print_stdout)]
|
||||
for diagnostic in result {
|
||||
println!("{}", diagnostic.display(db));
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
|
||||
);
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return if has_diagnostics {
|
||||
ExitStatus::Failure
|
||||
} else {
|
||||
ExitStatus::Success
|
||||
};
|
||||
}
|
||||
|
||||
tracing::trace!("Counts after last check:\n{}", countme::get_all());
|
||||
}
|
||||
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes, Some(&self.cli_configuration));
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
}
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
// Cancel any pending queries and wait for them to complete.
|
||||
// TODO: Don't use Salsa internal APIs
|
||||
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
|
||||
let _ = db.zalsa_mut();
|
||||
return ExitStatus::Success;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Waiting for next main loop message.");
|
||||
}
|
||||
|
||||
ExitStatus::Success
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MainLoopCancellationToken {
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
}
|
||||
|
||||
impl MainLoopCancellationToken {
|
||||
fn stop(self) {
|
||||
self.sender.send(MainLoopMessage::Exit).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckWorkspace,
|
||||
CheckCompleted {
|
||||
result: Vec<Box<dyn Diagnostic>>,
|
||||
revision: u64,
|
||||
},
|
||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||
Exit,
|
||||
}
|
||||
@@ -1,68 +0,0 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum PythonVersion {
|
||||
#[value(name = "3.7")]
|
||||
Py37,
|
||||
#[value(name = "3.8")]
|
||||
Py38,
|
||||
#[default]
|
||||
#[value(name = "3.9")]
|
||||
Py39,
|
||||
#[value(name = "3.10")]
|
||||
Py310,
|
||||
#[value(name = "3.11")]
|
||||
Py311,
|
||||
#[value(name = "3.12")]
|
||||
Py312,
|
||||
#[value(name = "3.13")]
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl PythonVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "3.7",
|
||||
Self::Py38 => "3.8",
|
||||
Self::Py39 => "3.9",
|
||||
Self::Py310 => "3.10",
|
||||
Self::Py311 => "3.11",
|
||||
Self::Py312 => "3.12",
|
||||
Self::Py313 => "3.13",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PythonVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PythonVersion> for red_knot_python_semantic::PythonVersion {
|
||||
fn from(value: PythonVersion) -> Self {
|
||||
match value {
|
||||
PythonVersion::Py37 => Self::PY37,
|
||||
PythonVersion::Py38 => Self::PY38,
|
||||
PythonVersion::Py39 => Self::PY39,
|
||||
PythonVersion::Py310 => Self::PY310,
|
||||
PythonVersion::Py311 => Self::PY311,
|
||||
PythonVersion::Py312 => Self::PY312,
|
||||
PythonVersion::Py313 => Self::PY313,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
#[test]
|
||||
fn same_default_as_python_version() {
|
||||
assert_eq!(
|
||||
red_knot_python_semantic::PythonVersion::from(PythonVersion::default()),
|
||||
red_knot_python_semantic::PythonVersion::default()
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,62 +0,0 @@
|
||||
[package]
|
||||
name = "red_knot_python_semantic"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_macros = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
ruff_python_literal = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
compact_str = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
drop_bomb = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
ordermap = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
smallvec = { workspace = true }
|
||||
static_assertions = { workspace = true }
|
||||
test-case = { workspace = true }
|
||||
memchr = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["os", "testing"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
red_knot_test = { workspace = true }
|
||||
red_knot_vendored = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
dir-test = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
quickcheck = { version = "1.0.3", default-features = false }
|
||||
quickcheck_macros = { version = "1.0.0" }
|
||||
|
||||
[features]
|
||||
serde = ["ruff_db/serde", "dep:serde"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,4 +0,0 @@
|
||||
/// Rebuild the crate if a test file is added or removed from
|
||||
pub fn main() {
|
||||
println!("cargo::rerun-if-changed=resources/mdtest");
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
Markdown files within the `mdtest/` subdirectory are tests of type inference and type checking;
|
||||
executed by the `tests/mdtest.rs` integration test.
|
||||
|
||||
See `crates/red_knot_test/README.md` for documentation of this test format.
|
||||
@@ -1 +0,0 @@
|
||||
wrap = 100
|
||||
@@ -1,94 +0,0 @@
|
||||
# `Annotated`
|
||||
|
||||
`Annotated` attaches arbitrary metadata to a given type.
|
||||
|
||||
## Usages
|
||||
|
||||
`Annotated[T, ...]` is equivalent to `T`: All metadata arguments are simply ignored.
|
||||
|
||||
```py
|
||||
from typing_extensions import Annotated
|
||||
|
||||
def _(x: Annotated[int, "foo"]):
|
||||
reveal_type(x) # revealed: int
|
||||
|
||||
def _(x: Annotated[int, lambda: 0 + 1 * 2 // 3, _(4)]):
|
||||
reveal_type(x) # revealed: int
|
||||
|
||||
def _(x: Annotated[int, "arbitrary", "metadata", "elements", "are", "fine"]):
|
||||
reveal_type(x) # revealed: int
|
||||
|
||||
def _(x: Annotated[tuple[str, int], bytes]):
|
||||
reveal_type(x) # revealed: tuple[str, int]
|
||||
```
|
||||
|
||||
## Parameterization
|
||||
|
||||
It is invalid to parameterize `Annotated` with less than two arguments.
|
||||
|
||||
```py
|
||||
from typing_extensions import Annotated
|
||||
|
||||
# error: [invalid-type-form] "`Annotated` requires at least two arguments when used in an annotation or type expression"
|
||||
def _(x: Annotated):
|
||||
reveal_type(x) # revealed: Unknown
|
||||
|
||||
def _(flag: bool):
|
||||
if flag:
|
||||
X = Annotated
|
||||
else:
|
||||
X = bool
|
||||
|
||||
# error: [invalid-type-form] "`Annotated` requires at least two arguments when used in an annotation or type expression"
|
||||
def f(y: X):
|
||||
reveal_type(y) # revealed: Unknown | bool
|
||||
|
||||
# error: [invalid-type-form] "`Annotated` requires at least two arguments when used in an annotation or type expression"
|
||||
def _(x: Annotated | bool):
|
||||
reveal_type(x) # revealed: Unknown | bool
|
||||
|
||||
# error: [invalid-type-form]
|
||||
def _(x: Annotated[()]):
|
||||
reveal_type(x) # revealed: Unknown
|
||||
|
||||
# error: [invalid-type-form]
|
||||
def _(x: Annotated[int]):
|
||||
# `Annotated[T]` is invalid and will raise an error at runtime,
|
||||
# but we treat it the same as `T` to provide better diagnostics later on.
|
||||
# The subscription itself is still reported, regardless.
|
||||
# Same for the `(int,)` form below.
|
||||
reveal_type(x) # revealed: int
|
||||
|
||||
# error: [invalid-type-form]
|
||||
def _(x: Annotated[(int,)]):
|
||||
reveal_type(x) # revealed: int
|
||||
```
|
||||
|
||||
## Inheritance
|
||||
|
||||
### Correctly parameterized
|
||||
|
||||
Inheriting from `Annotated[T, ...]` is equivalent to inheriting from `T` itself.
|
||||
|
||||
```py
|
||||
from typing_extensions import Annotated
|
||||
|
||||
# TODO: False positive
|
||||
# error: [invalid-base]
|
||||
class C(Annotated[int, "foo"]): ...
|
||||
|
||||
# TODO: Should be `tuple[Literal[C], Literal[int], Literal[object]]`
|
||||
reveal_type(C.__mro__) # revealed: tuple[Literal[C], Unknown, Literal[object]]
|
||||
```
|
||||
|
||||
### Not parameterized
|
||||
|
||||
```py
|
||||
from typing_extensions import Annotated
|
||||
|
||||
# At runtime, this is an error.
|
||||
# error: [invalid-base]
|
||||
class C(Annotated): ...
|
||||
|
||||
reveal_type(C.__mro__) # revealed: tuple[Literal[C], Unknown, Literal[object]]
|
||||
```
|
||||
@@ -1,83 +0,0 @@
|
||||
# Any
|
||||
|
||||
## Annotation
|
||||
|
||||
`typing.Any` is a way to name the Any type.
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
x: Any = 1
|
||||
x = "foo"
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
```
|
||||
|
||||
## Aliased to a different name
|
||||
|
||||
If you alias `typing.Any` to another name, we still recognize that as a spelling of the Any type.
|
||||
|
||||
```py
|
||||
from typing import Any as RenamedAny
|
||||
|
||||
x: RenamedAny = 1
|
||||
x = "foo"
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
```
|
||||
|
||||
## Shadowed class
|
||||
|
||||
If you define your own class named `Any`, using that in a type expression refers to your class, and
|
||||
isn't a spelling of the Any type.
|
||||
|
||||
```py
|
||||
class Any: ...
|
||||
|
||||
x: Any
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
|
||||
# This verifies that we're not accidentally seeing typing.Any, since str is assignable
|
||||
# to that but not to our locally defined class.
|
||||
y: Any = "not an Any" # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
## Subclass
|
||||
|
||||
The spec allows you to define subclasses of `Any`.
|
||||
|
||||
TODO: Handle assignments correctly. `Subclass` has an unknown superclass, which might be `int`. The
|
||||
assignment to `x` should not be allowed, even when the unknown superclass is `int`. The assignment
|
||||
to `y` should be allowed, since `Subclass` might have `int` as a superclass, and is therefore
|
||||
assignable to `int`.
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
class Subclass(Any): ...
|
||||
|
||||
reveal_type(Subclass.__mro__) # revealed: tuple[Literal[Subclass], Any, Literal[object]]
|
||||
|
||||
x: Subclass = 1 # error: [invalid-assignment]
|
||||
# TODO: no diagnostic
|
||||
y: int = Subclass() # error: [invalid-assignment]
|
||||
|
||||
def _(s: Subclass):
|
||||
reveal_type(s) # revealed: Subclass
|
||||
```
|
||||
|
||||
## Invalid
|
||||
|
||||
`Any` cannot be parameterized:
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
# error: [invalid-type-form] "Type `typing.Any` expected no type parameter"
|
||||
def f(x: Any[int]):
|
||||
reveal_type(x) # revealed: Unknown
|
||||
```
|
||||
@@ -1,153 +0,0 @@
|
||||
# Literal
|
||||
|
||||
<https://typing.readthedocs.io/en/latest/spec/literal.html#literals>
|
||||
|
||||
## Parameterization
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
from enum import Enum
|
||||
|
||||
mode: Literal["w", "r"]
|
||||
a1: Literal[26]
|
||||
a2: Literal[0x1A]
|
||||
a3: Literal[-4]
|
||||
a4: Literal["hello world"]
|
||||
a5: Literal[b"hello world"]
|
||||
a6: Literal[True]
|
||||
a7: Literal[None]
|
||||
a8: Literal[Literal[1]]
|
||||
|
||||
class Color(Enum):
|
||||
RED = 0
|
||||
GREEN = 1
|
||||
BLUE = 2
|
||||
|
||||
b1: Literal[Color.RED]
|
||||
|
||||
def f():
|
||||
reveal_type(mode) # revealed: Literal["w", "r"]
|
||||
reveal_type(a1) # revealed: Literal[26]
|
||||
reveal_type(a2) # revealed: Literal[26]
|
||||
reveal_type(a3) # revealed: Literal[-4]
|
||||
reveal_type(a4) # revealed: Literal["hello world"]
|
||||
reveal_type(a5) # revealed: Literal[b"hello world"]
|
||||
reveal_type(a6) # revealed: Literal[True]
|
||||
reveal_type(a7) # revealed: None
|
||||
reveal_type(a8) # revealed: Literal[1]
|
||||
# TODO: This should be Color.RED
|
||||
reveal_type(b1) # revealed: Literal[0]
|
||||
|
||||
# error: [invalid-type-form]
|
||||
invalid1: Literal[3 + 4]
|
||||
# error: [invalid-type-form]
|
||||
invalid2: Literal[4 + 3j]
|
||||
# error: [invalid-type-form]
|
||||
invalid3: Literal[(3, 4)]
|
||||
|
||||
hello = "hello"
|
||||
invalid4: Literal[
|
||||
1 + 2, # error: [invalid-type-form]
|
||||
"foo",
|
||||
hello, # error: [invalid-type-form]
|
||||
(1, 2, 3), # error: [invalid-type-form]
|
||||
]
|
||||
```
|
||||
|
||||
## Shortening unions of literals
|
||||
|
||||
When a Literal is parameterized with more than one value, it’s treated as exactly to equivalent to
|
||||
the union of those types.
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def x(
|
||||
a1: Literal[Literal[Literal[1, 2, 3], "foo"], 5, None],
|
||||
a2: Literal["w"] | Literal["r"],
|
||||
a3: Literal[Literal["w"], Literal["r"], Literal[Literal["w+"]]],
|
||||
a4: Literal[True] | Literal[1, 2] | Literal["foo"],
|
||||
):
|
||||
reveal_type(a1) # revealed: Literal[1, 2, 3, "foo", 5] | None
|
||||
reveal_type(a2) # revealed: Literal["w", "r"]
|
||||
reveal_type(a3) # revealed: Literal["w", "r", "w+"]
|
||||
reveal_type(a4) # revealed: Literal[True, 1, 2, "foo"]
|
||||
```
|
||||
|
||||
## Display of heterogeneous unions of literals
|
||||
|
||||
```py
|
||||
from typing import Literal, Union
|
||||
|
||||
def foo(x: int) -> int:
|
||||
return x + 1
|
||||
|
||||
def bar(s: str) -> str:
|
||||
return s
|
||||
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
def union_example(
|
||||
x: Union[
|
||||
# unknown type
|
||||
# error: [unresolved-reference]
|
||||
y,
|
||||
Literal[-1],
|
||||
Literal["A"],
|
||||
Literal[b"A"],
|
||||
Literal[b"\x00"],
|
||||
Literal[b"\x07"],
|
||||
Literal[0],
|
||||
Literal[1],
|
||||
Literal["B"],
|
||||
Literal["foo"],
|
||||
Literal["bar"],
|
||||
Literal["B"],
|
||||
Literal[True],
|
||||
None,
|
||||
]
|
||||
):
|
||||
reveal_type(x) # revealed: Unknown | Literal[-1, "A", b"A", b"\x00", b"\x07", 0, 1, "B", "foo", "bar", True] | None
|
||||
```
|
||||
|
||||
## Detecting Literal outside typing and typing_extensions
|
||||
|
||||
Only Literal that is defined in typing and typing_extension modules is detected as the special
|
||||
Literal.
|
||||
|
||||
```pyi path=other.pyi
|
||||
from typing import _SpecialForm
|
||||
|
||||
Literal: _SpecialForm
|
||||
```
|
||||
|
||||
```py
|
||||
from other import Literal
|
||||
|
||||
a1: Literal[26]
|
||||
|
||||
def f():
|
||||
reveal_type(a1) # revealed: @Todo(generics)
|
||||
```
|
||||
|
||||
## Detecting typing_extensions.Literal
|
||||
|
||||
```py
|
||||
from typing_extensions import Literal
|
||||
|
||||
a1: Literal[26]
|
||||
|
||||
def f():
|
||||
reveal_type(a1) # revealed: Literal[26]
|
||||
```
|
||||
|
||||
## Invalid
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
# error: [invalid-type-form] "`Literal` requires at least one argument when used in a type expression"
|
||||
def _(x: Literal):
|
||||
reveal_type(x) # revealed: Unknown
|
||||
```
|
||||
@@ -1,150 +0,0 @@
|
||||
# `LiteralString`
|
||||
|
||||
`LiteralString` represents a string that is either defined directly within the source code or is
|
||||
made up of such components.
|
||||
|
||||
Parts of the testcases defined here were adapted from [the specification's examples][1].
|
||||
|
||||
## Usages
|
||||
|
||||
### Valid places
|
||||
|
||||
It can be used anywhere a type is accepted:
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
x: LiteralString
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: LiteralString
|
||||
```
|
||||
|
||||
### Within `Literal`
|
||||
|
||||
`LiteralString` cannot be used within `Literal`:
|
||||
|
||||
```py
|
||||
from typing_extensions import Literal, LiteralString
|
||||
|
||||
bad_union: Literal["hello", LiteralString] # error: [invalid-type-form]
|
||||
bad_nesting: Literal[LiteralString] # error: [invalid-type-form]
|
||||
```
|
||||
|
||||
### Parameterized
|
||||
|
||||
`LiteralString` cannot be parameterized.
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
a: LiteralString[str] # error: [invalid-type-form]
|
||||
b: LiteralString["foo"] # error: [invalid-type-form]
|
||||
```
|
||||
|
||||
### As a base class
|
||||
|
||||
Subclassing `LiteralString` leads to a runtime error.
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
class C(LiteralString): ... # error: [invalid-base]
|
||||
```
|
||||
|
||||
## Inference
|
||||
|
||||
### Common operations
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
foo: LiteralString = "foo"
|
||||
reveal_type(foo) # revealed: Literal["foo"]
|
||||
|
||||
bar: LiteralString = "bar"
|
||||
reveal_type(foo + bar) # revealed: Literal["foobar"]
|
||||
|
||||
baz: LiteralString = "baz"
|
||||
baz += foo
|
||||
reveal_type(baz) # revealed: Literal["bazfoo"]
|
||||
|
||||
qux = (foo, bar)
|
||||
reveal_type(qux) # revealed: tuple[Literal["foo"], Literal["bar"]]
|
||||
|
||||
# TODO: Infer "LiteralString"
|
||||
reveal_type(foo.join(qux)) # revealed: @Todo(Attribute access on `StringLiteral` types)
|
||||
|
||||
template: LiteralString = "{}, {}"
|
||||
reveal_type(template) # revealed: Literal["{}, {}"]
|
||||
# TODO: Infer `LiteralString`
|
||||
reveal_type(template.format(foo, bar)) # revealed: @Todo(Attribute access on `StringLiteral` types)
|
||||
```
|
||||
|
||||
### Assignability
|
||||
|
||||
`Literal[""]` is assignable to `LiteralString`, and `LiteralString` is assignable to `str`, but not
|
||||
vice versa.
|
||||
|
||||
```py
|
||||
from typing_extensions import Literal, LiteralString
|
||||
|
||||
def _(flag: bool):
|
||||
foo_1: Literal["foo"] = "foo"
|
||||
bar_1: LiteralString = foo_1 # fine
|
||||
|
||||
foo_2 = "foo" if flag else "bar"
|
||||
reveal_type(foo_2) # revealed: Literal["foo", "bar"]
|
||||
bar_2: LiteralString = foo_2 # fine
|
||||
|
||||
foo_3: LiteralString = "foo" * 1_000_000_000
|
||||
bar_3: str = foo_2 # fine
|
||||
|
||||
baz_1: str = str()
|
||||
qux_1: LiteralString = baz_1 # error: [invalid-assignment]
|
||||
|
||||
baz_2: LiteralString = "baz" * 1_000_000_000
|
||||
qux_2: Literal["qux"] = baz_2 # error: [invalid-assignment]
|
||||
|
||||
baz_3 = "foo" if flag else 1
|
||||
reveal_type(baz_3) # revealed: Literal["foo", 1]
|
||||
qux_3: LiteralString = baz_3 # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
### Narrowing
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
lorem: LiteralString = "lorem" * 1_000_000_000
|
||||
|
||||
reveal_type(lorem) # revealed: LiteralString
|
||||
|
||||
if lorem == "ipsum":
|
||||
reveal_type(lorem) # revealed: Literal["ipsum"]
|
||||
|
||||
reveal_type(lorem) # revealed: LiteralString
|
||||
|
||||
if "" < lorem == "ipsum":
|
||||
reveal_type(lorem) # revealed: Literal["ipsum"]
|
||||
```
|
||||
|
||||
## `typing.LiteralString`
|
||||
|
||||
`typing.LiteralString` is only available in Python 3.11 and later:
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.11"
|
||||
```
|
||||
|
||||
```py
|
||||
from typing import LiteralString
|
||||
|
||||
x: LiteralString = "foo"
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: LiteralString
|
||||
```
|
||||
|
||||
[1]: https://typing.readthedocs.io/en/latest/spec/literal.html#literalstring
|
||||
@@ -1,75 +0,0 @@
|
||||
# NoReturn & Never
|
||||
|
||||
`NoReturn` is used to annotate the return type for functions that never return. `Never` is the
|
||||
bottom type, representing the empty set of Python objects. These two annotations can be used
|
||||
interchangeably.
|
||||
|
||||
## Function Return Type Annotation
|
||||
|
||||
```py
|
||||
from typing import NoReturn
|
||||
|
||||
def stop() -> NoReturn:
|
||||
raise RuntimeError("no way")
|
||||
|
||||
# revealed: Never
|
||||
reveal_type(stop())
|
||||
```
|
||||
|
||||
## Assignment
|
||||
|
||||
```py
|
||||
from typing_extensions import NoReturn, Never, Any
|
||||
|
||||
# error: [invalid-type-form] "Type `typing.Never` expected no type parameter"
|
||||
x: Never[int]
|
||||
a1: NoReturn
|
||||
a2: Never
|
||||
b1: Any
|
||||
b2: int
|
||||
|
||||
def f():
|
||||
# revealed: Never
|
||||
reveal_type(a1)
|
||||
# revealed: Never
|
||||
reveal_type(a2)
|
||||
|
||||
# Never is assignable to all types.
|
||||
v1: int = a1
|
||||
v2: str = a1
|
||||
# Other types are not assignable to Never except for Never (and Any).
|
||||
v3: Never = b1
|
||||
v4: Never = a2
|
||||
v5: Any = b2
|
||||
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to `Never`"
|
||||
v6: Never = 1
|
||||
```
|
||||
|
||||
## `typing.Never`
|
||||
|
||||
`typing.Never` is only available in Python 3.11 and later.
|
||||
|
||||
### Python 3.11
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.11"
|
||||
```
|
||||
|
||||
```py
|
||||
from typing import Never
|
||||
|
||||
reveal_type(Never) # revealed: typing.Never
|
||||
```
|
||||
|
||||
### Python 3.10
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.10"
|
||||
```
|
||||
|
||||
```py
|
||||
# error: [unresolved-import]
|
||||
from typing import Never
|
||||
```
|
||||
@@ -1,47 +0,0 @@
|
||||
# Optional
|
||||
|
||||
## Annotation
|
||||
|
||||
`typing.Optional` is equivalent to using the type with a None in a Union.
|
||||
|
||||
```py
|
||||
from typing import Optional
|
||||
|
||||
a: Optional[int]
|
||||
a1: Optional[bool]
|
||||
a2: Optional[Optional[bool]]
|
||||
a3: Optional[None]
|
||||
|
||||
def f():
|
||||
# revealed: int | None
|
||||
reveal_type(a)
|
||||
# revealed: bool | None
|
||||
reveal_type(a1)
|
||||
# revealed: bool | None
|
||||
reveal_type(a2)
|
||||
# revealed: None
|
||||
reveal_type(a3)
|
||||
```
|
||||
|
||||
## Assignment
|
||||
|
||||
```py
|
||||
from typing import Optional
|
||||
|
||||
a: Optional[int] = 1
|
||||
a = None
|
||||
# error: [invalid-assignment] "Object of type `Literal[""]` is not assignable to `int | None`"
|
||||
a = ""
|
||||
```
|
||||
|
||||
## Typing Extensions
|
||||
|
||||
```py
|
||||
from typing_extensions import Optional
|
||||
|
||||
a: Optional[int]
|
||||
|
||||
def f():
|
||||
# revealed: int | None
|
||||
reveal_type(a)
|
||||
```
|
||||
@@ -1,18 +0,0 @@
|
||||
# Starred expression annotations
|
||||
|
||||
Type annotations for `*args` can be starred expressions themselves:
|
||||
|
||||
```py
|
||||
from typing_extensions import TypeVarTuple
|
||||
|
||||
Ts = TypeVarTuple("Ts")
|
||||
|
||||
def append_int(*args: *Ts) -> tuple[*Ts, int]:
|
||||
# TODO: tuple[*Ts]
|
||||
reveal_type(args) # revealed: tuple
|
||||
|
||||
return (*args, 1)
|
||||
|
||||
# TODO should be tuple[Literal[True], Literal["a"], int]
|
||||
reveal_type(append_int(True, "a")) # revealed: @Todo(full tuple[...] support)
|
||||
```
|
||||
@@ -1,127 +0,0 @@
|
||||
# Typing-module aliases to other stdlib classes
|
||||
|
||||
The `typing` module has various aliases to other stdlib classes. These are a legacy feature, but
|
||||
still need to be supported by a type checker.
|
||||
|
||||
## Correspondence
|
||||
|
||||
All of the following symbols can be mapped one-to-one with the actual type:
|
||||
|
||||
```py
|
||||
import typing
|
||||
|
||||
def f(
|
||||
list_bare: typing.List,
|
||||
list_parametrized: typing.List[int],
|
||||
dict_bare: typing.Dict,
|
||||
dict_parametrized: typing.Dict[int, str],
|
||||
set_bare: typing.Set,
|
||||
set_parametrized: typing.Set[int],
|
||||
frozen_set_bare: typing.FrozenSet,
|
||||
frozen_set_parametrized: typing.FrozenSet[str],
|
||||
chain_map_bare: typing.ChainMap,
|
||||
chain_map_parametrized: typing.ChainMap[int],
|
||||
counter_bare: typing.Counter,
|
||||
counter_parametrized: typing.Counter[int],
|
||||
default_dict_bare: typing.DefaultDict,
|
||||
default_dict_parametrized: typing.DefaultDict[str, int],
|
||||
deque_bare: typing.Deque,
|
||||
deque_parametrized: typing.Deque[str],
|
||||
ordered_dict_bare: typing.OrderedDict,
|
||||
ordered_dict_parametrized: typing.OrderedDict[int, str],
|
||||
):
|
||||
reveal_type(list_bare) # revealed: list
|
||||
reveal_type(list_parametrized) # revealed: list
|
||||
|
||||
reveal_type(dict_bare) # revealed: dict
|
||||
reveal_type(dict_parametrized) # revealed: dict
|
||||
|
||||
reveal_type(set_bare) # revealed: set
|
||||
reveal_type(set_parametrized) # revealed: set
|
||||
|
||||
reveal_type(frozen_set_bare) # revealed: frozenset
|
||||
reveal_type(frozen_set_parametrized) # revealed: frozenset
|
||||
|
||||
reveal_type(chain_map_bare) # revealed: ChainMap
|
||||
reveal_type(chain_map_parametrized) # revealed: ChainMap
|
||||
|
||||
reveal_type(counter_bare) # revealed: Counter
|
||||
reveal_type(counter_parametrized) # revealed: Counter
|
||||
|
||||
reveal_type(default_dict_bare) # revealed: defaultdict
|
||||
reveal_type(default_dict_parametrized) # revealed: defaultdict
|
||||
|
||||
reveal_type(deque_bare) # revealed: deque
|
||||
reveal_type(deque_parametrized) # revealed: deque
|
||||
|
||||
reveal_type(ordered_dict_bare) # revealed: OrderedDict
|
||||
reveal_type(ordered_dict_parametrized) # revealed: OrderedDict
|
||||
```
|
||||
|
||||
## Inheritance
|
||||
|
||||
The aliases can be inherited from. Some of these are still partially or wholly TODOs.
|
||||
|
||||
```py
|
||||
import typing
|
||||
|
||||
####################
|
||||
### Built-ins
|
||||
|
||||
class ListSubclass(typing.List): ...
|
||||
|
||||
# TODO: should have `Generic`, should not have `Unknown`
|
||||
# revealed: tuple[Literal[ListSubclass], Literal[list], Unknown, Literal[object]]
|
||||
reveal_type(ListSubclass.__mro__)
|
||||
|
||||
class DictSubclass(typing.Dict): ...
|
||||
|
||||
# TODO: should have `Generic`, should not have `Unknown`
|
||||
# revealed: tuple[Literal[DictSubclass], Literal[dict], Unknown, Literal[object]]
|
||||
reveal_type(DictSubclass.__mro__)
|
||||
|
||||
class SetSubclass(typing.Set): ...
|
||||
|
||||
# TODO: should have `Generic`, should not have `Unknown`
|
||||
# revealed: tuple[Literal[SetSubclass], Literal[set], Unknown, Literal[object]]
|
||||
reveal_type(SetSubclass.__mro__)
|
||||
|
||||
class FrozenSetSubclass(typing.FrozenSet): ...
|
||||
|
||||
# TODO: should have `Generic`, should not have `Unknown`
|
||||
# revealed: tuple[Literal[FrozenSetSubclass], Literal[frozenset], Unknown, Literal[object]]
|
||||
reveal_type(FrozenSetSubclass.__mro__)
|
||||
|
||||
####################
|
||||
### `collections`
|
||||
|
||||
class ChainMapSubclass(typing.ChainMap): ...
|
||||
|
||||
# TODO: Should be (ChainMapSubclass, ChainMap, MutableMapping, Mapping, Collection, Sized, Iterable, Container, Generic, object)
|
||||
# revealed: tuple[Literal[ChainMapSubclass], Literal[ChainMap], Unknown, Literal[object]]
|
||||
reveal_type(ChainMapSubclass.__mro__)
|
||||
|
||||
class CounterSubclass(typing.Counter): ...
|
||||
|
||||
# TODO: Should be (CounterSubclass, Counter, dict, MutableMapping, Mapping, Collection, Sized, Iterable, Container, Generic, object)
|
||||
# revealed: tuple[Literal[CounterSubclass], Literal[Counter], Unknown, Literal[object]]
|
||||
reveal_type(CounterSubclass.__mro__)
|
||||
|
||||
class DefaultDictSubclass(typing.DefaultDict): ...
|
||||
|
||||
# TODO: Should be (DefaultDictSubclass, defaultdict, dict, MutableMapping, Mapping, Collection, Sized, Iterable, Container, Generic, object)
|
||||
# revealed: tuple[Literal[DefaultDictSubclass], Literal[defaultdict], Unknown, Literal[object]]
|
||||
reveal_type(DefaultDictSubclass.__mro__)
|
||||
|
||||
class DequeSubclass(typing.Deque): ...
|
||||
|
||||
# TODO: Should be (DequeSubclass, deque, MutableSequence, Sequence, Reversible, Collection, Sized, Iterable, Container, Generic, object)
|
||||
# revealed: tuple[Literal[DequeSubclass], Literal[deque], Unknown, Literal[object]]
|
||||
reveal_type(DequeSubclass.__mro__)
|
||||
|
||||
class OrderedDictSubclass(typing.OrderedDict): ...
|
||||
|
||||
# TODO: Should be (OrderedDictSubclass, OrderedDict, dict, MutableMapping, Mapping, Collection, Sized, Iterable, Container, Generic, object)
|
||||
# revealed: tuple[Literal[OrderedDictSubclass], Literal[OrderedDict], Unknown, Literal[object]]
|
||||
reveal_type(OrderedDictSubclass.__mro__)
|
||||
```
|
||||
@@ -1,175 +0,0 @@
|
||||
# String annotations
|
||||
|
||||
## Simple
|
||||
|
||||
```py
|
||||
def f(v: "int"):
|
||||
reveal_type(v) # revealed: int
|
||||
```
|
||||
|
||||
## Nested
|
||||
|
||||
```py
|
||||
def f(v: "'int'"):
|
||||
reveal_type(v) # revealed: int
|
||||
```
|
||||
|
||||
## Type expression
|
||||
|
||||
```py
|
||||
def f1(v: "int | str", w: "tuple[int, str]"):
|
||||
reveal_type(v) # revealed: int | str
|
||||
reveal_type(w) # revealed: tuple[int, str]
|
||||
```
|
||||
|
||||
## Partial
|
||||
|
||||
```py
|
||||
def f(v: tuple[int, "str"]):
|
||||
reveal_type(v) # revealed: tuple[int, str]
|
||||
```
|
||||
|
||||
## Deferred
|
||||
|
||||
```py
|
||||
def f(v: "Foo"):
|
||||
reveal_type(v) # revealed: Foo
|
||||
|
||||
class Foo: ...
|
||||
```
|
||||
|
||||
## Deferred (undefined)
|
||||
|
||||
```py
|
||||
# error: [unresolved-reference]
|
||||
def f(v: "Foo"):
|
||||
reveal_type(v) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Partial deferred
|
||||
|
||||
```py
|
||||
def f(v: int | "Foo"):
|
||||
reveal_type(v) # revealed: int | Foo
|
||||
|
||||
class Foo: ...
|
||||
```
|
||||
|
||||
## `typing.Literal`
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def f1(v: Literal["Foo", "Bar"], w: 'Literal["Foo", "Bar"]'):
|
||||
reveal_type(v) # revealed: Literal["Foo", "Bar"]
|
||||
reveal_type(w) # revealed: Literal["Foo", "Bar"]
|
||||
|
||||
class Foo: ...
|
||||
```
|
||||
|
||||
## Various string kinds
|
||||
|
||||
```py
|
||||
def f1(
|
||||
# error: [raw-string-type-annotation] "Type expressions cannot use raw string literal"
|
||||
a: r"int",
|
||||
# error: [fstring-type-annotation] "Type expressions cannot use f-strings"
|
||||
b: f"int",
|
||||
# error: [byte-string-type-annotation] "Type expressions cannot use bytes literal"
|
||||
c: b"int",
|
||||
d: "int",
|
||||
# error: [implicit-concatenated-string-type-annotation] "Type expressions cannot span multiple string literals"
|
||||
e: "in" "t",
|
||||
# error: [escape-character-in-forward-annotation] "Type expressions cannot contain escape characters"
|
||||
f: "\N{LATIN SMALL LETTER I}nt",
|
||||
# error: [escape-character-in-forward-annotation] "Type expressions cannot contain escape characters"
|
||||
g: "\x69nt",
|
||||
h: """int""",
|
||||
# error: [byte-string-type-annotation] "Type expressions cannot use bytes literal"
|
||||
i: "b'int'",
|
||||
):
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
reveal_type(d) # revealed: int
|
||||
reveal_type(e) # revealed: Unknown
|
||||
reveal_type(f) # revealed: Unknown
|
||||
reveal_type(g) # revealed: Unknown
|
||||
reveal_type(h) # revealed: int
|
||||
reveal_type(i) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Various string kinds in `typing.Literal`
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def f(v: Literal["a", r"b", b"c", "d" "e", "\N{LATIN SMALL LETTER F}", "\x67", """h"""]):
|
||||
reveal_type(v) # revealed: Literal["a", "b", b"c", "de", "f", "g", "h"]
|
||||
```
|
||||
|
||||
## Class variables
|
||||
|
||||
```py
|
||||
MyType = int
|
||||
|
||||
class Aliases:
|
||||
MyType = str
|
||||
|
||||
forward: "MyType"
|
||||
not_forward: MyType
|
||||
|
||||
reveal_type(Aliases.forward) # revealed: str
|
||||
reveal_type(Aliases.not_forward) # revealed: str
|
||||
```
|
||||
|
||||
## Annotated assignment
|
||||
|
||||
```py
|
||||
a: "int" = 1
|
||||
b: "'int'" = 1
|
||||
c: "Foo"
|
||||
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to `Foo`"
|
||||
d: "Foo" = 1
|
||||
|
||||
class Foo: ...
|
||||
|
||||
c = Foo()
|
||||
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
reveal_type(b) # revealed: Literal[1]
|
||||
reveal_type(c) # revealed: Foo
|
||||
reveal_type(d) # revealed: Foo
|
||||
```
|
||||
|
||||
## Parameter
|
||||
|
||||
TODO: Add tests once parameter inference is supported
|
||||
|
||||
## Invalid expressions
|
||||
|
||||
The expressions in these string annotations aren't valid expressions in this context but we
|
||||
shouldn't panic.
|
||||
|
||||
```py
|
||||
a: "1 or 2"
|
||||
b: "(x := 1)"
|
||||
c: "1 + 2"
|
||||
d: "lambda x: x"
|
||||
e: "x if True else y"
|
||||
f: "{'a': 1, 'b': 2}"
|
||||
g: "{1, 2}"
|
||||
h: "[i for i in range(5)]"
|
||||
i: "{i for i in range(5)}"
|
||||
j: "{i: i for i in range(5)}"
|
||||
k: "(i for i in range(5))"
|
||||
l: "await 1"
|
||||
# error: [invalid-syntax-in-forward-annotation]
|
||||
m: "yield 1"
|
||||
# error: [invalid-syntax-in-forward-annotation]
|
||||
n: "yield from 1"
|
||||
o: "1 < 2"
|
||||
p: "call()"
|
||||
r: "[1, 2]"
|
||||
s: "(1, 2)"
|
||||
```
|
||||
@@ -1,61 +0,0 @@
|
||||
# Union
|
||||
|
||||
## Annotation
|
||||
|
||||
`typing.Union` can be used to construct union types same as `|` operator.
|
||||
|
||||
```py
|
||||
from typing import Union
|
||||
|
||||
a: Union[int, str]
|
||||
a1: Union[int, bool]
|
||||
a2: Union[int, Union[float, str]]
|
||||
a3: Union[int, None]
|
||||
a4: Union[Union[float, str]]
|
||||
a5: Union[int]
|
||||
a6: Union[()]
|
||||
|
||||
def f():
|
||||
# revealed: int | str
|
||||
reveal_type(a)
|
||||
# Since bool is a subtype of int we simplify to int here. But we do allow assigning boolean values (see below).
|
||||
# revealed: int
|
||||
reveal_type(a1)
|
||||
# revealed: int | float | str
|
||||
reveal_type(a2)
|
||||
# revealed: int | None
|
||||
reveal_type(a3)
|
||||
# revealed: float | str
|
||||
reveal_type(a4)
|
||||
# revealed: int
|
||||
reveal_type(a5)
|
||||
# revealed: Never
|
||||
reveal_type(a6)
|
||||
```
|
||||
|
||||
## Assignment
|
||||
|
||||
```py
|
||||
from typing import Union
|
||||
|
||||
a: Union[int, str]
|
||||
a = 1
|
||||
a = ""
|
||||
a1: Union[int, bool]
|
||||
a1 = 1
|
||||
a1 = True
|
||||
# error: [invalid-assignment] "Object of type `Literal[b""]` is not assignable to `int | str`"
|
||||
a = b""
|
||||
```
|
||||
|
||||
## Typing Extensions
|
||||
|
||||
```py
|
||||
from typing_extensions import Union
|
||||
|
||||
a: Union[int, str]
|
||||
|
||||
def f():
|
||||
# revealed: int | str
|
||||
reveal_type(a)
|
||||
```
|
||||
@@ -1,71 +0,0 @@
|
||||
# Unsupported special forms
|
||||
|
||||
## Not yet supported
|
||||
|
||||
Several special forms are unsupported by red-knot currently. However, we also don't emit
|
||||
false-positive errors if you use one in an annotation:
|
||||
|
||||
```py
|
||||
from typing_extensions import Self, TypeVarTuple, Unpack, TypeGuard, TypeIs, Concatenate, ParamSpec, TypeAlias, Callable, TypeVar
|
||||
|
||||
P = ParamSpec("P")
|
||||
Ts = TypeVarTuple("Ts")
|
||||
R_co = TypeVar("R_co", covariant=True)
|
||||
|
||||
Alias: TypeAlias = int
|
||||
|
||||
def f(*args: Unpack[Ts]) -> tuple[Unpack[Ts]]:
|
||||
# TODO: should understand the annotation
|
||||
reveal_type(args) # revealed: tuple
|
||||
|
||||
reveal_type(Alias) # revealed: @Todo(Unsupported or invalid type in a type expression)
|
||||
|
||||
def g() -> TypeGuard[int]: ...
|
||||
def h() -> TypeIs[int]: ...
|
||||
def i(callback: Callable[Concatenate[int, P], R_co], *args: P.args, **kwargs: P.kwargs) -> R_co:
|
||||
# TODO: should understand the annotation
|
||||
reveal_type(args) # revealed: tuple
|
||||
|
||||
# TODO: should understand the annotation
|
||||
reveal_type(kwargs) # revealed: dict
|
||||
|
||||
return callback(42, *args, **kwargs)
|
||||
|
||||
class Foo:
|
||||
def method(self, x: Self):
|
||||
reveal_type(x) # revealed: @Todo(Unsupported or invalid type in a type expression)
|
||||
```
|
||||
|
||||
## Inheritance
|
||||
|
||||
You can't inherit from most of these. `typing.Callable` is an exception.
|
||||
|
||||
```py
|
||||
from typing import Callable
|
||||
from typing_extensions import Self, Unpack, TypeGuard, TypeIs, Concatenate
|
||||
|
||||
class A(Self): ... # error: [invalid-base]
|
||||
class B(Unpack): ... # error: [invalid-base]
|
||||
class C(TypeGuard): ... # error: [invalid-base]
|
||||
class D(TypeIs): ... # error: [invalid-base]
|
||||
class E(Concatenate): ... # error: [invalid-base]
|
||||
class F(Callable): ...
|
||||
|
||||
reveal_type(F.__mro__) # revealed: tuple[Literal[F], @Todo(Support for Callable as a base class), Literal[object]]
|
||||
```
|
||||
|
||||
## Subscriptability
|
||||
|
||||
Some of these are not subscriptable:
|
||||
|
||||
```py
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
X: TypeAlias[T] = int # error: [invalid-type-form]
|
||||
|
||||
class Foo[T]:
|
||||
# error: [invalid-type-form] "Special form `typing.Self` expected no type parameter"
|
||||
# error: [invalid-type-form] "Special form `typing.Self` expected no type parameter"
|
||||
def method(self: Self[int]) -> Self[int]:
|
||||
reveal_type(self) # revealed: Unknown
|
||||
```
|
||||
@@ -1,37 +0,0 @@
|
||||
# Unsupported type qualifiers
|
||||
|
||||
## Not yet supported
|
||||
|
||||
Several type qualifiers are unsupported by red-knot currently. However, we also don't emit
|
||||
false-positive errors if you use one in an annotation:
|
||||
|
||||
```py
|
||||
from typing_extensions import Final, ClassVar, Required, NotRequired, ReadOnly, TypedDict
|
||||
|
||||
X: Final = 42
|
||||
Y: Final[int] = 42
|
||||
|
||||
class Foo:
|
||||
A: ClassVar[int] = 42
|
||||
|
||||
# TODO: `TypedDict` is actually valid as a base
|
||||
# error: [invalid-base]
|
||||
class Bar(TypedDict):
|
||||
x: Required[int]
|
||||
y: NotRequired[str]
|
||||
z: ReadOnly[bytes]
|
||||
```
|
||||
|
||||
## Inheritance
|
||||
|
||||
You can't inherit from a type qualifier.
|
||||
|
||||
```py
|
||||
from typing_extensions import Final, ClassVar, Required, NotRequired, ReadOnly
|
||||
|
||||
class A(Final): ... # error: [invalid-base]
|
||||
class B(ClassVar): ... # error: [invalid-base]
|
||||
class C(Required): ... # error: [invalid-base]
|
||||
class D(NotRequired): ... # error: [invalid-base]
|
||||
class E(ReadOnly): ... # error: [invalid-base]
|
||||
```
|
||||
@@ -1,131 +0,0 @@
|
||||
# Assignment with annotations
|
||||
|
||||
## Annotation only transparent to local inference
|
||||
|
||||
```py
|
||||
x = 1
|
||||
x: int
|
||||
y = x
|
||||
|
||||
reveal_type(y) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
## Violates own annotation
|
||||
|
||||
```py
|
||||
x: int = "foo" # error: [invalid-assignment] "Object of type `Literal["foo"]` is not assignable to `int`"
|
||||
```
|
||||
|
||||
## Violates previous annotation
|
||||
|
||||
```py
|
||||
x: int
|
||||
x = "foo" # error: [invalid-assignment] "Object of type `Literal["foo"]` is not assignable to `int`"
|
||||
```
|
||||
|
||||
## Tuple annotations are understood
|
||||
|
||||
```py path=module.py
|
||||
from typing_extensions import Unpack
|
||||
|
||||
a: tuple[()] = ()
|
||||
b: tuple[int] = (42,)
|
||||
c: tuple[str, int] = ("42", 42)
|
||||
d: tuple[tuple[str, str], tuple[int, int]] = (("foo", "foo"), (42, 42))
|
||||
e: tuple[str, ...] = ()
|
||||
f: tuple[str, *tuple[int, ...], bytes] = ("42", b"42")
|
||||
g: tuple[str, Unpack[tuple[int, ...]], bytes] = ("42", b"42")
|
||||
h: tuple[list[int], list[int]] = ([], [])
|
||||
i: tuple[str | int, str | int] = (42, 42)
|
||||
j: tuple[str | int] = (42,)
|
||||
```
|
||||
|
||||
```py path=script.py
|
||||
from module import a, b, c, d, e, f, g, h, i, j
|
||||
|
||||
reveal_type(a) # revealed: tuple[()]
|
||||
reveal_type(b) # revealed: tuple[int]
|
||||
reveal_type(c) # revealed: tuple[str, int]
|
||||
reveal_type(d) # revealed: tuple[tuple[str, str], tuple[int, int]]
|
||||
|
||||
# TODO: homogeneous tuples, PEP-646 tuples
|
||||
reveal_type(e) # revealed: @Todo(full tuple[...] support)
|
||||
reveal_type(f) # revealed: @Todo(full tuple[...] support)
|
||||
reveal_type(g) # revealed: @Todo(full tuple[...] support)
|
||||
|
||||
# TODO: support more kinds of type expressions in annotations
|
||||
reveal_type(h) # revealed: @Todo(full tuple[...] support)
|
||||
|
||||
reveal_type(i) # revealed: tuple[str | int, str | int]
|
||||
reveal_type(j) # revealed: tuple[str | int]
|
||||
```
|
||||
|
||||
## Incorrect tuple assignments are complained about
|
||||
|
||||
```py
|
||||
# error: [invalid-assignment] "Object of type `tuple[Literal[1], Literal[2]]` is not assignable to `tuple[()]`"
|
||||
a: tuple[()] = (1, 2)
|
||||
|
||||
# error: [invalid-assignment] "Object of type `tuple[Literal["foo"]]` is not assignable to `tuple[int]`"
|
||||
b: tuple[int] = ("foo",)
|
||||
|
||||
# error: [invalid-assignment] "Object of type `tuple[list, Literal["foo"]]` is not assignable to `tuple[str | int, str]`"
|
||||
c: tuple[str | int, str] = ([], "foo")
|
||||
```
|
||||
|
||||
## PEP-604 annotations are supported
|
||||
|
||||
```py
|
||||
def foo(v: str | int | None, w: str | str | None, x: str | str):
|
||||
reveal_type(v) # revealed: str | int | None
|
||||
reveal_type(w) # revealed: str | None
|
||||
reveal_type(x) # revealed: str
|
||||
```
|
||||
|
||||
## Attribute expressions in type annotations are understood
|
||||
|
||||
```py
|
||||
import builtins
|
||||
|
||||
int = "foo"
|
||||
a: builtins.int = 42
|
||||
|
||||
# error: [invalid-assignment] "Object of type `Literal["bar"]` is not assignable to `int`"
|
||||
b: builtins.int = "bar"
|
||||
|
||||
c: builtins.tuple[builtins.tuple[builtins.int, builtins.int], builtins.int] = ((42, 42), 42)
|
||||
|
||||
# error: [invalid-assignment] "Object of type `Literal["foo"]` is not assignable to `tuple[tuple[int, int], int]`"
|
||||
c: builtins.tuple[builtins.tuple[builtins.int, builtins.int], builtins.int] = "foo"
|
||||
```
|
||||
|
||||
## Future annotations are deferred
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
x: Foo
|
||||
|
||||
class Foo: ...
|
||||
|
||||
x = Foo()
|
||||
reveal_type(x) # revealed: Foo
|
||||
```
|
||||
|
||||
## Annotations in stub files are deferred
|
||||
|
||||
```pyi path=main.pyi
|
||||
x: Foo
|
||||
|
||||
class Foo: ...
|
||||
|
||||
x = Foo()
|
||||
reveal_type(x) # revealed: Foo
|
||||
```
|
||||
|
||||
## Annotated assignments in stub files are inferred correctly
|
||||
|
||||
```pyi path=main.pyi
|
||||
x: int = 1
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
@@ -1,164 +0,0 @@
|
||||
# Augmented assignment
|
||||
|
||||
## Basic
|
||||
|
||||
```py
|
||||
x = 3
|
||||
x -= 1
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
|
||||
x = 1.0
|
||||
x /= 2
|
||||
reveal_type(x) # revealed: float
|
||||
```
|
||||
|
||||
## Dunder methods
|
||||
|
||||
```py
|
||||
class C:
|
||||
def __isub__(self, other: int) -> str:
|
||||
return "Hello, world!"
|
||||
|
||||
x = C()
|
||||
x -= 1
|
||||
reveal_type(x) # revealed: str
|
||||
|
||||
class C:
|
||||
def __iadd__(self, other: str) -> float:
|
||||
return 1.0
|
||||
|
||||
x = C()
|
||||
x += "Hello"
|
||||
reveal_type(x) # revealed: float
|
||||
```
|
||||
|
||||
## Unsupported types
|
||||
|
||||
```py
|
||||
class C:
|
||||
def __isub__(self, other: str) -> int:
|
||||
return 42
|
||||
|
||||
x = C()
|
||||
# error: [invalid-argument-type]
|
||||
x -= 1
|
||||
|
||||
reveal_type(x) # revealed: int
|
||||
```
|
||||
|
||||
## Method union
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
class Foo:
|
||||
if flag:
|
||||
def __iadd__(self, other: int) -> str:
|
||||
return "Hello, world!"
|
||||
else:
|
||||
def __iadd__(self, other: int) -> int:
|
||||
return 42
|
||||
|
||||
f = Foo()
|
||||
f += 12
|
||||
|
||||
reveal_type(f) # revealed: str | int
|
||||
```
|
||||
|
||||
## Partially bound `__iadd__`
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
class Foo:
|
||||
if flag:
|
||||
def __iadd__(self, other: str) -> int:
|
||||
return 42
|
||||
|
||||
f = Foo()
|
||||
|
||||
# TODO: We should emit an `unsupported-operator` error here, possibly with the information
|
||||
# that `Foo.__iadd__` may be unbound as additional context.
|
||||
f += "Hello, world!"
|
||||
|
||||
reveal_type(f) # revealed: int | Unknown
|
||||
```
|
||||
|
||||
## Partially bound with `__add__`
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
class Foo:
|
||||
def __add__(self, other: str) -> str:
|
||||
return "Hello, world!"
|
||||
if flag:
|
||||
def __iadd__(self, other: str) -> int:
|
||||
return 42
|
||||
|
||||
f = Foo()
|
||||
f += "Hello, world!"
|
||||
|
||||
reveal_type(f) # revealed: int | str
|
||||
```
|
||||
|
||||
## Partially bound target union
|
||||
|
||||
```py
|
||||
def _(flag1: bool, flag2: bool):
|
||||
class Foo:
|
||||
def __add__(self, other: int) -> str:
|
||||
return "Hello, world!"
|
||||
if flag1:
|
||||
def __iadd__(self, other: int) -> int:
|
||||
return 42
|
||||
|
||||
if flag2:
|
||||
f = Foo()
|
||||
else:
|
||||
f = 42.0
|
||||
f += 12
|
||||
|
||||
reveal_type(f) # revealed: int | str | float
|
||||
```
|
||||
|
||||
## Target union
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
class Foo:
|
||||
def __iadd__(self, other: int) -> str:
|
||||
return "Hello, world!"
|
||||
|
||||
if flag:
|
||||
f = Foo()
|
||||
else:
|
||||
f = 42.0
|
||||
f += 12
|
||||
|
||||
reveal_type(f) # revealed: str | float
|
||||
```
|
||||
|
||||
## Partially bound target union with `__add__`
|
||||
|
||||
```py
|
||||
def f(flag: bool, flag2: bool):
|
||||
class Foo:
|
||||
def __add__(self, other: int) -> str:
|
||||
return "Hello, world!"
|
||||
if flag:
|
||||
def __iadd__(self, other: int) -> int:
|
||||
return 42
|
||||
|
||||
class Bar:
|
||||
def __add__(self, other: int) -> bytes:
|
||||
return b"Hello, world!"
|
||||
|
||||
def __iadd__(self, other: int) -> float:
|
||||
return 42.0
|
||||
|
||||
if flag2:
|
||||
f = Foo()
|
||||
else:
|
||||
f = Bar()
|
||||
f += 12
|
||||
|
||||
reveal_type(f) # revealed: int | str | float
|
||||
```
|
||||
@@ -1,9 +0,0 @@
|
||||
# Multi-target assignment
|
||||
|
||||
## Basic
|
||||
|
||||
```py
|
||||
x = y = 1
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
reveal_type(y) # revealed: Literal[1]
|
||||
```
|
||||
@@ -1,20 +0,0 @@
|
||||
# Unbound
|
||||
|
||||
## Unbound
|
||||
|
||||
```py
|
||||
x = foo # error: [unresolved-reference] "Name `foo` used when not defined"
|
||||
foo = 1
|
||||
|
||||
# No error `unresolved-reference` diagnostic is reported for `x`. This is
|
||||
# desirable because we would get a lot of cascading errors even though there
|
||||
# is only one root cause (the unbound variable `foo`).
|
||||
|
||||
# revealed: Unknown
|
||||
reveal_type(x)
|
||||
```
|
||||
|
||||
Note: in this particular example, one could argue that the most likely error would be a wrong order
|
||||
of the `x`/`foo` definitions, and so it could be desirable to infer `Literal[1]` for the type of
|
||||
`x`. On the other hand, there might be a variable `fob` a little higher up in this file, and the
|
||||
actual error might have been just a typo. Inferring `Unknown` thus seems like the safest option.
|
||||
@@ -1,17 +0,0 @@
|
||||
# Walrus operator
|
||||
|
||||
## Basic
|
||||
|
||||
```py
|
||||
x = (y := 1) + 1
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
reveal_type(y) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
## Walrus self-addition
|
||||
|
||||
```py
|
||||
x = 0
|
||||
(x := x + 1)
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
@@ -1,223 +0,0 @@
|
||||
# Attributes
|
||||
|
||||
Tests for attribute access on various kinds of types.
|
||||
|
||||
## Union of attributes
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
if flag:
|
||||
class C1:
|
||||
x = 1
|
||||
|
||||
else:
|
||||
class C1:
|
||||
x = 2
|
||||
|
||||
class C2:
|
||||
if flag:
|
||||
x = 3
|
||||
else:
|
||||
x = 4
|
||||
|
||||
reveal_type(C1.x) # revealed: Literal[1, 2]
|
||||
reveal_type(C2.x) # revealed: Literal[3, 4]
|
||||
```
|
||||
|
||||
## Inherited attributes
|
||||
|
||||
```py
|
||||
class A:
|
||||
X = "foo"
|
||||
|
||||
class B(A): ...
|
||||
class C(B): ...
|
||||
|
||||
reveal_type(C.X) # revealed: Literal["foo"]
|
||||
```
|
||||
|
||||
## Inherited attributes (multiple inheritance)
|
||||
|
||||
```py
|
||||
class O: ...
|
||||
|
||||
class F(O):
|
||||
X = 56
|
||||
|
||||
class E(O):
|
||||
X = 42
|
||||
|
||||
class D(O): ...
|
||||
class C(D, F): ...
|
||||
class B(E, D): ...
|
||||
class A(B, C): ...
|
||||
|
||||
# revealed: tuple[Literal[A], Literal[B], Literal[E], Literal[C], Literal[D], Literal[F], Literal[O], Literal[object]]
|
||||
reveal_type(A.__mro__)
|
||||
|
||||
# `E` is earlier in the MRO than `F`, so we should use the type of `E.X`
|
||||
reveal_type(A.X) # revealed: Literal[42]
|
||||
```
|
||||
|
||||
## Unions with possibly unbound paths
|
||||
|
||||
### Definite boundness within a class
|
||||
|
||||
In this example, the `x` attribute is not defined in the `C2` element of the union:
|
||||
|
||||
```py
|
||||
def _(flag1: bool, flag2: bool):
|
||||
class C1:
|
||||
x = 1
|
||||
|
||||
class C2: ...
|
||||
|
||||
class C3:
|
||||
x = 3
|
||||
|
||||
C = C1 if flag1 else C2 if flag2 else C3
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `x` on type `Literal[C1, C2, C3]` is possibly unbound"
|
||||
reveal_type(C.x) # revealed: Literal[1, 3]
|
||||
```
|
||||
|
||||
### Possibly-unbound within a class
|
||||
|
||||
We raise the same diagnostic if the attribute is possibly-unbound in at least one element of the
|
||||
union:
|
||||
|
||||
```py
|
||||
def _(flag: bool, flag1: bool, flag2: bool):
|
||||
class C1:
|
||||
x = 1
|
||||
|
||||
class C2:
|
||||
if flag:
|
||||
x = 2
|
||||
|
||||
class C3:
|
||||
x = 3
|
||||
|
||||
C = C1 if flag1 else C2 if flag2 else C3
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `x` on type `Literal[C1, C2, C3]` is possibly unbound"
|
||||
reveal_type(C.x) # revealed: Literal[1, 2, 3]
|
||||
```
|
||||
|
||||
## Unions with all paths unbound
|
||||
|
||||
If the symbol is unbound in all elements of the union, we detect that:
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
class C1: ...
|
||||
class C2: ...
|
||||
C = C1 if flag else C2
|
||||
|
||||
# error: [unresolved-attribute] "Type `Literal[C1, C2]` has no attribute `x`"
|
||||
reveal_type(C.x) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Objects of all types have a `__class__` method
|
||||
|
||||
```py
|
||||
import typing_extensions
|
||||
|
||||
reveal_type(typing_extensions.__class__) # revealed: Literal[ModuleType]
|
||||
|
||||
a = 42
|
||||
reveal_type(a.__class__) # revealed: Literal[int]
|
||||
|
||||
b = "42"
|
||||
reveal_type(b.__class__) # revealed: Literal[str]
|
||||
|
||||
c = b"42"
|
||||
reveal_type(c.__class__) # revealed: Literal[bytes]
|
||||
|
||||
d = True
|
||||
reveal_type(d.__class__) # revealed: Literal[bool]
|
||||
|
||||
e = (42, 42)
|
||||
reveal_type(e.__class__) # revealed: Literal[tuple]
|
||||
|
||||
def f(a: int, b: typing_extensions.LiteralString, c: int | str, d: type[str]):
|
||||
reveal_type(a.__class__) # revealed: type[int]
|
||||
reveal_type(b.__class__) # revealed: Literal[str]
|
||||
reveal_type(c.__class__) # revealed: type[int] | type[str]
|
||||
|
||||
# `type[type]`, a.k.a., either the class `type` or some subclass of `type`.
|
||||
# It would be incorrect to infer `Literal[type]` here,
|
||||
# as `c` could be some subclass of `str` with a custom metaclass.
|
||||
# All we know is that the metaclass must be a (non-strict) subclass of `type`.
|
||||
reveal_type(d.__class__) # revealed: type[type]
|
||||
|
||||
reveal_type(f.__class__) # revealed: Literal[FunctionType]
|
||||
|
||||
class Foo: ...
|
||||
|
||||
reveal_type(Foo.__class__) # revealed: Literal[type]
|
||||
```
|
||||
|
||||
## Function-literal attributes
|
||||
|
||||
Most attribute accesses on function-literal types are delegated to `types.FunctionType`, since all
|
||||
functions are instances of that class:
|
||||
|
||||
```py path=a.py
|
||||
def f(): ...
|
||||
|
||||
reveal_type(f.__defaults__) # revealed: @Todo(instance attributes)
|
||||
reveal_type(f.__kwdefaults__) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
|
||||
Some attributes are special-cased, however:
|
||||
|
||||
```py path=b.py
|
||||
def f(): ...
|
||||
|
||||
reveal_type(f.__get__) # revealed: @Todo(`__get__` method on functions)
|
||||
reveal_type(f.__call__) # revealed: @Todo(`__call__` method on functions)
|
||||
```
|
||||
|
||||
## Int-literal attributes
|
||||
|
||||
Most attribute accesses on int-literal types are delegated to `builtins.int`, since all literal
|
||||
integers are instances of that class:
|
||||
|
||||
```py path=a.py
|
||||
reveal_type((2).bit_length) # revealed: @Todo(instance attributes)
|
||||
reveal_type((2).denominator) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
|
||||
Some attributes are special-cased, however:
|
||||
|
||||
```py path=b.py
|
||||
reveal_type((2).numerator) # revealed: Literal[2]
|
||||
reveal_type((2).real) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
## Literal `bool` attributes
|
||||
|
||||
Most attribute accesses on bool-literal types are delegated to `builtins.bool`, since all literal
|
||||
bols are instances of that class:
|
||||
|
||||
```py path=a.py
|
||||
reveal_type(True.__and__) # revealed: @Todo(instance attributes)
|
||||
reveal_type(False.__or__) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
|
||||
Some attributes are special-cased, however:
|
||||
|
||||
```py path=b.py
|
||||
reveal_type(True.numerator) # revealed: Literal[1]
|
||||
reveal_type(False.real) # revealed: Literal[0]
|
||||
```
|
||||
|
||||
## Bytes-literal attributes
|
||||
|
||||
All attribute access on literal `bytes` types is currently delegated to `buitins.bytes`:
|
||||
|
||||
```py
|
||||
reveal_type(b"foo".join) # revealed: @Todo(instance attributes)
|
||||
reveal_type(b"foo".endswith) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
@@ -1,95 +0,0 @@
|
||||
## Binary operations on booleans
|
||||
|
||||
## Basic Arithmetic
|
||||
|
||||
We try to be precise and all operations except for division will result in Literal type.
|
||||
|
||||
```py
|
||||
a = True
|
||||
b = False
|
||||
|
||||
reveal_type(a + a) # revealed: Literal[2]
|
||||
reveal_type(a + b) # revealed: Literal[1]
|
||||
reveal_type(b + a) # revealed: Literal[1]
|
||||
reveal_type(b + b) # revealed: Literal[0]
|
||||
|
||||
reveal_type(a - a) # revealed: Literal[0]
|
||||
reveal_type(a - b) # revealed: Literal[1]
|
||||
reveal_type(b - a) # revealed: Literal[-1]
|
||||
reveal_type(b - b) # revealed: Literal[0]
|
||||
|
||||
reveal_type(a * a) # revealed: Literal[1]
|
||||
reveal_type(a * b) # revealed: Literal[0]
|
||||
reveal_type(b * a) # revealed: Literal[0]
|
||||
reveal_type(b * b) # revealed: Literal[0]
|
||||
|
||||
reveal_type(a % a) # revealed: Literal[0]
|
||||
reveal_type(b % a) # revealed: Literal[0]
|
||||
|
||||
reveal_type(a // a) # revealed: Literal[1]
|
||||
reveal_type(b // a) # revealed: Literal[0]
|
||||
|
||||
reveal_type(a**a) # revealed: Literal[1]
|
||||
reveal_type(a**b) # revealed: Literal[1]
|
||||
reveal_type(b**a) # revealed: Literal[0]
|
||||
reveal_type(b**b) # revealed: Literal[1]
|
||||
|
||||
# Division
|
||||
reveal_type(a / a) # revealed: float
|
||||
reveal_type(b / a) # revealed: float
|
||||
b / b # error: [division-by-zero] "Cannot divide object of type `Literal[False]` by zero"
|
||||
a / b # error: [division-by-zero] "Cannot divide object of type `Literal[True]` by zero"
|
||||
|
||||
# bitwise OR
|
||||
reveal_type(a | a) # revealed: Literal[True]
|
||||
reveal_type(a | b) # revealed: Literal[True]
|
||||
reveal_type(b | a) # revealed: Literal[True]
|
||||
reveal_type(b | b) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
## Arithmetic with a variable
|
||||
|
||||
```py
|
||||
a = True
|
||||
b = False
|
||||
|
||||
def lhs_is_int(x: int):
|
||||
reveal_type(x + a) # revealed: int
|
||||
reveal_type(x - a) # revealed: int
|
||||
reveal_type(x * a) # revealed: int
|
||||
reveal_type(x // a) # revealed: int
|
||||
reveal_type(x / a) # revealed: float
|
||||
reveal_type(x % a) # revealed: int
|
||||
|
||||
def rhs_is_int(x: int):
|
||||
reveal_type(a + x) # revealed: int
|
||||
reveal_type(a - x) # revealed: int
|
||||
reveal_type(a * x) # revealed: int
|
||||
reveal_type(a // x) # revealed: int
|
||||
reveal_type(a / x) # revealed: float
|
||||
reveal_type(a % x) # revealed: int
|
||||
|
||||
def lhs_is_bool(x: bool):
|
||||
reveal_type(x + a) # revealed: int
|
||||
reveal_type(x - a) # revealed: int
|
||||
reveal_type(x * a) # revealed: int
|
||||
reveal_type(x // a) # revealed: int
|
||||
reveal_type(x / a) # revealed: float
|
||||
reveal_type(x % a) # revealed: int
|
||||
|
||||
def rhs_is_bool(x: bool):
|
||||
reveal_type(a + x) # revealed: int
|
||||
reveal_type(a - x) # revealed: int
|
||||
reveal_type(a * x) # revealed: int
|
||||
reveal_type(a // x) # revealed: int
|
||||
reveal_type(a / x) # revealed: float
|
||||
reveal_type(a % x) # revealed: int
|
||||
|
||||
def both_are_bool(x: bool, y: bool):
|
||||
reveal_type(x + y) # revealed: int
|
||||
reveal_type(x - y) # revealed: int
|
||||
reveal_type(x * y) # revealed: int
|
||||
reveal_type(x // y) # revealed: int
|
||||
reveal_type(x / y) # revealed: float
|
||||
reveal_type(x % y) # revealed: int
|
||||
```
|
||||
@@ -1,27 +0,0 @@
|
||||
# Binary operations on classes
|
||||
|
||||
## Union of two classes
|
||||
|
||||
Unioning two classes via the `|` operator is only available in Python 3.10 and later.
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.10"
|
||||
```
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
reveal_type(A | B) # revealed: UnionType
|
||||
```
|
||||
|
||||
## Union of two classes (prior to 3.10)
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
# error: "Operator `|` is unsupported between objects of type `Literal[A]` and `Literal[B]`"
|
||||
reveal_type(A | B) # revealed: Unknown
|
||||
```
|
||||
@@ -1,371 +0,0 @@
|
||||
# Custom binary operations
|
||||
|
||||
## Class instances
|
||||
|
||||
```py
|
||||
class Yes:
|
||||
def __add__(self, other) -> Literal["+"]:
|
||||
return "+"
|
||||
|
||||
def __sub__(self, other) -> Literal["-"]:
|
||||
return "-"
|
||||
|
||||
def __mul__(self, other) -> Literal["*"]:
|
||||
return "*"
|
||||
|
||||
def __matmul__(self, other) -> Literal["@"]:
|
||||
return "@"
|
||||
|
||||
def __truediv__(self, other) -> Literal["/"]:
|
||||
return "/"
|
||||
|
||||
def __mod__(self, other) -> Literal["%"]:
|
||||
return "%"
|
||||
|
||||
def __pow__(self, other) -> Literal["**"]:
|
||||
return "**"
|
||||
|
||||
def __lshift__(self, other) -> Literal["<<"]:
|
||||
return "<<"
|
||||
|
||||
def __rshift__(self, other) -> Literal[">>"]:
|
||||
return ">>"
|
||||
|
||||
def __or__(self, other) -> Literal["|"]:
|
||||
return "|"
|
||||
|
||||
def __xor__(self, other) -> Literal["^"]:
|
||||
return "^"
|
||||
|
||||
def __and__(self, other) -> Literal["&"]:
|
||||
return "&"
|
||||
|
||||
def __floordiv__(self, other) -> Literal["//"]:
|
||||
return "//"
|
||||
|
||||
class Sub(Yes): ...
|
||||
class No: ...
|
||||
|
||||
# Yes implements all of the dunder methods.
|
||||
reveal_type(Yes() + Yes()) # revealed: Literal["+"]
|
||||
reveal_type(Yes() - Yes()) # revealed: Literal["-"]
|
||||
reveal_type(Yes() * Yes()) # revealed: Literal["*"]
|
||||
reveal_type(Yes() @ Yes()) # revealed: Literal["@"]
|
||||
reveal_type(Yes() / Yes()) # revealed: Literal["/"]
|
||||
reveal_type(Yes() % Yes()) # revealed: Literal["%"]
|
||||
reveal_type(Yes() ** Yes()) # revealed: Literal["**"]
|
||||
reveal_type(Yes() << Yes()) # revealed: Literal["<<"]
|
||||
reveal_type(Yes() >> Yes()) # revealed: Literal[">>"]
|
||||
reveal_type(Yes() | Yes()) # revealed: Literal["|"]
|
||||
reveal_type(Yes() ^ Yes()) # revealed: Literal["^"]
|
||||
reveal_type(Yes() & Yes()) # revealed: Literal["&"]
|
||||
reveal_type(Yes() // Yes()) # revealed: Literal["//"]
|
||||
|
||||
# Sub inherits Yes's implementation of the dunder methods.
|
||||
reveal_type(Sub() + Sub()) # revealed: Literal["+"]
|
||||
reveal_type(Sub() - Sub()) # revealed: Literal["-"]
|
||||
reveal_type(Sub() * Sub()) # revealed: Literal["*"]
|
||||
reveal_type(Sub() @ Sub()) # revealed: Literal["@"]
|
||||
reveal_type(Sub() / Sub()) # revealed: Literal["/"]
|
||||
reveal_type(Sub() % Sub()) # revealed: Literal["%"]
|
||||
reveal_type(Sub() ** Sub()) # revealed: Literal["**"]
|
||||
reveal_type(Sub() << Sub()) # revealed: Literal["<<"]
|
||||
reveal_type(Sub() >> Sub()) # revealed: Literal[">>"]
|
||||
reveal_type(Sub() | Sub()) # revealed: Literal["|"]
|
||||
reveal_type(Sub() ^ Sub()) # revealed: Literal["^"]
|
||||
reveal_type(Sub() & Sub()) # revealed: Literal["&"]
|
||||
reveal_type(Sub() // Sub()) # revealed: Literal["//"]
|
||||
|
||||
# No does not implement any of the dunder methods.
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `No` and `No`"
|
||||
reveal_type(No() + No()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `-` is unsupported between objects of type `No` and `No`"
|
||||
reveal_type(No() - No()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `*` is unsupported between objects of type `No` and `No`"
|
||||
reveal_type(No() * No()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `@` is unsupported between objects of type `No` and `No`"
|
||||
reveal_type(No() @ No()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `/` is unsupported between objects of type `No` and `No`"
|
||||
reveal_type(No() / No()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `%` is unsupported between objects of type `No` and `No`"
|
||||
reveal_type(No() % No()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `**` is unsupported between objects of type `No` and `No`"
|
||||
reveal_type(No() ** No()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `<<` is unsupported between objects of type `No` and `No`"
|
||||
reveal_type(No() << No()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `>>` is unsupported between objects of type `No` and `No`"
|
||||
reveal_type(No() >> No()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `|` is unsupported between objects of type `No` and `No`"
|
||||
reveal_type(No() | No()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `^` is unsupported between objects of type `No` and `No`"
|
||||
reveal_type(No() ^ No()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `&` is unsupported between objects of type `No` and `No`"
|
||||
reveal_type(No() & No()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `//` is unsupported between objects of type `No` and `No`"
|
||||
reveal_type(No() // No()) # revealed: Unknown
|
||||
|
||||
# Yes does not implement any of the reflected dunder methods.
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `No` and `Yes`"
|
||||
reveal_type(No() + Yes()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `-` is unsupported between objects of type `No` and `Yes`"
|
||||
reveal_type(No() - Yes()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `*` is unsupported between objects of type `No` and `Yes`"
|
||||
reveal_type(No() * Yes()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `@` is unsupported between objects of type `No` and `Yes`"
|
||||
reveal_type(No() @ Yes()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `/` is unsupported between objects of type `No` and `Yes`"
|
||||
reveal_type(No() / Yes()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `%` is unsupported between objects of type `No` and `Yes`"
|
||||
reveal_type(No() % Yes()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `**` is unsupported between objects of type `No` and `Yes`"
|
||||
reveal_type(No() ** Yes()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `<<` is unsupported between objects of type `No` and `Yes`"
|
||||
reveal_type(No() << Yes()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `>>` is unsupported between objects of type `No` and `Yes`"
|
||||
reveal_type(No() >> Yes()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `|` is unsupported between objects of type `No` and `Yes`"
|
||||
reveal_type(No() | Yes()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `^` is unsupported between objects of type `No` and `Yes`"
|
||||
reveal_type(No() ^ Yes()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `&` is unsupported between objects of type `No` and `Yes`"
|
||||
reveal_type(No() & Yes()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `//` is unsupported between objects of type `No` and `Yes`"
|
||||
reveal_type(No() // Yes()) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Subclass reflections override superclass dunders
|
||||
|
||||
```py
|
||||
class Yes:
|
||||
def __add__(self, other) -> Literal["+"]:
|
||||
return "+"
|
||||
|
||||
def __sub__(self, other) -> Literal["-"]:
|
||||
return "-"
|
||||
|
||||
def __mul__(self, other) -> Literal["*"]:
|
||||
return "*"
|
||||
|
||||
def __matmul__(self, other) -> Literal["@"]:
|
||||
return "@"
|
||||
|
||||
def __truediv__(self, other) -> Literal["/"]:
|
||||
return "/"
|
||||
|
||||
def __mod__(self, other) -> Literal["%"]:
|
||||
return "%"
|
||||
|
||||
def __pow__(self, other) -> Literal["**"]:
|
||||
return "**"
|
||||
|
||||
def __lshift__(self, other) -> Literal["<<"]:
|
||||
return "<<"
|
||||
|
||||
def __rshift__(self, other) -> Literal[">>"]:
|
||||
return ">>"
|
||||
|
||||
def __or__(self, other) -> Literal["|"]:
|
||||
return "|"
|
||||
|
||||
def __xor__(self, other) -> Literal["^"]:
|
||||
return "^"
|
||||
|
||||
def __and__(self, other) -> Literal["&"]:
|
||||
return "&"
|
||||
|
||||
def __floordiv__(self, other) -> Literal["//"]:
|
||||
return "//"
|
||||
|
||||
class Sub(Yes):
|
||||
def __radd__(self, other) -> Literal["r+"]:
|
||||
return "r+"
|
||||
|
||||
def __rsub__(self, other) -> Literal["r-"]:
|
||||
return "r-"
|
||||
|
||||
def __rmul__(self, other) -> Literal["r*"]:
|
||||
return "r*"
|
||||
|
||||
def __rmatmul__(self, other) -> Literal["r@"]:
|
||||
return "r@"
|
||||
|
||||
def __rtruediv__(self, other) -> Literal["r/"]:
|
||||
return "r/"
|
||||
|
||||
def __rmod__(self, other) -> Literal["r%"]:
|
||||
return "r%"
|
||||
|
||||
def __rpow__(self, other) -> Literal["r**"]:
|
||||
return "r**"
|
||||
|
||||
def __rlshift__(self, other) -> Literal["r<<"]:
|
||||
return "r<<"
|
||||
|
||||
def __rrshift__(self, other) -> Literal["r>>"]:
|
||||
return "r>>"
|
||||
|
||||
def __ror__(self, other) -> Literal["r|"]:
|
||||
return "r|"
|
||||
|
||||
def __rxor__(self, other) -> Literal["r^"]:
|
||||
return "r^"
|
||||
|
||||
def __rand__(self, other) -> Literal["r&"]:
|
||||
return "r&"
|
||||
|
||||
def __rfloordiv__(self, other) -> Literal["r//"]:
|
||||
return "r//"
|
||||
|
||||
class No:
|
||||
def __radd__(self, other) -> Literal["r+"]:
|
||||
return "r+"
|
||||
|
||||
def __rsub__(self, other) -> Literal["r-"]:
|
||||
return "r-"
|
||||
|
||||
def __rmul__(self, other) -> Literal["r*"]:
|
||||
return "r*"
|
||||
|
||||
def __rmatmul__(self, other) -> Literal["r@"]:
|
||||
return "r@"
|
||||
|
||||
def __rtruediv__(self, other) -> Literal["r/"]:
|
||||
return "r/"
|
||||
|
||||
def __rmod__(self, other) -> Literal["r%"]:
|
||||
return "r%"
|
||||
|
||||
def __rpow__(self, other) -> Literal["r**"]:
|
||||
return "r**"
|
||||
|
||||
def __rlshift__(self, other) -> Literal["r<<"]:
|
||||
return "r<<"
|
||||
|
||||
def __rrshift__(self, other) -> Literal["r>>"]:
|
||||
return "r>>"
|
||||
|
||||
def __ror__(self, other) -> Literal["r|"]:
|
||||
return "r|"
|
||||
|
||||
def __rxor__(self, other) -> Literal["r^"]:
|
||||
return "r^"
|
||||
|
||||
def __rand__(self, other) -> Literal["r&"]:
|
||||
return "r&"
|
||||
|
||||
def __rfloordiv__(self, other) -> Literal["r//"]:
|
||||
return "r//"
|
||||
|
||||
# Subclass reflected dunder methods take precedence over the superclass's regular dunders.
|
||||
reveal_type(Yes() + Sub()) # revealed: Literal["r+"]
|
||||
reveal_type(Yes() - Sub()) # revealed: Literal["r-"]
|
||||
reveal_type(Yes() * Sub()) # revealed: Literal["r*"]
|
||||
reveal_type(Yes() @ Sub()) # revealed: Literal["r@"]
|
||||
reveal_type(Yes() / Sub()) # revealed: Literal["r/"]
|
||||
reveal_type(Yes() % Sub()) # revealed: Literal["r%"]
|
||||
reveal_type(Yes() ** Sub()) # revealed: Literal["r**"]
|
||||
reveal_type(Yes() << Sub()) # revealed: Literal["r<<"]
|
||||
reveal_type(Yes() >> Sub()) # revealed: Literal["r>>"]
|
||||
reveal_type(Yes() | Sub()) # revealed: Literal["r|"]
|
||||
reveal_type(Yes() ^ Sub()) # revealed: Literal["r^"]
|
||||
reveal_type(Yes() & Sub()) # revealed: Literal["r&"]
|
||||
reveal_type(Yes() // Sub()) # revealed: Literal["r//"]
|
||||
|
||||
# But for an unrelated class, the superclass regular dunders are used.
|
||||
reveal_type(Yes() + No()) # revealed: Literal["+"]
|
||||
reveal_type(Yes() - No()) # revealed: Literal["-"]
|
||||
reveal_type(Yes() * No()) # revealed: Literal["*"]
|
||||
reveal_type(Yes() @ No()) # revealed: Literal["@"]
|
||||
reveal_type(Yes() / No()) # revealed: Literal["/"]
|
||||
reveal_type(Yes() % No()) # revealed: Literal["%"]
|
||||
reveal_type(Yes() ** No()) # revealed: Literal["**"]
|
||||
reveal_type(Yes() << No()) # revealed: Literal["<<"]
|
||||
reveal_type(Yes() >> No()) # revealed: Literal[">>"]
|
||||
reveal_type(Yes() | No()) # revealed: Literal["|"]
|
||||
reveal_type(Yes() ^ No()) # revealed: Literal["^"]
|
||||
reveal_type(Yes() & No()) # revealed: Literal["&"]
|
||||
reveal_type(Yes() // No()) # revealed: Literal["//"]
|
||||
```
|
||||
|
||||
## Classes
|
||||
|
||||
Dunder methods defined in a class are available to instances of that class, but not to the class
|
||||
itself. (For these operators to work on the class itself, they would have to be defined on the
|
||||
class's type, i.e. `type`.)
|
||||
|
||||
```py
|
||||
class Yes:
|
||||
def __add__(self, other) -> Literal["+"]:
|
||||
return "+"
|
||||
|
||||
class Sub(Yes): ...
|
||||
class No: ...
|
||||
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `Literal[Yes]` and `Literal[Yes]`"
|
||||
reveal_type(Yes + Yes) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `Literal[Sub]` and `Literal[Sub]`"
|
||||
reveal_type(Sub + Sub) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `Literal[No]` and `Literal[No]`"
|
||||
reveal_type(No + No) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Subclass
|
||||
|
||||
```py
|
||||
class Yes:
|
||||
def __add__(self, other) -> Literal["+"]:
|
||||
return "+"
|
||||
|
||||
class Sub(Yes): ...
|
||||
class No: ...
|
||||
|
||||
def yes() -> type[Yes]:
|
||||
return Yes
|
||||
|
||||
def sub() -> type[Sub]:
|
||||
return Sub
|
||||
|
||||
def no() -> type[No]:
|
||||
return No
|
||||
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `type[Yes]` and `type[Yes]`"
|
||||
reveal_type(yes() + yes()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `type[Sub]` and `type[Sub]`"
|
||||
reveal_type(sub() + sub()) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `type[No]` and `type[No]`"
|
||||
reveal_type(no() + no()) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Function literals
|
||||
|
||||
```py
|
||||
def f():
|
||||
pass
|
||||
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `Literal[f]` and `Literal[f]`"
|
||||
reveal_type(f + f) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `-` is unsupported between objects of type `Literal[f]` and `Literal[f]`"
|
||||
reveal_type(f - f) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `*` is unsupported between objects of type `Literal[f]` and `Literal[f]`"
|
||||
reveal_type(f * f) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `@` is unsupported between objects of type `Literal[f]` and `Literal[f]`"
|
||||
reveal_type(f @ f) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `/` is unsupported between objects of type `Literal[f]` and `Literal[f]`"
|
||||
reveal_type(f / f) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `%` is unsupported between objects of type `Literal[f]` and `Literal[f]`"
|
||||
reveal_type(f % f) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `**` is unsupported between objects of type `Literal[f]` and `Literal[f]`"
|
||||
reveal_type(f**f) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `<<` is unsupported between objects of type `Literal[f]` and `Literal[f]`"
|
||||
reveal_type(f << f) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `>>` is unsupported between objects of type `Literal[f]` and `Literal[f]`"
|
||||
reveal_type(f >> f) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `|` is unsupported between objects of type `Literal[f]` and `Literal[f]`"
|
||||
reveal_type(f | f) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `^` is unsupported between objects of type `Literal[f]` and `Literal[f]`"
|
||||
reveal_type(f ^ f) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `&` is unsupported between objects of type `Literal[f]` and `Literal[f]`"
|
||||
reveal_type(f & f) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `//` is unsupported between objects of type `Literal[f]` and `Literal[f]`"
|
||||
reveal_type(f // f) # revealed: Unknown
|
||||
```
|
||||
@@ -1,427 +0,0 @@
|
||||
# Binary operations on instances
|
||||
|
||||
Binary operations in Python are implemented by means of magic double-underscore methods.
|
||||
|
||||
For references, see:
|
||||
|
||||
- <https://snarky.ca/unravelling-binary-arithmetic-operations-in-python/>
|
||||
- <https://docs.python.org/3/reference/datamodel.html#emulating-numeric-types>
|
||||
|
||||
## Operations
|
||||
|
||||
We support inference for all Python's binary operators: `+`, `-`, `*`, `@`, `/`, `//`, `%`, `**`,
|
||||
`<<`, `>>`, `&`, `^`, and `|`.
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __add__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __sub__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __mul__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __matmul__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __truediv__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __floordiv__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __mod__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __pow__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __lshift__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __rshift__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __and__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __xor__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __or__(self, other) -> A:
|
||||
return self
|
||||
|
||||
class B: ...
|
||||
|
||||
reveal_type(A() + B()) # revealed: A
|
||||
reveal_type(A() - B()) # revealed: A
|
||||
reveal_type(A() * B()) # revealed: A
|
||||
reveal_type(A() @ B()) # revealed: A
|
||||
reveal_type(A() / B()) # revealed: A
|
||||
reveal_type(A() // B()) # revealed: A
|
||||
reveal_type(A() % B()) # revealed: A
|
||||
reveal_type(A() ** B()) # revealed: A
|
||||
reveal_type(A() << B()) # revealed: A
|
||||
reveal_type(A() >> B()) # revealed: A
|
||||
reveal_type(A() & B()) # revealed: A
|
||||
reveal_type(A() ^ B()) # revealed: A
|
||||
reveal_type(A() | B()) # revealed: A
|
||||
```
|
||||
|
||||
## Reflected
|
||||
|
||||
We also support inference for reflected operations:
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __radd__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __rsub__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __rmul__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __rmatmul__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __rtruediv__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __rfloordiv__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __rmod__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __rpow__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __rlshift__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __rrshift__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __rand__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __rxor__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __ror__(self, other) -> A:
|
||||
return self
|
||||
|
||||
class B: ...
|
||||
|
||||
reveal_type(B() + A()) # revealed: A
|
||||
reveal_type(B() - A()) # revealed: A
|
||||
reveal_type(B() * A()) # revealed: A
|
||||
reveal_type(B() @ A()) # revealed: A
|
||||
reveal_type(B() / A()) # revealed: A
|
||||
reveal_type(B() // A()) # revealed: A
|
||||
reveal_type(B() % A()) # revealed: A
|
||||
reveal_type(B() ** A()) # revealed: A
|
||||
reveal_type(B() << A()) # revealed: A
|
||||
reveal_type(B() >> A()) # revealed: A
|
||||
reveal_type(B() & A()) # revealed: A
|
||||
reveal_type(B() ^ A()) # revealed: A
|
||||
reveal_type(B() | A()) # revealed: A
|
||||
```
|
||||
|
||||
## Returning a different type
|
||||
|
||||
The magic methods aren't required to return the type of `self`:
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __add__(self, other) -> int:
|
||||
return 1
|
||||
|
||||
def __rsub__(self, other) -> int:
|
||||
return 1
|
||||
|
||||
class B: ...
|
||||
|
||||
reveal_type(A() + B()) # revealed: int
|
||||
reveal_type(B() - A()) # revealed: int
|
||||
```
|
||||
|
||||
## Non-reflected precedence in general
|
||||
|
||||
In general, if the left-hand side defines `__add__` and the right-hand side defines `__radd__` and
|
||||
the right-hand side is not a subtype of the left-hand side, `lhs.__add__` will take precedence:
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __add__(self, other: B) -> int:
|
||||
return 42
|
||||
|
||||
class B:
|
||||
def __radd__(self, other: A) -> str:
|
||||
return "foo"
|
||||
|
||||
reveal_type(A() + B()) # revealed: int
|
||||
|
||||
# Edge case: C is a subtype of C, *but* if the two sides are of *equal* types,
|
||||
# the lhs *still* takes precedence
|
||||
class C:
|
||||
def __add__(self, other: C) -> int:
|
||||
return 42
|
||||
|
||||
def __radd__(self, other: C) -> str:
|
||||
return "foo"
|
||||
|
||||
reveal_type(C() + C()) # revealed: int
|
||||
```
|
||||
|
||||
## Reflected precedence for subtypes (in some cases)
|
||||
|
||||
If the right-hand operand is a subtype of the left-hand operand and has a different implementation
|
||||
of the reflected method, the reflected method on the right-hand operand takes precedence.
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __add__(self, other) -> str:
|
||||
return "foo"
|
||||
|
||||
def __radd__(self, other) -> str:
|
||||
return "foo"
|
||||
|
||||
class MyString(str): ...
|
||||
|
||||
class B(A):
|
||||
def __radd__(self, other) -> MyString:
|
||||
return MyString()
|
||||
|
||||
reveal_type(A() + B()) # revealed: MyString
|
||||
|
||||
# N.B. Still a subtype of `A`, even though `A` does not appear directly in the class's `__bases__`
|
||||
class C(B): ...
|
||||
|
||||
reveal_type(A() + C()) # revealed: MyString
|
||||
```
|
||||
|
||||
## Reflected precedence 2
|
||||
|
||||
If the right-hand operand is a subtype of the left-hand operand, but does not override the reflected
|
||||
method, the left-hand operand's non-reflected method still takes precedence:
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __add__(self, other) -> str:
|
||||
return "foo"
|
||||
|
||||
def __radd__(self, other) -> int:
|
||||
return 42
|
||||
|
||||
class B(A): ...
|
||||
|
||||
reveal_type(A() + B()) # revealed: str
|
||||
```
|
||||
|
||||
## Only reflected supported
|
||||
|
||||
For example, at runtime, `(1).__add__(1.2)` is `NotImplemented`, but `(1.2).__radd__(1) == 2.2`,
|
||||
meaning that `1 + 1.2` succeeds at runtime (producing `2.2`). The runtime tries the second one only
|
||||
if the first one returns `NotImplemented` to signal failure.
|
||||
|
||||
Typeshed and other stubs annotate dunder-method calls that would return `NotImplemented` as being
|
||||
"illegal" calls. `int.__add__` is annotated as only "accepting" `int`s, even though it
|
||||
strictly-speaking "accepts" any other object without raising an exception -- it will simply return
|
||||
`NotImplemented`, allowing the runtime to try the `__radd__` method of the right-hand operand as
|
||||
well.
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __sub__(self, other: A) -> A:
|
||||
return A()
|
||||
|
||||
class B:
|
||||
def __rsub__(self, other: A) -> B:
|
||||
return B()
|
||||
|
||||
# TODO: this should be `B` (the return annotation of `B.__rsub__`),
|
||||
# because `A.__sub__` is annotated as only accepting `A`,
|
||||
# but `B.__rsub__` will accept `A`.
|
||||
reveal_type(A() - B()) # revealed: A
|
||||
```
|
||||
|
||||
## Callable instances as dunders
|
||||
|
||||
Believe it or not, this is supported at runtime:
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __call__(self, other) -> int:
|
||||
return 42
|
||||
|
||||
class B:
|
||||
__add__ = A()
|
||||
|
||||
reveal_type(B() + B()) # revealed: int
|
||||
```
|
||||
|
||||
## Integration test: numbers from typeshed
|
||||
|
||||
```py
|
||||
reveal_type(3j + 3.14) # revealed: complex
|
||||
reveal_type(4.2 + 42) # revealed: float
|
||||
reveal_type(3j + 3) # revealed: complex
|
||||
|
||||
# TODO should be complex, need to check arg type and fall back to `rhs.__radd__`
|
||||
reveal_type(3.14 + 3j) # revealed: float
|
||||
|
||||
# TODO should be float, need to check arg type and fall back to `rhs.__radd__`
|
||||
reveal_type(42 + 4.2) # revealed: int
|
||||
|
||||
# TODO should be complex, need to check arg type and fall back to `rhs.__radd__`
|
||||
reveal_type(3 + 3j) # revealed: int
|
||||
|
||||
def _(x: bool, y: int):
|
||||
reveal_type(x + y) # revealed: int
|
||||
reveal_type(4.2 + x) # revealed: float
|
||||
|
||||
# TODO should be float, need to check arg type and fall back to `rhs.__radd__`
|
||||
reveal_type(y + 4.12) # revealed: int
|
||||
```
|
||||
|
||||
## With literal types
|
||||
|
||||
When we have a literal type for one operand, we're able to fall back to the instance handling for
|
||||
its instance super-type.
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __add__(self, other) -> A:
|
||||
return self
|
||||
|
||||
def __radd__(self, other) -> A:
|
||||
return self
|
||||
|
||||
reveal_type(A() + 1) # revealed: A
|
||||
# TODO should be `A` since `int.__add__` doesn't support `A` instances
|
||||
reveal_type(1 + A()) # revealed: int
|
||||
|
||||
reveal_type(A() + "foo") # revealed: A
|
||||
# TODO should be `A` since `str.__add__` doesn't support `A` instances
|
||||
# TODO overloads
|
||||
reveal_type("foo" + A()) # revealed: @Todo(return type)
|
||||
|
||||
reveal_type(A() + b"foo") # revealed: A
|
||||
# TODO should be `A` since `bytes.__add__` doesn't support `A` instances
|
||||
reveal_type(b"foo" + A()) # revealed: bytes
|
||||
|
||||
reveal_type(A() + ()) # revealed: A
|
||||
# TODO this should be `A`, since `tuple.__add__` doesn't support `A` instances
|
||||
reveal_type(() + A()) # revealed: @Todo(return type)
|
||||
|
||||
literal_string_instance = "foo" * 1_000_000_000
|
||||
# the test is not testing what it's meant to be testing if this isn't a `LiteralString`:
|
||||
reveal_type(literal_string_instance) # revealed: LiteralString
|
||||
|
||||
reveal_type(A() + literal_string_instance) # revealed: A
|
||||
# TODO should be `A` since `str.__add__` doesn't support `A` instances
|
||||
# TODO overloads
|
||||
reveal_type(literal_string_instance + A()) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Operations involving instances of classes inheriting from `Any`
|
||||
|
||||
`Any` and `Unknown` represent a set of possible runtime objects, wherein the bounds of the set are
|
||||
unknown. Whether the left-hand operand's dunder or the right-hand operand's reflected dunder depends
|
||||
on whether the right-hand operand is an instance of a class that is a subclass of the left-hand
|
||||
operand's class and overrides the reflected dunder. In the following example, because of the
|
||||
unknowable nature of `Any`/`Unknown`, we must consider both possibilities: `Any`/`Unknown` might
|
||||
resolve to an unknown third class that inherits from `X` and overrides `__radd__`; but it also might
|
||||
not. Thus, the correct answer here for the `reveal_type` is `int | Unknown`.
|
||||
|
||||
```py
|
||||
from does_not_exist import Foo # error: [unresolved-import]
|
||||
|
||||
reveal_type(Foo) # revealed: Unknown
|
||||
|
||||
class X:
|
||||
def __add__(self, other: object) -> int:
|
||||
return 42
|
||||
|
||||
class Y(Foo): ...
|
||||
|
||||
# TODO: Should be `int | Unknown`; see above discussion.
|
||||
reveal_type(X() + Y()) # revealed: int
|
||||
```
|
||||
|
||||
## Unsupported
|
||||
|
||||
### Dunder as instance attribute
|
||||
|
||||
The magic method must exist on the class, not just on the instance:
|
||||
|
||||
```py
|
||||
def add_impl(self, other) -> int:
|
||||
return 1
|
||||
|
||||
class A:
|
||||
def __init__(self):
|
||||
self.__add__ = add_impl
|
||||
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `A` and `A`"
|
||||
# revealed: Unknown
|
||||
reveal_type(A() + A())
|
||||
```
|
||||
|
||||
### Missing dunder
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
|
||||
# error: [unsupported-operator]
|
||||
# revealed: Unknown
|
||||
reveal_type(A() + A())
|
||||
```
|
||||
|
||||
### Wrong position
|
||||
|
||||
A left-hand dunder method doesn't apply for the right-hand operand, or vice versa:
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __add__(self, other) -> int: ...
|
||||
|
||||
class B:
|
||||
def __radd__(self, other) -> int: ...
|
||||
|
||||
class C: ...
|
||||
|
||||
# error: [unsupported-operator]
|
||||
# revealed: Unknown
|
||||
reveal_type(C() + A())
|
||||
|
||||
# error: [unsupported-operator]
|
||||
# revealed: Unknown
|
||||
reveal_type(B() + C())
|
||||
```
|
||||
|
||||
### Reflected dunder is not tried between two objects of the same type
|
||||
|
||||
For the specific case where the left-hand operand is the exact same type as the right-hand operand,
|
||||
the reflected dunder of the right-hand operand is not tried; the runtime short-circuits after trying
|
||||
the unreflected dunder of the left-hand operand. For context, see
|
||||
[this mailing list discussion](https://mail.python.org/archives/list/python-dev@python.org/thread/7NZUCODEAPQFMRFXYRMGJXDSIS3WJYIV/).
|
||||
|
||||
```py
|
||||
class Foo:
|
||||
def __radd__(self, other: Foo) -> Foo:
|
||||
return self
|
||||
|
||||
# error: [unsupported-operator]
|
||||
# revealed: Unknown
|
||||
reveal_type(Foo() + Foo())
|
||||
```
|
||||
|
||||
### Wrong type
|
||||
|
||||
TODO: check signature and error if `other` is the wrong type
|
||||
@@ -1,103 +0,0 @@
|
||||
# Binary operations on integers
|
||||
|
||||
## Basic Arithmetic
|
||||
|
||||
```py
|
||||
reveal_type(2 + 1) # revealed: Literal[3]
|
||||
reveal_type(3 - 4) # revealed: Literal[-1]
|
||||
reveal_type(3 * -1) # revealed: Literal[-3]
|
||||
reveal_type(-3 // 3) # revealed: Literal[-1]
|
||||
reveal_type(-3 / 3) # revealed: float
|
||||
reveal_type(5 % 3) # revealed: Literal[2]
|
||||
|
||||
# TODO: We don't currently verify that the actual parameter to int.__add__ matches the declared
|
||||
# formal parameter type.
|
||||
reveal_type(2 + "f") # revealed: int
|
||||
|
||||
def lhs(x: int):
|
||||
reveal_type(x + 1) # revealed: int
|
||||
reveal_type(x - 4) # revealed: int
|
||||
reveal_type(x * -1) # revealed: int
|
||||
reveal_type(x // 3) # revealed: int
|
||||
reveal_type(x / 3) # revealed: float
|
||||
reveal_type(x % 3) # revealed: int
|
||||
|
||||
def rhs(x: int):
|
||||
reveal_type(2 + x) # revealed: int
|
||||
reveal_type(3 - x) # revealed: int
|
||||
reveal_type(3 * x) # revealed: int
|
||||
reveal_type(-3 // x) # revealed: int
|
||||
reveal_type(-3 / x) # revealed: float
|
||||
reveal_type(5 % x) # revealed: int
|
||||
|
||||
def both(x: int):
|
||||
reveal_type(x + x) # revealed: int
|
||||
reveal_type(x - x) # revealed: int
|
||||
reveal_type(x * x) # revealed: int
|
||||
reveal_type(x // x) # revealed: int
|
||||
reveal_type(x / x) # revealed: float
|
||||
reveal_type(x % x) # revealed: int
|
||||
```
|
||||
|
||||
## Power
|
||||
|
||||
For power if the result fits in the int literal type it will be a Literal type. Otherwise the
|
||||
outcome is int.
|
||||
|
||||
```py
|
||||
largest_u32 = 4_294_967_295
|
||||
reveal_type(2**2) # revealed: Literal[4]
|
||||
reveal_type(1 ** (largest_u32 + 1)) # revealed: int
|
||||
reveal_type(2**largest_u32) # revealed: int
|
||||
|
||||
def variable(x: int):
|
||||
reveal_type(x**2) # revealed: @Todo(return type)
|
||||
reveal_type(2**x) # revealed: @Todo(return type)
|
||||
reveal_type(x**x) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Division by Zero
|
||||
|
||||
This error is really outside the current Python type system, because e.g. `int.__truediv__` and
|
||||
friends are not annotated to indicate that it's an error, and we don't even have a facility to
|
||||
permit such an annotation. So arguably divide-by-zero should be a lint error rather than a type
|
||||
checker error. But we choose to go ahead and error in the cases that are very likely to be an error:
|
||||
dividing something typed as `int` or `float` by something known to be `Literal[0]`.
|
||||
|
||||
This isn't _definitely_ an error, because the object typed as `int` or `float` could be an instance
|
||||
of a custom subclass which overrides division behavior to handle zero without error. But if this
|
||||
unusual case occurs, the error can be avoided by explicitly typing the dividend as that safe custom
|
||||
subclass; we only emit the error if the LHS type is exactly `int` or `float`, not if its a subclass.
|
||||
|
||||
```py
|
||||
a = 1 / 0 # error: "Cannot divide object of type `Literal[1]` by zero"
|
||||
reveal_type(a) # revealed: float
|
||||
|
||||
b = 2 // 0 # error: "Cannot floor divide object of type `Literal[2]` by zero"
|
||||
reveal_type(b) # revealed: int
|
||||
|
||||
c = 3 % 0 # error: "Cannot reduce object of type `Literal[3]` modulo zero"
|
||||
reveal_type(c) # revealed: int
|
||||
|
||||
# error: "Cannot divide object of type `int` by zero"
|
||||
# revealed: float
|
||||
reveal_type(int() / 0)
|
||||
|
||||
# error: "Cannot divide object of type `Literal[1]` by zero"
|
||||
# revealed: float
|
||||
reveal_type(1 / False)
|
||||
# error: [division-by-zero] "Cannot divide object of type `Literal[True]` by zero"
|
||||
True / False
|
||||
# error: [division-by-zero] "Cannot divide object of type `Literal[True]` by zero"
|
||||
bool(1) / False
|
||||
|
||||
# error: "Cannot divide object of type `float` by zero"
|
||||
# revealed: float
|
||||
reveal_type(1.0 / 0)
|
||||
|
||||
class MyInt(int): ...
|
||||
|
||||
# No error for a subclass of int
|
||||
# revealed: float
|
||||
reveal_type(MyInt(3) / 0)
|
||||
```
|
||||
@@ -1,67 +0,0 @@
|
||||
# Short-Circuit Evaluation
|
||||
|
||||
## Not all boolean expressions must be evaluated
|
||||
|
||||
In `or` expressions, if the left-hand side is truthy, the right-hand side is not evaluated.
|
||||
Similarly, in `and` expressions, if the left-hand side is falsy, the right-hand side is not
|
||||
evaluated.
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
if flag or (x := 1):
|
||||
# error: [possibly-unresolved-reference]
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
|
||||
if flag and (x := 1):
|
||||
# error: [possibly-unresolved-reference]
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
## First expression is always evaluated
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
if (x := 1) or flag:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
|
||||
if (x := 1) and flag:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
## Statically known truthiness
|
||||
|
||||
```py
|
||||
if True or (x := 1):
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(x) # revealed: Unknown
|
||||
|
||||
if True and (x := 1):
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
## Later expressions can always use variables from earlier expressions
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
flag or (x := 1) or reveal_type(x) # revealed: Literal[1]
|
||||
|
||||
# error: [unresolved-reference]
|
||||
flag or reveal_type(y) or (y := 1) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Nested expressions
|
||||
|
||||
```py
|
||||
def _(flag1: bool, flag2: bool):
|
||||
if flag1 or ((x := 1) and flag2):
|
||||
# error: [possibly-unresolved-reference]
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
|
||||
if ((y := 1) and flag1) or flag2:
|
||||
reveal_type(y) # revealed: Literal[1]
|
||||
|
||||
# error: [possibly-unresolved-reference]
|
||||
if (flag1 and (z := 1)) or reveal_type(z): # revealed: Literal[1]
|
||||
# error: [possibly-unresolved-reference]
|
||||
reveal_type(z) # revealed: Literal[1]
|
||||
```
|
||||
@@ -1,101 +0,0 @@
|
||||
# Callable instance
|
||||
|
||||
## Dunder call
|
||||
|
||||
```py
|
||||
class Multiplier:
|
||||
def __init__(self, factor: float):
|
||||
self.factor = factor
|
||||
|
||||
def __call__(self, number: float) -> float:
|
||||
return number * self.factor
|
||||
|
||||
a = Multiplier(2.0)(3.0)
|
||||
reveal_type(a) # revealed: float
|
||||
|
||||
class Unit: ...
|
||||
|
||||
b = Unit()(3.0) # error: "Object of type `Unit` is not callable"
|
||||
reveal_type(b) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Possibly unbound `__call__` method
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
class PossiblyNotCallable:
|
||||
if flag:
|
||||
def __call__(self) -> int: ...
|
||||
|
||||
a = PossiblyNotCallable()
|
||||
result = a() # error: "Object of type `PossiblyNotCallable` is not callable (possibly unbound `__call__` method)"
|
||||
reveal_type(result) # revealed: int
|
||||
```
|
||||
|
||||
## Possibly unbound callable
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
if flag:
|
||||
class PossiblyUnbound:
|
||||
def __call__(self) -> int: ...
|
||||
|
||||
# error: [possibly-unresolved-reference]
|
||||
a = PossiblyUnbound()
|
||||
reveal_type(a()) # revealed: int
|
||||
```
|
||||
|
||||
## Non-callable `__call__`
|
||||
|
||||
```py
|
||||
class NonCallable:
|
||||
__call__ = 1
|
||||
|
||||
a = NonCallable()
|
||||
# error: "Object of type `NonCallable` is not callable"
|
||||
reveal_type(a()) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Possibly non-callable `__call__`
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
class NonCallable:
|
||||
if flag:
|
||||
__call__ = 1
|
||||
else:
|
||||
def __call__(self) -> int: ...
|
||||
|
||||
a = NonCallable()
|
||||
# error: "Object of type `Literal[1] | Literal[__call__]` is not callable (due to union element `Literal[1]`)"
|
||||
reveal_type(a()) # revealed: Unknown | int
|
||||
```
|
||||
|
||||
## Call binding errors
|
||||
|
||||
### Wrong argument type
|
||||
|
||||
```py
|
||||
class C:
|
||||
def __call__(self, x: int) -> int:
|
||||
return 1
|
||||
|
||||
c = C()
|
||||
|
||||
# error: 15 [invalid-argument-type] "Object of type `Literal["foo"]` cannot be assigned to parameter 2 (`x`) of function `__call__`; expected type `int`"
|
||||
reveal_type(c("foo")) # revealed: int
|
||||
```
|
||||
|
||||
### Wrong argument type on `self`
|
||||
|
||||
```py
|
||||
class C:
|
||||
# TODO this definition should also be an error; `C` must be assignable to type of `self`
|
||||
def __call__(self: int) -> int:
|
||||
return 1
|
||||
|
||||
c = C()
|
||||
|
||||
# error: 13 [invalid-argument-type] "Object of type `C` cannot be assigned to parameter 1 (`self`) of function `__call__`; expected type `int`"
|
||||
reveal_type(c()) # revealed: int
|
||||
```
|
||||
@@ -1,7 +0,0 @@
|
||||
# Constructor
|
||||
|
||||
```py
|
||||
class Foo: ...
|
||||
|
||||
reveal_type(Foo()) # revealed: Foo
|
||||
```
|
||||
@@ -1,323 +0,0 @@
|
||||
# Call expression
|
||||
|
||||
## Simple
|
||||
|
||||
```py
|
||||
def get_int() -> int:
|
||||
return 42
|
||||
|
||||
reveal_type(get_int()) # revealed: int
|
||||
```
|
||||
|
||||
## Async
|
||||
|
||||
```py
|
||||
async def get_int_async() -> int:
|
||||
return 42
|
||||
|
||||
# TODO: we don't yet support `types.CoroutineType`, should be generic `Coroutine[Any, Any, int]`
|
||||
reveal_type(get_int_async()) # revealed: @Todo(generic types.CoroutineType)
|
||||
```
|
||||
|
||||
## Generic
|
||||
|
||||
```py
|
||||
def get_int[T]() -> int:
|
||||
return 42
|
||||
|
||||
reveal_type(get_int()) # revealed: int
|
||||
```
|
||||
|
||||
## Decorated
|
||||
|
||||
```py
|
||||
from typing import Callable
|
||||
|
||||
def foo() -> int:
|
||||
return 42
|
||||
|
||||
def decorator(func) -> Callable[[], int]:
|
||||
return foo
|
||||
|
||||
@decorator
|
||||
def bar() -> str:
|
||||
return "bar"
|
||||
|
||||
# TODO: should reveal `int`, as the decorator replaces `bar` with `foo`
|
||||
reveal_type(bar()) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Invalid callable
|
||||
|
||||
```py
|
||||
nonsense = 123
|
||||
x = nonsense() # error: "Object of type `Literal[123]` is not callable"
|
||||
```
|
||||
|
||||
## Potentially unbound function
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
if flag:
|
||||
def foo() -> int:
|
||||
return 42
|
||||
# error: [possibly-unresolved-reference]
|
||||
reveal_type(foo()) # revealed: int
|
||||
```
|
||||
|
||||
## Wrong argument type
|
||||
|
||||
### Positional argument, positional-or-keyword parameter
|
||||
|
||||
```py
|
||||
def f(x: int) -> int:
|
||||
return 1
|
||||
|
||||
# error: 15 [invalid-argument-type] "Object of type `Literal["foo"]` cannot be assigned to parameter 1 (`x`) of function `f`; expected type `int`"
|
||||
reveal_type(f("foo")) # revealed: int
|
||||
```
|
||||
|
||||
### Positional argument, positional-only parameter
|
||||
|
||||
```py
|
||||
def f(x: int, /) -> int:
|
||||
return 1
|
||||
|
||||
# error: 15 [invalid-argument-type] "Object of type `Literal["foo"]` cannot be assigned to parameter 1 (`x`) of function `f`; expected type `int`"
|
||||
reveal_type(f("foo")) # revealed: int
|
||||
```
|
||||
|
||||
### Positional argument, variadic parameter
|
||||
|
||||
```py
|
||||
def f(*args: int) -> int:
|
||||
return 1
|
||||
|
||||
# error: 15 [invalid-argument-type] "Object of type `Literal["foo"]` cannot be assigned to parameter `*args` of function `f`; expected type `int`"
|
||||
reveal_type(f("foo")) # revealed: int
|
||||
```
|
||||
|
||||
### Keyword argument, positional-or-keyword parameter
|
||||
|
||||
```py
|
||||
def f(x: int) -> int:
|
||||
return 1
|
||||
|
||||
# error: 15 [invalid-argument-type] "Object of type `Literal["foo"]` cannot be assigned to parameter `x` of function `f`; expected type `int`"
|
||||
reveal_type(f(x="foo")) # revealed: int
|
||||
```
|
||||
|
||||
### Keyword argument, keyword-only parameter
|
||||
|
||||
```py
|
||||
def f(*, x: int) -> int:
|
||||
return 1
|
||||
|
||||
# error: 15 [invalid-argument-type] "Object of type `Literal["foo"]` cannot be assigned to parameter `x` of function `f`; expected type `int`"
|
||||
reveal_type(f(x="foo")) # revealed: int
|
||||
```
|
||||
|
||||
### Keyword argument, keywords parameter
|
||||
|
||||
```py
|
||||
def f(**kwargs: int) -> int:
|
||||
return 1
|
||||
|
||||
# error: 15 [invalid-argument-type] "Object of type `Literal["foo"]` cannot be assigned to parameter `**kwargs` of function `f`; expected type `int`"
|
||||
reveal_type(f(x="foo")) # revealed: int
|
||||
```
|
||||
|
||||
### Correctly match keyword out-of-order
|
||||
|
||||
```py
|
||||
def f(x: int = 1, y: str = "foo") -> int:
|
||||
return 1
|
||||
|
||||
# error: 15 [invalid-argument-type] "Object of type `Literal[2]` cannot be assigned to parameter `y` of function `f`; expected type `str`"
|
||||
# error: 20 [invalid-argument-type] "Object of type `Literal["bar"]` cannot be assigned to parameter `x` of function `f`; expected type `int`"
|
||||
reveal_type(f(y=2, x="bar")) # revealed: int
|
||||
```
|
||||
|
||||
## Too many positional arguments
|
||||
|
||||
### One too many
|
||||
|
||||
```py
|
||||
def f() -> int:
|
||||
return 1
|
||||
|
||||
# error: 15 [too-many-positional-arguments] "Too many positional arguments to function `f`: expected 0, got 1"
|
||||
reveal_type(f("foo")) # revealed: int
|
||||
```
|
||||
|
||||
### Two too many
|
||||
|
||||
```py
|
||||
def f() -> int:
|
||||
return 1
|
||||
|
||||
# error: 15 [too-many-positional-arguments] "Too many positional arguments to function `f`: expected 0, got 2"
|
||||
reveal_type(f("foo", "bar")) # revealed: int
|
||||
```
|
||||
|
||||
### No too-many-positional if variadic is taken
|
||||
|
||||
```py
|
||||
def f(*args: int) -> int:
|
||||
return 1
|
||||
|
||||
reveal_type(f(1, 2, 3)) # revealed: int
|
||||
```
|
||||
|
||||
## Missing arguments
|
||||
|
||||
### No defaults or variadic
|
||||
|
||||
```py
|
||||
def f(x: int) -> int:
|
||||
return 1
|
||||
|
||||
# error: 13 [missing-argument] "No argument provided for required parameter `x` of function `f`"
|
||||
reveal_type(f()) # revealed: int
|
||||
```
|
||||
|
||||
### With default
|
||||
|
||||
```py
|
||||
def f(x: int, y: str = "foo") -> int:
|
||||
return 1
|
||||
|
||||
# error: 13 [missing-argument] "No argument provided for required parameter `x` of function `f`"
|
||||
reveal_type(f()) # revealed: int
|
||||
```
|
||||
|
||||
### Defaulted argument is not required
|
||||
|
||||
```py
|
||||
def f(x: int = 1) -> int:
|
||||
return 1
|
||||
|
||||
reveal_type(f()) # revealed: int
|
||||
```
|
||||
|
||||
### With variadic
|
||||
|
||||
```py
|
||||
def f(x: int, *y: str) -> int:
|
||||
return 1
|
||||
|
||||
# error: 13 [missing-argument] "No argument provided for required parameter `x` of function `f`"
|
||||
reveal_type(f()) # revealed: int
|
||||
```
|
||||
|
||||
### Variadic argument is not required
|
||||
|
||||
```py
|
||||
def f(*args: int) -> int:
|
||||
return 1
|
||||
|
||||
reveal_type(f()) # revealed: int
|
||||
```
|
||||
|
||||
### Keywords argument is not required
|
||||
|
||||
```py
|
||||
def f(**kwargs: int) -> int:
|
||||
return 1
|
||||
|
||||
reveal_type(f()) # revealed: int
|
||||
```
|
||||
|
||||
### Multiple
|
||||
|
||||
```py
|
||||
def f(x: int, y: int) -> int:
|
||||
return 1
|
||||
|
||||
# error: 13 [missing-argument] "No arguments provided for required parameters `x`, `y` of function `f`"
|
||||
reveal_type(f()) # revealed: int
|
||||
```
|
||||
|
||||
## Unknown argument
|
||||
|
||||
```py
|
||||
def f(x: int) -> int:
|
||||
return 1
|
||||
|
||||
# error: 20 [unknown-argument] "Argument `y` does not match any known parameter of function `f`"
|
||||
reveal_type(f(x=1, y=2)) # revealed: int
|
||||
```
|
||||
|
||||
## Parameter already assigned
|
||||
|
||||
```py
|
||||
def f(x: int) -> int:
|
||||
return 1
|
||||
|
||||
# error: 18 [parameter-already-assigned] "Multiple values provided for parameter `x` of function `f`"
|
||||
reveal_type(f(1, x=2)) # revealed: int
|
||||
```
|
||||
|
||||
## Special functions
|
||||
|
||||
Some functions require special handling in type inference. Here, we make sure that we still emit
|
||||
proper diagnostics in case of missing or superfluous arguments.
|
||||
|
||||
### `reveal_type`
|
||||
|
||||
```py
|
||||
from typing_extensions import reveal_type
|
||||
|
||||
# error: [missing-argument] "No argument provided for required parameter `obj` of function `reveal_type`"
|
||||
reveal_type() # revealed: Unknown
|
||||
|
||||
# error: [too-many-positional-arguments] "Too many positional arguments to function `reveal_type`: expected 1, got 2"
|
||||
reveal_type(1, 2) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
### `static_assert`
|
||||
|
||||
```py
|
||||
from knot_extensions import static_assert
|
||||
|
||||
# error: [missing-argument] "No argument provided for required parameter `condition` of function `static_assert`"
|
||||
# error: [static-assert-error]
|
||||
static_assert()
|
||||
|
||||
# error: [too-many-positional-arguments] "Too many positional arguments to function `static_assert`: expected 2, got 3"
|
||||
static_assert(True, 2, 3)
|
||||
```
|
||||
|
||||
### `len`
|
||||
|
||||
```py
|
||||
# error: [missing-argument] "No argument provided for required parameter `obj` of function `len`"
|
||||
len()
|
||||
|
||||
# error: [too-many-positional-arguments] "Too many positional arguments to function `len`: expected 1, got 2"
|
||||
len([], 1)
|
||||
```
|
||||
|
||||
### Type API predicates
|
||||
|
||||
```py
|
||||
from knot_extensions import is_subtype_of, is_fully_static
|
||||
|
||||
# error: [missing-argument]
|
||||
is_subtype_of()
|
||||
|
||||
# error: [missing-argument]
|
||||
is_subtype_of(int)
|
||||
|
||||
# error: [too-many-positional-arguments]
|
||||
is_subtype_of(int, int, int)
|
||||
|
||||
# error: [too-many-positional-arguments]
|
||||
is_subtype_of(int, int, int, int)
|
||||
|
||||
# error: [missing-argument]
|
||||
is_fully_static()
|
||||
|
||||
# error: [too-many-positional-arguments]
|
||||
is_fully_static(int, int)
|
||||
```
|
||||
@@ -1,44 +0,0 @@
|
||||
# Invalid signatures
|
||||
|
||||
## Multiple arguments with the same name
|
||||
|
||||
We always map a keyword argument to the first parameter of that name.
|
||||
|
||||
```py
|
||||
# error: [invalid-syntax] "Duplicate parameter "x""
|
||||
def f(x: int, x: str) -> int:
|
||||
return 1
|
||||
|
||||
# error: 13 [missing-argument] "No argument provided for required parameter `x` of function `f`"
|
||||
# error: 18 [parameter-already-assigned] "Multiple values provided for parameter `x` of function `f`"
|
||||
reveal_type(f(1, x=2)) # revealed: int
|
||||
```
|
||||
|
||||
## Positional after non-positional
|
||||
|
||||
When parameter kinds are given in an invalid order, we emit a diagnostic and implicitly reorder them
|
||||
to the valid order:
|
||||
|
||||
```py
|
||||
# error: [invalid-syntax] "Parameter cannot follow var-keyword parameter"
|
||||
def f(**kw: int, x: str) -> int:
|
||||
return 1
|
||||
|
||||
# error: 15 [invalid-argument-type] "Object of type `Literal[1]` cannot be assigned to parameter 1 (`x`) of function `f`; expected type `str`"
|
||||
reveal_type(f(1)) # revealed: int
|
||||
```
|
||||
|
||||
## Non-defaulted after defaulted
|
||||
|
||||
We emit a syntax diagnostic for this, but it doesn't cause any problems for binding.
|
||||
|
||||
```py
|
||||
# error: [invalid-syntax] "Parameter without a default cannot follow a parameter with a default"
|
||||
def f(x: int = 1, y: str) -> int:
|
||||
return 1
|
||||
|
||||
reveal_type(f(y="foo")) # revealed: int
|
||||
# error: [invalid-argument-type] "Object of type `Literal["foo"]` cannot be assigned to parameter 1 (`x`) of function `f`; expected type `int`"
|
||||
# error: [missing-argument] "No argument provided for required parameter `y` of function `f`"
|
||||
reveal_type(f("foo")) # revealed: int
|
||||
```
|
||||
@@ -1,77 +0,0 @@
|
||||
# Unions in calls
|
||||
|
||||
## Union of return types
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
if flag:
|
||||
def f() -> int:
|
||||
return 1
|
||||
else:
|
||||
def f() -> str:
|
||||
return "foo"
|
||||
reveal_type(f()) # revealed: int | str
|
||||
```
|
||||
|
||||
## Calling with an unknown union
|
||||
|
||||
```py
|
||||
from nonexistent import f # error: [unresolved-import] "Cannot resolve import `nonexistent`"
|
||||
|
||||
def coinflip() -> bool:
|
||||
return True
|
||||
|
||||
if coinflip():
|
||||
def f() -> int:
|
||||
return 1
|
||||
|
||||
reveal_type(f()) # revealed: Unknown | int
|
||||
```
|
||||
|
||||
## Non-callable elements in a union
|
||||
|
||||
Calling a union with a non-callable element should emit a diagnostic.
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
if flag:
|
||||
f = 1
|
||||
else:
|
||||
def f() -> int:
|
||||
return 1
|
||||
x = f() # error: "Object of type `Literal[1] | Literal[f]` is not callable (due to union element `Literal[1]`)"
|
||||
reveal_type(x) # revealed: Unknown | int
|
||||
```
|
||||
|
||||
## Multiple non-callable elements in a union
|
||||
|
||||
Calling a union with multiple non-callable elements should mention all of them in the diagnostic.
|
||||
|
||||
```py
|
||||
def _(flag: bool, flag2: bool):
|
||||
if flag:
|
||||
f = 1
|
||||
elif flag2:
|
||||
f = "foo"
|
||||
else:
|
||||
def f() -> int:
|
||||
return 1
|
||||
# error: "Object of type `Literal[1, "foo"] | Literal[f]` is not callable (due to union elements Literal[1], Literal["foo"])"
|
||||
# revealed: Unknown | int
|
||||
reveal_type(f())
|
||||
```
|
||||
|
||||
## All non-callable union elements
|
||||
|
||||
Calling a union with no callable elements can emit a simpler diagnostic.
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
if flag:
|
||||
f = 1
|
||||
else:
|
||||
f = "foo"
|
||||
|
||||
x = f() # error: "Object of type `Literal[1, "foo"]` is not callable"
|
||||
reveal_type(x) # revealed: Unknown
|
||||
```
|
||||
@@ -1,43 +0,0 @@
|
||||
# Comparison: Byte literals
|
||||
|
||||
These tests assert that we infer precise `Literal` types for comparisons between objects inferred as
|
||||
having `Literal` bytes types:
|
||||
|
||||
```py
|
||||
reveal_type(b"abc" == b"abc") # revealed: Literal[True]
|
||||
reveal_type(b"abc" == b"ab") # revealed: Literal[False]
|
||||
|
||||
reveal_type(b"abc" != b"abc") # revealed: Literal[False]
|
||||
reveal_type(b"abc" != b"ab") # revealed: Literal[True]
|
||||
|
||||
reveal_type(b"abc" < b"abd") # revealed: Literal[True]
|
||||
reveal_type(b"abc" < b"abb") # revealed: Literal[False]
|
||||
|
||||
reveal_type(b"abc" <= b"abc") # revealed: Literal[True]
|
||||
reveal_type(b"abc" <= b"abb") # revealed: Literal[False]
|
||||
|
||||
reveal_type(b"abc" > b"abd") # revealed: Literal[False]
|
||||
reveal_type(b"abc" > b"abb") # revealed: Literal[True]
|
||||
|
||||
reveal_type(b"abc" >= b"abc") # revealed: Literal[True]
|
||||
reveal_type(b"abc" >= b"abd") # revealed: Literal[False]
|
||||
|
||||
reveal_type(b"" in b"") # revealed: Literal[True]
|
||||
reveal_type(b"" in b"abc") # revealed: Literal[True]
|
||||
reveal_type(b"abc" in b"") # revealed: Literal[False]
|
||||
reveal_type(b"ab" in b"abc") # revealed: Literal[True]
|
||||
reveal_type(b"abc" in b"abc") # revealed: Literal[True]
|
||||
reveal_type(b"d" in b"abc") # revealed: Literal[False]
|
||||
reveal_type(b"ac" in b"abc") # revealed: Literal[False]
|
||||
reveal_type(b"\x81\x82" in b"\x80\x81\x82") # revealed: Literal[True]
|
||||
reveal_type(b"\x82\x83" in b"\x80\x81\x82") # revealed: Literal[False]
|
||||
|
||||
reveal_type(b"ab" not in b"abc") # revealed: Literal[False]
|
||||
reveal_type(b"ac" not in b"abc") # revealed: Literal[True]
|
||||
|
||||
reveal_type(b"abc" is b"abc") # revealed: bool
|
||||
reveal_type(b"abc" is b"ab") # revealed: Literal[False]
|
||||
|
||||
reveal_type(b"abc" is not b"abc") # revealed: bool
|
||||
reveal_type(b"abc" is not b"ab") # revealed: Literal[True]
|
||||
```
|
||||
@@ -1,33 +0,0 @@
|
||||
# Identity tests
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
|
||||
def _(a1: A, a2: A, o: object):
|
||||
n1 = None
|
||||
n2 = None
|
||||
|
||||
reveal_type(a1 is a1) # revealed: bool
|
||||
reveal_type(a1 is a2) # revealed: bool
|
||||
|
||||
reveal_type(n1 is n1) # revealed: Literal[True]
|
||||
reveal_type(n1 is n2) # revealed: Literal[True]
|
||||
|
||||
reveal_type(a1 is n1) # revealed: Literal[False]
|
||||
reveal_type(n1 is a1) # revealed: Literal[False]
|
||||
|
||||
reveal_type(a1 is o) # revealed: bool
|
||||
reveal_type(n1 is o) # revealed: bool
|
||||
|
||||
reveal_type(a1 is not a1) # revealed: bool
|
||||
reveal_type(a1 is not a2) # revealed: bool
|
||||
|
||||
reveal_type(n1 is not n1) # revealed: Literal[False]
|
||||
reveal_type(n1 is not n2) # revealed: Literal[False]
|
||||
|
||||
reveal_type(a1 is not n1) # revealed: Literal[True]
|
||||
reveal_type(n1 is not a1) # revealed: Literal[True]
|
||||
|
||||
reveal_type(a1 is not o) # revealed: bool
|
||||
reveal_type(n1 is not o) # revealed: bool
|
||||
```
|
||||
@@ -1,160 +0,0 @@
|
||||
# Comparison: Membership Test
|
||||
|
||||
In Python, the term "membership test operators" refers to the operators `in` and `not in`. To
|
||||
customize their behavior, classes can implement one of the special methods `__contains__`,
|
||||
`__iter__`, or `__getitem__`.
|
||||
|
||||
For references, see:
|
||||
|
||||
- <https://docs.python.org/3/reference/expressions.html#membership-test-details>
|
||||
- <https://docs.python.org/3/reference/datamodel.html#object.__contains__>
|
||||
- <https://snarky.ca/unravelling-membership-testing/>
|
||||
|
||||
## Implements `__contains__`
|
||||
|
||||
Classes can support membership tests by implementing the `__contains__` method:
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __contains__(self, item: str) -> bool:
|
||||
return True
|
||||
|
||||
reveal_type("hello" in A()) # revealed: bool
|
||||
reveal_type("hello" not in A()) # revealed: bool
|
||||
# TODO: should emit diagnostic, need to check arg type, will fail
|
||||
reveal_type(42 in A()) # revealed: bool
|
||||
reveal_type(42 not in A()) # revealed: bool
|
||||
```
|
||||
|
||||
## Implements `__iter__`
|
||||
|
||||
Classes that don't implement `__contains__`, but do implement `__iter__`, also support containment
|
||||
checks; the needle will be sought in their iterated items:
|
||||
|
||||
```py
|
||||
class StringIterator:
|
||||
def __next__(self) -> str:
|
||||
return "foo"
|
||||
|
||||
class A:
|
||||
def __iter__(self) -> StringIterator:
|
||||
return StringIterator()
|
||||
|
||||
reveal_type("hello" in A()) # revealed: bool
|
||||
reveal_type("hello" not in A()) # revealed: bool
|
||||
reveal_type(42 in A()) # revealed: bool
|
||||
reveal_type(42 not in A()) # revealed: bool
|
||||
```
|
||||
|
||||
## Implements `__getitems__`
|
||||
|
||||
The final fallback is to implement `__getitem__` for integer keys. Python will call `__getitem__`
|
||||
with `0`, `1`, `2`... until either the needle is found (leading the membership test to evaluate to
|
||||
`True`) or `__getitem__` raises `IndexError` (the raised exception is swallowed, but results in the
|
||||
membership test evaluating to `False`).
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __getitem__(self, key: int) -> str:
|
||||
return "foo"
|
||||
|
||||
reveal_type("hello" in A()) # revealed: bool
|
||||
reveal_type("hello" not in A()) # revealed: bool
|
||||
reveal_type(42 in A()) # revealed: bool
|
||||
reveal_type(42 not in A()) # revealed: bool
|
||||
```
|
||||
|
||||
## Wrong Return Type
|
||||
|
||||
Python coerces the results of containment checks to `bool`, even if `__contains__` returns a
|
||||
non-bool:
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __contains__(self, item: str) -> str:
|
||||
return "foo"
|
||||
|
||||
reveal_type("hello" in A()) # revealed: bool
|
||||
reveal_type("hello" not in A()) # revealed: bool
|
||||
```
|
||||
|
||||
## Literal Result for `in` and `not in`
|
||||
|
||||
`__contains__` with a literal return type may result in a `BooleanLiteral` outcome.
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
class AlwaysTrue:
|
||||
def __contains__(self, item: int) -> Literal[1]:
|
||||
return 1
|
||||
|
||||
class AlwaysFalse:
|
||||
def __contains__(self, item: int) -> Literal[""]:
|
||||
return ""
|
||||
|
||||
reveal_type(42 in AlwaysTrue()) # revealed: Literal[True]
|
||||
reveal_type(42 not in AlwaysTrue()) # revealed: Literal[False]
|
||||
|
||||
reveal_type(42 in AlwaysFalse()) # revealed: Literal[False]
|
||||
reveal_type(42 not in AlwaysFalse()) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
## No Fallback for `__contains__`
|
||||
|
||||
If `__contains__` is implemented, checking membership of a type it doesn't accept is an error; it
|
||||
doesn't result in a fallback to `__iter__` or `__getitem__`:
|
||||
|
||||
```py
|
||||
class CheckContains: ...
|
||||
class CheckIter: ...
|
||||
class CheckGetItem: ...
|
||||
|
||||
class CheckIterIterator:
|
||||
def __next__(self) -> CheckIter:
|
||||
return CheckIter()
|
||||
|
||||
class A:
|
||||
def __contains__(self, item: CheckContains) -> bool:
|
||||
return True
|
||||
|
||||
def __iter__(self) -> CheckIterIterator:
|
||||
return CheckIterIterator()
|
||||
|
||||
def __getitem__(self, key: int) -> CheckGetItem:
|
||||
return CheckGetItem()
|
||||
|
||||
reveal_type(CheckContains() in A()) # revealed: bool
|
||||
|
||||
# TODO: should emit diagnostic, need to check arg type,
|
||||
# should not fall back to __iter__ or __getitem__
|
||||
reveal_type(CheckIter() in A()) # revealed: bool
|
||||
reveal_type(CheckGetItem() in A()) # revealed: bool
|
||||
|
||||
class B:
|
||||
def __iter__(self) -> CheckIterIterator:
|
||||
return CheckIterIterator()
|
||||
|
||||
def __getitem__(self, key: int) -> CheckGetItem:
|
||||
return CheckGetItem()
|
||||
|
||||
reveal_type(CheckIter() in B()) # revealed: bool
|
||||
# Always use `__iter__`, regardless of iterated type; there's no NotImplemented
|
||||
# in this case, so there's no fallback to `__getitem__`
|
||||
reveal_type(CheckGetItem() in B()) # revealed: bool
|
||||
```
|
||||
|
||||
## Invalid Old-Style Iteration
|
||||
|
||||
If `__getitem__` is implemented but does not accept integer arguments, then the membership test is
|
||||
not supported and should trigger a diagnostic.
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __getitem__(self, key: str) -> str:
|
||||
return "foo"
|
||||
|
||||
# TODO should emit a diagnostic
|
||||
reveal_type(42 in A()) # revealed: bool
|
||||
reveal_type("hello" in A()) # revealed: bool
|
||||
```
|
||||
@@ -1,320 +0,0 @@
|
||||
# Comparison: Rich Comparison
|
||||
|
||||
Rich comparison operations (`==`, `!=`, `<`, `<=`, `>`, `>=`) in Python are implemented through
|
||||
double-underscore methods that allow customization of comparison behavior.
|
||||
|
||||
For references, see:
|
||||
|
||||
- <https://docs.python.org/3/reference/datamodel.html#object.__lt__>
|
||||
- <https://snarky.ca/unravelling-rich-comparison-operators/>
|
||||
|
||||
## Rich Comparison Dunder Implementations For Same Class
|
||||
|
||||
Classes can support rich comparison by implementing dunder methods like `__eq__`, `__ne__`, etc. The
|
||||
most common case involves implementing these methods for the same type:
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
class A:
|
||||
def __eq__(self, other: A) -> int:
|
||||
return 42
|
||||
|
||||
def __ne__(self, other: A) -> float:
|
||||
return 42.0
|
||||
|
||||
def __lt__(self, other: A) -> str:
|
||||
return "42"
|
||||
|
||||
def __le__(self, other: A) -> bytes:
|
||||
return b"42"
|
||||
|
||||
def __gt__(self, other: A) -> list:
|
||||
return [42]
|
||||
|
||||
def __ge__(self, other: A) -> set:
|
||||
return {42}
|
||||
|
||||
reveal_type(A() == A()) # revealed: int
|
||||
reveal_type(A() != A()) # revealed: float
|
||||
reveal_type(A() < A()) # revealed: str
|
||||
reveal_type(A() <= A()) # revealed: bytes
|
||||
reveal_type(A() > A()) # revealed: list
|
||||
reveal_type(A() >= A()) # revealed: set
|
||||
```
|
||||
|
||||
## Rich Comparison Dunder Implementations for Other Class
|
||||
|
||||
In some cases, classes may implement rich comparison dunder methods for comparisons with a different
|
||||
type:
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
class A:
|
||||
def __eq__(self, other: B) -> int:
|
||||
return 42
|
||||
|
||||
def __ne__(self, other: B) -> float:
|
||||
return 42.0
|
||||
|
||||
def __lt__(self, other: B) -> str:
|
||||
return "42"
|
||||
|
||||
def __le__(self, other: B) -> bytes:
|
||||
return b"42"
|
||||
|
||||
def __gt__(self, other: B) -> list:
|
||||
return [42]
|
||||
|
||||
def __ge__(self, other: B) -> set:
|
||||
return {42}
|
||||
|
||||
class B: ...
|
||||
|
||||
reveal_type(A() == B()) # revealed: int
|
||||
reveal_type(A() != B()) # revealed: float
|
||||
reveal_type(A() < B()) # revealed: str
|
||||
reveal_type(A() <= B()) # revealed: bytes
|
||||
reveal_type(A() > B()) # revealed: list
|
||||
reveal_type(A() >= B()) # revealed: set
|
||||
```
|
||||
|
||||
## Reflected Comparisons
|
||||
|
||||
Fallback to the right-hand side’s comparison methods occurs when the left-hand side does not define
|
||||
them. Note: class `B` has its own `__eq__` and `__ne__` methods to override those of `object`, but
|
||||
these methods will be ignored here because they require a mismatched operand type.
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
class A:
|
||||
def __eq__(self, other: B) -> int:
|
||||
return 42
|
||||
|
||||
def __ne__(self, other: B) -> float:
|
||||
return 42.0
|
||||
|
||||
def __lt__(self, other: B) -> str:
|
||||
return "42"
|
||||
|
||||
def __le__(self, other: B) -> bytes:
|
||||
return b"42"
|
||||
|
||||
def __gt__(self, other: B) -> list:
|
||||
return [42]
|
||||
|
||||
def __ge__(self, other: B) -> set:
|
||||
return {42}
|
||||
|
||||
class B:
|
||||
# To override builtins.object.__eq__ and builtins.object.__ne__
|
||||
# TODO these should emit an invalid override diagnostic
|
||||
def __eq__(self, other: str) -> B:
|
||||
return B()
|
||||
|
||||
def __ne__(self, other: str) -> B:
|
||||
return B()
|
||||
|
||||
# TODO: should be `int` and `float`.
|
||||
# Need to check arg type and fall back to `rhs.__eq__` and `rhs.__ne__`.
|
||||
#
|
||||
# Because `object.__eq__` and `object.__ne__` accept `object` in typeshed,
|
||||
# this can only happen with an invalid override of these methods,
|
||||
# but we still support it.
|
||||
reveal_type(B() == A()) # revealed: B
|
||||
reveal_type(B() != A()) # revealed: B
|
||||
|
||||
reveal_type(B() < A()) # revealed: list
|
||||
reveal_type(B() <= A()) # revealed: set
|
||||
|
||||
reveal_type(B() > A()) # revealed: str
|
||||
reveal_type(B() >= A()) # revealed: bytes
|
||||
|
||||
class C:
|
||||
def __gt__(self, other: C) -> int:
|
||||
return 42
|
||||
|
||||
def __ge__(self, other: C) -> float:
|
||||
return 42.0
|
||||
|
||||
reveal_type(C() < C()) # revealed: int
|
||||
reveal_type(C() <= C()) # revealed: float
|
||||
```
|
||||
|
||||
## Reflected Comparisons with Subclasses
|
||||
|
||||
When subclasses override comparison methods, these overridden methods take precedence over those in
|
||||
the parent class. Class `B` inherits from `A` and redefines comparison methods to return types other
|
||||
than `A`.
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
class A:
|
||||
def __eq__(self, other: A) -> A:
|
||||
return A()
|
||||
|
||||
def __ne__(self, other: A) -> A:
|
||||
return A()
|
||||
|
||||
def __lt__(self, other: A) -> A:
|
||||
return A()
|
||||
|
||||
def __le__(self, other: A) -> A:
|
||||
return A()
|
||||
|
||||
def __gt__(self, other: A) -> A:
|
||||
return A()
|
||||
|
||||
def __ge__(self, other: A) -> A:
|
||||
return A()
|
||||
|
||||
class B(A):
|
||||
def __eq__(self, other: A) -> int:
|
||||
return 42
|
||||
|
||||
def __ne__(self, other: A) -> float:
|
||||
return 42.0
|
||||
|
||||
def __lt__(self, other: A) -> str:
|
||||
return "42"
|
||||
|
||||
def __le__(self, other: A) -> bytes:
|
||||
return b"42"
|
||||
|
||||
def __gt__(self, other: A) -> list:
|
||||
return [42]
|
||||
|
||||
def __ge__(self, other: A) -> set:
|
||||
return {42}
|
||||
|
||||
reveal_type(A() == B()) # revealed: int
|
||||
reveal_type(A() != B()) # revealed: float
|
||||
|
||||
reveal_type(A() < B()) # revealed: list
|
||||
reveal_type(A() <= B()) # revealed: set
|
||||
|
||||
reveal_type(A() > B()) # revealed: str
|
||||
reveal_type(A() >= B()) # revealed: bytes
|
||||
```
|
||||
|
||||
## Reflected Comparisons with Subclass But Falls Back to LHS
|
||||
|
||||
In the case of a subclass, the right-hand side has priority. However, if the overridden dunder
|
||||
method has an mismatched type to operand, the comparison will fall back to the left-hand side.
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
class A:
|
||||
def __lt__(self, other: A) -> A:
|
||||
return A()
|
||||
|
||||
def __gt__(self, other: A) -> A:
|
||||
return A()
|
||||
|
||||
class B(A):
|
||||
def __lt__(self, other: int) -> B:
|
||||
return B()
|
||||
|
||||
def __gt__(self, other: int) -> B:
|
||||
return B()
|
||||
|
||||
# TODO: should be `A`, need to check argument type and fall back to LHS method
|
||||
reveal_type(A() < B()) # revealed: B
|
||||
reveal_type(A() > B()) # revealed: B
|
||||
```
|
||||
|
||||
## Operations involving instances of classes inheriting from `Any`
|
||||
|
||||
`Any` and `Unknown` represent a set of possible runtime objects, wherein the bounds of the set are
|
||||
unknown. Whether the left-hand operand's dunder or the right-hand operand's reflected dunder depends
|
||||
on whether the right-hand operand is an instance of a class that is a subclass of the left-hand
|
||||
operand's class and overrides the reflected dunder. In the following example, because of the
|
||||
unknowable nature of `Any`/`Unknown`, we must consider both possibilities: `Any`/`Unknown` might
|
||||
resolve to an unknown third class that inherits from `X` and overrides `__gt__`; but it also might
|
||||
not. Thus, the correct answer here for the `reveal_type` is `int | Unknown`.
|
||||
|
||||
(This test is referenced from `mdtest/binary/instances.md`)
|
||||
|
||||
```py
|
||||
from does_not_exist import Foo # error: [unresolved-import]
|
||||
|
||||
reveal_type(Foo) # revealed: Unknown
|
||||
|
||||
class X:
|
||||
def __lt__(self, other: object) -> int:
|
||||
return 42
|
||||
|
||||
class Y(Foo): ...
|
||||
|
||||
# TODO: Should be `int | Unknown`; see above discussion.
|
||||
reveal_type(X() < Y()) # revealed: int
|
||||
```
|
||||
|
||||
## Equality and Inequality Fallback
|
||||
|
||||
This test confirms that `==` and `!=` comparisons default to identity comparisons (`is`, `is not`)
|
||||
when argument types do not match the method signature.
|
||||
|
||||
Please refer to the [docs](https://docs.python.org/3/reference/datamodel.html#object.__eq__)
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
class A:
|
||||
# TODO both these overrides should emit invalid-override diagnostic
|
||||
def __eq__(self, other: int) -> A:
|
||||
return A()
|
||||
|
||||
def __ne__(self, other: int) -> A:
|
||||
return A()
|
||||
|
||||
# TODO: it should be `bool`, need to check arg type and fall back to `is` and `is not`
|
||||
reveal_type(A() == A()) # revealed: A
|
||||
reveal_type(A() != A()) # revealed: A
|
||||
```
|
||||
|
||||
## Object Comparisons with Typeshed
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
|
||||
reveal_type(A() == object()) # revealed: bool
|
||||
reveal_type(A() != object()) # revealed: bool
|
||||
reveal_type(object() == A()) # revealed: bool
|
||||
reveal_type(object() != A()) # revealed: bool
|
||||
|
||||
# error: [unsupported-operator] "Operator `<` is not supported for types `A` and `object`"
|
||||
# revealed: Unknown
|
||||
reveal_type(A() < object())
|
||||
```
|
||||
|
||||
## Numbers Comparison with typeshed
|
||||
|
||||
```py
|
||||
reveal_type(1 == 1.0) # revealed: bool
|
||||
reveal_type(1 != 1.0) # revealed: bool
|
||||
reveal_type(1 < 1.0) # revealed: bool
|
||||
reveal_type(1 <= 1.0) # revealed: bool
|
||||
reveal_type(1 > 1.0) # revealed: bool
|
||||
reveal_type(1 >= 1.0) # revealed: bool
|
||||
|
||||
reveal_type(1 == 2j) # revealed: bool
|
||||
reveal_type(1 != 2j) # revealed: bool
|
||||
|
||||
# TODO: should be Unknown and emit diagnostic,
|
||||
# need to check arg type and should be failed
|
||||
reveal_type(1 < 2j) # revealed: bool
|
||||
reveal_type(1 <= 2j) # revealed: bool
|
||||
reveal_type(1 > 2j) # revealed: bool
|
||||
reveal_type(1 >= 2j) # revealed: bool
|
||||
|
||||
def f(x: bool, y: int):
|
||||
reveal_type(x < y) # revealed: bool
|
||||
reveal_type(y < x) # revealed: bool
|
||||
reveal_type(4.2 < x) # revealed: bool
|
||||
reveal_type(x < 4.2) # revealed: bool
|
||||
```
|
||||
@@ -1,27 +0,0 @@
|
||||
# Comparison: Integers
|
||||
|
||||
## Integer literals
|
||||
|
||||
```py
|
||||
reveal_type(1 == 1 == True) # revealed: Literal[True]
|
||||
reveal_type(1 == 1 == 2 == 4) # revealed: Literal[False]
|
||||
reveal_type(False < True <= 2 < 3 != 6) # revealed: Literal[True]
|
||||
reveal_type(1 < 1) # revealed: Literal[False]
|
||||
reveal_type(1 > 1) # revealed: Literal[False]
|
||||
reveal_type(1 is 1) # revealed: bool
|
||||
reveal_type(1 is not 1) # revealed: bool
|
||||
reveal_type(1 is 2) # revealed: Literal[False]
|
||||
reveal_type(1 is not 7) # revealed: Literal[True]
|
||||
# TODO: should be Unknown, and emit diagnostic, once we check call argument types
|
||||
reveal_type(1 <= "" and 0 < 1) # revealed: bool
|
||||
```
|
||||
|
||||
## Integer instance
|
||||
|
||||
```py
|
||||
# TODO: implement lookup of `__eq__` on typeshed `int` stub.
|
||||
def _(a: int, b: int):
|
||||
reveal_type(1 == a) # revealed: bool
|
||||
reveal_type(9 < a) # revealed: bool
|
||||
reveal_type(a < b) # revealed: bool
|
||||
```
|
||||
@@ -1,141 +0,0 @@
|
||||
# Comparison: Intersections
|
||||
|
||||
## Positive contributions
|
||||
|
||||
If we have an intersection type `A & B` and we get a definitive true/false answer for one of the
|
||||
types, we can infer that the result for the intersection type is also true/false:
|
||||
|
||||
```py
|
||||
class Base: ...
|
||||
|
||||
class Child1(Base):
|
||||
def __eq__(self, other) -> Literal[True]:
|
||||
return True
|
||||
|
||||
class Child2(Base): ...
|
||||
|
||||
def _(x: Base):
|
||||
c1 = Child1()
|
||||
|
||||
# Create an intersection type through narrowing:
|
||||
if isinstance(x, Child1):
|
||||
if isinstance(x, Child2):
|
||||
reveal_type(x) # revealed: Child1 & Child2
|
||||
|
||||
reveal_type(x == 1) # revealed: Literal[True]
|
||||
|
||||
# Other comparison operators fall back to the base type:
|
||||
reveal_type(x > 1) # revealed: bool
|
||||
reveal_type(x is c1) # revealed: bool
|
||||
```
|
||||
|
||||
## Negative contributions
|
||||
|
||||
Negative contributions to the intersection type only allow simplifications in a few special cases
|
||||
(equality and identity comparisons).
|
||||
|
||||
### Equality comparisons
|
||||
|
||||
#### Literal strings
|
||||
|
||||
```py
|
||||
x = "x" * 1_000_000_000
|
||||
y = "y" * 1_000_000_000
|
||||
reveal_type(x) # revealed: LiteralString
|
||||
|
||||
if x != "abc":
|
||||
reveal_type(x) # revealed: LiteralString & ~Literal["abc"]
|
||||
|
||||
reveal_type(x == "abc") # revealed: Literal[False]
|
||||
reveal_type("abc" == x) # revealed: Literal[False]
|
||||
reveal_type(x == "something else") # revealed: bool
|
||||
reveal_type("something else" == x) # revealed: bool
|
||||
|
||||
reveal_type(x != "abc") # revealed: Literal[True]
|
||||
reveal_type("abc" != x) # revealed: Literal[True]
|
||||
reveal_type(x != "something else") # revealed: bool
|
||||
reveal_type("something else" != x) # revealed: bool
|
||||
|
||||
reveal_type(x == y) # revealed: bool
|
||||
reveal_type(y == x) # revealed: bool
|
||||
reveal_type(x != y) # revealed: bool
|
||||
reveal_type(y != x) # revealed: bool
|
||||
|
||||
reveal_type(x >= "abc") # revealed: bool
|
||||
reveal_type("abc" >= x) # revealed: bool
|
||||
|
||||
reveal_type(x in "abc") # revealed: bool
|
||||
reveal_type("abc" in x) # revealed: bool
|
||||
```
|
||||
|
||||
#### Integers
|
||||
|
||||
```py
|
||||
def _(x: int):
|
||||
if x != 1:
|
||||
reveal_type(x) # revealed: int & ~Literal[1]
|
||||
|
||||
reveal_type(x != 1) # revealed: Literal[True]
|
||||
reveal_type(x != 2) # revealed: bool
|
||||
|
||||
reveal_type(x == 1) # revealed: Literal[False]
|
||||
reveal_type(x == 2) # revealed: bool
|
||||
```
|
||||
|
||||
### Identity comparisons
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
|
||||
def _(o: object):
|
||||
a = A()
|
||||
n = None
|
||||
|
||||
if o is not None:
|
||||
reveal_type(o) # revealed: object & ~None
|
||||
|
||||
reveal_type(o is n) # revealed: Literal[False]
|
||||
reveal_type(o is not n) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
## Diagnostics
|
||||
|
||||
### Unsupported operators for positive contributions
|
||||
|
||||
Raise an error if any of the positive contributions to the intersection type are unsupported for the
|
||||
given operator:
|
||||
|
||||
```py
|
||||
class Container:
|
||||
def __contains__(self, x) -> bool: ...
|
||||
|
||||
class NonContainer: ...
|
||||
|
||||
def _(x: object):
|
||||
if isinstance(x, Container):
|
||||
if isinstance(x, NonContainer):
|
||||
reveal_type(x) # revealed: Container & NonContainer
|
||||
|
||||
# error: [unsupported-operator] "Operator `in` is not supported for types `int` and `NonContainer`"
|
||||
reveal_type(2 in x) # revealed: bool
|
||||
```
|
||||
|
||||
### Unsupported operators for negative contributions
|
||||
|
||||
Do *not* raise an error if any of the negative contributions to the intersection type are
|
||||
unsupported for the given operator:
|
||||
|
||||
```py
|
||||
class Container:
|
||||
def __contains__(self, x) -> bool: ...
|
||||
|
||||
class NonContainer: ...
|
||||
|
||||
def _(x: object):
|
||||
if isinstance(x, Container):
|
||||
if not isinstance(x, NonContainer):
|
||||
reveal_type(x) # revealed: Container & ~NonContainer
|
||||
|
||||
# No error here!
|
||||
reveal_type(2 in x) # revealed: bool
|
||||
```
|
||||
@@ -1,41 +0,0 @@
|
||||
# Comparison: Non boolean returns
|
||||
|
||||
Walking through examples:
|
||||
|
||||
- `a = A() < B() < C()`
|
||||
|
||||
1. `A() < B() and B() < C()` - split in N comparison
|
||||
1. `A()` and `B()` - evaluate outcome types
|
||||
1. `bool` and `bool` - evaluate truthiness
|
||||
1. `A | B` - union of "first true" types
|
||||
|
||||
- `b = 0 < 1 < A() < 3`
|
||||
|
||||
1. `0 < 1 and 1 < A() and A() < 3` - split in N comparison
|
||||
1. `True` and `bool` and `A` - evaluate outcome types
|
||||
1. `True` and `bool` and `bool` - evaluate truthiness
|
||||
1. `bool | A` - union of "true" types
|
||||
|
||||
- `c = 10 < 0 < A() < B() < C()` short-circuit to False
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
class A:
|
||||
def __lt__(self, other) -> A: ...
|
||||
|
||||
class B:
|
||||
def __lt__(self, other) -> B: ...
|
||||
|
||||
class C:
|
||||
def __lt__(self, other) -> C: ...
|
||||
|
||||
x = A() < B() < C()
|
||||
reveal_type(x) # revealed: A & ~AlwaysTruthy | B
|
||||
|
||||
y = 0 < 1 < A() < 3
|
||||
reveal_type(y) # revealed: Literal[False] | A
|
||||
|
||||
z = 10 < 0 < A() < B() < C()
|
||||
reveal_type(z) # revealed: Literal[False]
|
||||
```
|
||||
@@ -1,19 +0,0 @@
|
||||
# Comparison: Strings
|
||||
|
||||
## String literals
|
||||
|
||||
```py
|
||||
def _(x: str):
|
||||
reveal_type("abc" == "abc") # revealed: Literal[True]
|
||||
reveal_type("ab_cd" <= "ab_ce") # revealed: Literal[True]
|
||||
reveal_type("abc" in "ab cd") # revealed: Literal[False]
|
||||
reveal_type("" not in "hello") # revealed: Literal[False]
|
||||
reveal_type("--" is "--") # revealed: bool
|
||||
reveal_type("A" is "B") # revealed: Literal[False]
|
||||
reveal_type("--" is not "--") # revealed: bool
|
||||
reveal_type("A" is not "B") # revealed: Literal[True]
|
||||
reveal_type(x < "...") # revealed: bool
|
||||
|
||||
# ensure we're not comparing the interned salsa symbols, which compare by order of declaration.
|
||||
reveal_type("ab" < "ab_cd") # revealed: Literal[True]
|
||||
```
|
||||
@@ -1,324 +0,0 @@
|
||||
# Comparison: Tuples
|
||||
|
||||
## Heterogeneous
|
||||
|
||||
For tuples like `tuple[int, str, Literal[1]]`
|
||||
|
||||
### Value Comparisons
|
||||
|
||||
"Value Comparisons" refers to the operators: `==`, `!=`, `<`, `<=`, `>`, `>=`
|
||||
|
||||
#### Results without Ambiguity
|
||||
|
||||
Cases where the result can be definitively inferred as a `BooleanLiteral`.
|
||||
|
||||
```py
|
||||
a = (1, "test", (3, 13), True)
|
||||
b = (1, "test", (3, 14), False)
|
||||
|
||||
reveal_type(a == a) # revealed: Literal[True]
|
||||
reveal_type(a != a) # revealed: Literal[False]
|
||||
reveal_type(a < a) # revealed: Literal[False]
|
||||
reveal_type(a <= a) # revealed: Literal[True]
|
||||
reveal_type(a > a) # revealed: Literal[False]
|
||||
reveal_type(a >= a) # revealed: Literal[True]
|
||||
|
||||
reveal_type(a == b) # revealed: Literal[False]
|
||||
reveal_type(a != b) # revealed: Literal[True]
|
||||
reveal_type(a < b) # revealed: Literal[True]
|
||||
reveal_type(a <= b) # revealed: Literal[True]
|
||||
reveal_type(a > b) # revealed: Literal[False]
|
||||
reveal_type(a >= b) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
Even when tuples have different lengths, comparisons should be handled appropriately.
|
||||
|
||||
```py path=different_length.py
|
||||
a = (1, 2, 3)
|
||||
b = (1, 2, 3, 4)
|
||||
|
||||
reveal_type(a == b) # revealed: Literal[False]
|
||||
reveal_type(a != b) # revealed: Literal[True]
|
||||
reveal_type(a < b) # revealed: Literal[True]
|
||||
reveal_type(a <= b) # revealed: Literal[True]
|
||||
reveal_type(a > b) # revealed: Literal[False]
|
||||
reveal_type(a >= b) # revealed: Literal[False]
|
||||
|
||||
c = ("a", "b", "c", "d")
|
||||
d = ("a", "b", "c")
|
||||
|
||||
reveal_type(c == d) # revealed: Literal[False]
|
||||
reveal_type(c != d) # revealed: Literal[True]
|
||||
reveal_type(c < d) # revealed: Literal[False]
|
||||
reveal_type(c <= d) # revealed: Literal[False]
|
||||
reveal_type(c > d) # revealed: Literal[True]
|
||||
reveal_type(c >= d) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
#### Results with Ambiguity
|
||||
|
||||
```py
|
||||
def _(x: bool, y: int):
|
||||
a = (x,)
|
||||
b = (y,)
|
||||
|
||||
reveal_type(a == a) # revealed: bool
|
||||
reveal_type(a != a) # revealed: bool
|
||||
reveal_type(a < a) # revealed: bool
|
||||
reveal_type(a <= a) # revealed: bool
|
||||
reveal_type(a > a) # revealed: bool
|
||||
reveal_type(a >= a) # revealed: bool
|
||||
|
||||
reveal_type(a == b) # revealed: bool
|
||||
reveal_type(a != b) # revealed: bool
|
||||
reveal_type(a < b) # revealed: bool
|
||||
reveal_type(a <= b) # revealed: bool
|
||||
reveal_type(a > b) # revealed: bool
|
||||
reveal_type(a >= b) # revealed: bool
|
||||
```
|
||||
|
||||
#### Comparison Unsupported
|
||||
|
||||
If two tuples contain types that do not support comparison, the result may be `Unknown`. However,
|
||||
`==` and `!=` are exceptions and can still provide definite results.
|
||||
|
||||
```py
|
||||
a = (1, 2)
|
||||
b = (1, "hello")
|
||||
|
||||
# TODO: should be Literal[False], once we implement (in)equality for mismatched literals
|
||||
reveal_type(a == b) # revealed: bool
|
||||
|
||||
# TODO: should be Literal[True], once we implement (in)equality for mismatched literals
|
||||
reveal_type(a != b) # revealed: bool
|
||||
|
||||
# TODO: should be Unknown and add more informative diagnostics
|
||||
reveal_type(a < b) # revealed: bool
|
||||
reveal_type(a <= b) # revealed: bool
|
||||
reveal_type(a > b) # revealed: bool
|
||||
reveal_type(a >= b) # revealed: bool
|
||||
```
|
||||
|
||||
However, if the lexicographic comparison completes without reaching a point where str and int are
|
||||
compared, Python will still produce a result based on the prior elements.
|
||||
|
||||
```py path=short_circuit.py
|
||||
a = (1, 2)
|
||||
b = (999999, "hello")
|
||||
|
||||
reveal_type(a == b) # revealed: Literal[False]
|
||||
reveal_type(a != b) # revealed: Literal[True]
|
||||
reveal_type(a < b) # revealed: Literal[True]
|
||||
reveal_type(a <= b) # revealed: Literal[True]
|
||||
reveal_type(a > b) # revealed: Literal[False]
|
||||
reveal_type(a >= b) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
#### Matryoshka Tuples
|
||||
|
||||
```py
|
||||
a = (1, True, "Hello")
|
||||
b = (a, a, a)
|
||||
c = (b, b, b)
|
||||
|
||||
reveal_type(c == c) # revealed: Literal[True]
|
||||
reveal_type(c != c) # revealed: Literal[False]
|
||||
reveal_type(c < c) # revealed: Literal[False]
|
||||
reveal_type(c <= c) # revealed: Literal[True]
|
||||
reveal_type(c > c) # revealed: Literal[False]
|
||||
reveal_type(c >= c) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
#### Non Boolean Rich Comparisons
|
||||
|
||||
Rich comparison methods defined in a class affect tuple comparisons as well. Proper type inference
|
||||
should be possible even in cases where these methods return non-boolean types.
|
||||
|
||||
Note: Tuples use lexicographic comparisons. If the `==` result for all paired elements in the tuple
|
||||
is True, the comparison then considers the tuple’s length. Regardless of the return type of the
|
||||
dunder methods, the final result can still be a boolean value.
|
||||
|
||||
(+cpython: For tuples, `==` and `!=` always produce boolean results, regardless of the return type
|
||||
of the dunder methods.)
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
class A:
|
||||
def __eq__(self, o: object) -> str:
|
||||
return "hello"
|
||||
|
||||
def __ne__(self, o: object) -> bytes:
|
||||
return b"world"
|
||||
|
||||
def __lt__(self, o: A) -> float:
|
||||
return 3.14
|
||||
|
||||
def __le__(self, o: A) -> complex:
|
||||
return complex(0.5, -0.5)
|
||||
|
||||
def __gt__(self, o: A) -> tuple:
|
||||
return (1, 2, 3)
|
||||
|
||||
def __ge__(self, o: A) -> list:
|
||||
return [1, 2, 3]
|
||||
|
||||
a = (A(), A())
|
||||
|
||||
reveal_type(a == a) # revealed: bool
|
||||
reveal_type(a != a) # revealed: bool
|
||||
reveal_type(a < a) # revealed: float | Literal[False]
|
||||
reveal_type(a <= a) # revealed: complex | Literal[True]
|
||||
reveal_type(a > a) # revealed: tuple | Literal[False]
|
||||
reveal_type(a >= a) # revealed: list | Literal[True]
|
||||
|
||||
# If lexicographic comparison is finished before comparing A()
|
||||
b = ("1_foo", A())
|
||||
c = ("2_bar", A())
|
||||
|
||||
reveal_type(b == c) # revealed: Literal[False]
|
||||
reveal_type(b != c) # revealed: Literal[True]
|
||||
reveal_type(b < c) # revealed: Literal[True]
|
||||
reveal_type(b <= c) # revealed: Literal[True]
|
||||
reveal_type(b > c) # revealed: Literal[False]
|
||||
reveal_type(b >= c) # revealed: Literal[False]
|
||||
|
||||
class B:
|
||||
def __lt__(self, o: B) -> set:
|
||||
return set()
|
||||
|
||||
reveal_type((A(), B()) < (A(), B())) # revealed: float | set | Literal[False]
|
||||
```
|
||||
|
||||
#### Special Handling of Eq and NotEq in Lexicographic Comparisons
|
||||
|
||||
> Example: `(<int instance>, "foo") == (<int instance>, "bar")`
|
||||
|
||||
`Eq` and `NotEq` have unique behavior compared to other operators in lexicographic comparisons.
|
||||
Specifically, for `Eq`, if any non-equal pair exists within the tuples being compared, we can
|
||||
immediately conclude that the tuples are not equal. Conversely, for `NotEq`, if any non-equal pair
|
||||
exists, we can determine that the tuples are unequal.
|
||||
|
||||
In contrast, with operators like `<` and `>`, the comparison must consider each pair of elements
|
||||
sequentially, and the final outcome might remain ambiguous until all pairs are compared.
|
||||
|
||||
```py
|
||||
def _(x: str, y: int):
|
||||
reveal_type("foo" == "bar") # revealed: Literal[False]
|
||||
reveal_type(("foo",) == ("bar",)) # revealed: Literal[False]
|
||||
reveal_type((4, "foo") == (4, "bar")) # revealed: Literal[False]
|
||||
reveal_type((y, "foo") == (y, "bar")) # revealed: Literal[False]
|
||||
|
||||
a = (x, y, "foo")
|
||||
|
||||
reveal_type(a == a) # revealed: bool
|
||||
reveal_type(a != a) # revealed: bool
|
||||
reveal_type(a < a) # revealed: bool
|
||||
reveal_type(a <= a) # revealed: bool
|
||||
reveal_type(a > a) # revealed: bool
|
||||
reveal_type(a >= a) # revealed: bool
|
||||
|
||||
b = (x, y, "bar")
|
||||
|
||||
reveal_type(a == b) # revealed: Literal[False]
|
||||
reveal_type(a != b) # revealed: Literal[True]
|
||||
reveal_type(a < b) # revealed: bool
|
||||
reveal_type(a <= b) # revealed: bool
|
||||
reveal_type(a > b) # revealed: bool
|
||||
reveal_type(a >= b) # revealed: bool
|
||||
|
||||
c = (x, y, "foo", "different_length")
|
||||
|
||||
reveal_type(a == c) # revealed: Literal[False]
|
||||
reveal_type(a != c) # revealed: Literal[True]
|
||||
reveal_type(a < c) # revealed: bool
|
||||
reveal_type(a <= c) # revealed: bool
|
||||
reveal_type(a > c) # revealed: bool
|
||||
reveal_type(a >= c) # revealed: bool
|
||||
```
|
||||
|
||||
#### Error Propagation
|
||||
|
||||
Errors occurring within a tuple comparison should propagate outward. However, if the tuple
|
||||
comparison can clearly conclude before encountering an error, the error should not be raised.
|
||||
|
||||
```py
|
||||
def _(n: int, s: str):
|
||||
class A: ...
|
||||
# error: [unsupported-operator] "Operator `<` is not supported for types `A` and `A`"
|
||||
A() < A()
|
||||
# error: [unsupported-operator] "Operator `<=` is not supported for types `A` and `A`"
|
||||
A() <= A()
|
||||
# error: [unsupported-operator] "Operator `>` is not supported for types `A` and `A`"
|
||||
A() > A()
|
||||
# error: [unsupported-operator] "Operator `>=` is not supported for types `A` and `A`"
|
||||
A() >= A()
|
||||
|
||||
a = (0, n, A())
|
||||
|
||||
# error: [unsupported-operator] "Operator `<` is not supported for types `A` and `A`, in comparing `tuple[Literal[0], int, A]` with `tuple[Literal[0], int, A]`"
|
||||
reveal_type(a < a) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `<=` is not supported for types `A` and `A`, in comparing `tuple[Literal[0], int, A]` with `tuple[Literal[0], int, A]`"
|
||||
reveal_type(a <= a) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `>` is not supported for types `A` and `A`, in comparing `tuple[Literal[0], int, A]` with `tuple[Literal[0], int, A]`"
|
||||
reveal_type(a > a) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `>=` is not supported for types `A` and `A`, in comparing `tuple[Literal[0], int, A]` with `tuple[Literal[0], int, A]`"
|
||||
reveal_type(a >= a) # revealed: Unknown
|
||||
|
||||
# Comparison between `a` and `b` should only involve the first elements, `Literal[0]` and `Literal[99999]`,
|
||||
# and should terminate immediately.
|
||||
b = (99999, n, A())
|
||||
|
||||
reveal_type(a < b) # revealed: Literal[True]
|
||||
reveal_type(a <= b) # revealed: Literal[True]
|
||||
reveal_type(a > b) # revealed: Literal[False]
|
||||
reveal_type(a >= b) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
### Membership Test Comparisons
|
||||
|
||||
"Membership Test Comparisons" refers to the operators `in` and `not in`.
|
||||
|
||||
```py
|
||||
def _(n: int):
|
||||
a = (1, 2)
|
||||
b = ((3, 4), (1, 2))
|
||||
c = ((1, 2, 3), (4, 5, 6))
|
||||
d = ((n, n), (n, n))
|
||||
|
||||
reveal_type(a in b) # revealed: Literal[True]
|
||||
reveal_type(a not in b) # revealed: Literal[False]
|
||||
|
||||
reveal_type(a in c) # revealed: Literal[False]
|
||||
reveal_type(a not in c) # revealed: Literal[True]
|
||||
|
||||
reveal_type(a in d) # revealed: bool
|
||||
reveal_type(a not in d) # revealed: bool
|
||||
```
|
||||
|
||||
### Identity Comparisons
|
||||
|
||||
"Identity Comparisons" refers to `is` and `is not`.
|
||||
|
||||
```py
|
||||
a = (1, 2)
|
||||
b = ("a", "b")
|
||||
c = (1, 2, 3)
|
||||
|
||||
reveal_type(a is (1, 2)) # revealed: bool
|
||||
reveal_type(a is not (1, 2)) # revealed: bool
|
||||
|
||||
# TODO should be Literal[False] once we implement comparison of mismatched literal types
|
||||
reveal_type(a is b) # revealed: bool
|
||||
# TODO should be Literal[True] once we implement comparison of mismatched literal types
|
||||
reveal_type(a is not b) # revealed: bool
|
||||
|
||||
reveal_type(a is c) # revealed: Literal[False]
|
||||
reveal_type(a is not c) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
## Homogeneous
|
||||
|
||||
For tuples like `tuple[int, ...]`, `tuple[Any, ...]`
|
||||
|
||||
// TODO
|
||||
@@ -1,79 +0,0 @@
|
||||
# Comparison: Unions
|
||||
|
||||
## Union on one side of the comparison
|
||||
|
||||
Comparisons on union types need to consider all possible cases:
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
one_or_two = 1 if flag else 2
|
||||
|
||||
reveal_type(one_or_two <= 2) # revealed: Literal[True]
|
||||
reveal_type(one_or_two <= 1) # revealed: bool
|
||||
reveal_type(one_or_two <= 0) # revealed: Literal[False]
|
||||
|
||||
reveal_type(2 >= one_or_two) # revealed: Literal[True]
|
||||
reveal_type(1 >= one_or_two) # revealed: bool
|
||||
reveal_type(0 >= one_or_two) # revealed: Literal[False]
|
||||
|
||||
reveal_type(one_or_two < 1) # revealed: Literal[False]
|
||||
reveal_type(one_or_two < 2) # revealed: bool
|
||||
reveal_type(one_or_two < 3) # revealed: Literal[True]
|
||||
|
||||
reveal_type(one_or_two > 0) # revealed: Literal[True]
|
||||
reveal_type(one_or_two > 1) # revealed: bool
|
||||
reveal_type(one_or_two > 2) # revealed: Literal[False]
|
||||
|
||||
reveal_type(one_or_two == 3) # revealed: Literal[False]
|
||||
reveal_type(one_or_two == 1) # revealed: bool
|
||||
|
||||
reveal_type(one_or_two != 3) # revealed: Literal[True]
|
||||
reveal_type(one_or_two != 1) # revealed: bool
|
||||
|
||||
a_or_ab = "a" if flag else "ab"
|
||||
|
||||
reveal_type(a_or_ab in "ab") # revealed: Literal[True]
|
||||
reveal_type("a" in a_or_ab) # revealed: Literal[True]
|
||||
|
||||
reveal_type("c" not in a_or_ab) # revealed: Literal[True]
|
||||
reveal_type("a" not in a_or_ab) # revealed: Literal[False]
|
||||
|
||||
reveal_type("b" in a_or_ab) # revealed: bool
|
||||
reveal_type("b" not in a_or_ab) # revealed: bool
|
||||
|
||||
one_or_none = 1 if flag else None
|
||||
|
||||
reveal_type(one_or_none is None) # revealed: bool
|
||||
reveal_type(one_or_none is not None) # revealed: bool
|
||||
```
|
||||
|
||||
## Union on both sides of the comparison
|
||||
|
||||
With unions on both sides, we need to consider the full cross product of options when building the
|
||||
resulting (union) type:
|
||||
|
||||
```py
|
||||
def _(flag_s: bool, flag_l: bool):
|
||||
small = 1 if flag_s else 2
|
||||
large = 2 if flag_l else 3
|
||||
|
||||
reveal_type(small <= large) # revealed: Literal[True]
|
||||
reveal_type(small >= large) # revealed: bool
|
||||
|
||||
reveal_type(small < large) # revealed: bool
|
||||
reveal_type(small > large) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
## Unsupported operations
|
||||
|
||||
Make sure we emit a diagnostic if *any* of the possible comparisons is unsupported. For now, we fall
|
||||
back to `bool` for the result type instead of trying to infer something more precise from the other
|
||||
(supported) variants:
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
x = [1, 2] if flag else 1
|
||||
|
||||
result = 1 in x # error: "Operator `in` is not supported"
|
||||
reveal_type(result) # revealed: bool
|
||||
```
|
||||
@@ -1,38 +0,0 @@
|
||||
# Comparison: Unsupported operators
|
||||
|
||||
```py
|
||||
def _(flag: bool, flag1: bool, flag2: bool):
|
||||
class A: ...
|
||||
a = 1 in 7 # error: "Operator `in` is not supported for types `Literal[1]` and `Literal[7]`"
|
||||
reveal_type(a) # revealed: bool
|
||||
|
||||
b = 0 not in 10 # error: "Operator `not in` is not supported for types `Literal[0]` and `Literal[10]`"
|
||||
reveal_type(b) # revealed: bool
|
||||
|
||||
# TODO: should error, once operand type check is implemented
|
||||
# ("Operator `<` is not supported for types `object` and `int`")
|
||||
c = object() < 5
|
||||
# TODO: should be Unknown, once operand type check is implemented
|
||||
reveal_type(c) # revealed: bool
|
||||
|
||||
# TODO: should error, once operand type check is implemented
|
||||
# ("Operator `<` is not supported for types `int` and `object`")
|
||||
d = 5 < object()
|
||||
# TODO: should be Unknown, once operand type check is implemented
|
||||
reveal_type(d) # revealed: bool
|
||||
|
||||
int_literal_or_str_literal = 1 if flag else "foo"
|
||||
# error: "Operator `in` is not supported for types `Literal[42]` and `Literal[1]`, in comparing `Literal[42]` with `Literal[1, "foo"]`"
|
||||
e = 42 in int_literal_or_str_literal
|
||||
reveal_type(e) # revealed: bool
|
||||
|
||||
# TODO: should error, need to check if __lt__ signature is valid for right operand
|
||||
# error may be "Operator `<` is not supported for types `int` and `str`, in comparing `tuple[Literal[1], Literal[2]]` with `tuple[Literal[1], Literal["hello"]]`
|
||||
f = (1, 2) < (1, "hello")
|
||||
# TODO: should be Unknown, once operand type check is implemented
|
||||
reveal_type(f) # revealed: bool
|
||||
|
||||
# error: [unsupported-operator] "Operator `<` is not supported for types `A` and `A`, in comparing `tuple[bool, A]` with `tuple[bool, A]`"
|
||||
g = (flag1, A()) < (flag2, A())
|
||||
reveal_type(g) # revealed: Unknown
|
||||
```
|
||||
@@ -1,37 +0,0 @@
|
||||
# If expressions
|
||||
|
||||
## Simple if-expression
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
x = 1 if flag else 2
|
||||
reveal_type(x) # revealed: Literal[1, 2]
|
||||
```
|
||||
|
||||
## If-expression with walrus operator
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
y = 0
|
||||
z = 0
|
||||
x = (y := 1) if flag else (z := 2)
|
||||
reveal_type(x) # revealed: Literal[1, 2]
|
||||
reveal_type(y) # revealed: Literal[0, 1]
|
||||
reveal_type(z) # revealed: Literal[0, 2]
|
||||
```
|
||||
|
||||
## Nested if-expression
|
||||
|
||||
```py
|
||||
def _(flag: bool, flag2: bool):
|
||||
x = 1 if flag else 2 if flag2 else 3
|
||||
reveal_type(x) # revealed: Literal[1, 2, 3]
|
||||
```
|
||||
|
||||
## None
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
x = 1 if flag else None
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
```
|
||||
@@ -1,149 +0,0 @@
|
||||
# If statements
|
||||
|
||||
## Simple if
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
y = 1
|
||||
y = 2
|
||||
|
||||
if flag:
|
||||
y = 3
|
||||
|
||||
reveal_type(y) # revealed: Literal[2, 3]
|
||||
```
|
||||
|
||||
## Simple if-elif-else
|
||||
|
||||
```py
|
||||
def _(flag: bool, flag2: bool):
|
||||
y = 1
|
||||
y = 2
|
||||
|
||||
if flag:
|
||||
y = 3
|
||||
elif flag2:
|
||||
y = 4
|
||||
else:
|
||||
r = y
|
||||
y = 5
|
||||
s = y
|
||||
x = y
|
||||
|
||||
reveal_type(x) # revealed: Literal[3, 4, 5]
|
||||
|
||||
# revealed: Literal[2]
|
||||
# error: [possibly-unresolved-reference]
|
||||
reveal_type(r)
|
||||
|
||||
# revealed: Literal[5]
|
||||
# error: [possibly-unresolved-reference]
|
||||
reveal_type(s)
|
||||
```
|
||||
|
||||
## Single symbol across if-elif-else
|
||||
|
||||
```py
|
||||
def _(flag: bool, flag2: bool):
|
||||
if flag:
|
||||
y = 1
|
||||
elif flag2:
|
||||
y = 2
|
||||
else:
|
||||
y = 3
|
||||
|
||||
reveal_type(y) # revealed: Literal[1, 2, 3]
|
||||
```
|
||||
|
||||
## if-elif-else without else assignment
|
||||
|
||||
```py
|
||||
def _(flag: bool, flag2: bool):
|
||||
y = 0
|
||||
|
||||
if flag:
|
||||
y = 1
|
||||
elif flag2:
|
||||
y = 2
|
||||
else:
|
||||
pass
|
||||
|
||||
reveal_type(y) # revealed: Literal[0, 1, 2]
|
||||
```
|
||||
|
||||
## if-elif-else with intervening assignment
|
||||
|
||||
```py
|
||||
def _(flag: bool, flag2: bool):
|
||||
y = 0
|
||||
|
||||
if flag:
|
||||
y = 1
|
||||
z = 3
|
||||
elif flag2:
|
||||
y = 2
|
||||
else:
|
||||
pass
|
||||
|
||||
reveal_type(y) # revealed: Literal[0, 1, 2]
|
||||
```
|
||||
|
||||
## Nested if statement
|
||||
|
||||
```py
|
||||
def _(flag: bool, flag2: bool):
|
||||
y = 0
|
||||
|
||||
if flag:
|
||||
if flag2:
|
||||
y = 1
|
||||
|
||||
reveal_type(y) # revealed: Literal[0, 1]
|
||||
```
|
||||
|
||||
## if-elif without else
|
||||
|
||||
```py
|
||||
def _(flag: bool, flag2: bool):
|
||||
y = 1
|
||||
y = 2
|
||||
|
||||
if flag:
|
||||
y = 3
|
||||
elif flag2:
|
||||
y = 4
|
||||
|
||||
reveal_type(y) # revealed: Literal[2, 3, 4]
|
||||
```
|
||||
|
||||
## if-elif with assignment expressions in tests
|
||||
|
||||
```py
|
||||
def check(x: int) -> bool:
|
||||
return bool(x)
|
||||
|
||||
if check(x := 1):
|
||||
x = 2
|
||||
elif check(x := 3):
|
||||
x = 4
|
||||
|
||||
reveal_type(x) # revealed: Literal[2, 3, 4]
|
||||
```
|
||||
|
||||
## constraints apply to later test expressions
|
||||
|
||||
```py
|
||||
def check(x) -> bool:
|
||||
return bool(x)
|
||||
|
||||
def _(flag: bool):
|
||||
x = 1 if flag else None
|
||||
y = 0
|
||||
|
||||
if x is None:
|
||||
pass
|
||||
elif check(y := x):
|
||||
pass
|
||||
|
||||
reveal_type(y) # revealed: Literal[0, 1]
|
||||
```
|
||||
@@ -1,45 +0,0 @@
|
||||
# Pattern matching
|
||||
|
||||
## With wildcard
|
||||
|
||||
```py
|
||||
def _(target: int):
|
||||
match target:
|
||||
case 1:
|
||||
y = 2
|
||||
case _:
|
||||
y = 3
|
||||
|
||||
reveal_type(y) # revealed: Literal[2, 3]
|
||||
```
|
||||
|
||||
## Without wildcard
|
||||
|
||||
```py
|
||||
def _(target: int):
|
||||
match target:
|
||||
case 1:
|
||||
y = 2
|
||||
case 2:
|
||||
y = 3
|
||||
|
||||
# revealed: Literal[2, 3]
|
||||
# error: [possibly-unresolved-reference]
|
||||
reveal_type(y)
|
||||
```
|
||||
|
||||
## Basic match
|
||||
|
||||
```py
|
||||
def _(target: int):
|
||||
y = 1
|
||||
y = 2
|
||||
|
||||
match target:
|
||||
case 1:
|
||||
y = 3
|
||||
case 2:
|
||||
y = 4
|
||||
|
||||
reveal_type(y) # revealed: Literal[2, 3, 4]
|
||||
```
|
||||
@@ -1,75 +0,0 @@
|
||||
# Errors while declaring
|
||||
|
||||
## Violates previous assignment
|
||||
|
||||
```py
|
||||
x = 1
|
||||
x: str # error: [invalid-declaration] "Cannot declare type `str` for inferred type `Literal[1]`"
|
||||
```
|
||||
|
||||
## Incompatible declarations
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
if flag:
|
||||
x: str
|
||||
else:
|
||||
x: int
|
||||
|
||||
x = 1 # error: [conflicting-declarations] "Conflicting declared types for `x`: str, int"
|
||||
```
|
||||
|
||||
## Incompatible declarations for 2 (out of 3) types
|
||||
|
||||
```py
|
||||
def _(flag1: bool, flag2: bool):
|
||||
if flag1:
|
||||
x: str
|
||||
elif flag2:
|
||||
x: int
|
||||
|
||||
# Here, the declared type for `x` is `int | str | Unknown`.
|
||||
x = 1 # error: [conflicting-declarations] "Conflicting declared types for `x`: str, int"
|
||||
```
|
||||
|
||||
## Incompatible declarations with bad assignment
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
if flag:
|
||||
x: str
|
||||
else:
|
||||
x: int
|
||||
|
||||
# error: [conflicting-declarations]
|
||||
# error: [invalid-assignment]
|
||||
x = b"foo"
|
||||
```
|
||||
|
||||
## No errors
|
||||
|
||||
Currently, we avoid raising the conflicting-declarations for the following cases:
|
||||
|
||||
### Partial declarations
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
if flag:
|
||||
x: int
|
||||
|
||||
x = 1
|
||||
```
|
||||
|
||||
### Partial declarations in try-except
|
||||
|
||||
Refer to <https://github.com/astral-sh/ruff/issues/13966>
|
||||
|
||||
```py
|
||||
def _():
|
||||
try:
|
||||
x: int = 1
|
||||
except:
|
||||
x = 2
|
||||
|
||||
x = 3
|
||||
```
|
||||
@@ -1,172 +0,0 @@
|
||||
# Exception Handling
|
||||
|
||||
## Single Exception
|
||||
|
||||
```py
|
||||
import re
|
||||
|
||||
try:
|
||||
help()
|
||||
except NameError as e:
|
||||
reveal_type(e) # revealed: NameError
|
||||
except re.error as f:
|
||||
reveal_type(f) # revealed: error
|
||||
```
|
||||
|
||||
## Unknown type in except handler does not cause spurious diagnostic
|
||||
|
||||
```py
|
||||
from nonexistent_module import foo # error: [unresolved-import]
|
||||
|
||||
try:
|
||||
help()
|
||||
except foo as e:
|
||||
reveal_type(foo) # revealed: Unknown
|
||||
reveal_type(e) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Multiple Exceptions in a Tuple
|
||||
|
||||
```py
|
||||
EXCEPTIONS = (AttributeError, TypeError)
|
||||
|
||||
try:
|
||||
help()
|
||||
except (RuntimeError, OSError) as e:
|
||||
reveal_type(e) # revealed: RuntimeError | OSError
|
||||
except EXCEPTIONS as f:
|
||||
reveal_type(f) # revealed: AttributeError | TypeError
|
||||
```
|
||||
|
||||
## Dynamic exception types
|
||||
|
||||
```py
|
||||
def foo(
|
||||
x: type[AttributeError],
|
||||
y: tuple[type[OSError], type[RuntimeError]],
|
||||
z: tuple[type[BaseException], ...],
|
||||
):
|
||||
try:
|
||||
help()
|
||||
except x as e:
|
||||
reveal_type(e) # revealed: AttributeError
|
||||
except y as f:
|
||||
reveal_type(f) # revealed: OSError | RuntimeError
|
||||
except z as g:
|
||||
# TODO: should be `BaseException`
|
||||
reveal_type(g) # revealed: @Todo(full tuple[...] support)
|
||||
```
|
||||
|
||||
## Invalid exception handlers
|
||||
|
||||
```py
|
||||
try:
|
||||
pass
|
||||
# error: [invalid-exception-caught] "Cannot catch object of type `Literal[3]` in an exception handler (must be a `BaseException` subclass or a tuple of `BaseException` subclasses)"
|
||||
except 3 as e:
|
||||
reveal_type(e) # revealed: Unknown
|
||||
|
||||
try:
|
||||
pass
|
||||
# error: [invalid-exception-caught] "Cannot catch object of type `Literal["foo"]` in an exception handler (must be a `BaseException` subclass or a tuple of `BaseException` subclasses)"
|
||||
# error: [invalid-exception-caught] "Cannot catch object of type `Literal[b"bar"]` in an exception handler (must be a `BaseException` subclass or a tuple of `BaseException` subclasses)"
|
||||
except (ValueError, OSError, "foo", b"bar") as e:
|
||||
reveal_type(e) # revealed: ValueError | OSError | Unknown
|
||||
|
||||
def foo(
|
||||
x: type[str],
|
||||
y: tuple[type[OSError], type[RuntimeError], int],
|
||||
z: tuple[type[str], ...],
|
||||
):
|
||||
try:
|
||||
help()
|
||||
# error: [invalid-exception-caught]
|
||||
except x as e:
|
||||
reveal_type(e) # revealed: Unknown
|
||||
# error: [invalid-exception-caught]
|
||||
except y as f:
|
||||
reveal_type(f) # revealed: OSError | RuntimeError | Unknown
|
||||
except z as g:
|
||||
# TODO: should emit a diagnostic here:
|
||||
reveal_type(g) # revealed: @Todo(full tuple[...] support)
|
||||
```
|
||||
|
||||
## Object raised is not an exception
|
||||
|
||||
```py
|
||||
try:
|
||||
raise AttributeError() # fine
|
||||
except:
|
||||
...
|
||||
|
||||
try:
|
||||
raise FloatingPointError # fine
|
||||
except:
|
||||
...
|
||||
|
||||
try:
|
||||
raise 1 # error: [invalid-raise]
|
||||
except:
|
||||
...
|
||||
|
||||
try:
|
||||
raise int # error: [invalid-raise]
|
||||
except:
|
||||
...
|
||||
|
||||
def _(e: Exception | type[Exception]):
|
||||
raise e # fine
|
||||
|
||||
def _(e: Exception | type[Exception] | None):
|
||||
raise e # error: [invalid-raise]
|
||||
```
|
||||
|
||||
## Exception cause is not an exception
|
||||
|
||||
```py
|
||||
try:
|
||||
raise EOFError() from GeneratorExit # fine
|
||||
except:
|
||||
...
|
||||
|
||||
try:
|
||||
raise StopIteration from MemoryError() # fine
|
||||
except:
|
||||
...
|
||||
|
||||
try:
|
||||
raise BufferError() from None # fine
|
||||
except:
|
||||
...
|
||||
|
||||
try:
|
||||
raise ZeroDivisionError from False # error: [invalid-raise]
|
||||
except:
|
||||
...
|
||||
|
||||
try:
|
||||
raise SystemExit from bool() # error: [invalid-raise]
|
||||
except:
|
||||
...
|
||||
|
||||
try:
|
||||
raise
|
||||
except KeyboardInterrupt as e: # fine
|
||||
reveal_type(e) # revealed: KeyboardInterrupt
|
||||
raise LookupError from e # fine
|
||||
|
||||
try:
|
||||
raise
|
||||
except int as e: # error: [invalid-exception-caught]
|
||||
reveal_type(e) # revealed: Unknown
|
||||
raise KeyError from e
|
||||
|
||||
def _(e: Exception | type[Exception]):
|
||||
raise ModuleNotFoundError from e # fine
|
||||
|
||||
def _(e: Exception | type[Exception] | None):
|
||||
raise IndexError from e # fine
|
||||
|
||||
def _(e: int | None):
|
||||
raise IndexError from e # error: [invalid-raise]
|
||||
```
|
||||
@@ -1,622 +0,0 @@
|
||||
# Control flow for exception handlers
|
||||
|
||||
These tests assert that we understand the possible "definition states" (which symbols might or might
|
||||
not be defined) in the various branches of a `try`/`except`/`else`/`finally` block.
|
||||
|
||||
For a full writeup on the semantics of exception handlers, see [this document][1].
|
||||
|
||||
The tests throughout this Markdown document use functions with names starting with `could_raise_*`
|
||||
to mark definitions that might or might not succeed (as the function could raise an exception). A
|
||||
type checker must assume that any arbitrary function call could raise an exception in Python; this
|
||||
is just a naming convention used in these tests for clarity, and to future-proof the tests against
|
||||
possible future improvements whereby certain statements or expressions could potentially be inferred
|
||||
as being incapable of causing an exception to be raised.
|
||||
|
||||
## A single bare `except`
|
||||
|
||||
Consider the following `try`/`except` block, with a single bare `except:`. There are different types
|
||||
for the variable `x` in the two branches of this block, and we can't determine which branch might
|
||||
have been taken from the perspective of code following this block. The inferred type after the
|
||||
block's conclusion is therefore the union of the type at the end of the `try` suite (`str`) and the
|
||||
type at the end of the `except` suite (`Literal[2]`).
|
||||
|
||||
*Within* the `except` suite, we must infer a union of all possible "definition states" we could have
|
||||
been in at any point during the `try` suite. This is because control flow could have jumped to the
|
||||
`except` suite without any of the `try`-suite definitions successfully completing, with only *some*
|
||||
of the `try`-suite definitions successfully completing, or indeed with *all* of them successfully
|
||||
completing. The type of `x` at the beginning of the `except` suite in this example is therefore
|
||||
`Literal[1] | str`, taking into account that we might have jumped to the `except` suite before the
|
||||
`x = could_raise_returns_str()` redefinition, but we *also* could have jumped to the `except` suite
|
||||
*after* that redefinition.
|
||||
|
||||
```py path=union_type_inferred.py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
x = could_raise_returns_str()
|
||||
reveal_type(x) # revealed: str
|
||||
except:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = 2
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
|
||||
reveal_type(x) # revealed: str | Literal[2]
|
||||
```
|
||||
|
||||
If `x` has the same type at the end of both branches, however, the branches unify and `x` is not
|
||||
inferred as having a union type following the `try`/`except` block:
|
||||
|
||||
```py path=branches_unify_to_non_union_type.py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
x = could_raise_returns_str()
|
||||
except:
|
||||
x = could_raise_returns_str()
|
||||
|
||||
reveal_type(x) # revealed: str
|
||||
```
|
||||
|
||||
## A non-bare `except`
|
||||
|
||||
For simple `try`/`except` blocks, an `except TypeError:` handler has the same control flow semantics
|
||||
as an `except:` handler. An `except TypeError:` handler will not catch *all* exceptions: if this is
|
||||
the only handler, it opens up the possibility that an exception might occur that would not be
|
||||
handled. However, as described in [the document on exception-handling semantics][1], that would lead
|
||||
to termination of the scope. It's therefore irrelevant to consider this possibility when it comes to
|
||||
control-flow analysis.
|
||||
|
||||
```py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
x = could_raise_returns_str()
|
||||
reveal_type(x) # revealed: str
|
||||
except TypeError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = 2
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
|
||||
reveal_type(x) # revealed: str | Literal[2]
|
||||
```
|
||||
|
||||
## Multiple `except` branches
|
||||
|
||||
If the scope reaches the final `reveal_type` call in this example, either the `try`-block suite of
|
||||
statements was executed in its entirety, or exactly one `except` suite was executed in its entirety.
|
||||
The inferred type of `x` at this point is the union of the types at the end of the three suites:
|
||||
|
||||
- At the end of `try`, `type(x) == str`
|
||||
- At the end of `except TypeError`, `x == 2`
|
||||
- At the end of `except ValueError`, `x == 3`
|
||||
|
||||
```py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
x = could_raise_returns_str()
|
||||
reveal_type(x) # revealed: str
|
||||
except TypeError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = 2
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
except ValueError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = 3
|
||||
reveal_type(x) # revealed: Literal[3]
|
||||
|
||||
reveal_type(x) # revealed: str | Literal[2, 3]
|
||||
```
|
||||
|
||||
## Exception handlers with `else` branches (but no `finally`)
|
||||
|
||||
If we reach the `reveal_type` call at the end of this scope, either the `try` and `else` suites were
|
||||
both executed in their entireties, or the `except` suite was executed in its entirety. The type of
|
||||
`x` at this point is the union of the type at the end of the `else` suite and the type at the end of
|
||||
the `except` suite:
|
||||
|
||||
- At the end of `else`, `x == 3`
|
||||
- At the end of `except`, `x == 2`
|
||||
|
||||
```py path=single_except.py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
x = could_raise_returns_str()
|
||||
reveal_type(x) # revealed: str
|
||||
except TypeError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = 2
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
else:
|
||||
reveal_type(x) # revealed: str
|
||||
x = 3
|
||||
reveal_type(x) # revealed: Literal[3]
|
||||
|
||||
reveal_type(x) # revealed: Literal[2, 3]
|
||||
```
|
||||
|
||||
For a block that has multiple `except` branches and an `else` branch, the same principle applies. In
|
||||
order to reach the final `reveal_type` call, either exactly one of the `except` suites must have
|
||||
been executed in its entirety, or the `try` suite and the `else` suite must both have been executed
|
||||
in their entireties:
|
||||
|
||||
```py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
x = could_raise_returns_str()
|
||||
reveal_type(x) # revealed: str
|
||||
except TypeError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = 2
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
except ValueError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = 3
|
||||
reveal_type(x) # revealed: Literal[3]
|
||||
else:
|
||||
reveal_type(x) # revealed: str
|
||||
x = 4
|
||||
reveal_type(x) # revealed: Literal[4]
|
||||
|
||||
reveal_type(x) # revealed: Literal[2, 3, 4]
|
||||
```
|
||||
|
||||
## Exception handlers with `finally` branches (but no `except` branches)
|
||||
|
||||
A `finally` suite is *always* executed. As such, if we reach the `reveal_type` call at the end of
|
||||
this example, we know that `x` *must* have been reassigned to `2` during the `finally` suite. The
|
||||
type of `x` at the end of the example is therefore `Literal[2]`:
|
||||
|
||||
```py path=redef_in_finally.py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
x = could_raise_returns_str()
|
||||
reveal_type(x) # revealed: str
|
||||
finally:
|
||||
x = 2
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
If `x` was *not* redefined in the `finally` suite, however, things are somewhat more complicated. If
|
||||
we reach the final `reveal_type` call, unlike the state when we're visiting the `finally` suite, we
|
||||
know that the `try`-block suite ran to completion. This means that there are fewer possible states
|
||||
at this point than there were when we were inside the `finally` block.
|
||||
|
||||
(Our current model does *not* correctly infer the types *inside* `finally` suites, however; this is
|
||||
still a TODO item for us.)
|
||||
|
||||
```py path=no_redef_in_finally.py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
x = could_raise_returns_str()
|
||||
reveal_type(x) # revealed: str
|
||||
finally:
|
||||
# TODO: should be Literal[1] | str
|
||||
reveal_type(x) # revealed: str
|
||||
|
||||
reveal_type(x) # revealed: str
|
||||
```
|
||||
|
||||
## Combining an `except` branch with a `finally` branch
|
||||
|
||||
As previously stated, we do not yet have accurate inference for types *inside* `finally` suites.
|
||||
When we do, however, we will have to take account of the following possibilities inside `finally`
|
||||
suites:
|
||||
|
||||
- The `try` suite could have run to completion
|
||||
- Or we could have jumped from halfway through the `try` suite to an `except` suite, and the
|
||||
`except` suite ran to completion
|
||||
- Or we could have jumped from halfway through the `try` suite straight to the `finally` suite due
|
||||
to an unhandled exception
|
||||
- Or we could have jumped from halfway through the `try` suite to an `except` suite, only for an
|
||||
exception raised in the `except` suite to cause us to jump to the `finally` suite before the
|
||||
`except` suite ran to completion
|
||||
|
||||
```py path=redef_in_finally.py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
def could_raise_returns_bytes() -> bytes:
|
||||
return b"foo"
|
||||
|
||||
def could_raise_returns_bool() -> bool:
|
||||
return True
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
x = could_raise_returns_str()
|
||||
reveal_type(x) # revealed: str
|
||||
except TypeError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = could_raise_returns_bytes()
|
||||
reveal_type(x) # revealed: bytes
|
||||
x = could_raise_returns_bool()
|
||||
reveal_type(x) # revealed: bool
|
||||
finally:
|
||||
# TODO: should be `Literal[1] | str | bytes | bool`
|
||||
reveal_type(x) # revealed: str | bool
|
||||
x = 2
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
Now for an example without a redefinition in the `finally` suite. As before, there *should* be fewer
|
||||
possibilities after completion of the `finally` suite than there were during the `finally` suite
|
||||
itself. (In some control-flow possibilities, some exceptions were merely *suspended* during the
|
||||
`finally` suite; these lead to the scope's termination following the conclusion of the `finally`
|
||||
suite.)
|
||||
|
||||
```py path=no_redef_in_finally.py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
def could_raise_returns_bytes() -> bytes:
|
||||
return b"foo"
|
||||
|
||||
def could_raise_returns_bool() -> bool:
|
||||
return True
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
x = could_raise_returns_str()
|
||||
reveal_type(x) # revealed: str
|
||||
except TypeError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = could_raise_returns_bytes()
|
||||
reveal_type(x) # revealed: bytes
|
||||
x = could_raise_returns_bool()
|
||||
reveal_type(x) # revealed: bool
|
||||
finally:
|
||||
# TODO: should be `Literal[1] | str | bytes | bool`
|
||||
reveal_type(x) # revealed: str | bool
|
||||
|
||||
reveal_type(x) # revealed: str | bool
|
||||
```
|
||||
|
||||
An example with multiple `except` branches and a `finally` branch:
|
||||
|
||||
```py path=multiple_except_branches.py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
def could_raise_returns_bytes() -> bytes:
|
||||
return b"foo"
|
||||
|
||||
def could_raise_returns_bool() -> bool:
|
||||
return True
|
||||
|
||||
def could_raise_returns_memoryview() -> memoryview:
|
||||
return memoryview(b"")
|
||||
|
||||
def could_raise_returns_float() -> float:
|
||||
return 3.14
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
x = could_raise_returns_str()
|
||||
reveal_type(x) # revealed: str
|
||||
except TypeError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = could_raise_returns_bytes()
|
||||
reveal_type(x) # revealed: bytes
|
||||
x = could_raise_returns_bool()
|
||||
reveal_type(x) # revealed: bool
|
||||
except ValueError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = could_raise_returns_memoryview()
|
||||
reveal_type(x) # revealed: memoryview
|
||||
x = could_raise_returns_float()
|
||||
reveal_type(x) # revealed: float
|
||||
finally:
|
||||
# TODO: should be `Literal[1] | str | bytes | bool | memoryview | float`
|
||||
reveal_type(x) # revealed: str | bool | float
|
||||
|
||||
reveal_type(x) # revealed: str | bool | float
|
||||
```
|
||||
|
||||
## Combining `except`, `else` and `finally` branches
|
||||
|
||||
If the exception handler has an `else` branch, we must also take into account the possibility that
|
||||
control flow could have jumped to the `finally` suite from partway through the `else` suite due to
|
||||
an exception raised *there*.
|
||||
|
||||
```py path=single_except_branch.py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
def could_raise_returns_bytes() -> bytes:
|
||||
return b"foo"
|
||||
|
||||
def could_raise_returns_bool() -> bool:
|
||||
return True
|
||||
|
||||
def could_raise_returns_memoryview() -> memoryview:
|
||||
return memoryview(b"")
|
||||
|
||||
def could_raise_returns_float() -> float:
|
||||
return 3.14
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
x = could_raise_returns_str()
|
||||
reveal_type(x) # revealed: str
|
||||
except TypeError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = could_raise_returns_bytes()
|
||||
reveal_type(x) # revealed: bytes
|
||||
x = could_raise_returns_bool()
|
||||
reveal_type(x) # revealed: bool
|
||||
else:
|
||||
reveal_type(x) # revealed: str
|
||||
x = could_raise_returns_memoryview()
|
||||
reveal_type(x) # revealed: memoryview
|
||||
x = could_raise_returns_float()
|
||||
reveal_type(x) # revealed: float
|
||||
finally:
|
||||
# TODO: should be `Literal[1] | str | bytes | bool | memoryview | float`
|
||||
reveal_type(x) # revealed: bool | float
|
||||
|
||||
reveal_type(x) # revealed: bool | float
|
||||
```
|
||||
|
||||
The same again, this time with multiple `except` branches:
|
||||
|
||||
```py path=multiple_except_branches.py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
def could_raise_returns_bytes() -> bytes:
|
||||
return b"foo"
|
||||
|
||||
def could_raise_returns_bool() -> bool:
|
||||
return True
|
||||
|
||||
def could_raise_returns_memoryview() -> memoryview:
|
||||
return memoryview(b"")
|
||||
|
||||
def could_raise_returns_float() -> float:
|
||||
return 3.14
|
||||
|
||||
def could_raise_returns_range() -> range:
|
||||
return range(42)
|
||||
|
||||
def could_raise_returns_slice() -> slice:
|
||||
return slice(None)
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
x = could_raise_returns_str()
|
||||
reveal_type(x) # revealed: str
|
||||
except TypeError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = could_raise_returns_bytes()
|
||||
reveal_type(x) # revealed: bytes
|
||||
x = could_raise_returns_bool()
|
||||
reveal_type(x) # revealed: bool
|
||||
except ValueError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = could_raise_returns_memoryview()
|
||||
reveal_type(x) # revealed: memoryview
|
||||
x = could_raise_returns_float()
|
||||
reveal_type(x) # revealed: float
|
||||
else:
|
||||
reveal_type(x) # revealed: str
|
||||
x = could_raise_returns_range()
|
||||
reveal_type(x) # revealed: range
|
||||
x = could_raise_returns_slice()
|
||||
reveal_type(x) # revealed: slice
|
||||
finally:
|
||||
# TODO: should be `Literal[1] | str | bytes | bool | memoryview | float | range | slice`
|
||||
reveal_type(x) # revealed: bool | float | slice
|
||||
|
||||
reveal_type(x) # revealed: bool | float | slice
|
||||
```
|
||||
|
||||
## Nested `try`/`except` blocks
|
||||
|
||||
It would take advanced analysis, which we are not yet capable of, to be able to determine that an
|
||||
exception handler always suppresses all exceptions. This is partly because it is possible for
|
||||
statements in `except`, `else` and `finally` suites to raise exceptions as well as statements in
|
||||
`try` suites. This means that if an exception handler is nested inside the `try` statement of an
|
||||
enclosing exception handler, it should (at least for now) be treated the same as any other node: as
|
||||
a suite containing statements that could possibly raise exceptions, which would lead to control flow
|
||||
jumping out of that suite prior to the suite running to completion.
|
||||
|
||||
```py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
def could_raise_returns_bytes() -> bytes:
|
||||
return b"foo"
|
||||
|
||||
def could_raise_returns_bool() -> bool:
|
||||
return True
|
||||
|
||||
def could_raise_returns_memoryview() -> memoryview:
|
||||
return memoryview(b"")
|
||||
|
||||
def could_raise_returns_float() -> float:
|
||||
return 3.14
|
||||
|
||||
def could_raise_returns_range() -> range:
|
||||
return range(42)
|
||||
|
||||
def could_raise_returns_slice() -> slice:
|
||||
return slice(None)
|
||||
|
||||
def could_raise_returns_complex() -> complex:
|
||||
return 3j
|
||||
|
||||
def could_raise_returns_bytearray() -> bytearray:
|
||||
return bytearray()
|
||||
|
||||
class Foo: ...
|
||||
class Bar: ...
|
||||
|
||||
def could_raise_returns_Foo() -> Foo:
|
||||
return Foo()
|
||||
|
||||
def could_raise_returns_Bar() -> Bar:
|
||||
return Bar()
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
try:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
x = could_raise_returns_str()
|
||||
reveal_type(x) # revealed: str
|
||||
except TypeError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = could_raise_returns_bytes()
|
||||
reveal_type(x) # revealed: bytes
|
||||
x = could_raise_returns_bool()
|
||||
reveal_type(x) # revealed: bool
|
||||
except ValueError:
|
||||
reveal_type(x) # revealed: Literal[1] | str
|
||||
x = could_raise_returns_memoryview()
|
||||
reveal_type(x) # revealed: memoryview
|
||||
x = could_raise_returns_float()
|
||||
reveal_type(x) # revealed: float
|
||||
else:
|
||||
reveal_type(x) # revealed: str
|
||||
x = could_raise_returns_range()
|
||||
reveal_type(x) # revealed: range
|
||||
x = could_raise_returns_slice()
|
||||
reveal_type(x) # revealed: slice
|
||||
finally:
|
||||
# TODO: should be `Literal[1] | str | bytes | bool | memoryview | float | range | slice`
|
||||
reveal_type(x) # revealed: bool | float | slice
|
||||
x = 2
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
except:
|
||||
reveal_type(x) # revealed: Literal[1, 2] | str | bytes | bool | memoryview | float | range | slice
|
||||
x = could_raise_returns_complex()
|
||||
reveal_type(x) # revealed: complex
|
||||
x = could_raise_returns_bytearray()
|
||||
reveal_type(x) # revealed: bytearray
|
||||
else:
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
x = could_raise_returns_Foo()
|
||||
reveal_type(x) # revealed: Foo
|
||||
x = could_raise_returns_Bar()
|
||||
reveal_type(x) # revealed: Bar
|
||||
finally:
|
||||
# TODO: should be `Literal[1, 2] | str | bytes | bool | memoryview | float | range | slice | complex | bytearray | Foo | Bar`
|
||||
reveal_type(x) # revealed: bytearray | Bar
|
||||
|
||||
# Either one `except` branch or the `else`
|
||||
# must have been taken and completed to get here:
|
||||
reveal_type(x) # revealed: bytearray | Bar
|
||||
```
|
||||
|
||||
## Nested scopes inside `try` blocks
|
||||
|
||||
Shadowing a variable in an inner scope has no effect on type inference of the variable by that name
|
||||
in the outer scope:
|
||||
|
||||
```py
|
||||
def could_raise_returns_str() -> str:
|
||||
return "foo"
|
||||
|
||||
def could_raise_returns_bytes() -> bytes:
|
||||
return b"foo"
|
||||
|
||||
def could_raise_returns_range() -> range:
|
||||
return range(42)
|
||||
|
||||
def could_raise_returns_bytearray() -> bytearray:
|
||||
return bytearray()
|
||||
|
||||
def could_raise_returns_float() -> float:
|
||||
return 3.14
|
||||
|
||||
x = 1
|
||||
|
||||
try:
|
||||
|
||||
def foo(param=could_raise_returns_str()):
|
||||
x = could_raise_returns_str()
|
||||
|
||||
try:
|
||||
reveal_type(x) # revealed: str
|
||||
x = could_raise_returns_bytes()
|
||||
reveal_type(x) # revealed: bytes
|
||||
except:
|
||||
reveal_type(x) # revealed: str | bytes
|
||||
x = could_raise_returns_bytearray()
|
||||
reveal_type(x) # revealed: bytearray
|
||||
x = could_raise_returns_float()
|
||||
reveal_type(x) # revealed: float
|
||||
finally:
|
||||
# TODO: should be `str | bytes | bytearray | float`
|
||||
reveal_type(x) # revealed: bytes | float
|
||||
reveal_type(x) # revealed: bytes | float
|
||||
x = foo
|
||||
reveal_type(x) # revealed: Literal[foo]
|
||||
except:
|
||||
reveal_type(x) # revealed: Literal[1] | Literal[foo]
|
||||
|
||||
class Bar:
|
||||
x = could_raise_returns_range()
|
||||
reveal_type(x) # revealed: range
|
||||
|
||||
x = Bar
|
||||
reveal_type(x) # revealed: Literal[Bar]
|
||||
finally:
|
||||
# TODO: should be `Literal[1] | Literal[foo] | Literal[Bar]`
|
||||
reveal_type(x) # revealed: Literal[foo] | Literal[Bar]
|
||||
|
||||
reveal_type(x) # revealed: Literal[foo] | Literal[Bar]
|
||||
```
|
||||
|
||||
[1]: https://astral-sh.notion.site/Exception-handler-control-flow-11348797e1ca80bb8ce1e9aedbbe439d
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user