Compare commits
2 Commits
micha/erro
...
david/self
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
36ba9707ca | ||
|
|
4aba390838 |
5
.github/CODEOWNERS
vendored
5
.github/CODEOWNERS
vendored
@@ -13,10 +13,9 @@
|
||||
# flake8-pyi
|
||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
||||
|
||||
# Script for fuzzing the parser/red-knot etc.
|
||||
/python/py-fuzzer/ @AlexWaygood
|
||||
# Script for fuzzing the parser
|
||||
/scripts/fuzz-parser/ @AlexWaygood
|
||||
|
||||
# red-knot
|
||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
/scripts/knot_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
|
||||
8
.github/workflows/build-binaries.yml
vendored
8
.github/workflows/build-binaries.yml
vendored
@@ -40,7 +40,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -69,7 +68,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -111,7 +109,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -167,7 +164,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -220,7 +216,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -295,7 +290,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -360,7 +354,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -426,7 +419,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
1
.github/workflows/build-docker.yml
vendored
1
.github/workflows/build-docker.yml
vendored
@@ -36,7 +36,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
|
||||
70
.github/workflows/ci.yaml
vendored
70
.github/workflows/ci.yaml
vendored
@@ -32,13 +32,10 @@ jobs:
|
||||
# Flag that is raised when any code is changed
|
||||
# This is superset of the linter and formatter
|
||||
code: ${{ steps.changed.outputs.code_any_changed }}
|
||||
# Flag that is raised when any code that affects the fuzzer is changed
|
||||
fuzz: ${{ steps.changed.outputs.fuzz_any_changed }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- uses: tj-actions/changed-files@v45
|
||||
id: changed
|
||||
@@ -52,7 +49,7 @@ jobs:
|
||||
- crates/ruff_text_size/**
|
||||
- crates/ruff_python_ast/**
|
||||
- crates/ruff_python_parser/**
|
||||
- python/py-fuzzer/**
|
||||
- scripts/fuzz-parser/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
linter:
|
||||
@@ -82,15 +79,9 @@ jobs:
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
fuzz:
|
||||
- fuzz/Cargo.toml
|
||||
- fuzz/Cargo.lock
|
||||
- fuzz/fuzz_targets/**
|
||||
|
||||
code:
|
||||
- "**/*"
|
||||
- "!**/*.md"
|
||||
- "crates/red_knot_python_semantic/resources/mdtest/**/*.md"
|
||||
- "!docs/**"
|
||||
- "!assets/**"
|
||||
|
||||
@@ -100,8 +91,6 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
- run: cargo fmt --all --check
|
||||
@@ -114,8 +103,6 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: |
|
||||
rustup component add clippy
|
||||
@@ -134,8 +121,6 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
@@ -180,8 +165,6 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
@@ -209,8 +192,6 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
@@ -235,8 +216,6 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
@@ -264,8 +243,6 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
@@ -282,8 +259,6 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: SebRollen/toml-action@v1.2.0
|
||||
id: msrv
|
||||
with:
|
||||
@@ -312,12 +287,10 @@ jobs:
|
||||
name: "cargo fuzz build"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ github.ref == 'refs/heads/main' || needs.determine_changes.outputs.fuzz == 'true' }}
|
||||
if: ${{ github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@@ -344,9 +317,13 @@ jobs:
|
||||
FORCE_COLOR: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@v4
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
@@ -358,15 +335,7 @@ jobs:
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
|
||||
(
|
||||
uvx \
|
||||
--python=${{ env.PYTHON_VERSION }} \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable=${{ steps.download-cached-binary.outputs.download-path }}/ruff \
|
||||
--bin=ruff \
|
||||
0-500
|
||||
)
|
||||
python scripts/fuzz-parser/fuzz.py --bin ruff 0-500 --test-executable ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
|
||||
scripts:
|
||||
name: "test scripts"
|
||||
@@ -376,8 +345,6 @@ jobs:
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@@ -402,8 +369,6 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -415,7 +380,7 @@ jobs:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v7
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: Download baseline Ruff binary
|
||||
with:
|
||||
name: ruff
|
||||
@@ -514,8 +479,6 @@ jobs:
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@main
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
@@ -526,8 +489,6 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -553,8 +514,6 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -586,8 +545,6 @@ jobs:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13"
|
||||
@@ -599,7 +556,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v4
|
||||
uses: astral-sh/setup-uv@v3
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
@@ -628,8 +585,6 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Cache rust"
|
||||
@@ -657,7 +612,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
name: "Download ruff-lsp source"
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: "astral-sh/ruff-lsp"
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
@@ -693,8 +647,6 @@ jobs:
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
19
.github/workflows/daily_fuzz.yaml
vendored
19
.github/workflows/daily_fuzz.yaml
vendored
@@ -32,9 +32,13 @@ jobs:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@v4
|
||||
python-version: "3.12"
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
@@ -45,16 +49,7 @@ jobs:
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
run: cargo build --locked
|
||||
- name: Fuzz
|
||||
run: |
|
||||
(
|
||||
uvx \
|
||||
--python=3.12 \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable=target/debug/ruff \
|
||||
--bin=ruff \
|
||||
$(shuf -i 0-9999999999999999999 -n 1000)
|
||||
)
|
||||
run: python scripts/fuzz-parser/fuzz.py --bin ruff $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily fuzz surfaced any bugs
|
||||
|
||||
4
.github/workflows/pr-comment.yaml
vendored
4
.github/workflows/pr-comment.yaml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@v7
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v7
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
|
||||
1
.github/workflows/publish-docs.yml
vendored
1
.github/workflows/publish-docs.yml
vendored
@@ -26,7 +26,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
persist-credentials: true
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
|
||||
4
.github/workflows/publish-playground.yml
vendored
4
.github/workflows/publish-playground.yml
vendored
@@ -25,8 +25,6 @@ jobs:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
@@ -49,7 +47,7 @@ jobs:
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.13.0
|
||||
uses: cloudflare/wrangler-action@v3.12.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@v4
|
||||
uses: astral-sh/setup-uv@v3
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
2
.github/workflows/publish-wasm.yml
vendored
2
.github/workflows/publish-wasm.yml
vendored
@@ -30,8 +30,6 @@ jobs:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
|
||||
2
.github/workflows/sync_typeshed.yaml
vendored
2
.github/workflows/sync_typeshed.yaml
vendored
@@ -25,13 +25,11 @@ jobs:
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
path: ruff
|
||||
persist-credentials: true
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout typeshed
|
||||
with:
|
||||
repository: python/typeshed
|
||||
path: typeshed
|
||||
persist-credentials: true
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
|
||||
@@ -22,7 +22,7 @@ repos:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 0.7.19
|
||||
rev: 0.7.18
|
||||
hooks:
|
||||
- id: mdformat
|
||||
additional_dependencies:
|
||||
@@ -36,7 +36,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.43.0
|
||||
rev: v0.42.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -59,7 +59,7 @@ repos:
|
||||
- black==24.10.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.28.2
|
||||
rev: v1.27.3
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -73,7 +73,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.2
|
||||
rev: v0.7.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
@@ -83,25 +83,10 @@ repos:
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.4.2
|
||||
rev: v3.3.3
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v0.8.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
# `release.yml` is autogenerated by `dist`; security issues need to be fixed there
|
||||
# (https://opensource.axo.dev/cargo-dist/)
|
||||
exclude: .github/workflows/release.yml
|
||||
# We could consider enabling the low-severity warnings, but they're noisy
|
||||
args: [--min-severity=medium]
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.30.0
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, dev-generate-all]
|
||||
|
||||
@@ -192,7 +192,7 @@ flag or `unsafe-fixes` configuration option can be used to enable unsafe fixes.
|
||||
|
||||
See the [docs](https://docs.astral.sh/ruff/configuration/#fix-safety) for details.
|
||||
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
|
||||
Previously, Ruff enabled all implemented rules in Pycodestyle (`E`) by default. Ruff now only includes the
|
||||
Pycodestyle prefixes `E4`, `E7`, and `E9` to exclude rules that conflict with automatic formatters. Consequently,
|
||||
|
||||
84
CHANGELOG.md
84
CHANGELOG.md
@@ -1,83 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.8.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Avoid deprecated values (`AIR302`) ([#14582](https://github.com/astral-sh/ruff/pull/14582))
|
||||
- \[`airflow`\] Extend removed names for `AIR302` ([#14734](https://github.com/astral-sh/ruff/pull/14734))
|
||||
- \[`ruff`\] Extend `unnecessary-regular-expression` to non-literal strings (`RUF055`) ([#14679](https://github.com/astral-sh/ruff/pull/14679))
|
||||
- \[`ruff`\] Implement `used-dummy-variable` (`RUF052`) ([#14611](https://github.com/astral-sh/ruff/pull/14611))
|
||||
- \[`ruff`\] Implement `unnecessary-cast-to-int` (`RUF046`) ([#14697](https://github.com/astral-sh/ruff/pull/14697))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`airflow`\] Check `AIR001` from builtin or providers `operators` module ([#14631](https://github.com/astral-sh/ruff/pull/14631))
|
||||
- \[`flake8-pytest-style`\] Remove `@` in `pytest.mark.parametrize` rule messages ([#14770](https://github.com/astral-sh/ruff/pull/14770))
|
||||
- \[`pandas-vet`\] Skip rules if the `panda` module hasn't been seen ([#14671](https://github.com/astral-sh/ruff/pull/14671))
|
||||
- \[`pylint`\] Fix false negatives for `ascii` and `sorted` in `len-as-condition` (`PLC1802`) ([#14692](https://github.com/astral-sh/ruff/pull/14692))
|
||||
- \[`refurb`\] Guard `hashlib` imports and mark `hashlib-digest-hex` fix as safe (`FURB181`) ([#14694](https://github.com/astral-sh/ruff/pull/14694))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-import-conventions`\] Improve syntax check for aliases supplied in configuration for `unconventional-import-alias` (`ICN001`) ([#14745](https://github.com/astral-sh/ruff/pull/14745))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Revert: [pyflakes] Avoid false positives in `@no_type_check` contexts (`F821`, `F722`) (#14615) ([#14726](https://github.com/astral-sh/ruff/pull/14726))
|
||||
- \[`pep8-naming`\] Avoid false positive for `class Bar(type(foo))` (`N804`) ([#14683](https://github.com/astral-sh/ruff/pull/14683))
|
||||
- \[`pycodestyle`\] Handle f-strings properly for `invalid-escape-sequence` (`W605`) ([#14748](https://github.com/astral-sh/ruff/pull/14748))
|
||||
- \[`pylint`\] Ignore `@overload` in `PLR0904` ([#14730](https://github.com/astral-sh/ruff/pull/14730))
|
||||
- \[`refurb`\] Handle non-finite decimals in `verbose-decimal-constructor` (`FURB157`) ([#14596](https://github.com/astral-sh/ruff/pull/14596))
|
||||
- \[`ruff`\] Avoid emitting `assignment-in-assert` when all references to the assigned variable are themselves inside `assert`s (`RUF018`) ([#14661](https://github.com/astral-sh/ruff/pull/14661))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Improve docs for `flake8-use-pathlib` rules ([#14741](https://github.com/astral-sh/ruff/pull/14741))
|
||||
- Improve error messages and docs for `flake8-comprehensions` rules ([#14729](https://github.com/astral-sh/ruff/pull/14729))
|
||||
- \[`flake8-type-checking`\] Expands `TC006` docs to better explain itself ([#14749](https://github.com/astral-sh/ruff/pull/14749))
|
||||
|
||||
## 0.8.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- Formatter: Avoid invalid syntax for format-spec with quotes for all Python versions ([#14625](https://github.com/astral-sh/ruff/pull/14625))
|
||||
- Formatter: Consider quotes inside format-specs when choosing the quotes for an f-string ([#14493](https://github.com/astral-sh/ruff/pull/14493))
|
||||
- Formatter: Do not consider f-strings with escaped newlines as multiline ([#14624](https://github.com/astral-sh/ruff/pull/14624))
|
||||
- Formatter: Fix f-string formatting in assignment statement ([#14454](https://github.com/astral-sh/ruff/pull/14454))
|
||||
- Formatter: Fix unnecessary space around power operator (`**`) in overlong f-string expressions ([#14489](https://github.com/astral-sh/ruff/pull/14489))
|
||||
- \[`airflow`\] Avoid implicit `schedule` argument to `DAG` and `@dag` (`AIR301`) ([#14581](https://github.com/astral-sh/ruff/pull/14581))
|
||||
- \[`flake8-builtins`\] Exempt private built-in modules (`A005`) ([#14505](https://github.com/astral-sh/ruff/pull/14505))
|
||||
- \[`flake8-pytest-style`\] Fix `pytest.mark.parametrize` rules to check calls instead of decorators ([#14515](https://github.com/astral-sh/ruff/pull/14515))
|
||||
- \[`flake8-type-checking`\] Implement `runtime-cast-value` (`TC006`) ([#14511](https://github.com/astral-sh/ruff/pull/14511))
|
||||
- \[`flake8-type-checking`\] Implement `unquoted-type-alias` (`TC007`) and `quoted-type-alias` (`TC008`) ([#12927](https://github.com/astral-sh/ruff/pull/12927))
|
||||
- \[`flake8-use-pathlib`\] Recommend `Path.iterdir()` over `os.listdir()` (`PTH208`) ([#14509](https://github.com/astral-sh/ruff/pull/14509))
|
||||
- \[`pylint`\] Extend `invalid-envvar-default` to detect `os.environ.get` (`PLW1508`) ([#14512](https://github.com/astral-sh/ruff/pull/14512))
|
||||
- \[`pylint`\] Implement `len-test` (`PLC1802`) ([#14309](https://github.com/astral-sh/ruff/pull/14309))
|
||||
- \[`refurb`\] Fix bug where methods defined using lambdas were flagged by `FURB118` ([#14639](https://github.com/astral-sh/ruff/pull/14639))
|
||||
- \[`ruff`\] Auto-add `r` prefix when string has no backslashes for `unraw-re-pattern` (`RUF039`) ([#14536](https://github.com/astral-sh/ruff/pull/14536))
|
||||
- \[`ruff`\] Implement `invalid-assert-message-literal-argument` (`RUF040`) ([#14488](https://github.com/astral-sh/ruff/pull/14488))
|
||||
- \[`ruff`\] Implement `unnecessary-nested-literal` (`RUF041`) ([#14323](https://github.com/astral-sh/ruff/pull/14323))
|
||||
- \[`ruff`\] Implement `unnecessary-regular-expression` (`RUF055`) ([#14659](https://github.com/astral-sh/ruff/pull/14659))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Ignore more rules for stub files ([#14541](https://github.com/astral-sh/ruff/pull/14541))
|
||||
- \[`pep8-naming`\] Eliminate false positives for single-letter names (`N811`, `N814`) ([#14584](https://github.com/astral-sh/ruff/pull/14584))
|
||||
- \[`pyflakes`\] Avoid false positives in `@no_type_check` contexts (`F821`, `F722`) ([#14615](https://github.com/astral-sh/ruff/pull/14615))
|
||||
- \[`ruff`\] Detect redirected-noqa in file-level comments (`RUF101`) ([#14635](https://github.com/astral-sh/ruff/pull/14635))
|
||||
- \[`ruff`\] Mark fixes for `unsorted-dunder-all` and `unsorted-dunder-slots` as unsafe when there are complex comments in the sequence (`RUF022`, `RUF023`) ([#14560](https://github.com/astral-sh/ruff/pull/14560))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid fixing code to `None | None` for `redundant-none-literal` (`PYI061`) and `never-union` (`RUF020`) ([#14583](https://github.com/astral-sh/ruff/pull/14583), [#14589](https://github.com/astral-sh/ruff/pull/14589))
|
||||
- \[`flake8-bugbear`\] Fix `mutable-contextvar-default` to resolve annotated function calls properly (`B039`) ([#14532](https://github.com/astral-sh/ruff/pull/14532))
|
||||
- \[`flake8-pyi`, `ruff`\] Fix traversal of nested literals and unions (`PYI016`, `PYI051`, `PYI055`, `PYI062`, `RUF041`) ([#14641](https://github.com/astral-sh/ruff/pull/14641))
|
||||
- \[`flake8-pyi`\] Avoid rewriting invalid type expressions in `unnecessary-type-union` (`PYI055`) ([#14660](https://github.com/astral-sh/ruff/pull/14660))
|
||||
- \[`flake8-type-checking`\] Avoid syntax errors and type checking problem for quoted annotations autofix (`TC003`, `TC006`) ([#14634](https://github.com/astral-sh/ruff/pull/14634))
|
||||
- \[`pylint`\] Do not wrap function calls in parentheses in the fix for unnecessary-dunder-call (`PLC2801`) ([#14601](https://github.com/astral-sh/ruff/pull/14601))
|
||||
- \[`ruff`\] Handle `attrs`'s `auto_attribs` correctly (`RUF009`) ([#14520](https://github.com/astral-sh/ruff/pull/14520))
|
||||
|
||||
## 0.8.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.8.0) for a migration guide and overview of the changes!
|
||||
@@ -294,7 +216,7 @@ The following fixes have been stabilized:
|
||||
### Preview features
|
||||
|
||||
- Fix `E221` and `E222` to flag missing or extra whitespace around `==` operator ([#13890](https://github.com/astral-sh/ruff/pull/13890))
|
||||
- Formatter: Alternate quotes for strings inside f-strings in preview ([#13860](https://github.com/astral-sh/ruff/pull/13860))
|
||||
- Formatter: Alternate quotes for strings inside f-strings in preview ([#13860](https://github.com/astral-sh/ruff/pull/13860))
|
||||
- Formatter: Join implicit concatenated strings when they fit on a line ([#13663](https://github.com/astral-sh/ruff/pull/13663))
|
||||
- \[`pylint`\] Restrict `iteration-over-set` to only work on sets of literals (`PLC0208`) ([#13731](https://github.com/astral-sh/ruff/pull/13731))
|
||||
|
||||
@@ -1309,7 +1231,7 @@ To read more about this exciting milestone, check out our [blog post](https://as
|
||||
### Preview features
|
||||
|
||||
- \[`pycodestyle`\] Ignore end-of-line comments when determining blank line rules ([#11342](https://github.com/astral-sh/ruff/pull/11342))
|
||||
- \[`pylint`\] Detect `pathlib.Path.open` calls in `unspecified-encoding` (`PLW1514`) ([#11288](https://github.com/astral-sh/ruff/pull/11288))
|
||||
- \[`pylint`\] Detect `pathlib.Path.open` calls in `unspecified-encoding` (`PLW1514`) ([#11288](https://github.com/astral-sh/ruff/pull/11288))
|
||||
- \[`flake8-pyi`\] Implement `PYI059` (`generic-not-last-base-class`) ([#11233](https://github.com/astral-sh/ruff/pull/11233))
|
||||
- \[`flake8-pyi`\] Implement `PYI062` (`duplicate-literal-member`) ([#11269](https://github.com/astral-sh/ruff/pull/11269))
|
||||
|
||||
@@ -1684,7 +1606,7 @@ To setup `ruff server` with your editor, refer to the [README.md](https://github
|
||||
- \[`pycodestyle`\] Do not ignore lines before the first logical line in blank lines rules. ([#10382](https://github.com/astral-sh/ruff/pull/10382))
|
||||
- \[`pycodestyle`\] Do not trigger `E225` and `E275` when the next token is a ')' ([#10315](https://github.com/astral-sh/ruff/pull/10315))
|
||||
- \[`pylint`\] Avoid false-positive slot non-assignment for `__dict__` (`PLE0237`) ([#10348](https://github.com/astral-sh/ruff/pull/10348))
|
||||
- Gate f-string struct size test for Rustc < 1.76 ([#10371](https://github.com/astral-sh/ruff/pull/10371))
|
||||
- Gate f-string struct size test for Rustc \< 1.76 ([#10371](https://github.com/astral-sh/ruff/pull/10371))
|
||||
|
||||
### Documentation
|
||||
|
||||
|
||||
@@ -139,7 +139,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
|
||||
1. Create a file for your rule (e.g., `crates/ruff_linter/src/rules/flake8_bugbear/rules/assert_false.rs`).
|
||||
|
||||
1. In that file, define a violation struct (e.g., `pub struct AssertFalse`). You can grep for
|
||||
`#[derive(ViolationMetadata)]` to see examples.
|
||||
`#[violation]` to see examples.
|
||||
|
||||
1. In that file, define a function that adds the violation to the diagnostic list as appropriate
|
||||
(e.g., `pub(crate) fn assert_false`) based on whatever inputs are required for the rule (e.g.,
|
||||
@@ -863,7 +863,7 @@ each configuration file.
|
||||
|
||||
The package root is used to determine a file's "module path". Consider, again, `baz.py`. In that
|
||||
case, `./my_project/src/foo` was identified as the package root, so the module path for `baz.py`
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
(inclusive of the root itself). The module path can be thought of as "the path you would use to
|
||||
import the module" (e.g., `import foo.bar.baz`).
|
||||
|
||||
|
||||
316
Cargo.lock
generated
316
Cargo.lock
generated
@@ -117,9 +117,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.94"
|
||||
version = "1.0.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7"
|
||||
checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775"
|
||||
|
||||
[[package]]
|
||||
name = "append-only-vec"
|
||||
@@ -276,13 +276,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.2"
|
||||
version = "1.0.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f34d93e62b03caf570cccc334cbc6c2fceca82f39211051345108adcba3eebdc"
|
||||
checksum = "d32a725bc159af97c3e629873bb9f88fb8cf8a4867175f76dc987815ea07c83b"
|
||||
dependencies = [
|
||||
"jobserver",
|
||||
"libc",
|
||||
"shlex",
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -353,9 +353,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.23"
|
||||
version = "4.5.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84"
|
||||
checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -363,9 +363,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.23"
|
||||
version = "4.5.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838"
|
||||
checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -413,14 +413,14 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_lex"
|
||||
version = "0.7.4"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
|
||||
checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce"
|
||||
|
||||
[[package]]
|
||||
name = "clearscreen"
|
||||
@@ -693,7 +693,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"strsim 0.10.0",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -704,7 +704,7 @@ checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -758,23 +758,23 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "dir-test"
|
||||
version = "0.4.1"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "62c013fe825864f3e4593f36426c1fa7a74f5603f13ca8d1af7a990c1cd94a79"
|
||||
checksum = "b12781621d53fd9087021f5a338df5c57c04f84a6231c1f4726f45e2e333470b"
|
||||
dependencies = [
|
||||
"dir-test-macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dir-test-macros"
|
||||
version = "0.4.1"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d42f54d7b4a6bc2400fe5b338e35d1a335787585375322f49c5d5fe7b243da7e"
|
||||
checksum = "1340852f50b2285d01a7f598cc5d08b572669c3e09e614925175cc3c26787b91"
|
||||
dependencies = [
|
||||
"glob",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -826,7 +826,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1246,7 +1246,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1314,9 +1314,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.7.0"
|
||||
version = "2.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f"
|
||||
checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.15.2",
|
||||
@@ -1420,7 +1420,7 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1485,11 +1485,10 @@ checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.74"
|
||||
version = "0.3.72"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a865e038f7f6ed956f788f0d7d60c541fff74c7bd74272c5d4cf15c63743e705"
|
||||
checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
@@ -1521,9 +1520,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.167"
|
||||
version = "0.2.164"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc"
|
||||
checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
@@ -1547,7 +1546,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a2ae40017ac09cd2c6a53504cb3c871c7f2b41466eac5bc66ba63f39073b467b"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1834,9 +1833,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
|
||||
|
||||
[[package]]
|
||||
name = "ordermap"
|
||||
version = "0.5.4"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f80a48eb68b6a7da9829b8b0429011708f775af80676a91063d023a66a656106"
|
||||
checksum = "31f2bd7b03bf2c767e1bb7b91505dbe022833776e60480275e6f2fb0db0c7503"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
]
|
||||
@@ -1911,9 +1910,9 @@ checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42"
|
||||
|
||||
[[package]]
|
||||
name = "pathdiff"
|
||||
version = "0.2.3"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3"
|
||||
checksum = "d61c5ce1153ab5b689d0c074c4e7fc613e942dfb7dd9eea5ab202d2ad91fe361"
|
||||
|
||||
[[package]]
|
||||
name = "peg"
|
||||
@@ -1944,11 +1943,10 @@ checksum = "e3aeb8f54c078314c2065ee649a7241f46b9d8e418e1a9581ba0546657d7aa3a"
|
||||
|
||||
[[package]]
|
||||
name = "pep440_rs"
|
||||
version = "0.7.3"
|
||||
version = "0.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "31095ca1f396e3de32745f42b20deef7bc09077f918b085307e8eab6ddd8fb9c"
|
||||
checksum = "0922a442c78611fa8c5ed6065d2d898a820cf12fa90604217fdb2d01675efec7"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"serde",
|
||||
"unicode-width 0.2.0",
|
||||
"unscanny",
|
||||
@@ -1967,7 +1965,7 @@ dependencies = [
|
||||
"once_cell",
|
||||
"pep440_rs",
|
||||
"regex",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"thiserror 1.0.67",
|
||||
@@ -2014,7 +2012,7 @@ dependencies = [
|
||||
"pest_meta",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2161,7 +2159,7 @@ dependencies = [
|
||||
"newtype-uuid",
|
||||
"quick-xml",
|
||||
"strip-ansi-escapes",
|
||||
"thiserror 2.0.6",
|
||||
"thiserror 2.0.3",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
@@ -2174,26 +2172,6 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quickcheck"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6"
|
||||
dependencies = [
|
||||
"rand",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quickcheck_macros"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b22a693222d716a9587786f37ac3f6b4faedb5b80c23914e7303ff5a1d8016e9"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.37"
|
||||
@@ -2294,27 +2272,24 @@ dependencies = [
|
||||
"itertools 0.13.0",
|
||||
"memchr",
|
||||
"ordermap",
|
||||
"quickcheck",
|
||||
"quickcheck_macros",
|
||||
"red_knot_test",
|
||||
"red_knot_vendored",
|
||||
"ruff_db",
|
||||
"ruff_index",
|
||||
"ruff_macros",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_literal",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"static_assertions",
|
||||
"tempfile",
|
||||
"test-case",
|
||||
"thiserror 2.0.6",
|
||||
"thiserror 2.0.3",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -2334,7 +2309,7 @@ dependencies = [
|
||||
"ruff_python_ast",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
@@ -2347,7 +2322,6 @@ name = "red_knot_test"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"camino",
|
||||
"colored",
|
||||
"memchr",
|
||||
"red_knot_python_semantic",
|
||||
@@ -2358,11 +2332,9 @@ dependencies = [
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2408,10 +2380,10 @@ dependencies = [
|
||||
"ruff_db",
|
||||
"ruff_python_ast",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"serde",
|
||||
"thiserror 2.0.6",
|
||||
"thiserror 2.0.3",
|
||||
"toml",
|
||||
"tracing",
|
||||
]
|
||||
@@ -2517,7 +2489,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.8.2"
|
||||
version = "0.8.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2557,14 +2529,14 @@ dependencies = [
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"ruff_workspace",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"strum",
|
||||
"tempfile",
|
||||
"test-case",
|
||||
"thiserror 2.0.6",
|
||||
"thiserror 2.0.3",
|
||||
"tikv-jemallocator",
|
||||
"toml",
|
||||
"tracing",
|
||||
@@ -2588,7 +2560,7 @@ dependencies = [
|
||||
"ruff_python_formatter",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_trivia",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tikv-jemallocator",
|
||||
@@ -2630,11 +2602,11 @@ dependencies = [
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"serde",
|
||||
"tempfile",
|
||||
"thiserror 2.0.6",
|
||||
"thiserror 2.0.3",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"tracing-tree",
|
||||
@@ -2699,7 +2671,7 @@ dependencies = [
|
||||
"ruff_cache",
|
||||
"ruff_macros",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
"static_assertions",
|
||||
@@ -2736,7 +2708,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.8.2"
|
||||
version = "0.8.0"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2777,7 +2749,7 @@ dependencies = [
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -2786,7 +2758,7 @@ dependencies = [
|
||||
"strum",
|
||||
"strum_macros",
|
||||
"test-case",
|
||||
"thiserror 2.0.6",
|
||||
"thiserror 2.0.3",
|
||||
"toml",
|
||||
"typed-arena",
|
||||
"unicode-normalization",
|
||||
@@ -2803,7 +2775,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"ruff_python_trivia",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2820,7 +2792,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"test-case",
|
||||
"thiserror 2.0.6",
|
||||
"thiserror 2.0.3",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
@@ -2839,7 +2811,7 @@ dependencies = [
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
]
|
||||
@@ -2885,14 +2857,14 @@ dependencies = [
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"similar",
|
||||
"smallvec",
|
||||
"static_assertions",
|
||||
"thiserror 2.0.6",
|
||||
"thiserror 2.0.3",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -2932,7 +2904,7 @@ dependencies = [
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"static_assertions",
|
||||
"unicode-ident",
|
||||
"unicode-normalization",
|
||||
@@ -2963,7 +2935,7 @@ dependencies = [
|
||||
"ruff_python_parser",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
]
|
||||
@@ -3021,11 +2993,11 @@ dependencies = [
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"ruff_workspace",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"thiserror 2.0.6",
|
||||
"thiserror 2.0.3",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
@@ -3051,7 +3023,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.8.2"
|
||||
version = "0.8.0"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -3102,7 +3074,7 @@ dependencies = [
|
||||
"ruff_python_semantic",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_source_file",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
"shellexpand",
|
||||
@@ -3129,9 +3101,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "2.1.0"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497"
|
||||
checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
@@ -3148,9 +3120,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rustls"
|
||||
version = "0.23.19"
|
||||
version = "0.23.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "934b404430bb06b3fae2cba809eb45a1ab1aecd64491213d7c3301b88393f8d1"
|
||||
checksum = "05cff451f60db80f490f3c182b77c35260baace73209e9cdbbe526bfe3a4d402"
|
||||
dependencies = [
|
||||
"log",
|
||||
"once_cell",
|
||||
@@ -3163,15 +3135,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rustls-pki-types"
|
||||
version = "1.10.0"
|
||||
version = "1.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b"
|
||||
checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d"
|
||||
|
||||
[[package]]
|
||||
name = "rustls-webpki"
|
||||
version = "0.102.8"
|
||||
version = "0.102.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9"
|
||||
checksum = "f9a6fccd794a42c2c105b513a2f62bc3fd8f3ba57a4593677ceb0bd035164d78"
|
||||
dependencies = [
|
||||
"ring",
|
||||
"rustls-pki-types",
|
||||
@@ -3203,7 +3175,7 @@ dependencies = [
|
||||
"indexmap",
|
||||
"lazy_static",
|
||||
"parking_lot",
|
||||
"rustc-hash 2.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa-macro-rules",
|
||||
"salsa-macros",
|
||||
"smallvec",
|
||||
@@ -3223,7 +3195,7 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
@@ -3257,7 +3229,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde_derive_internals",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3306,7 +3278,7 @@ checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3317,7 +3289,7 @@ checksum = "330f01ce65a3a5fe59a60c82f3c9a024b573b8a6e875bd233fe5f934e71d54e3"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3340,7 +3312,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3381,7 +3353,7 @@ dependencies = [
|
||||
"darling",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3413,17 +3385,11 @@ dependencies = [
|
||||
"dirs 5.0.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||
|
||||
[[package]]
|
||||
name = "similar"
|
||||
version = "2.6.0"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e"
|
||||
checksum = "fa42c91313f1d05da9b26f267f931cf178d4aba455b4c4622dd7355eb80c6640"
|
||||
|
||||
[[package]]
|
||||
name = "siphasher"
|
||||
@@ -3495,7 +3461,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3506,20 +3472,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.109"
|
||||
version = "2.0.89"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.90"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31"
|
||||
checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3534,7 +3489,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3597,7 +3552,7 @@ dependencies = [
|
||||
"cfg-if",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3608,7 +3563,7 @@ checksum = "5c89e72a01ed4c579669add59014b9a524d609c0c88c6a585ce37485879f6ffb"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
"test-case-core",
|
||||
]
|
||||
|
||||
@@ -3623,11 +3578,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "2.0.6"
|
||||
version = "2.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8fec2a1820ebd077e2b90c4df007bebf344cd394098a13c563957d0afc83ea47"
|
||||
checksum = "c006c85c7651b3cf2ada4584faa36773bd07bac24acfb39f3c431b36d7e667aa"
|
||||
dependencies = [
|
||||
"thiserror-impl 2.0.6",
|
||||
"thiserror-impl 2.0.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3638,18 +3593,18 @@ checksum = "b607164372e89797d78b8e23a6d67d5d1038c1c65efd52e1389ef8b77caba2a6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "2.0.6"
|
||||
version = "2.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d65750cab40f4ff1929fb1ba509e9914eb756131cef4210da8d5d700d26f6312"
|
||||
checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3753,9 +3708,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.41"
|
||||
version = "0.1.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
|
||||
checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
|
||||
dependencies = [
|
||||
"log",
|
||||
"pin-project-lite",
|
||||
@@ -3765,20 +3720,20 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-attributes"
|
||||
version = "0.1.28"
|
||||
version = "0.1.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
|
||||
checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-core"
|
||||
version = "0.1.33"
|
||||
version = "0.1.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
|
||||
checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"valuable",
|
||||
@@ -3797,9 +3752,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-indicatif"
|
||||
version = "0.3.8"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74ba258e9de86447f75edf6455fded8e5242704c6fccffe7bf8d7fb6daef1180"
|
||||
checksum = "069580424efe11d97c3fef4197fa98c004fa26672cc71ad8770d224e23b1951d"
|
||||
dependencies = [
|
||||
"indicatif",
|
||||
"tracing",
|
||||
@@ -3820,9 +3775,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-subscriber"
|
||||
version = "0.3.19"
|
||||
version = "0.3.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
|
||||
checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
|
||||
dependencies = [
|
||||
"matchers",
|
||||
"nu-ansi-term 0.46.0",
|
||||
@@ -3971,9 +3926,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
||||
|
||||
[[package]]
|
||||
name = "ureq"
|
||||
version = "2.12.1"
|
||||
version = "2.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d"
|
||||
checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a"
|
||||
dependencies = [
|
||||
"base64 0.22.0",
|
||||
"flate2",
|
||||
@@ -4041,7 +3996,7 @@ checksum = "6b91f57fe13a38d0ce9e28a03463d8d3c2468ed03d75375110ec71d93b449a08"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4116,9 +4071,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.97"
|
||||
version = "0.2.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d15e63b4482863c109d70a7b8706c1e364eb6ea449b201a76c5b89cedcec2d5c"
|
||||
checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
@@ -4127,37 +4082,36 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.97"
|
||||
version = "0.2.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d36ef12e3aaca16ddd3f67922bc63e48e953f126de60bd33ccc0101ef9998cd"
|
||||
checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"log",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-futures"
|
||||
version = "0.4.47"
|
||||
version = "0.4.45"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9dfaf8f50e5f293737ee323940c7d8b08a66a95a419223d9f41610ca08b0833d"
|
||||
checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
"once_cell",
|
||||
"wasm-bindgen",
|
||||
"web-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.97"
|
||||
version = "0.2.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "705440e08b42d3e4b36de7d66c944be628d579796b8090bfa3471478a2260051"
|
||||
checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
@@ -4165,32 +4119,32 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.97"
|
||||
version = "0.2.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "98c9ae5a76e46f4deecd0f0255cc223cfa18dc9b261213b8aa0c7b36f61b3f1d"
|
||||
checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.97"
|
||||
version = "0.2.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ee99da9c5ba11bd675621338ef6fa52296b76b83305e9b6e5c77d4c286d6d49"
|
||||
checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-test"
|
||||
version = "0.3.47"
|
||||
version = "0.3.45"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d919bb60ebcecb9160afee6c71b43a58a4f0517a2de0054cd050d02cec08201"
|
||||
checksum = "d381749acb0943d357dcbd8f0b100640679883fcdeeef04def49daf8d33a5426"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"js-sys",
|
||||
"minicov",
|
||||
"once_cell",
|
||||
"scoped-tls",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-futures",
|
||||
@@ -4199,20 +4153,20 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-test-macro"
|
||||
version = "0.3.47"
|
||||
version = "0.3.45"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "222ebde6ea87fbfa6bdd2e9f1fd8a91d60aee5db68792632176c4e16a74fc7d8"
|
||||
checksum = "c97b2ef2c8d627381e51c071c2ab328eac606d3f69dd82bcbca20a9e389d95f0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "web-sys"
|
||||
version = "0.3.74"
|
||||
version = "0.3.69"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a98bc3c33f0fe7e59ad7cd041b89034fa82a7c2d4365ca538dda6cdaf513863c"
|
||||
checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
@@ -4508,7 +4462,7 @@ checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
@@ -4529,7 +4483,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4549,7 +4503,7 @@ checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
@@ -4578,7 +4532,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
10
README.md
10
README.md
@@ -119,10 +119,6 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
# With uv.
|
||||
uv add --dev ruff # to add ruff to your project
|
||||
uv tool install ruff # to install ruff globally
|
||||
|
||||
# With pip.
|
||||
pip install ruff
|
||||
|
||||
@@ -140,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.8.2/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.8.2/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.8.0/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.8.0/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -174,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.8.2
|
||||
rev: v0.8.0
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
36
clippy.toml
36
clippy.toml
@@ -1,25 +1,21 @@
|
||||
doc-valid-idents = [
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
"SNMPv1",
|
||||
"SNMPv2",
|
||||
"SNMPv3",
|
||||
"PyFlakes"
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
@@ -103,7 +103,7 @@ called **once**.
|
||||
|
||||
## Profiling
|
||||
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
|
||||
|
||||
@@ -1,23 +1,25 @@
|
||||
#![allow(clippy::disallowed_names)]
|
||||
|
||||
use std::io::Write;
|
||||
use std::time::{Duration, Instant};
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use red_knot_python_semantic::{resolve_module, ModuleName, Program, PythonVersion, SitePackages};
|
||||
use red_knot_workspace::db::{Db, RootDatabase};
|
||||
use red_knot_workspace::watch::{directory_watcher, ChangeEvent, WorkspaceWatcher};
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::{directory_watcher, WorkspaceWatcher};
|
||||
use red_knot_workspace::workspace::settings::{Configuration, SearchPathConfiguration};
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::files::{system_path_to_file, File, FileError};
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::testing::{setup_logging, setup_logging_with_filter};
|
||||
use ruff_db::Upcast;
|
||||
|
||||
struct TestCase {
|
||||
db: RootDatabase,
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
changes_receiver: crossbeam::channel::Receiver<Vec<ChangeEvent>>,
|
||||
changes_receiver: crossbeam::channel::Receiver<Vec<watch::ChangeEvent>>,
|
||||
/// The temporary directory that contains the test files.
|
||||
/// We need to hold on to it in the test case or the temp files get deleted.
|
||||
_temp_dir: tempfile::TempDir,
|
||||
@@ -38,36 +40,12 @@ impl TestCase {
|
||||
&self.db
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn stop_watch<M>(&mut self, matcher: M) -> Vec<ChangeEvent>
|
||||
where
|
||||
M: MatchEvent,
|
||||
{
|
||||
// track_caller is unstable for lambdas -> That's why this is a fn
|
||||
#[track_caller]
|
||||
fn panic_with_formatted_events(events: Vec<ChangeEvent>) -> Vec<ChangeEvent> {
|
||||
panic!(
|
||||
"Didn't observe expected change:\n{}",
|
||||
events
|
||||
.into_iter()
|
||||
.map(|event| format!(" - {event:?}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
)
|
||||
}
|
||||
|
||||
self.try_stop_watch(matcher, Duration::from_secs(10))
|
||||
.unwrap_or_else(panic_with_formatted_events)
|
||||
fn stop_watch(&mut self) -> Vec<watch::ChangeEvent> {
|
||||
self.try_stop_watch(Duration::from_secs(10))
|
||||
.expect("Expected watch changes but observed none")
|
||||
}
|
||||
|
||||
fn try_stop_watch<M>(
|
||||
&mut self,
|
||||
mut matcher: M,
|
||||
timeout: Duration,
|
||||
) -> Result<Vec<ChangeEvent>, Vec<ChangeEvent>>
|
||||
where
|
||||
M: MatchEvent,
|
||||
{
|
||||
fn try_stop_watch(&mut self, timeout: Duration) -> Option<Vec<watch::ChangeEvent>> {
|
||||
tracing::debug!("Try stopping watch with timeout {:?}", timeout);
|
||||
|
||||
let watcher = self
|
||||
@@ -75,50 +53,36 @@ impl TestCase {
|
||||
.take()
|
||||
.expect("Cannot call `stop_watch` more than once");
|
||||
|
||||
let start = Instant::now();
|
||||
let mut all_events = Vec::new();
|
||||
|
||||
loop {
|
||||
let events = self
|
||||
.changes_receiver
|
||||
.recv_timeout(Duration::from_millis(100))
|
||||
.unwrap_or_default();
|
||||
|
||||
if events
|
||||
.iter()
|
||||
.any(|event| matcher.match_event(event) || event.is_rescan())
|
||||
{
|
||||
all_events.extend(events);
|
||||
break;
|
||||
}
|
||||
|
||||
all_events.extend(events);
|
||||
|
||||
if start.elapsed() > timeout {
|
||||
return Err(all_events);
|
||||
}
|
||||
}
|
||||
let mut all_events = self
|
||||
.changes_receiver
|
||||
.recv_timeout(timeout)
|
||||
.unwrap_or_default();
|
||||
|
||||
watcher.flush();
|
||||
tracing::debug!("Flushed file watcher");
|
||||
watcher.stop();
|
||||
tracing::debug!("Stopping file watcher");
|
||||
|
||||
// Consume remaining events
|
||||
for event in &self.changes_receiver {
|
||||
all_events.extend(event);
|
||||
}
|
||||
|
||||
Ok(all_events)
|
||||
if all_events.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(all_events)
|
||||
}
|
||||
|
||||
fn take_watch_changes(&self) -> Vec<ChangeEvent> {
|
||||
fn take_watch_changes(&self) -> Vec<watch::ChangeEvent> {
|
||||
self.try_take_watch_changes(Duration::from_secs(10))
|
||||
.expect("Expected watch changes but observed none")
|
||||
}
|
||||
|
||||
fn try_take_watch_changes(&self, timeout: Duration) -> Option<Vec<ChangeEvent>> {
|
||||
let watcher = self.watcher.as_ref()?;
|
||||
fn try_take_watch_changes(&self, timeout: Duration) -> Option<Vec<watch::ChangeEvent>> {
|
||||
let Some(watcher) = &self.watcher else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let mut all_events = self
|
||||
.changes_receiver
|
||||
@@ -140,7 +104,7 @@ impl TestCase {
|
||||
Some(all_events)
|
||||
}
|
||||
|
||||
fn apply_changes(&mut self, changes: Vec<ChangeEvent>) {
|
||||
fn apply_changes(&mut self, changes: Vec<watch::ChangeEvent>) {
|
||||
self.db.apply_changes(changes, Some(&self.configuration));
|
||||
}
|
||||
|
||||
@@ -176,23 +140,6 @@ impl TestCase {
|
||||
}
|
||||
}
|
||||
|
||||
trait MatchEvent {
|
||||
fn match_event(&mut self, event: &ChangeEvent) -> bool;
|
||||
}
|
||||
|
||||
fn event_for_file(name: &str) -> impl MatchEvent + '_ {
|
||||
|event: &ChangeEvent| event.file_name() == Some(name)
|
||||
}
|
||||
|
||||
impl<F> MatchEvent for F
|
||||
where
|
||||
F: FnMut(&ChangeEvent) -> bool,
|
||||
{
|
||||
fn match_event(&mut self, event: &ChangeEvent) -> bool {
|
||||
(*self)(event)
|
||||
}
|
||||
}
|
||||
|
||||
trait SetupFiles {
|
||||
fn setup(self, root_path: &SystemPath, workspace_path: &SystemPath) -> anyhow::Result<()>;
|
||||
}
|
||||
@@ -368,7 +315,7 @@ fn new_file() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::write(foo_path.as_std_path(), "print('Hello')")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -391,7 +338,7 @@ fn new_ignored_file() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::write(foo_path.as_std_path(), "print('Hello')")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -413,7 +360,7 @@ fn changed_file() -> anyhow::Result<()> {
|
||||
|
||||
update_file(&foo_path, "print('Version 2')")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
assert!(!changes.is_empty());
|
||||
|
||||
@@ -438,7 +385,7 @@ fn deleted_file() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::remove_file(foo_path.as_std_path())?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -470,7 +417,7 @@ fn move_file_to_trash() -> anyhow::Result<()> {
|
||||
trash_path.join("foo.py").as_std_path(),
|
||||
)?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -502,7 +449,7 @@ fn move_file_to_workspace() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::rename(foo_path.as_std_path(), foo_in_workspace_path.as_std_path())?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -530,7 +477,7 @@ fn rename_file() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::rename(foo_path.as_std_path(), bar_path.as_std_path())?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("bar.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -574,7 +521,7 @@ fn directory_moved_to_workspace() -> anyhow::Result<()> {
|
||||
std::fs::rename(sub_original_path.as_std_path(), sub_new_path.as_std_path())
|
||||
.with_context(|| "Failed to move sub directory")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("sub"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -633,7 +580,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
|
||||
std::fs::rename(sub_path.as_std_path(), trashed_sub.as_std_path())
|
||||
.with_context(|| "Failed to move the sub directory to the trash")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("sub"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -657,6 +604,8 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
|
||||
|
||||
#[test]
|
||||
fn directory_renamed() -> anyhow::Result<()> {
|
||||
let _tracing = setup_logging_with_filter("file_watching=TRACE,red_knot=TRACE");
|
||||
|
||||
let mut case = setup([
|
||||
("bar.py", "import sub.a"),
|
||||
("sub/__init__.py", ""),
|
||||
@@ -695,8 +644,11 @@ fn directory_renamed() -> anyhow::Result<()> {
|
||||
std::fs::rename(sub_path.as_std_path(), foo_baz.as_std_path())
|
||||
.with_context(|| "Failed to move the sub directory")?;
|
||||
|
||||
// Linux and windows only emit an event for the newly created root directory, but not for every new component.
|
||||
let changes = case.stop_watch(event_for_file("sub"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
for event in &changes {
|
||||
tracing::debug!("Event: {:?}", event);
|
||||
}
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -769,7 +721,7 @@ fn directory_deleted() -> anyhow::Result<()> {
|
||||
std::fs::remove_dir_all(sub_path.as_std_path())
|
||||
.with_context(|| "Failed to remove the sub directory")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("sub"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -806,7 +758,7 @@ fn search_path() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("a.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -837,7 +789,7 @@ fn add_search_path() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("a.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -866,9 +818,9 @@ fn remove_search_path() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
|
||||
let changes = case.try_stop_watch(|_: &ChangeEvent| true, Duration::from_millis(100));
|
||||
let changes = case.try_stop_watch(Duration::from_millis(100));
|
||||
|
||||
assert_eq!(changes, Err(vec![]));
|
||||
assert_eq!(changes, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -906,7 +858,7 @@ fn changed_versions_file() -> anyhow::Result<()> {
|
||||
"os: 3.0-",
|
||||
)?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("VERSIONS"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -959,7 +911,7 @@ fn hard_links_in_workspace() -> anyhow::Result<()> {
|
||||
// Write to the hard link target.
|
||||
update_file(foo_path, "print('Version 2')").context("Failed to update foo.py")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -1030,7 +982,7 @@ fn hard_links_to_target_outside_workspace() -> anyhow::Result<()> {
|
||||
// Write to the hard link target.
|
||||
update_file(foo_path, "print('Version 2')").context("Failed to update foo.py")?;
|
||||
|
||||
let changes = case.stop_watch(ChangeEvent::is_changed);
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -1069,7 +1021,7 @@ mod unix {
|
||||
)
|
||||
.with_context(|| "Failed to set file permissions.")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -1167,7 +1119,7 @@ mod unix {
|
||||
update_file(baz_workspace, "def baz(): print('Version 3')")
|
||||
.context("Failed to update bar/baz.py")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("baz.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -1238,7 +1190,7 @@ mod unix {
|
||||
update_file(&patched_bar_baz, "def baz(): print('Version 2')")
|
||||
.context("Failed to update bar/baz.py")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("baz.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -1346,7 +1298,7 @@ mod unix {
|
||||
update_file(&baz_original, "def baz(): print('Version 2')")
|
||||
.context("Failed to update bar/baz.py")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("baz.py"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -1400,7 +1352,7 @@ fn nested_packages_delete_root() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::remove_file(case.workspace_path("pyproject.toml").as_std_path())?;
|
||||
|
||||
let changes = case.stop_watch(ChangeEvent::is_deleted);
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -1412,6 +1364,7 @@ fn nested_packages_delete_root() -> anyhow::Result<()> {
|
||||
|
||||
#[test]
|
||||
fn added_package() -> anyhow::Result<()> {
|
||||
let _ = setup_logging();
|
||||
let mut case = setup([
|
||||
(
|
||||
"pyproject.toml",
|
||||
@@ -1453,7 +1406,7 @@ fn added_package() -> anyhow::Result<()> {
|
||||
)
|
||||
.context("failed to write pyproject.toml for package b")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("pyproject.toml"));
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -1496,7 +1449,7 @@ fn removed_package() -> anyhow::Result<()> {
|
||||
std::fs::remove_dir_all(case.workspace_path("packages/b").as_std_path())
|
||||
.context("failed to remove package 'b'")?;
|
||||
|
||||
let changes = case.stop_watch(ChangeEvent::is_deleted);
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_macros = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
@@ -50,8 +49,6 @@ anyhow = { workspace = true }
|
||||
dir-test = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
quickcheck = { version = "1.0.3", default-features = false }
|
||||
quickcheck_macros = { version = "1.0.0" }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
# Any
|
||||
|
||||
## Annotation
|
||||
|
||||
`typing.Any` is a way to name the Any type.
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
x: Any = 1
|
||||
x = "foo"
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
```
|
||||
|
||||
## Aliased to a different name
|
||||
|
||||
If you alias `typing.Any` to another name, we still recognize that as a spelling of the Any type.
|
||||
|
||||
```py
|
||||
from typing import Any as RenamedAny
|
||||
|
||||
x: RenamedAny = 1
|
||||
x = "foo"
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
```
|
||||
|
||||
## Shadowed class
|
||||
|
||||
If you define your own class named `Any`, using that in a type expression refers to your class, and
|
||||
isn't a spelling of the Any type.
|
||||
|
||||
```py
|
||||
class Any:
|
||||
pass
|
||||
|
||||
x: Any
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
|
||||
# This verifies that we're not accidentally seeing typing.Any, since str is assignable
|
||||
# to that but not to our locally defined class.
|
||||
y: Any = "not an Any" # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
## Subclass
|
||||
|
||||
The spec allows you to define subclasses of `Any`.
|
||||
|
||||
TODO: Handle assignments correctly. `Subclass` has an unknown superclass, which might be `int`. The
|
||||
assignment to `x` should not be allowed, even when the unknown superclass is `int`. The assignment
|
||||
to `y` should be allowed, since `Subclass` might have `int` as a superclass, and is therefore
|
||||
assignable to `int`.
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
class Subclass(Any):
|
||||
pass
|
||||
|
||||
reveal_type(Subclass.__mro__) # revealed: tuple[Literal[Subclass], Any, Literal[object]]
|
||||
|
||||
x: Subclass = 1 # error: [invalid-assignment]
|
||||
# TODO: no diagnostic
|
||||
y: int = Subclass() # error: [invalid-assignment]
|
||||
|
||||
def f() -> Subclass:
|
||||
pass
|
||||
|
||||
reveal_type(f()) # revealed: Subclass
|
||||
```
|
||||
@@ -1,152 +0,0 @@
|
||||
# `LiteralString`
|
||||
|
||||
`LiteralString` represents a string that is either defined directly within the source code or is
|
||||
made up of such components.
|
||||
|
||||
Parts of the testcases defined here were adapted from [the specification's examples][1].
|
||||
|
||||
## Usages
|
||||
|
||||
### Valid places
|
||||
|
||||
It can be used anywhere a type is accepted:
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
x: LiteralString
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: LiteralString
|
||||
```
|
||||
|
||||
### Within `Literal`
|
||||
|
||||
`LiteralString` cannot be used within `Literal`:
|
||||
|
||||
```py
|
||||
from typing_extensions import Literal, LiteralString
|
||||
|
||||
bad_union: Literal["hello", LiteralString] # error: [invalid-literal-parameter]
|
||||
bad_nesting: Literal[LiteralString] # error: [invalid-literal-parameter]
|
||||
```
|
||||
|
||||
### Parametrized
|
||||
|
||||
`LiteralString` cannot be parametrized.
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
a: LiteralString[str] # error: [invalid-type-parameter]
|
||||
b: LiteralString["foo"] # error: [invalid-type-parameter]
|
||||
```
|
||||
|
||||
### As a base class
|
||||
|
||||
Subclassing `LiteralString` leads to a runtime error.
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
class C(LiteralString): ... # error: [invalid-base]
|
||||
```
|
||||
|
||||
## Inference
|
||||
|
||||
### Common operations
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
foo: LiteralString = "foo"
|
||||
reveal_type(foo) # revealed: Literal["foo"]
|
||||
|
||||
bar: LiteralString = "bar"
|
||||
reveal_type(foo + bar) # revealed: Literal["foobar"]
|
||||
|
||||
baz: LiteralString = "baz"
|
||||
baz += foo
|
||||
reveal_type(baz) # revealed: Literal["bazfoo"]
|
||||
|
||||
qux = (foo, bar)
|
||||
reveal_type(qux) # revealed: tuple[Literal["foo"], Literal["bar"]]
|
||||
|
||||
# TODO: Infer "LiteralString"
|
||||
reveal_type(foo.join(qux)) # revealed: @Todo(call todo)
|
||||
|
||||
template: LiteralString = "{}, {}"
|
||||
reveal_type(template) # revealed: Literal["{}, {}"]
|
||||
# TODO: Infer `LiteralString`
|
||||
reveal_type(template.format(foo, bar)) # revealed: @Todo(call todo)
|
||||
```
|
||||
|
||||
### Assignability
|
||||
|
||||
`Literal[""]` is assignable to `LiteralString`, and `LiteralString` is assignable to `str`, but not
|
||||
vice versa.
|
||||
|
||||
```py
|
||||
from typing_extensions import Literal, LiteralString
|
||||
|
||||
def coinflip() -> bool:
|
||||
return True
|
||||
|
||||
foo_1: Literal["foo"] = "foo"
|
||||
bar_1: LiteralString = foo_1 # fine
|
||||
|
||||
foo_2 = "foo" if coinflip() else "bar"
|
||||
reveal_type(foo_2) # revealed: Literal["foo", "bar"]
|
||||
bar_2: LiteralString = foo_2 # fine
|
||||
|
||||
foo_3: LiteralString = "foo" * 1_000_000_000
|
||||
bar_3: str = foo_2 # fine
|
||||
|
||||
baz_1: str = str()
|
||||
qux_1: LiteralString = baz_1 # error: [invalid-assignment]
|
||||
|
||||
baz_2: LiteralString = "baz" * 1_000_000_000
|
||||
qux_2: Literal["qux"] = baz_2 # error: [invalid-assignment]
|
||||
|
||||
baz_3 = "foo" if coinflip() else 1
|
||||
reveal_type(baz_3) # revealed: Literal["foo"] | Literal[1]
|
||||
qux_3: LiteralString = baz_3 # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
### Narrowing
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
lorem: LiteralString = "lorem" * 1_000_000_000
|
||||
|
||||
reveal_type(lorem) # revealed: LiteralString
|
||||
|
||||
if lorem == "ipsum":
|
||||
reveal_type(lorem) # revealed: Literal["ipsum"]
|
||||
|
||||
reveal_type(lorem) # revealed: LiteralString
|
||||
|
||||
if "" < lorem == "ipsum":
|
||||
reveal_type(lorem) # revealed: Literal["ipsum"]
|
||||
```
|
||||
|
||||
## `typing.LiteralString`
|
||||
|
||||
`typing.LiteralString` is only available in Python 3.11 and later:
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
target-version = "3.11"
|
||||
```
|
||||
|
||||
```py
|
||||
from typing import LiteralString
|
||||
|
||||
x: LiteralString = "foo"
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: LiteralString
|
||||
```
|
||||
|
||||
[1]: https://typing.readthedocs.io/en/latest/spec/literal.html#literalstring
|
||||
@@ -19,11 +19,12 @@ reveal_type(stop())
|
||||
## Assignment
|
||||
|
||||
```py
|
||||
from typing_extensions import NoReturn, Never, Any
|
||||
from typing import NoReturn, Never, Any
|
||||
|
||||
# error: [invalid-type-parameter] "Type `typing.Never` expected no type parameter"
|
||||
x: Never[int]
|
||||
a1: NoReturn
|
||||
# TODO: Test `Never` is only available in python >= 3.11
|
||||
a2: Never
|
||||
b1: Any
|
||||
b2: int
|
||||
@@ -45,20 +46,17 @@ def f():
|
||||
v6: Never = 1
|
||||
```
|
||||
|
||||
## `typing.Never`
|
||||
|
||||
`typing.Never` is only available in Python 3.11 and later:
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
target-version = "3.11"
|
||||
```
|
||||
## Typing Extensions
|
||||
|
||||
```py
|
||||
from typing import Never
|
||||
from typing_extensions import NoReturn, Never
|
||||
|
||||
x: Never
|
||||
x: NoReturn
|
||||
y: Never
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Never
|
||||
# revealed: Never
|
||||
reveal_type(x)
|
||||
# revealed: Never
|
||||
reveal_type(y)
|
||||
```
|
||||
|
||||
@@ -8,8 +8,8 @@ from typing_extensions import TypeVarTuple
|
||||
Ts = TypeVarTuple("Ts")
|
||||
|
||||
def append_int(*args: *Ts) -> tuple[*Ts, int]:
|
||||
# TODO: tuple[*Ts]
|
||||
reveal_type(args) # revealed: tuple
|
||||
# TODO: should show some representation of the variadic generic type
|
||||
reveal_type(args) # revealed: @Todo(function parameter type)
|
||||
|
||||
return (*args, 1)
|
||||
|
||||
|
||||
@@ -95,37 +95,36 @@ reveal_type(f2()) # revealed: Literal["Foo", "Bar"]
|
||||
## Various string kinds
|
||||
|
||||
```py
|
||||
# error: [raw-string-type-annotation] "Type expressions cannot use raw string literal"
|
||||
# error: [annotation-raw-string] "Type expressions cannot use raw string literal"
|
||||
def f1() -> r"int":
|
||||
return 1
|
||||
|
||||
# error: [fstring-type-annotation] "Type expressions cannot use f-strings"
|
||||
# error: [annotation-f-string] "Type expressions cannot use f-strings"
|
||||
def f2() -> f"int":
|
||||
return 1
|
||||
|
||||
# error: [byte-string-type-annotation] "Type expressions cannot use bytes literal"
|
||||
# error: [annotation-byte-string] "Type expressions cannot use bytes literal"
|
||||
def f3() -> b"int":
|
||||
return 1
|
||||
|
||||
def f4() -> "int":
|
||||
return 1
|
||||
|
||||
# error: [implicit-concatenated-string-type-annotation] "Type expressions cannot span multiple string literals"
|
||||
# error: [annotation-implicit-concat] "Type expressions cannot span multiple string literals"
|
||||
def f5() -> "in" "t":
|
||||
return 1
|
||||
|
||||
# error: [escape-character-in-forward-annotation] "Type expressions cannot contain escape characters"
|
||||
# error: [annotation-escape-character] "Type expressions cannot contain escape characters"
|
||||
def f6() -> "\N{LATIN SMALL LETTER I}nt":
|
||||
return 1
|
||||
|
||||
# error: [escape-character-in-forward-annotation] "Type expressions cannot contain escape characters"
|
||||
# error: [annotation-escape-character] "Type expressions cannot contain escape characters"
|
||||
def f7() -> "\x69nt":
|
||||
return 1
|
||||
|
||||
def f8() -> """int""":
|
||||
return 1
|
||||
|
||||
# error: [byte-string-type-annotation] "Type expressions cannot use bytes literal"
|
||||
def f9() -> "b'int'":
|
||||
return 1
|
||||
|
||||
@@ -208,9 +207,9 @@ i: "{i for i in range(5)}"
|
||||
j: "{i: i for i in range(5)}"
|
||||
k: "(i for i in range(5))"
|
||||
l: "await 1"
|
||||
# error: [invalid-syntax-in-forward-annotation]
|
||||
# error: [forward-annotation-syntax-error]
|
||||
m: "yield 1"
|
||||
# error: [invalid-syntax-in-forward-annotation]
|
||||
# error: [forward-annotation-syntax-error]
|
||||
n: "yield from 1"
|
||||
o: "1 < 2"
|
||||
p: "call()"
|
||||
|
||||
@@ -50,7 +50,7 @@ reveal_type(b) # revealed: tuple[int]
|
||||
reveal_type(c) # revealed: tuple[str, int]
|
||||
reveal_type(d) # revealed: tuple[tuple[str, str], tuple[int, int]]
|
||||
|
||||
# TODO: homogeneous tuples, PEP-646 tuples
|
||||
# TODO: homogenous tuples, PEP-646 tuples
|
||||
reveal_type(e) # revealed: @Todo(full tuple[...] support)
|
||||
reveal_type(f) # revealed: @Todo(full tuple[...] support)
|
||||
reveal_type(g) # revealed: @Todo(full tuple[...] support)
|
||||
|
||||
@@ -18,3 +18,43 @@ Note: in this particular example, one could argue that the most likely error wou
|
||||
of the `x`/`foo` definitions, and so it could be desirable to infer `Literal[1]` for the type of
|
||||
`x`. On the other hand, there might be a variable `fob` a little higher up in this file, and the
|
||||
actual error might have been just a typo. Inferring `Unknown` thus seems like the safest option.
|
||||
|
||||
## Unbound class variable
|
||||
|
||||
Name lookups within a class scope fall back to globals, but lookups of class attributes don't.
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
flag = bool_instance()
|
||||
x = 1
|
||||
|
||||
class C:
|
||||
y = x
|
||||
if flag:
|
||||
x = 2
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `x` on type `Literal[C]` is possibly unbound"
|
||||
reveal_type(C.x) # revealed: Literal[2]
|
||||
reveal_type(C.y) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
## Possibly unbound in class and global scope
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
if bool_instance():
|
||||
x = "abc"
|
||||
|
||||
class C:
|
||||
if bool_instance():
|
||||
x = 1
|
||||
|
||||
# error: [possibly-unresolved-reference]
|
||||
y = x
|
||||
|
||||
reveal_type(C.y) # revealed: Literal[1] | Literal["abc"]
|
||||
```
|
||||
|
||||
@@ -36,6 +36,8 @@ from typing import Callable
|
||||
def foo() -> int:
|
||||
return 42
|
||||
|
||||
# TODO: This should be resolved once we understand `Callable` annotations
|
||||
# error: [annotation-with-invalid-expression]
|
||||
def decorator(func) -> Callable[[], int]:
|
||||
return foo
|
||||
|
||||
|
||||
@@ -1,219 +0,0 @@
|
||||
# Length (`len()`)
|
||||
|
||||
## Literal and constructed iterables
|
||||
|
||||
### Strings and bytes literals
|
||||
|
||||
```py
|
||||
reveal_type(len("no\rmal")) # revealed: Literal[6]
|
||||
reveal_type(len(r"aw stri\ng")) # revealed: Literal[10]
|
||||
reveal_type(len(r"conca\t" "ena\tion")) # revealed: Literal[14]
|
||||
reveal_type(len(b"ytes lite" rb"al")) # revealed: Literal[11]
|
||||
reveal_type(len("𝒰𝕹🄸©🕲𝕕ℇ")) # revealed: Literal[7]
|
||||
|
||||
reveal_type( # revealed: Literal[7]
|
||||
len(
|
||||
"""foo
|
||||
bar"""
|
||||
)
|
||||
)
|
||||
reveal_type( # revealed: Literal[9]
|
||||
len(
|
||||
r"""foo\r
|
||||
bar"""
|
||||
)
|
||||
)
|
||||
reveal_type( # revealed: Literal[7]
|
||||
len(
|
||||
b"""foo
|
||||
bar"""
|
||||
)
|
||||
)
|
||||
reveal_type( # revealed: Literal[9]
|
||||
len(
|
||||
rb"""foo\r
|
||||
bar"""
|
||||
)
|
||||
)
|
||||
```
|
||||
|
||||
### Tuples
|
||||
|
||||
```py
|
||||
reveal_type(len(())) # revealed: Literal[0]
|
||||
reveal_type(len((1,))) # revealed: Literal[1]
|
||||
reveal_type(len((1, 2))) # revealed: Literal[2]
|
||||
|
||||
# TODO: Handle constructor calls
|
||||
reveal_type(len(tuple())) # revealed: int
|
||||
|
||||
# TODO: Handle star unpacks; Should be: Literal[0]
|
||||
reveal_type(len((*[],))) # revealed: Literal[1]
|
||||
|
||||
# TODO: Handle star unpacks; Should be: Literal[1]
|
||||
reveal_type( # revealed: Literal[2]
|
||||
len(
|
||||
(
|
||||
*[],
|
||||
1,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# TODO: Handle star unpacks; Should be: Literal[2]
|
||||
reveal_type(len((*[], 1, 2))) # revealed: Literal[3]
|
||||
|
||||
# TODO: Handle star unpacks; Should be: Literal[0]
|
||||
reveal_type(len((*[], *{}))) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
### Lists, sets and dictionaries
|
||||
|
||||
```py
|
||||
reveal_type(len([])) # revealed: int
|
||||
reveal_type(len([1])) # revealed: int
|
||||
reveal_type(len([1, 2])) # revealed: int
|
||||
reveal_type(len([*{}, *dict()])) # revealed: int
|
||||
|
||||
reveal_type(len({})) # revealed: int
|
||||
reveal_type(len({**{}})) # revealed: int
|
||||
reveal_type(len({**{}, **{}})) # revealed: int
|
||||
|
||||
reveal_type(len({1})) # revealed: int
|
||||
reveal_type(len({1, 2})) # revealed: int
|
||||
reveal_type(len({*[], 2})) # revealed: int
|
||||
|
||||
reveal_type(len(list())) # revealed: int
|
||||
reveal_type(len(set())) # revealed: int
|
||||
reveal_type(len(dict())) # revealed: int
|
||||
reveal_type(len(frozenset())) # revealed: int
|
||||
```
|
||||
|
||||
## `__len__`
|
||||
|
||||
The returned value of `__len__` is implicitly and recursively converted to `int`.
|
||||
|
||||
### Literal integers
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
class Zero:
|
||||
def __len__(self) -> Literal[0]: ...
|
||||
|
||||
class ZeroOrOne:
|
||||
def __len__(self) -> Literal[0, 1]: ...
|
||||
|
||||
class ZeroOrTrue:
|
||||
def __len__(self) -> Literal[0, True]: ...
|
||||
|
||||
class OneOrFalse:
|
||||
def __len__(self) -> Literal[1] | Literal[False]: ...
|
||||
|
||||
class OneOrFoo:
|
||||
def __len__(self) -> Literal[1, "foo"]: ...
|
||||
|
||||
class ZeroOrStr:
|
||||
def __len__(self) -> Literal[0] | str: ...
|
||||
|
||||
reveal_type(len(Zero())) # revealed: Literal[0]
|
||||
reveal_type(len(ZeroOrOne())) # revealed: Literal[0, 1]
|
||||
reveal_type(len(ZeroOrTrue())) # revealed: Literal[0, 1]
|
||||
reveal_type(len(OneOrFalse())) # revealed: Literal[0, 1]
|
||||
|
||||
# TODO: Emit a diagnostic
|
||||
reveal_type(len(OneOrFoo())) # revealed: int
|
||||
|
||||
# TODO: Emit a diagnostic
|
||||
reveal_type(len(ZeroOrStr())) # revealed: int
|
||||
```
|
||||
|
||||
### Literal booleans
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
class LiteralTrue:
|
||||
def __len__(self) -> Literal[True]: ...
|
||||
|
||||
class LiteralFalse:
|
||||
def __len__(self) -> Literal[False]: ...
|
||||
|
||||
reveal_type(len(LiteralTrue())) # revealed: Literal[1]
|
||||
reveal_type(len(LiteralFalse())) # revealed: Literal[0]
|
||||
```
|
||||
|
||||
### Enums
|
||||
|
||||
```py
|
||||
from enum import Enum, auto
|
||||
from typing import Literal
|
||||
|
||||
class SomeEnum(Enum):
|
||||
AUTO = auto()
|
||||
INT = 2
|
||||
STR = "4"
|
||||
TUPLE = (8, "16")
|
||||
INT_2 = 3_2
|
||||
|
||||
class Auto:
|
||||
def __len__(self) -> Literal[SomeEnum.AUTO]: ...
|
||||
|
||||
class Int:
|
||||
def __len__(self) -> Literal[SomeEnum.INT]: ...
|
||||
|
||||
class Str:
|
||||
def __len__(self) -> Literal[SomeEnum.STR]: ...
|
||||
|
||||
class Tuple:
|
||||
def __len__(self) -> Literal[SomeEnum.TUPLE]: ...
|
||||
|
||||
class IntUnion:
|
||||
def __len__(self) -> Literal[SomeEnum.INT, SomeEnum.INT_2]: ...
|
||||
|
||||
reveal_type(len(Auto())) # revealed: int
|
||||
reveal_type(len(Int())) # revealed: Literal[2]
|
||||
reveal_type(len(Str())) # revealed: int
|
||||
reveal_type(len(Tuple())) # revealed: int
|
||||
reveal_type(len(IntUnion())) # revealed: Literal[2, 32]
|
||||
```
|
||||
|
||||
### Negative integers
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
class Negative:
|
||||
def __len__(self) -> Literal[-1]: ...
|
||||
|
||||
# TODO: Emit a diagnostic
|
||||
reveal_type(len(Negative())) # revealed: int
|
||||
```
|
||||
|
||||
### Wrong signature
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
class SecondOptionalArgument:
|
||||
def __len__(self, v: int = 0) -> Literal[0]: ...
|
||||
|
||||
class SecondRequiredArgument:
|
||||
def __len__(self, v: int) -> Literal[1]: ...
|
||||
|
||||
# TODO: Emit a diagnostic
|
||||
reveal_type(len(SecondOptionalArgument())) # revealed: Literal[0]
|
||||
|
||||
# TODO: Emit a diagnostic
|
||||
reveal_type(len(SecondRequiredArgument())) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
### No `__len__`
|
||||
|
||||
```py
|
||||
class NoDunderLen:
|
||||
pass
|
||||
|
||||
# TODO: Emit a diagnostic
|
||||
reveal_type(len(NoDunderLen())) # revealed: int
|
||||
```
|
||||
@@ -1,75 +0,0 @@
|
||||
# Function parameter types
|
||||
|
||||
Within a function scope, the declared type of each parameter is its annotated type (or Unknown if
|
||||
not annotated). The initial inferred type is the union of the declared type with the type of the
|
||||
default value expression (if any). If both are fully static types, this union should simplify to the
|
||||
annotated type (since the default value type must be assignable to the annotated type, and for fully
|
||||
static types this means subtype-of, which simplifies in unions). But if the annotated type is
|
||||
Unknown or another non-fully-static type, the default value type may still be relevant as lower
|
||||
bound.
|
||||
|
||||
The variadic parameter is a variadic tuple of its annotated type; the variadic-keywords parameter is
|
||||
a dictionary from strings to its annotated type.
|
||||
|
||||
## Parameter kinds
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def f(a, b: int, c=1, d: int = 2, /, e=3, f: Literal[4] = 4, *args: object, g=5, h: Literal[6] = 6, **kwargs: str):
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: int
|
||||
reveal_type(c) # revealed: Unknown | Literal[1]
|
||||
reveal_type(d) # revealed: int
|
||||
reveal_type(e) # revealed: Unknown | Literal[3]
|
||||
reveal_type(f) # revealed: Literal[4]
|
||||
reveal_type(g) # revealed: Unknown | Literal[5]
|
||||
reveal_type(h) # revealed: Literal[6]
|
||||
|
||||
# TODO: should be `tuple[object, ...]` (needs generics)
|
||||
reveal_type(args) # revealed: tuple
|
||||
|
||||
# TODO: should be `dict[str, str]` (needs generics)
|
||||
reveal_type(kwargs) # revealed: dict
|
||||
```
|
||||
|
||||
## Unannotated variadic parameters
|
||||
|
||||
...are inferred as tuple of Unknown or dict from string to Unknown.
|
||||
|
||||
```py
|
||||
def g(*args, **kwargs):
|
||||
# TODO: should be `tuple[Unknown, ...]` (needs generics)
|
||||
reveal_type(args) # revealed: tuple
|
||||
|
||||
# TODO: should be `dict[str, Unknown]` (needs generics)
|
||||
reveal_type(kwargs) # revealed: dict
|
||||
```
|
||||
|
||||
## Annotation is present but not a fully static type
|
||||
|
||||
The default value type should be a lower bound on the inferred type.
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
def f(x: Any = 1):
|
||||
reveal_type(x) # revealed: Any | Literal[1]
|
||||
```
|
||||
|
||||
## Default value type must be assignable to annotated type
|
||||
|
||||
The default value type must be assignable to the annotated type. If not, we emit a diagnostic, and
|
||||
fall back to inferring the annotated type, ignoring the default value type.
|
||||
|
||||
```py
|
||||
# error: [invalid-parameter-default]
|
||||
def f(x: int = "foo"):
|
||||
reveal_type(x) # revealed: int
|
||||
|
||||
# The check is assignable-to, not subtype-of, so this is fine:
|
||||
from typing import Any
|
||||
|
||||
def g(x: Any = "foo"):
|
||||
reveal_type(x) # revealed: Any | Literal["foo"]
|
||||
```
|
||||
@@ -73,7 +73,7 @@ def f[T]():
|
||||
A typevar with less than two constraints emits a diagnostic:
|
||||
|
||||
```py
|
||||
# error: [invalid-type-variable-constraints] "TypeVar must have at least two constrained types"
|
||||
# error: [invalid-typevar-constraints] "TypeVar must have at least two constrained types"
|
||||
def f[T: (int,)]():
|
||||
pass
|
||||
```
|
||||
|
||||
@@ -55,24 +55,3 @@ from b import x
|
||||
|
||||
x = "foo" # error: [invalid-assignment] "Object of type `Literal["foo"]"
|
||||
```
|
||||
|
||||
## Import cycle
|
||||
|
||||
```py path=a.py
|
||||
class A: ...
|
||||
|
||||
reveal_type(A.__mro__) # revealed: tuple[Literal[A], Literal[object]]
|
||||
import b
|
||||
|
||||
class C(b.B): ...
|
||||
|
||||
reveal_type(C.__mro__) # revealed: tuple[Literal[C], Literal[B], Literal[A], Literal[object]]
|
||||
```
|
||||
|
||||
```py path=b.py
|
||||
from a import A
|
||||
|
||||
class B(A): ...
|
||||
|
||||
reveal_type(B.__mro__) # revealed: tuple[Literal[B], Literal[A], Literal[object]]
|
||||
```
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
# Invalid syntax
|
||||
|
||||
## Missing module name
|
||||
|
||||
```py
|
||||
from import bar # error: [invalid-syntax]
|
||||
|
||||
reveal_type(bar) # revealed: Unknown
|
||||
```
|
||||
@@ -1,93 +0,0 @@
|
||||
# Syntax errors
|
||||
|
||||
Test cases to ensure that red knot does not panic if there are syntax errors in the source code.
|
||||
|
||||
The parser cannot recover from certain syntax errors completely which is why the number of syntax
|
||||
errors could be more than expected in the following examples. For instance, if there's a keyword
|
||||
(like `for`) in the middle of another statement (like function definition), then it's more likely
|
||||
that the rest of the tokens are going to be part of the `for` statement and not the function
|
||||
definition. But, it's not necessary that the remaining tokens are valid in the context of a `for`
|
||||
statement.
|
||||
|
||||
## Keyword as identifiers
|
||||
|
||||
When keywords are used as identifiers, the parser recovers from this syntax error by emitting an
|
||||
error and including the text value of the keyword to create the `Identifier` node.
|
||||
|
||||
### Name expression
|
||||
|
||||
#### Assignment
|
||||
|
||||
```py
|
||||
# error: [invalid-syntax]
|
||||
pass = 1
|
||||
```
|
||||
|
||||
#### Type alias
|
||||
|
||||
```py
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
type pass = 1
|
||||
```
|
||||
|
||||
#### Function definition
|
||||
|
||||
```py
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
def True(for):
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
pass
|
||||
```
|
||||
|
||||
#### For
|
||||
|
||||
```py
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [unresolved-reference] "Name `pass` used when not defined"
|
||||
for while in pass:
|
||||
pass
|
||||
```
|
||||
|
||||
#### While
|
||||
|
||||
```py
|
||||
# error: [invalid-syntax]
|
||||
# error: [unresolved-reference] "Name `in` used when not defined"
|
||||
while in:
|
||||
pass
|
||||
```
|
||||
|
||||
#### Match
|
||||
|
||||
```py
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [unresolved-reference] "Name `match` used when not defined"
|
||||
match while:
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [unresolved-reference] "Name `case` used when not defined"
|
||||
case in:
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
pass
|
||||
```
|
||||
|
||||
### Attribute expression
|
||||
|
||||
```py
|
||||
# TODO: Check when support for attribute expressions is added
|
||||
|
||||
# error: [invalid-syntax]
|
||||
# error: [unresolved-reference] "Name `foo` used when not defined"
|
||||
for x in foo.pass:
|
||||
pass
|
||||
```
|
||||
@@ -1,7 +0,0 @@
|
||||
# Ellipsis literals
|
||||
|
||||
## Simple
|
||||
|
||||
```py
|
||||
reveal_type(...) # revealed: EllipsisType | ellipsis
|
||||
```
|
||||
@@ -75,6 +75,7 @@ Literal: _SpecialForm
|
||||
```py
|
||||
from other import Literal
|
||||
|
||||
# error: [annotation-with-invalid-expression]
|
||||
a1: Literal[26]
|
||||
|
||||
def f():
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
This test makes sure that `red_knot_test` correctly parses the TOML configuration blocks and applies
|
||||
the correct settings hierarchically.
|
||||
|
||||
The following configuration will be attached to the *root* section (without any heading):
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
target-version = "3.10"
|
||||
```
|
||||
|
||||
# Basic
|
||||
|
||||
Here, we simply make sure that we pick up the global configuration from the root section:
|
||||
|
||||
```py
|
||||
reveal_type(sys.version_info[:2] == (3, 10)) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
# Inheritance
|
||||
|
||||
## Child
|
||||
|
||||
### Grandchild
|
||||
|
||||
The same should work for arbitrarily nested sections:
|
||||
|
||||
```py
|
||||
reveal_type(sys.version_info[:2] == (3, 10)) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
# Overwriting
|
||||
|
||||
Here, we make sure that we can overwrite the global configuration in a child section:
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
target-version = "3.11"
|
||||
```
|
||||
|
||||
```py
|
||||
reveal_type(sys.version_info[:2] == (3, 11)) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
# No global state
|
||||
|
||||
There is no global state. This section should again use the root configuration:
|
||||
|
||||
```py
|
||||
reveal_type(sys.version_info[:2] == (3, 10)) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
# Overwriting affects children
|
||||
|
||||
Children in this section should all use the section configuration:
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
target-version = "3.12"
|
||||
```
|
||||
|
||||
## Child
|
||||
|
||||
### Grandchild
|
||||
|
||||
```py
|
||||
reveal_type(sys.version_info[:2] == (3, 12)) # revealed: Literal[True]
|
||||
```
|
||||
@@ -179,9 +179,9 @@ reveal_type(A.__class__) # revealed: @Todo(metaclass not a class)
|
||||
Retrieving the metaclass of a cyclically defined class should not cause an infinite loop.
|
||||
|
||||
```py path=a.pyi
|
||||
class A(B): ... # error: [cyclic-class-definition]
|
||||
class B(C): ... # error: [cyclic-class-definition]
|
||||
class C(A): ... # error: [cyclic-class-definition]
|
||||
class A(B): ... # error: [cyclic-class-def]
|
||||
class B(C): ... # error: [cyclic-class-def]
|
||||
class C(A): ... # error: [cyclic-class-def]
|
||||
|
||||
reveal_type(A.__class__) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -256,7 +256,7 @@ class O: ...
|
||||
class X(O): ...
|
||||
class Y(O): ...
|
||||
|
||||
if returns_bool():
|
||||
if bool():
|
||||
foo = Y
|
||||
else:
|
||||
foo = object
|
||||
@@ -348,14 +348,14 @@ reveal_type(unknown_object.__mro__) # revealed: Unknown
|
||||
These are invalid, but we need to be able to handle them gracefully without panicking.
|
||||
|
||||
```py path=a.pyi
|
||||
class Foo(Foo): ... # error: [cyclic-class-definition]
|
||||
class Foo(Foo): ... # error: [cyclic-class-def]
|
||||
|
||||
reveal_type(Foo) # revealed: Literal[Foo]
|
||||
reveal_type(Foo.__mro__) # revealed: tuple[Literal[Foo], Unknown, Literal[object]]
|
||||
|
||||
class Bar: ...
|
||||
class Baz: ...
|
||||
class Boz(Bar, Baz, Boz): ... # error: [cyclic-class-definition]
|
||||
class Boz(Bar, Baz, Boz): ... # error: [cyclic-class-def]
|
||||
|
||||
reveal_type(Boz) # revealed: Literal[Boz]
|
||||
reveal_type(Boz.__mro__) # revealed: tuple[Literal[Boz], Unknown, Literal[object]]
|
||||
@@ -366,9 +366,9 @@ reveal_type(Boz.__mro__) # revealed: tuple[Literal[Boz], Unknown, Literal[objec
|
||||
These are similarly unlikely, but we still shouldn't crash:
|
||||
|
||||
```py path=a.pyi
|
||||
class Foo(Bar): ... # error: [cyclic-class-definition]
|
||||
class Bar(Baz): ... # error: [cyclic-class-definition]
|
||||
class Baz(Foo): ... # error: [cyclic-class-definition]
|
||||
class Foo(Bar): ... # error: [cyclic-class-def]
|
||||
class Bar(Baz): ... # error: [cyclic-class-def]
|
||||
class Baz(Foo): ... # error: [cyclic-class-def]
|
||||
|
||||
reveal_type(Foo.__mro__) # revealed: tuple[Literal[Foo], Unknown, Literal[object]]
|
||||
reveal_type(Bar.__mro__) # revealed: tuple[Literal[Bar], Unknown, Literal[object]]
|
||||
@@ -379,9 +379,9 @@ reveal_type(Baz.__mro__) # revealed: tuple[Literal[Baz], Unknown, Literal[objec
|
||||
|
||||
```py path=a.pyi
|
||||
class Spam: ...
|
||||
class Foo(Bar): ... # error: [cyclic-class-definition]
|
||||
class Bar(Baz): ... # error: [cyclic-class-definition]
|
||||
class Baz(Foo, Spam): ... # error: [cyclic-class-definition]
|
||||
class Foo(Bar): ... # error: [cyclic-class-def]
|
||||
class Bar(Baz): ... # error: [cyclic-class-def]
|
||||
class Baz(Foo, Spam): ... # error: [cyclic-class-def]
|
||||
|
||||
reveal_type(Foo.__mro__) # revealed: tuple[Literal[Foo], Unknown, Literal[object]]
|
||||
reveal_type(Bar.__mro__) # revealed: tuple[Literal[Bar], Unknown, Literal[object]]
|
||||
@@ -391,16 +391,16 @@ reveal_type(Baz.__mro__) # revealed: tuple[Literal[Baz], Unknown, Literal[objec
|
||||
## Classes with cycles in their MRO, and a sub-graph
|
||||
|
||||
```py path=a.pyi
|
||||
class FooCycle(BarCycle): ... # error: [cyclic-class-definition]
|
||||
class FooCycle(BarCycle): ... # error: [cyclic-class-def]
|
||||
class Foo: ...
|
||||
class BarCycle(FooCycle): ... # error: [cyclic-class-definition]
|
||||
class BarCycle(FooCycle): ... # error: [cyclic-class-def]
|
||||
class Bar(Foo): ...
|
||||
|
||||
# TODO: can we avoid emitting the errors for these?
|
||||
# The classes have cyclic superclasses,
|
||||
# but are not themselves cyclic...
|
||||
class Baz(Bar, BarCycle): ... # error: [cyclic-class-definition]
|
||||
class Spam(Baz): ... # error: [cyclic-class-definition]
|
||||
class Baz(Bar, BarCycle): ... # error: [cyclic-class-def]
|
||||
class Spam(Baz): ... # error: [cyclic-class-def]
|
||||
|
||||
reveal_type(FooCycle.__mro__) # revealed: tuple[Literal[FooCycle], Unknown, Literal[object]]
|
||||
reveal_type(BarCycle.__mro__) # revealed: tuple[Literal[BarCycle], Unknown, Literal[object]]
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
## Narrowing for `bool(..)` checks
|
||||
|
||||
```py
|
||||
def flag() -> bool: ...
|
||||
|
||||
x = 1 if flag() else None
|
||||
|
||||
# valid invocation, positive
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
if bool(x is not None):
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
|
||||
# valid invocation, negative
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
if not bool(x is not None):
|
||||
reveal_type(x) # revealed: None
|
||||
|
||||
# no args/narrowing
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
if not bool():
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
|
||||
# invalid invocation, too many positional args
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
if bool(x is not None, 5): # TODO diagnostic
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
|
||||
# invalid invocation, too many kwargs
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
if bool(x is not None, y=5): # TODO diagnostic
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
```
|
||||
@@ -1,64 +0,0 @@
|
||||
# Consolidating narrowed types after if statement
|
||||
|
||||
## After if-else statements, narrowing has no effect if the variable is not mutated in any branch
|
||||
|
||||
```py
|
||||
def optional_int() -> int | None: ...
|
||||
|
||||
x = optional_int()
|
||||
|
||||
if x is None:
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
|
||||
reveal_type(x) # revealed: int | None
|
||||
```
|
||||
|
||||
## Narrowing can have a persistent effect if the variable is mutated in one branch
|
||||
|
||||
```py
|
||||
def optional_int() -> int | None: ...
|
||||
|
||||
x = optional_int()
|
||||
|
||||
if x is None:
|
||||
x = 10
|
||||
else:
|
||||
pass
|
||||
|
||||
reveal_type(x) # revealed: int
|
||||
```
|
||||
|
||||
## An if statement without an explicit `else` branch is equivalent to one with a no-op `else` branch
|
||||
|
||||
```py
|
||||
def optional_int() -> int | None: ...
|
||||
|
||||
x = optional_int()
|
||||
y = optional_int()
|
||||
|
||||
if x is None:
|
||||
x = 0
|
||||
|
||||
if y is None:
|
||||
pass
|
||||
|
||||
reveal_type(x) # revealed: int
|
||||
reveal_type(y) # revealed: int | None
|
||||
```
|
||||
|
||||
## An if-elif without an explicit else branch is equivalent to one with an empty else branch
|
||||
|
||||
```py
|
||||
def optional_int() -> int | None: ...
|
||||
|
||||
x = optional_int()
|
||||
|
||||
if x is None:
|
||||
x = 0
|
||||
elif x > 50:
|
||||
x = 50
|
||||
|
||||
reveal_type(x) # revealed: int
|
||||
```
|
||||
@@ -1,45 +0,0 @@
|
||||
# Nonlocal references
|
||||
|
||||
## One level up
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
## Two levels up
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
def h():
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
## Skips class scope
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
|
||||
class C:
|
||||
x = 2
|
||||
def g():
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
## Skips annotation-only assignment
|
||||
|
||||
```py
|
||||
def f():
|
||||
x = 1
|
||||
def g():
|
||||
# it's pretty weird to have an annotated assignment in a function where the
|
||||
# name is otherwise not defined; maybe should be an error?
|
||||
x: int
|
||||
def h():
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
@@ -1,57 +0,0 @@
|
||||
# Unbound
|
||||
|
||||
## Unbound class variable
|
||||
|
||||
Name lookups within a class scope fall back to globals, but lookups of class attributes don't.
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
flag = bool_instance()
|
||||
x = 1
|
||||
|
||||
class C:
|
||||
y = x
|
||||
if flag:
|
||||
x = 2
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `x` on type `Literal[C]` is possibly unbound"
|
||||
reveal_type(C.x) # revealed: Literal[2]
|
||||
reveal_type(C.y) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
## Possibly unbound in class and global scope
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
if bool_instance():
|
||||
x = "abc"
|
||||
|
||||
class C:
|
||||
if bool_instance():
|
||||
x = 1
|
||||
|
||||
# error: [possibly-unresolved-reference]
|
||||
y = x
|
||||
|
||||
reveal_type(C.y) # revealed: Literal[1] | Literal["abc"]
|
||||
```
|
||||
|
||||
## Unbound function local
|
||||
|
||||
An unbound function local that has definitions in the scope does not fall back to globals.
|
||||
|
||||
```py
|
||||
x = 1
|
||||
|
||||
def f():
|
||||
# error: [unresolved-reference]
|
||||
# revealed: Unknown
|
||||
reveal_type(x)
|
||||
x = 2
|
||||
# revealed: Literal[2]
|
||||
reveal_type(x)
|
||||
```
|
||||
@@ -1,4 +1,4 @@
|
||||
# Class definitions in stubs
|
||||
# Class defenitions in stubs
|
||||
|
||||
## Cyclical class definition
|
||||
|
||||
|
||||
@@ -21,11 +21,10 @@ reveal_type(Identity[0]) # revealed: str
|
||||
## Class getitem union
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
flag = True
|
||||
|
||||
class UnionClassGetItem:
|
||||
if bool_instance():
|
||||
if flag:
|
||||
|
||||
def __class_getitem__(cls, item: int) -> str:
|
||||
return item
|
||||
@@ -60,10 +59,9 @@ reveal_type(x[0]) # revealed: str | int
|
||||
## Class getitem with unbound method union
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
flag = True
|
||||
|
||||
if bool_instance():
|
||||
if flag:
|
||||
class Spam:
|
||||
def __class_getitem__(self, x: int) -> str:
|
||||
return "foo"
|
||||
@@ -79,10 +77,9 @@ reveal_type(Spam[42])
|
||||
## TODO: Class getitem non-class union
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
flag = True
|
||||
|
||||
if bool_instance():
|
||||
if flag:
|
||||
class Eggs:
|
||||
def __class_getitem__(self, x: int) -> str:
|
||||
return "foo"
|
||||
|
||||
@@ -30,11 +30,10 @@ reveal_type(Identity()[0]) # revealed: int
|
||||
## Getitem union
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
flag = True
|
||||
|
||||
class Identity:
|
||||
if bool_instance():
|
||||
if flag:
|
||||
|
||||
def __getitem__(self, index: int) -> int:
|
||||
return index
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
# `sys.version_info`
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
target-version = "3.9"
|
||||
```
|
||||
|
||||
## The type of `sys.version_info`
|
||||
|
||||
The type of `sys.version_info` is `sys._version_info`, at least according to typeshed's stubs (which
|
||||
|
||||
@@ -1,117 +0,0 @@
|
||||
# type special form
|
||||
|
||||
## Class literal
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
|
||||
def f() -> type[A]:
|
||||
return A
|
||||
|
||||
reveal_type(f()) # revealed: type[A]
|
||||
```
|
||||
|
||||
## Nested class literal
|
||||
|
||||
```py
|
||||
class A:
|
||||
class B: ...
|
||||
|
||||
def f() -> type[A.B]:
|
||||
return A.B
|
||||
|
||||
reveal_type(f()) # revealed: type[B]
|
||||
```
|
||||
|
||||
## Deeply nested class literal
|
||||
|
||||
```py
|
||||
class A:
|
||||
class B:
|
||||
class C: ...
|
||||
|
||||
def f() -> type[A.B.C]:
|
||||
return A.B.C
|
||||
|
||||
reveal_type(f()) # revealed: type[C]
|
||||
```
|
||||
|
||||
## Class literal from another module
|
||||
|
||||
```py
|
||||
from a import A
|
||||
|
||||
def f() -> type[A]:
|
||||
return A
|
||||
|
||||
reveal_type(f()) # revealed: type[A]
|
||||
```
|
||||
|
||||
```py path=a.py
|
||||
class A: ...
|
||||
```
|
||||
|
||||
## Qualified class literal from another module
|
||||
|
||||
```py
|
||||
import a
|
||||
|
||||
def f() -> type[a.B]:
|
||||
return a.B
|
||||
|
||||
reveal_type(f()) # revealed: type[B]
|
||||
```
|
||||
|
||||
```py path=a.py
|
||||
class B: ...
|
||||
```
|
||||
|
||||
## Deeply qualified class literal from another module
|
||||
|
||||
```py path=a/test.py
|
||||
import a.b
|
||||
|
||||
# TODO: no diagnostic
|
||||
# error: [unresolved-attribute]
|
||||
def f() -> type[a.b.C]:
|
||||
# TODO: no diagnostic
|
||||
# error: [unresolved-attribute]
|
||||
return a.b.C
|
||||
|
||||
reveal_type(f()) # revealed: @Todo(unsupported type[X] special form)
|
||||
```
|
||||
|
||||
```py path=a/__init__.py
|
||||
```
|
||||
|
||||
```py path=a/b.py
|
||||
class C: ...
|
||||
```
|
||||
|
||||
## Union of classes
|
||||
|
||||
```py
|
||||
class BasicUser: ...
|
||||
class ProUser: ...
|
||||
|
||||
class A:
|
||||
class B:
|
||||
class C: ...
|
||||
|
||||
def get_user() -> type[BasicUser | ProUser | A.B.C]:
|
||||
return BasicUser
|
||||
|
||||
# revealed: type[BasicUser] | type[ProUser] | type[C]
|
||||
reveal_type(get_user())
|
||||
```
|
||||
|
||||
## Illegal parameters
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
# error: [invalid-type-form]
|
||||
def get_user() -> type[A, B]:
|
||||
return A
|
||||
```
|
||||
@@ -267,42 +267,3 @@ reveal_type(b) # revealed: LiteralString
|
||||
# TODO: Should be list[int] once support for assigning to starred expression is added
|
||||
reveal_type(c) # revealed: @Todo(starred unpacking)
|
||||
```
|
||||
|
||||
### Unicode
|
||||
|
||||
```py
|
||||
# TODO: Add diagnostic (need more values to unpack)
|
||||
(a, b) = "é"
|
||||
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(b) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Unicode escape (1)
|
||||
|
||||
```py
|
||||
# TODO: Add diagnostic (need more values to unpack)
|
||||
(a, b) = "\u9E6C"
|
||||
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(b) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Unicode escape (2)
|
||||
|
||||
```py
|
||||
# TODO: Add diagnostic (need more values to unpack)
|
||||
(a, b) = "\U0010FFFF"
|
||||
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(b) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Surrogates
|
||||
|
||||
```py
|
||||
(a, b) = "\uD800\uDFFF"
|
||||
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(b) # revealed: LiteralString
|
||||
```
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use crate::lint::RuleSelection;
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
@@ -6,34 +5,26 @@ use ruff_db::{Db as SourceDb, Upcast};
|
||||
#[salsa::db]
|
||||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
|
||||
fn is_file_open(&self, file: File) -> bool;
|
||||
|
||||
fn rule_selection(&self) -> &RuleSelection;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::{default_lint_registry, ProgramSettings};
|
||||
|
||||
use super::Db;
|
||||
use crate::lint::RuleSelection;
|
||||
use anyhow::Context;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, SystemPathBuf, TestSystem};
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use super::Db;
|
||||
|
||||
#[salsa::db]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
events: Arc<std::sync::Mutex<Vec<salsa::Event>>>,
|
||||
rule_selection: Arc<RuleSelection>,
|
||||
events: std::sync::Arc<std::sync::Mutex<Vec<salsa::Event>>>,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
@@ -42,9 +33,8 @@ pub(crate) mod tests {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: red_knot_vendored::file_system().clone(),
|
||||
events: Arc::default(),
|
||||
events: std::sync::Arc::default(),
|
||||
files: Files::default(),
|
||||
rule_selection: Arc::new(RuleSelection::from_registry(&default_lint_registry())),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -107,10 +97,6 @@ pub(crate) mod tests {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
|
||||
fn rule_selection(&self) -> &RuleSelection {
|
||||
&self.rule_selection
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
@@ -122,66 +108,4 @@ pub(crate) mod tests {
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TestDbBuilder<'a> {
|
||||
/// Target Python version
|
||||
python_version: PythonVersion,
|
||||
/// Path to a custom typeshed directory
|
||||
custom_typeshed: Option<SystemPathBuf>,
|
||||
/// Path and content pairs for files that should be present
|
||||
files: Vec<(&'a str, &'a str)>,
|
||||
}
|
||||
|
||||
impl<'a> TestDbBuilder<'a> {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
python_version: PythonVersion::default(),
|
||||
custom_typeshed: None,
|
||||
files: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn with_python_version(mut self, version: PythonVersion) -> Self {
|
||||
self.python_version = version;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn with_custom_typeshed(mut self, path: &str) -> Self {
|
||||
self.custom_typeshed = Some(SystemPathBuf::from(path));
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn with_file(mut self, path: &'a str, content: &'a str) -> Self {
|
||||
self.files.push((path, content));
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> anyhow::Result<TestDb> {
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system().create_directory_all(&src_root)?;
|
||||
|
||||
db.write_files(self.files)
|
||||
.context("Failed to write test files")?;
|
||||
|
||||
let mut search_paths = SearchPathSettings::new(src_root);
|
||||
search_paths.custom_typeshed = self.custom_typeshed;
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: self.python_version,
|
||||
search_paths,
|
||||
},
|
||||
)
|
||||
.context("Failed to configure Program settings")?;
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn setup_db() -> TestDb {
|
||||
TestDbBuilder::new().build().expect("valid TestDb setup")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ use std::hash::BuildHasherDefault;
|
||||
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
use crate::lint::{LintRegistry, LintRegistryBuilder};
|
||||
pub use db::Db;
|
||||
pub use module_name::ModuleName;
|
||||
pub use module_resolver::{resolve_module, system_module_search_paths, Module};
|
||||
@@ -12,7 +11,6 @@ pub use semantic_model::{HasTy, SemanticModel};
|
||||
|
||||
pub mod ast_node_ref;
|
||||
mod db;
|
||||
pub mod lint;
|
||||
mod module_name;
|
||||
mod module_resolver;
|
||||
mod node_key;
|
||||
@@ -28,13 +26,3 @@ mod unpack;
|
||||
mod util;
|
||||
|
||||
type FxOrderSet<V> = ordermap::set::OrderSet<V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
pub fn default_lint_registry() -> LintRegistry {
|
||||
let mut registry = LintRegistryBuilder::default();
|
||||
register_semantic_lints(&mut registry);
|
||||
registry.build()
|
||||
}
|
||||
|
||||
pub fn register_semantic_lints(registry: &mut LintRegistryBuilder) {
|
||||
types::register_type_lints(registry);
|
||||
}
|
||||
|
||||
@@ -1,419 +0,0 @@
|
||||
use itertools::Itertools;
|
||||
use ruff_db::diagnostic::{LintName, Severity};
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::hash::Hasher;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LintMetadata {
|
||||
/// The unique identifier for the lint.
|
||||
pub name: LintName,
|
||||
|
||||
/// A one-sentence summary of what the lint catches.
|
||||
pub summary: &'static str,
|
||||
|
||||
/// An in depth explanation of the lint in markdown. Covers what the lint does, why it's bad and possible fixes.
|
||||
///
|
||||
/// The documentation may require post-processing to be rendered correctly. For example, lines
|
||||
/// might have leading or trailing whitespace that should be removed.
|
||||
pub raw_documentation: &'static str,
|
||||
|
||||
/// The default level of the lint if the user doesn't specify one.
|
||||
pub default_level: Level,
|
||||
|
||||
pub status: LintStatus,
|
||||
|
||||
/// Location where this lint is declared: `file_name:line`
|
||||
pub source: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub enum Level {
|
||||
/// The lint is disabled and should not run.
|
||||
Ignore,
|
||||
|
||||
/// The lint is enabled and diagnostic should have a warning severity.
|
||||
Warn,
|
||||
|
||||
/// The lint is enabled and diagnostics have an error severity.
|
||||
Error,
|
||||
}
|
||||
|
||||
impl Level {
|
||||
pub const fn is_error(self) -> bool {
|
||||
matches!(self, Level::Error)
|
||||
}
|
||||
|
||||
pub const fn is_warn(self) -> bool {
|
||||
matches!(self, Level::Warn)
|
||||
}
|
||||
|
||||
pub const fn is_ignore(self) -> bool {
|
||||
matches!(self, Level::Ignore)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<Level> for Severity {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(level: Level) -> Result<Self, ()> {
|
||||
match level {
|
||||
Level::Ignore => Err(()),
|
||||
Level::Warn => Ok(Severity::Warning),
|
||||
Level::Error => Ok(Severity::Error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LintMetadata {
|
||||
pub fn name(&self) -> LintName {
|
||||
self.name
|
||||
}
|
||||
|
||||
pub fn summary(&self) -> &str {
|
||||
self.summary
|
||||
}
|
||||
|
||||
/// Returns the documentation line by line with leading and trailing whitespace removed.
|
||||
pub fn documentation_lines(&self) -> impl Iterator<Item = &str> {
|
||||
self.raw_documentation
|
||||
.lines()
|
||||
.map(|line| line.strip_prefix(' ').unwrap_or(line).trim_end())
|
||||
}
|
||||
|
||||
/// Returns the documentation as a single string.
|
||||
pub fn documentation(&self) -> String {
|
||||
self.documentation_lines().join("\n")
|
||||
}
|
||||
|
||||
pub fn default_level(&self) -> Level {
|
||||
self.default_level
|
||||
}
|
||||
|
||||
pub fn status(&self) -> &LintStatus {
|
||||
&self.status
|
||||
}
|
||||
|
||||
pub fn source(&self) -> &str {
|
||||
self.source
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub const fn lint_metadata_defaults() -> LintMetadata {
|
||||
LintMetadata {
|
||||
name: LintName::of(""),
|
||||
summary: "",
|
||||
raw_documentation: "",
|
||||
default_level: Level::Error,
|
||||
status: LintStatus::preview("0.0.0"),
|
||||
source: "",
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum LintStatus {
|
||||
/// The rule has been added to the linter, but is not yet stable.
|
||||
Preview {
|
||||
/// When the rule was added to preview
|
||||
since: &'static str,
|
||||
},
|
||||
|
||||
/// Stable rule that was added in the version defined by `since`.
|
||||
Stable { since: &'static str },
|
||||
|
||||
/// The rule has been deprecated [`since`] (version) and will be removed in the future.
|
||||
Deprecated {
|
||||
since: &'static str,
|
||||
reason: &'static str,
|
||||
},
|
||||
|
||||
/// The rule has been removed [`since`] (version) and using it will result in an error.
|
||||
Removed {
|
||||
since: &'static str,
|
||||
reason: &'static str,
|
||||
},
|
||||
}
|
||||
|
||||
impl LintStatus {
|
||||
pub const fn preview(since: &'static str) -> Self {
|
||||
LintStatus::Preview { since }
|
||||
}
|
||||
|
||||
pub const fn stable(since: &'static str) -> Self {
|
||||
LintStatus::Stable { since }
|
||||
}
|
||||
|
||||
pub const fn deprecated(since: &'static str, reason: &'static str) -> Self {
|
||||
LintStatus::Deprecated { since, reason }
|
||||
}
|
||||
|
||||
pub const fn removed(since: &'static str, reason: &'static str) -> Self {
|
||||
LintStatus::Removed { since, reason }
|
||||
}
|
||||
|
||||
pub const fn is_removed(&self) -> bool {
|
||||
matches!(self, LintStatus::Removed { .. })
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! declare_lint {
|
||||
(
|
||||
$(#[doc = $doc:literal])+
|
||||
$vis: vis static $name: ident = {
|
||||
summary: $summary: literal,
|
||||
status: $status: expr,
|
||||
// Optional properties
|
||||
$( $key:ident: $value:expr, )*
|
||||
}
|
||||
) => {
|
||||
$( #[doc = $doc] )+
|
||||
#[allow(clippy::needless_update)]
|
||||
$vis static $name: $crate::lint::LintMetadata = $crate::lint::LintMetadata {
|
||||
name: ruff_db::diagnostic::LintName::of(ruff_macros::kebab_case!($name)),
|
||||
summary: $summary,
|
||||
raw_documentation: concat!($($doc,)+ "\n"),
|
||||
status: $status,
|
||||
source: concat!(file!(), ":", line!()),
|
||||
$( $key: $value, )*
|
||||
..$crate::lint::lint_metadata_defaults()
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/// A unique identifier for a lint rule.
|
||||
///
|
||||
/// Implements `PartialEq`, `Eq`, and `Hash` based on the `LintMetadata` pointer
|
||||
/// for fast comparison and lookup.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct LintId {
|
||||
definition: &'static LintMetadata,
|
||||
}
|
||||
|
||||
impl LintId {
|
||||
pub const fn of(definition: &'static LintMetadata) -> Self {
|
||||
LintId { definition }
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for LintId {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
std::ptr::eq(self.definition, other.definition)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for LintId {}
|
||||
|
||||
impl std::hash::Hash for LintId {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
std::ptr::hash(self.definition, state);
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for LintId {
|
||||
type Target = LintMetadata;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.definition
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintRegistryBuilder {
|
||||
/// Registered lints that haven't been removed.
|
||||
lints: Vec<LintId>,
|
||||
|
||||
/// Lints indexed by name, including aliases and removed rules.
|
||||
by_name: FxHashMap<&'static str, LintEntry>,
|
||||
}
|
||||
|
||||
impl LintRegistryBuilder {
|
||||
#[track_caller]
|
||||
pub fn register_lint(&mut self, lint: &'static LintMetadata) {
|
||||
assert_eq!(
|
||||
self.by_name.insert(&*lint.name, lint.into()),
|
||||
None,
|
||||
"duplicate lint registration for '{name}'",
|
||||
name = lint.name
|
||||
);
|
||||
|
||||
if !lint.status.is_removed() {
|
||||
self.lints.push(LintId::of(lint));
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn register_alias(&mut self, from: LintName, to: &'static LintMetadata) {
|
||||
let target = match self.by_name.get(to.name.as_str()) {
|
||||
Some(LintEntry::Lint(target) | LintEntry::Removed(target)) => target,
|
||||
Some(LintEntry::Alias(target)) => {
|
||||
panic!(
|
||||
"lint alias {from} -> {to:?} points to another alias {target:?}",
|
||||
target = target.name()
|
||||
)
|
||||
}
|
||||
None => panic!(
|
||||
"lint alias {from} -> {to} points to non-registered lint",
|
||||
to = to.name
|
||||
),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
self.by_name
|
||||
.insert(from.as_str(), LintEntry::Alias(*target)),
|
||||
None,
|
||||
"duplicate lint registration for '{from}'",
|
||||
);
|
||||
}
|
||||
|
||||
pub fn build(self) -> LintRegistry {
|
||||
LintRegistry {
|
||||
lints: self.lints,
|
||||
by_name: self.by_name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintRegistry {
|
||||
lints: Vec<LintId>,
|
||||
by_name: FxHashMap<&'static str, LintEntry>,
|
||||
}
|
||||
|
||||
impl LintRegistry {
|
||||
/// Looks up a lint by its name.
|
||||
pub fn get(&self, code: &str) -> Result<LintId, GetLintError> {
|
||||
match self.by_name.get(code) {
|
||||
Some(LintEntry::Lint(metadata)) => Ok(*metadata),
|
||||
Some(LintEntry::Alias(lint)) => {
|
||||
if lint.status.is_removed() {
|
||||
Err(GetLintError::Removed(lint.name()))
|
||||
} else {
|
||||
Ok(*lint)
|
||||
}
|
||||
}
|
||||
Some(LintEntry::Removed(lint)) => Err(GetLintError::Removed(lint.name())),
|
||||
None => Err(GetLintError::Unknown(code.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns all registered, non-removed lints.
|
||||
pub fn lints(&self) -> &[LintId] {
|
||||
&self.lints
|
||||
}
|
||||
|
||||
/// Returns an iterator over all known aliases and to their target lints.
|
||||
///
|
||||
/// This iterator includes aliases that point to removed lints.
|
||||
pub fn aliases(&self) -> impl Iterator<Item = (LintName, LintId)> + use<'_> {
|
||||
self.by_name.iter().filter_map(|(key, value)| {
|
||||
if let LintEntry::Alias(alias) = value {
|
||||
Some((LintName::of(key), *alias))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Iterates over all removed lints.
|
||||
pub fn removed(&self) -> impl Iterator<Item = LintId> + use<'_> {
|
||||
self.by_name.iter().filter_map(|(_, value)| {
|
||||
if let LintEntry::Removed(metadata) = value {
|
||||
Some(*metadata)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug, Clone)]
|
||||
pub enum GetLintError {
|
||||
/// The name maps to this removed lint.
|
||||
#[error("lint {0} has been removed")]
|
||||
Removed(LintName),
|
||||
|
||||
/// No lint with the given name is known.
|
||||
#[error("unknown lint {0}")]
|
||||
Unknown(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum LintEntry {
|
||||
/// An existing lint rule. Can be in preview, stable or deprecated.
|
||||
Lint(LintId),
|
||||
/// A lint rule that has been removed.
|
||||
Removed(LintId),
|
||||
Alias(LintId),
|
||||
}
|
||||
|
||||
impl From<&'static LintMetadata> for LintEntry {
|
||||
fn from(metadata: &'static LintMetadata) -> Self {
|
||||
if metadata.status.is_removed() {
|
||||
LintEntry::Removed(LintId::of(metadata))
|
||||
} else {
|
||||
LintEntry::Lint(LintId::of(metadata))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct RuleSelection {
|
||||
/// Map with the severity for each enabled lint rule.
|
||||
///
|
||||
/// If a rule isn't present in this map, then it should be considered disabled.
|
||||
lints: FxHashMap<LintId, Severity>,
|
||||
}
|
||||
|
||||
impl RuleSelection {
|
||||
/// Creates a new rule selection from all known lints in the registry that are enabled
|
||||
/// according to their default severity.
|
||||
pub fn from_registry(registry: &LintRegistry) -> Self {
|
||||
let lints = registry
|
||||
.lints()
|
||||
.iter()
|
||||
.filter_map(|lint| {
|
||||
Severity::try_from(lint.default_level())
|
||||
.ok()
|
||||
.map(|severity| (*lint, severity))
|
||||
})
|
||||
.collect();
|
||||
|
||||
RuleSelection { lints }
|
||||
}
|
||||
|
||||
/// Returns an iterator over all enabled lints.
|
||||
pub fn enabled(&self) -> impl Iterator<Item = LintId> + use<'_> {
|
||||
self.lints.keys().copied()
|
||||
}
|
||||
|
||||
/// Returns an iterator over all enabled lints and their severity.
|
||||
pub fn iter(&self) -> impl ExactSizeIterator<Item = (LintId, Severity)> + use<'_> {
|
||||
self.lints.iter().map(|(&lint, &severity)| (lint, severity))
|
||||
}
|
||||
|
||||
/// Returns the configured severity for the lint with the given id or `None` if the lint is disabled.
|
||||
pub fn severity(&self, lint: LintId) -> Option<Severity> {
|
||||
self.lints.get(&lint).copied()
|
||||
}
|
||||
|
||||
/// Enables `lint` and configures with the given `severity`.
|
||||
///
|
||||
/// Overrides any previous configuration for the lint.
|
||||
pub fn enable(&mut self, lint: LintId, severity: Severity) {
|
||||
self.lints.insert(lint, severity);
|
||||
}
|
||||
|
||||
/// Disables `lint` if it was previously enabled.
|
||||
pub fn disable(&mut self, lint: LintId) {
|
||||
self.lints.remove(&lint);
|
||||
}
|
||||
|
||||
/// Merges the enabled lints from `other` into this selection.
|
||||
///
|
||||
/// Lints from `other` will override any existing configuration.
|
||||
pub fn merge(&mut self, other: &RuleSelection) {
|
||||
self.lints.extend(other.iter());
|
||||
}
|
||||
}
|
||||
@@ -416,7 +416,7 @@ impl<'db> Iterator for SearchPathIterator<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for SearchPathIterator<'_> {}
|
||||
impl<'db> FusedIterator for SearchPathIterator<'db> {}
|
||||
|
||||
/// Represents a single `.pth` file in a `site-packages` directory.
|
||||
/// One or more lines in a `.pth` file may be a (relative or absolute)
|
||||
|
||||
@@ -606,11 +606,24 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
let function_table = index.symbol_table(function_scope_id);
|
||||
assert_eq!(
|
||||
names(&function_table),
|
||||
vec!["a", "b", "c", "d", "args", "kwargs"],
|
||||
vec!["a", "b", "c", "args", "d", "kwargs"],
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
function_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
binding.kind(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
function_table
|
||||
@@ -620,28 +633,6 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
.unwrap();
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
let args_binding = use_def
|
||||
.first_public_binding(
|
||||
function_table
|
||||
.symbol_id_by_name("args")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
args_binding.kind(&db),
|
||||
DefinitionKind::VariadicPositionalParameter(_)
|
||||
));
|
||||
let kwargs_binding = use_def
|
||||
.first_public_binding(
|
||||
function_table
|
||||
.symbol_id_by_name("kwargs")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
kwargs_binding.kind(&db),
|
||||
DefinitionKind::VariadicKeywordParameter(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -663,38 +654,25 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
let lambda_table = index.symbol_table(lambda_scope_id);
|
||||
assert_eq!(
|
||||
names(&lambda_table),
|
||||
vec!["a", "b", "c", "d", "args", "kwargs"],
|
||||
vec!["a", "b", "c", "args", "d", "kwargs"],
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(lambda_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let binding = use_def
|
||||
.first_public_binding(lambda_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
binding.kind(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let binding = use_def
|
||||
.first_public_binding(lambda_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
let args_binding = use_def
|
||||
.first_public_binding(
|
||||
lambda_table
|
||||
.symbol_id_by_name("args")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
args_binding.kind(&db),
|
||||
DefinitionKind::VariadicPositionalParameter(_)
|
||||
));
|
||||
let kwargs_binding = use_def
|
||||
.first_public_binding(
|
||||
lambda_table
|
||||
.symbol_id_by_name("kwargs")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
kwargs_binding.kind(&db),
|
||||
DefinitionKind::VariadicKeywordParameter(_)
|
||||
));
|
||||
}
|
||||
|
||||
/// Test case to validate that the comprehension scope is correctly identified and that the target
|
||||
|
||||
@@ -9,7 +9,7 @@ use ruff_index::IndexVec;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::name::Name;
|
||||
use ruff_python_ast::visitor::{walk_expr, walk_pattern, walk_stmt, Visitor};
|
||||
use ruff_python_ast::{BoolOp, Expr};
|
||||
use ruff_python_ast::{AnyParameterRef, BoolOp, Expr};
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
@@ -479,35 +479,21 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
self.pop_scope();
|
||||
}
|
||||
|
||||
fn declare_parameters(&mut self, parameters: &'db ast::Parameters) {
|
||||
for parameter in parameters.iter_non_variadic_params() {
|
||||
self.declare_parameter(parameter);
|
||||
}
|
||||
if let Some(vararg) = parameters.vararg.as_ref() {
|
||||
let symbol = self.add_symbol(vararg.name.id().clone());
|
||||
self.add_definition(
|
||||
symbol,
|
||||
DefinitionNodeRef::VariadicPositionalParameter(vararg),
|
||||
);
|
||||
}
|
||||
if let Some(kwarg) = parameters.kwarg.as_ref() {
|
||||
let symbol = self.add_symbol(kwarg.name.id().clone());
|
||||
self.add_definition(symbol, DefinitionNodeRef::VariadicKeywordParameter(kwarg));
|
||||
}
|
||||
}
|
||||
|
||||
fn declare_parameter(&mut self, parameter: &'db ast::ParameterWithDefault) {
|
||||
let symbol = self.add_symbol(parameter.parameter.name.id().clone());
|
||||
fn declare_parameter(&mut self, parameter: AnyParameterRef<'db>) {
|
||||
let symbol = self.add_symbol(parameter.name().id().clone());
|
||||
|
||||
let definition = self.add_definition(symbol, parameter);
|
||||
|
||||
// Insert a mapping from the inner Parameter node to the same definition.
|
||||
// This ensures that calling `HasTy::ty` on the inner parameter returns
|
||||
// a valid type (and doesn't panic)
|
||||
let existing_definition = self
|
||||
.definitions_by_node
|
||||
.insert((¶meter.parameter).into(), definition);
|
||||
debug_assert_eq!(existing_definition, None);
|
||||
if let AnyParameterRef::NonVariadic(with_default) = parameter {
|
||||
// Insert a mapping from the parameter to the same definition.
|
||||
// This ensures that calling `HasTy::ty` on the inner parameter returns
|
||||
// a valid type (and doesn't panic)
|
||||
let existing_definition = self.definitions_by_node.insert(
|
||||
DefinitionNodeRef::from(AnyParameterRef::Variadic(&with_default.parameter)).key(),
|
||||
definition,
|
||||
);
|
||||
debug_assert_eq!(existing_definition, None);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn build(mut self) -> SemanticIndex<'db> {
|
||||
@@ -570,40 +556,34 @@ where
|
||||
fn visit_stmt(&mut self, stmt: &'ast ast::Stmt) {
|
||||
match stmt {
|
||||
ast::Stmt::FunctionDef(function_def) => {
|
||||
let ast::StmtFunctionDef {
|
||||
decorator_list,
|
||||
parameters,
|
||||
type_params,
|
||||
name,
|
||||
returns,
|
||||
body,
|
||||
is_async: _,
|
||||
range: _,
|
||||
} = function_def;
|
||||
for decorator in decorator_list {
|
||||
for decorator in &function_def.decorator_list {
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
|
||||
self.with_type_params(
|
||||
NodeWithScopeRef::FunctionTypeParameters(function_def),
|
||||
type_params.as_deref(),
|
||||
function_def.type_params.as_deref(),
|
||||
|builder| {
|
||||
builder.visit_parameters(parameters);
|
||||
if let Some(returns) = returns {
|
||||
builder.visit_annotation(returns);
|
||||
builder.visit_parameters(&function_def.parameters);
|
||||
if let Some(expr) = &function_def.returns {
|
||||
builder.visit_annotation(expr);
|
||||
}
|
||||
|
||||
builder.push_scope(NodeWithScopeRef::Function(function_def));
|
||||
|
||||
builder.declare_parameters(parameters);
|
||||
// Add symbols and definitions for the parameters to the function scope.
|
||||
for parameter in &*function_def.parameters {
|
||||
builder.declare_parameter(parameter);
|
||||
}
|
||||
|
||||
builder.visit_body(body);
|
||||
builder.visit_body(&function_def.body);
|
||||
builder.pop_scope()
|
||||
},
|
||||
);
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in parameters
|
||||
for default in function_def
|
||||
.parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
@@ -612,7 +592,7 @@ where
|
||||
// The symbol for the function name itself has to be evaluated
|
||||
// at the end to match the runtime evaluation of parameter defaults
|
||||
// and return-type annotations.
|
||||
let symbol = self.add_symbol(name.id.clone());
|
||||
let symbol = self.add_symbol(function_def.name.id.clone());
|
||||
self.add_definition(symbol, function_def);
|
||||
}
|
||||
ast::Stmt::ClassDef(class) => {
|
||||
@@ -789,22 +769,7 @@ where
|
||||
let mut constraints = vec![constraint];
|
||||
self.visit_body(&node.body);
|
||||
let mut post_clauses: Vec<FlowSnapshot> = vec![];
|
||||
let elif_else_clauses = node
|
||||
.elif_else_clauses
|
||||
.iter()
|
||||
.map(|clause| (clause.test.as_ref(), clause.body.as_slice()));
|
||||
let has_else = node
|
||||
.elif_else_clauses
|
||||
.last()
|
||||
.is_some_and(|clause| clause.test.is_none());
|
||||
let elif_else_clauses = elif_else_clauses.chain(if has_else {
|
||||
// if there's an `else` clause already, we don't need to add another
|
||||
None
|
||||
} else {
|
||||
// if there's no `else` branch, we should add a no-op `else` branch
|
||||
Some((None, Default::default()))
|
||||
});
|
||||
for (clause_test, clause_body) in elif_else_clauses {
|
||||
for clause in &node.elif_else_clauses {
|
||||
// snapshot after every block except the last; the last one will just become
|
||||
// the state that we merge the other snapshots into
|
||||
post_clauses.push(self.flow_snapshot());
|
||||
@@ -814,15 +779,24 @@ where
|
||||
for constraint in &constraints {
|
||||
self.record_negated_constraint(*constraint);
|
||||
}
|
||||
if let Some(elif_test) = clause_test {
|
||||
if let Some(elif_test) = &clause.test {
|
||||
self.visit_expr(elif_test);
|
||||
constraints.push(self.record_expression_constraint(elif_test));
|
||||
}
|
||||
self.visit_body(clause_body);
|
||||
self.visit_body(&clause.body);
|
||||
}
|
||||
for post_clause_state in post_clauses {
|
||||
self.flow_merge(post_clause_state);
|
||||
}
|
||||
let has_else = node
|
||||
.elif_else_clauses
|
||||
.last()
|
||||
.is_some_and(|clause| clause.test.is_none());
|
||||
if !has_else {
|
||||
// if there's no else clause, then it's possible we took none of the branches,
|
||||
// and the pre_if state can reach here
|
||||
self.flow_merge(pre_if);
|
||||
}
|
||||
}
|
||||
ast::Stmt::While(ast::StmtWhile {
|
||||
test,
|
||||
@@ -1199,8 +1173,10 @@ where
|
||||
self.push_scope(NodeWithScopeRef::Lambda(lambda));
|
||||
|
||||
// Add symbols and definitions for the parameters to the lambda scope.
|
||||
if let Some(parameters) = lambda.parameters.as_ref() {
|
||||
self.declare_parameters(parameters);
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
for parameter in parameters {
|
||||
self.declare_parameter(parameter);
|
||||
}
|
||||
}
|
||||
|
||||
self.visit_expr(lambda.body.as_ref());
|
||||
|
||||
@@ -89,9 +89,7 @@ pub(crate) enum DefinitionNodeRef<'a> {
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
AugmentedAssignment(&'a ast::StmtAugAssign),
|
||||
Comprehension(ComprehensionDefinitionNodeRef<'a>),
|
||||
VariadicPositionalParameter(&'a ast::Parameter),
|
||||
VariadicKeywordParameter(&'a ast::Parameter),
|
||||
Parameter(&'a ast::ParameterWithDefault),
|
||||
Parameter(ast::AnyParameterRef<'a>),
|
||||
WithItem(WithItemDefinitionNodeRef<'a>),
|
||||
MatchPattern(MatchPatternDefinitionNodeRef<'a>),
|
||||
ExceptHandler(ExceptHandlerDefinitionNodeRef<'a>),
|
||||
@@ -190,8 +188,8 @@ impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ParameterWithDefault> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::ParameterWithDefault) -> Self {
|
||||
impl<'a> From<ast::AnyParameterRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ast::AnyParameterRef<'a>) -> Self {
|
||||
Self::Parameter(node)
|
||||
}
|
||||
}
|
||||
@@ -317,15 +315,14 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
first,
|
||||
is_async,
|
||||
}),
|
||||
DefinitionNodeRef::VariadicPositionalParameter(parameter) => {
|
||||
DefinitionKind::VariadicPositionalParameter(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
DefinitionNodeRef::VariadicKeywordParameter(parameter) => {
|
||||
DefinitionKind::VariadicKeywordParameter(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
DefinitionNodeRef::Parameter(parameter) => {
|
||||
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
DefinitionNodeRef::Parameter(parameter) => match parameter {
|
||||
ast::AnyParameterRef::Variadic(parameter) => {
|
||||
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => {
|
||||
DefinitionKind::ParameterWithDefault(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
},
|
||||
DefinitionNodeRef::WithItem(WithItemDefinitionNodeRef {
|
||||
node,
|
||||
target,
|
||||
@@ -387,9 +384,10 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
is_async: _,
|
||||
}) => target.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { target, .. }) => target.into(),
|
||||
Self::VariadicPositionalParameter(node) => node.into(),
|
||||
Self::VariadicKeywordParameter(node) => node.into(),
|
||||
Self::Parameter(node) => node.into(),
|
||||
Self::Parameter(node) => match node {
|
||||
ast::AnyParameterRef::Variadic(parameter) => parameter.into(),
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => parameter.into(),
|
||||
},
|
||||
Self::WithItem(WithItemDefinitionNodeRef {
|
||||
node: _,
|
||||
target,
|
||||
@@ -454,9 +452,8 @@ pub enum DefinitionKind<'db> {
|
||||
AugmentedAssignment(AstNodeRef<ast::StmtAugAssign>),
|
||||
For(ForStmtDefinitionKind),
|
||||
Comprehension(ComprehensionDefinitionKind),
|
||||
VariadicPositionalParameter(AstNodeRef<ast::Parameter>),
|
||||
VariadicKeywordParameter(AstNodeRef<ast::Parameter>),
|
||||
Parameter(AstNodeRef<ast::ParameterWithDefault>),
|
||||
Parameter(AstNodeRef<ast::Parameter>),
|
||||
ParameterWithDefault(AstNodeRef<ast::ParameterWithDefault>),
|
||||
WithItem(WithItemDefinitionKind),
|
||||
MatchPattern(MatchPatternDefinitionKind),
|
||||
ExceptHandler(ExceptHandlerDefinitionKind),
|
||||
@@ -478,8 +475,7 @@ impl DefinitionKind<'_> {
|
||||
| DefinitionKind::ParamSpec(_)
|
||||
| DefinitionKind::TypeVarTuple(_) => DefinitionCategory::DeclarationAndBinding,
|
||||
// a parameter always binds a value, but is only a declaration if annotated
|
||||
DefinitionKind::VariadicPositionalParameter(parameter)
|
||||
| DefinitionKind::VariadicKeywordParameter(parameter) => {
|
||||
DefinitionKind::Parameter(parameter) => {
|
||||
if parameter.annotation.is_some() {
|
||||
DefinitionCategory::DeclarationAndBinding
|
||||
} else {
|
||||
@@ -487,7 +483,7 @@ impl DefinitionKind<'_> {
|
||||
}
|
||||
}
|
||||
// presence of a default is irrelevant, same logic as for a no-default parameter
|
||||
DefinitionKind::Parameter(parameter_with_default) => {
|
||||
DefinitionKind::ParameterWithDefault(parameter_with_default) => {
|
||||
if parameter_with_default.parameter.annotation.is_some() {
|
||||
DefinitionCategory::DeclarationAndBinding
|
||||
} else {
|
||||
@@ -747,15 +743,6 @@ impl From<&ast::ParameterWithDefault> for DefinitionNodeKey {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ast::AnyParameterRef<'_>> for DefinitionNodeKey {
|
||||
fn from(value: ast::AnyParameterRef) -> Self {
|
||||
Self(match value {
|
||||
ast::AnyParameterRef::Variadic(node) => NodeKey::from_node(node),
|
||||
ast::AnyParameterRef::NonVariadic(node) => NodeKey::from_node(node),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Identifier> for DefinitionNodeKey {
|
||||
fn from(identifier: &ast::Identifier) -> Self {
|
||||
Self(NodeKey::from_node(identifier))
|
||||
|
||||
@@ -400,7 +400,7 @@ pub(crate) struct ConstraintsIterator<'map, 'db> {
|
||||
constraint_ids: ConstraintIdIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'db> Iterator for ConstraintsIterator<'_, 'db> {
|
||||
impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
|
||||
type Item = Constraint<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
@@ -424,7 +424,7 @@ impl DeclarationsIterator<'_, '_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> Iterator for DeclarationsIterator<'_, 'db> {
|
||||
impl<'map, 'db> Iterator for DeclarationsIterator<'map, 'db> {
|
||||
type Item = Definition<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
||||
@@ -401,7 +401,7 @@ pub(super) struct DeclarationIdIterator<'a> {
|
||||
inner: DeclarationsIterator<'a>,
|
||||
}
|
||||
|
||||
impl Iterator for DeclarationIdIterator<'_> {
|
||||
impl<'a> Iterator for DeclarationIdIterator<'a> {
|
||||
type Item = ScopedDefinitionId;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
||||
@@ -166,15 +166,31 @@ impl_binding_has_ty!(ast::ParameterWithDefault);
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDbBuilder;
|
||||
use crate::{HasTy, SemanticModel};
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::{HasTy, ProgramSettings, SemanticModel};
|
||||
|
||||
fn setup_db<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<TestDb> {
|
||||
let mut db = TestDb::new();
|
||||
db.write_files(files)?;
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(SystemPathBuf::from("/src")),
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_ty() -> anyhow::Result<()> {
|
||||
let db = TestDbBuilder::new()
|
||||
.with_file("/src/foo.py", "def test(): pass")
|
||||
.build()?;
|
||||
let db = setup_db([("/src/foo.py", "def test(): pass")])?;
|
||||
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
@@ -191,9 +207,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn class_ty() -> anyhow::Result<()> {
|
||||
let db = TestDbBuilder::new()
|
||||
.with_file("/src/foo.py", "class Test: pass")
|
||||
.build()?;
|
||||
let db = setup_db([("/src/foo.py", "class Test: pass")])?;
|
||||
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
@@ -210,10 +224,10 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn alias_ty() -> anyhow::Result<()> {
|
||||
let db = TestDbBuilder::new()
|
||||
.with_file("/src/foo.py", "class Test: pass")
|
||||
.with_file("/src/bar.py", "from foo import Test")
|
||||
.build()?;
|
||||
let db = setup_db([
|
||||
("/src/foo.py", "class Test: pass"),
|
||||
("/src/bar.py", "from foo import Test"),
|
||||
])?;
|
||||
|
||||
let bar = system_path_to_file(&db, "/src/bar.py").unwrap();
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ impl<'db> Symbol<'db> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::db::tests::setup_db;
|
||||
use crate::types::tests::setup_db;
|
||||
|
||||
#[test]
|
||||
fn test_symbol_or_fall_back_to() {
|
||||
|
||||
@@ -2,12 +2,11 @@ use std::hash::Hash;
|
||||
|
||||
use indexmap::IndexSet;
|
||||
use itertools::Itertools;
|
||||
use ruff_db::diagnostic::{DiagnosticId, Severity};
|
||||
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
pub(crate) use self::builder::{IntersectionBuilder, UnionBuilder};
|
||||
pub(crate) use self::diagnostic::register_type_lints;
|
||||
pub use self::diagnostic::{TypeCheckDiagnostic, TypeCheckDiagnostics};
|
||||
pub(crate) use self::display::TypeArrayDisplay;
|
||||
pub(crate) use self::infer::{
|
||||
@@ -26,10 +25,10 @@ use crate::stdlib::{
|
||||
builtins_symbol, core_module_symbol, typing_extensions_symbol, CoreStdlibModule,
|
||||
};
|
||||
use crate::symbol::{Boundness, Symbol};
|
||||
use crate::types::diagnostic::{TypeCheckDiagnosticsBuilder, CALL_NON_CALLABLE};
|
||||
use crate::types::diagnostic::TypeCheckDiagnosticsBuilder;
|
||||
use crate::types::mro::{ClassBase, Mro, MroError, MroIterator};
|
||||
use crate::types::narrow::narrowing_constraint;
|
||||
use crate::{Db, FxOrderSet, Module, Program, PythonVersion};
|
||||
use crate::{Db, FxOrderSet, Module, Program};
|
||||
|
||||
mod builder;
|
||||
mod diagnostic;
|
||||
@@ -41,9 +40,6 @@ mod signatures;
|
||||
mod string_annotation;
|
||||
mod unpacker;
|
||||
|
||||
#[cfg(test)]
|
||||
mod property_tests;
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics {
|
||||
let _span = tracing::trace_span!("check_types", file=?file.path(db)).entered();
|
||||
@@ -226,18 +222,6 @@ fn definition_expression_ty<'db>(
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the type of an expression from an arbitrary scope.
|
||||
///
|
||||
/// Can cause query cycles if used carelessly; caller must be sure that type inference isn't
|
||||
/// currently in progress for the expression's scope.
|
||||
fn expression_ty<'db>(db: &'db dyn Db, file: File, expression: &ast::Expr) -> Type<'db> {
|
||||
let index = semantic_index(db, file);
|
||||
let file_scope = index.expression_scope_id(expression);
|
||||
let scope = file_scope.to_scope_id(db, file);
|
||||
let expr_id = expression.scoped_expression_id(db, scope);
|
||||
infer_scope_types(db, scope).expression_ty(expr_id)
|
||||
}
|
||||
|
||||
/// Infer the combined type of an iterator of bindings.
|
||||
///
|
||||
/// Will return a union if there is more than one binding.
|
||||
@@ -315,7 +299,13 @@ fn declarations_ty<'db>(
|
||||
let declared_ty = if let Some(second) = all_types.next() {
|
||||
let mut builder = UnionBuilder::new(db).add(first);
|
||||
for other in [second].into_iter().chain(all_types) {
|
||||
if !first.is_equivalent_to(db, other) {
|
||||
// Make sure not to emit spurious errors relating to `Type::Todo`,
|
||||
// since we only infer this type due to a limitation in our current model.
|
||||
//
|
||||
// `Unknown` is different here, since we might infer `Unknown`
|
||||
// for one of these due to a variable being defined in one possible
|
||||
// control-flow branch but not another one.
|
||||
if !first.is_equivalent_to(db, other) && !first.is_todo() && !other.is_todo() {
|
||||
conflicting.push(other);
|
||||
}
|
||||
builder = builder.add(other);
|
||||
@@ -584,11 +574,8 @@ impl<'db> Type<'db> {
|
||||
Self::BytesLiteral(BytesLiteralType::new(db, bytes))
|
||||
}
|
||||
|
||||
pub fn tuple<T: Into<Type<'db>>>(
|
||||
db: &'db dyn Db,
|
||||
elements: impl IntoIterator<Item = T>,
|
||||
) -> Self {
|
||||
TupleType::from_elements(db, elements)
|
||||
pub fn tuple(db: &'db dyn Db, elements: &[Type<'db>]) -> Self {
|
||||
Self::Tuple(TupleType::new(db, elements))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
@@ -607,16 +594,11 @@ impl<'db> Type<'db> {
|
||||
|
||||
/// Return true if this type is a [subtype of] type `target`.
|
||||
///
|
||||
/// This method returns `false` if either `self` or `other` is not fully static.
|
||||
///
|
||||
/// [subtype of]: https://typing.readthedocs.io/en/latest/spec/concepts.html#subtype-supertype-and-type-equivalence
|
||||
pub(crate) fn is_subtype_of(self, db: &'db dyn Db, target: Type<'db>) -> bool {
|
||||
if self.is_equivalent_to(db, target) {
|
||||
return true;
|
||||
}
|
||||
if !self.is_fully_static(db) || !target.is_fully_static(db) {
|
||||
return false;
|
||||
}
|
||||
match (self, target) {
|
||||
(Type::Unknown | Type::Any | Type::Todo(_), _) => false,
|
||||
(_, Type::Unknown | Type::Any | Type::Todo(_)) => false,
|
||||
@@ -737,6 +719,9 @@ impl<'db> Type<'db> {
|
||||
(Type::KnownInstance(left), right) => {
|
||||
left.instance_fallback(db).is_subtype_of(db, right)
|
||||
}
|
||||
(left, Type::KnownInstance(right)) => {
|
||||
left.is_subtype_of(db, right.instance_fallback(db))
|
||||
}
|
||||
(Type::Instance(left), Type::Instance(right)) => left.is_instance_of(db, right.class),
|
||||
// TODO
|
||||
_ => false,
|
||||
@@ -776,16 +761,8 @@ impl<'db> Type<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Return true if this type is [equivalent to] type `other`.
|
||||
///
|
||||
/// This method returns `false` if either `self` or `other` is not fully static.
|
||||
///
|
||||
/// [equivalent to]: https://typing.readthedocs.io/en/latest/spec/glossary.html#term-equivalent
|
||||
/// Return true if this type is equivalent to type `other`.
|
||||
pub(crate) fn is_equivalent_to(self, db: &'db dyn Db, other: Type<'db>) -> bool {
|
||||
if !(self.is_fully_static(db) && other.is_fully_static(db)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// TODO equivalent but not identical structural types, differently-ordered unions and
|
||||
// intersections, other cases?
|
||||
|
||||
@@ -796,6 +773,7 @@ impl<'db> Type<'db> {
|
||||
// of `NoneType` and `NoDefaultType` in typeshed. This should not be required anymore once
|
||||
// we understand `sys.version_info` branches.
|
||||
self == other
|
||||
|| matches!((self, other), (Type::Todo(_), Type::Todo(_)))
|
||||
|| matches!((self, other),
|
||||
(
|
||||
Type::Instance(InstanceType { class: self_class }),
|
||||
@@ -809,17 +787,6 @@ impl<'db> Type<'db> {
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns true if both `self` and `other` are the same gradual form
|
||||
/// (limited to `Any`, `Unknown`, or `Todo`).
|
||||
pub(crate) fn is_same_gradual_form(self, other: Type<'db>) -> bool {
|
||||
matches!(
|
||||
(self, other),
|
||||
(Type::Unknown, Type::Unknown)
|
||||
| (Type::Any, Type::Any)
|
||||
| (Type::Todo(_), Type::Todo(_))
|
||||
)
|
||||
}
|
||||
|
||||
/// Return true if this type and `other` have no common elements.
|
||||
///
|
||||
/// Note: This function aims to have no false positives, but might return
|
||||
@@ -1026,63 +993,6 @@ impl<'db> Type<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the type does not contain any gradual forms (as a sub-part).
|
||||
pub(crate) fn is_fully_static(self, db: &'db dyn Db) -> bool {
|
||||
match self {
|
||||
Type::Any | Type::Unknown | Type::Todo(_) => false,
|
||||
Type::Never
|
||||
| Type::FunctionLiteral(..)
|
||||
| Type::ModuleLiteral(..)
|
||||
| Type::IntLiteral(_)
|
||||
| Type::BooleanLiteral(_)
|
||||
| Type::StringLiteral(_)
|
||||
| Type::LiteralString
|
||||
| Type::BytesLiteral(_)
|
||||
| Type::SliceLiteral(_)
|
||||
| Type::KnownInstance(_) => true,
|
||||
Type::ClassLiteral(_) | Type::SubclassOf(_) | Type::Instance(_) => {
|
||||
// TODO: Ideally, we would iterate over the MRO of the class, check if all
|
||||
// bases are fully static, and only return `true` if that is the case.
|
||||
//
|
||||
// This does not work yet, because we currently infer `Unknown` for some
|
||||
// generic base classes that we don't understand yet. For example, `str`
|
||||
// is defined as `class str(Sequence[str])` in typeshed and we currently
|
||||
// compute its MRO as `(str, Unknown, object)`. This would make us think
|
||||
// that `str` is a gradual type, which causes all sorts of downstream
|
||||
// issues because it does not participate in equivalence/subtyping etc.
|
||||
//
|
||||
// Another problem is that we run into problems if we eagerly query the
|
||||
// MRO of class literals here. I have not fully investigated this, but
|
||||
// iterating over the MRO alone, without even acting on it, causes us to
|
||||
// infer `Unknown` for many classes.
|
||||
|
||||
true
|
||||
}
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
.iter()
|
||||
.all(|elem| elem.is_fully_static(db)),
|
||||
Type::Intersection(intersection) => {
|
||||
intersection
|
||||
.positive(db)
|
||||
.iter()
|
||||
.all(|elem| elem.is_fully_static(db))
|
||||
&& intersection
|
||||
.negative(db)
|
||||
.iter()
|
||||
.all(|elem| elem.is_fully_static(db))
|
||||
}
|
||||
Type::Tuple(tuple) => tuple
|
||||
.elements(db)
|
||||
.iter()
|
||||
.all(|elem| elem.is_fully_static(db)),
|
||||
// TODO: Once we support them, make sure that we return `false` for other types
|
||||
// containing gradual forms such as `tuple[Any, ...]` or `Callable[..., str]`.
|
||||
// Conversely, make sure to return `true` for homogeneous tuples such as
|
||||
// `tuple[int, ...]`, once we add support for them.
|
||||
}
|
||||
}
|
||||
|
||||
/// Return true if there is just a single inhabitant for this type.
|
||||
///
|
||||
/// Note: This function aims to have no false positives, but might return `false`
|
||||
@@ -1430,76 +1340,21 @@ impl<'db> Type<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the type of `len()` on a type if it is known more precisely than `int`,
|
||||
/// or `None` otherwise.
|
||||
///
|
||||
/// In the second case, the return type of `len()` in `typeshed` (`int`)
|
||||
/// is used as a fallback.
|
||||
fn len(&self, db: &'db dyn Db) -> Option<Type<'db>> {
|
||||
fn non_negative_int_literal<'db>(db: &'db dyn Db, ty: Type<'db>) -> Option<Type<'db>> {
|
||||
match ty {
|
||||
// TODO: Emit diagnostic for non-integers and negative integers
|
||||
Type::IntLiteral(value) => (value >= 0).then_some(ty),
|
||||
Type::BooleanLiteral(value) => Some(Type::IntLiteral(value.into())),
|
||||
Type::Union(union) => {
|
||||
let mut builder = UnionBuilder::new(db);
|
||||
for element in union.elements(db) {
|
||||
builder = builder.add(non_negative_int_literal(db, *element)?);
|
||||
}
|
||||
Some(builder.build())
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
let usize_len = match self {
|
||||
Type::BytesLiteral(bytes) => Some(bytes.python_len(db)),
|
||||
Type::StringLiteral(string) => Some(string.python_len(db)),
|
||||
Type::Tuple(tuple) => Some(tuple.len(db)),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
if let Some(usize_len) = usize_len {
|
||||
return usize_len.try_into().ok().map(Type::IntLiteral);
|
||||
}
|
||||
|
||||
let return_ty = match self.call_dunder(db, "__len__", &[*self]) {
|
||||
// TODO: emit a diagnostic
|
||||
CallDunderResult::MethodNotAvailable => return None,
|
||||
|
||||
CallDunderResult::CallOutcome(outcome) | CallDunderResult::PossiblyUnbound(outcome) => {
|
||||
outcome.return_ty(db)?
|
||||
}
|
||||
};
|
||||
|
||||
non_negative_int_literal(db, return_ty)
|
||||
}
|
||||
|
||||
/// Return the outcome of calling an object of this type.
|
||||
#[must_use]
|
||||
fn call(self, db: &'db dyn Db, arg_types: &[Type<'db>]) -> CallOutcome<'db> {
|
||||
match self {
|
||||
// TODO validate typed call arguments vs callable signature
|
||||
Type::FunctionLiteral(function_type) => match function_type.known(db) {
|
||||
Some(KnownFunction::RevealType) => CallOutcome::revealed(
|
||||
function_type.signature(db).return_ty,
|
||||
*arg_types.first().unwrap_or(&Type::Unknown),
|
||||
),
|
||||
|
||||
Some(KnownFunction::Len) => {
|
||||
let normal_return_ty = function_type.signature(db).return_ty;
|
||||
|
||||
let [only_arg] = arg_types else {
|
||||
// TODO: Emit a diagnostic
|
||||
return CallOutcome::callable(normal_return_ty);
|
||||
};
|
||||
let len_ty = only_arg.len(db);
|
||||
|
||||
CallOutcome::callable(len_ty.unwrap_or(normal_return_ty))
|
||||
Type::FunctionLiteral(function_type) => {
|
||||
if function_type.is_known(db, KnownFunction::RevealType) {
|
||||
CallOutcome::revealed(
|
||||
function_type.signature(db).return_ty,
|
||||
*arg_types.first().unwrap_or(&Type::Unknown),
|
||||
)
|
||||
} else {
|
||||
CallOutcome::callable(function_type.signature(db).return_ty)
|
||||
}
|
||||
|
||||
_ => CallOutcome::callable(function_type.signature(db).return_ty),
|
||||
},
|
||||
}
|
||||
|
||||
// TODO annotated return type on `__new__` or metaclass `__call__`
|
||||
Type::ClassLiteral(ClassLiteralType { class }) => {
|
||||
@@ -1548,7 +1403,7 @@ impl<'db> Type<'db> {
|
||||
// `Any` is callable, and its return type is also `Any`.
|
||||
Type::Any => CallOutcome::callable(Type::Any),
|
||||
|
||||
Type::Todo(_) => CallOutcome::callable(todo_type!("call todo")),
|
||||
Type::Todo(_) => CallOutcome::callable(todo_type!()),
|
||||
|
||||
Type::Unknown => CallOutcome::callable(Type::Unknown),
|
||||
|
||||
@@ -1701,8 +1556,6 @@ impl<'db> Type<'db> {
|
||||
Type::KnownInstance(KnownInstanceType::Never | KnownInstanceType::NoReturn) => {
|
||||
Type::Never
|
||||
}
|
||||
Type::KnownInstance(KnownInstanceType::LiteralString) => Type::LiteralString,
|
||||
Type::KnownInstance(KnownInstanceType::Any) => Type::Any,
|
||||
_ => todo_type!(),
|
||||
}
|
||||
}
|
||||
@@ -1910,13 +1763,13 @@ impl<'db> KnownClass {
|
||||
}
|
||||
|
||||
pub fn to_class_literal(self, db: &'db dyn Db) -> Type<'db> {
|
||||
core_module_symbol(db, self.canonical_module(db), self.as_str())
|
||||
core_module_symbol(db, self.canonical_module(), self.as_str())
|
||||
.ignore_possibly_unbound()
|
||||
.unwrap_or(Type::Unknown)
|
||||
}
|
||||
|
||||
/// Return the module in which we should look up the definition for this class
|
||||
pub(crate) fn canonical_module(self, db: &'db dyn Db) -> CoreStdlibModule {
|
||||
pub(crate) const fn canonical_module(self) -> CoreStdlibModule {
|
||||
match self {
|
||||
Self::Bool
|
||||
| Self::Object
|
||||
@@ -1934,18 +1787,10 @@ impl<'db> KnownClass {
|
||||
Self::GenericAlias | Self::ModuleType | Self::FunctionType => CoreStdlibModule::Types,
|
||||
Self::NoneType => CoreStdlibModule::Typeshed,
|
||||
Self::SpecialForm | Self::TypeVar | Self::TypeAliasType => CoreStdlibModule::Typing,
|
||||
Self::NoDefaultType => {
|
||||
let python_version = Program::get(db).target_version(db);
|
||||
|
||||
// typing_extensions has a 3.13+ re-export for the `typing.NoDefault`
|
||||
// singleton, but not for `typing._NoDefaultType`. So we need to switch
|
||||
// to `typing._NoDefaultType` for newer versions:
|
||||
if python_version >= PythonVersion::PY313 {
|
||||
CoreStdlibModule::Typing
|
||||
} else {
|
||||
CoreStdlibModule::TypingExtensions
|
||||
}
|
||||
}
|
||||
// TODO when we understand sys.version_info, we will need an explicit fallback here,
|
||||
// because typing_extensions has a 3.13+ re-export for the `typing.NoDefault`
|
||||
// singleton, but not for `typing._NoDefaultType`
|
||||
Self::NoDefaultType => CoreStdlibModule::TypingExtensions,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2005,11 +1850,11 @@ impl<'db> KnownClass {
|
||||
};
|
||||
|
||||
let module = file_to_module(db, file)?;
|
||||
candidate.check_module(db, &module).then_some(candidate)
|
||||
candidate.check_module(&module).then_some(candidate)
|
||||
}
|
||||
|
||||
/// Return `true` if the module of `self` matches `module_name`
|
||||
fn check_module(self, db: &'db dyn Db, module: &Module) -> bool {
|
||||
fn check_module(self, module: &Module) -> bool {
|
||||
if !module.search_path().is_standard_library() {
|
||||
return false;
|
||||
}
|
||||
@@ -2029,7 +1874,7 @@ impl<'db> KnownClass {
|
||||
| Self::GenericAlias
|
||||
| Self::ModuleType
|
||||
| Self::VersionInfo
|
||||
| Self::FunctionType => module.name() == self.canonical_module(db).as_str(),
|
||||
| Self::FunctionType => module.name() == self.canonical_module().as_str(),
|
||||
Self::NoneType => matches!(module.name().as_str(), "_typeshed" | "types"),
|
||||
Self::SpecialForm | Self::TypeVar | Self::TypeAliasType | Self::NoDefaultType => {
|
||||
matches!(module.name().as_str(), "typing" | "typing_extensions")
|
||||
@@ -2043,8 +1888,6 @@ impl<'db> KnownClass {
|
||||
pub enum KnownInstanceType<'db> {
|
||||
/// The symbol `typing.Literal` (which can also be found as `typing_extensions.Literal`)
|
||||
Literal,
|
||||
/// The symbol `typing.LiteralString` (which can also be found as `typing_extensions.LiteralString`)
|
||||
LiteralString,
|
||||
/// The symbol `typing.Optional` (which can also be found as `typing_extensions.Optional`)
|
||||
Optional,
|
||||
/// The symbol `typing.Union` (which can also be found as `typing_extensions.Union`)
|
||||
@@ -2053,8 +1896,6 @@ pub enum KnownInstanceType<'db> {
|
||||
NoReturn,
|
||||
/// The symbol `typing.Never` available since 3.11 (which can also be found as `typing_extensions.Never`)
|
||||
Never,
|
||||
/// The symbol `typing.Any` (which can also be found as `typing_extensions.Any`)
|
||||
Any,
|
||||
/// A single instance of `typing.TypeVar`
|
||||
TypeVar(TypeVarInstance<'db>),
|
||||
/// A single instance of `typing.TypeAliasType` (PEP 695 type alias)
|
||||
@@ -2066,13 +1907,11 @@ impl<'db> KnownInstanceType<'db> {
|
||||
pub const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Literal => "Literal",
|
||||
Self::LiteralString => "LiteralString",
|
||||
Self::Optional => "Optional",
|
||||
Self::Union => "Union",
|
||||
Self::TypeVar(_) => "TypeVar",
|
||||
Self::NoReturn => "NoReturn",
|
||||
Self::Never => "Never",
|
||||
Self::Any => "Any",
|
||||
Self::TypeAliasType(_) => "TypeAliasType",
|
||||
}
|
||||
}
|
||||
@@ -2081,13 +1920,11 @@ impl<'db> KnownInstanceType<'db> {
|
||||
pub const fn bool(self) -> Truthiness {
|
||||
match self {
|
||||
Self::Literal
|
||||
| Self::LiteralString
|
||||
| Self::Optional
|
||||
| Self::TypeVar(_)
|
||||
| Self::Union
|
||||
| Self::NoReturn
|
||||
| Self::Never
|
||||
| Self::Any
|
||||
| Self::TypeAliasType(_) => Truthiness::AlwaysTrue,
|
||||
}
|
||||
}
|
||||
@@ -2096,12 +1933,10 @@ impl<'db> KnownInstanceType<'db> {
|
||||
pub fn repr(self, db: &'db dyn Db) -> &'db str {
|
||||
match self {
|
||||
Self::Literal => "typing.Literal",
|
||||
Self::LiteralString => "typing.LiteralString",
|
||||
Self::Optional => "typing.Optional",
|
||||
Self::Union => "typing.Union",
|
||||
Self::NoReturn => "typing.NoReturn",
|
||||
Self::Never => "typing.Never",
|
||||
Self::Any => "typing.Any",
|
||||
Self::TypeVar(typevar) => typevar.name(db),
|
||||
Self::TypeAliasType(_) => "typing.TypeAliasType",
|
||||
}
|
||||
@@ -2111,12 +1946,10 @@ impl<'db> KnownInstanceType<'db> {
|
||||
pub const fn class(self) -> KnownClass {
|
||||
match self {
|
||||
Self::Literal => KnownClass::SpecialForm,
|
||||
Self::LiteralString => KnownClass::SpecialForm,
|
||||
Self::Optional => KnownClass::SpecialForm,
|
||||
Self::Union => KnownClass::SpecialForm,
|
||||
Self::NoReturn => KnownClass::SpecialForm,
|
||||
Self::Never => KnownClass::SpecialForm,
|
||||
Self::Any => KnownClass::Object,
|
||||
Self::TypeVar(_) => KnownClass::TypeVar,
|
||||
Self::TypeAliasType(_) => KnownClass::TypeAliasType,
|
||||
}
|
||||
@@ -2136,9 +1969,7 @@ impl<'db> KnownInstanceType<'db> {
|
||||
return None;
|
||||
}
|
||||
match (module.name().as_str(), instance_name) {
|
||||
("typing", "Any") => Some(Self::Any),
|
||||
("typing" | "typing_extensions", "Literal") => Some(Self::Literal),
|
||||
("typing" | "typing_extensions", "LiteralString") => Some(Self::LiteralString),
|
||||
("typing" | "typing_extensions", "Optional") => Some(Self::Optional),
|
||||
("typing" | "typing_extensions", "Union") => Some(Self::Union),
|
||||
("typing" | "typing_extensions", "NoReturn") => Some(Self::NoReturn),
|
||||
@@ -2192,7 +2023,7 @@ impl<'db> TypeVarInstance<'db> {
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn constraints(self, db: &'db dyn Db) -> Option<&'db [Type<'db>]> {
|
||||
pub(crate) fn constraints(self, db: &'db dyn Db) -> Option<&[Type<'db>]> {
|
||||
if let Some(TypeVarBoundOrConstraints::Constraints(tuple)) = self.bound_or_constraints(db) {
|
||||
Some(tuple.elements(db))
|
||||
} else {
|
||||
@@ -2301,9 +2132,9 @@ impl<'db> CallOutcome<'db> {
|
||||
not_callable_ty,
|
||||
return_ty,
|
||||
}) => {
|
||||
diagnostics.add_lint(
|
||||
&CALL_NON_CALLABLE,
|
||||
diagnostics.add(
|
||||
node,
|
||||
"call-non-callable",
|
||||
format_args!(
|
||||
"Object of type `{}` is not callable",
|
||||
not_callable_ty.display(db)
|
||||
@@ -2316,9 +2147,9 @@ impl<'db> CallOutcome<'db> {
|
||||
called_ty,
|
||||
return_ty,
|
||||
}) => {
|
||||
diagnostics.add_lint(
|
||||
&CALL_NON_CALLABLE,
|
||||
diagnostics.add(
|
||||
node,
|
||||
"call-non-callable",
|
||||
format_args!(
|
||||
"Object of type `{}` is not callable (due to union element `{}`)",
|
||||
called_ty.display(db),
|
||||
@@ -2332,9 +2163,9 @@ impl<'db> CallOutcome<'db> {
|
||||
called_ty,
|
||||
return_ty,
|
||||
}) => {
|
||||
diagnostics.add_lint(
|
||||
&CALL_NON_CALLABLE,
|
||||
diagnostics.add(
|
||||
node,
|
||||
"call-non-callable",
|
||||
format_args!(
|
||||
"Object of type `{}` is not callable (due to union elements {})",
|
||||
called_ty.display(db),
|
||||
@@ -2347,9 +2178,9 @@ impl<'db> CallOutcome<'db> {
|
||||
callable_ty: called_ty,
|
||||
return_ty,
|
||||
}) => {
|
||||
diagnostics.add_lint(
|
||||
&CALL_NON_CALLABLE,
|
||||
diagnostics.add(
|
||||
node,
|
||||
"call-non-callable",
|
||||
format_args!(
|
||||
"Object of type `{}` is not callable (possibly unbound `__call__` method)",
|
||||
called_ty.display(db)
|
||||
@@ -2375,8 +2206,7 @@ impl<'db> CallOutcome<'db> {
|
||||
} => {
|
||||
diagnostics.add(
|
||||
node,
|
||||
DiagnosticId::RevealedType,
|
||||
Severity::Info,
|
||||
"revealed-type",
|
||||
format_args!("Revealed type is `{}`", revealed_ty.display(db)),
|
||||
);
|
||||
Ok(*return_ty)
|
||||
@@ -2674,15 +2504,13 @@ pub enum KnownFunction {
|
||||
ConstraintFunction(KnownConstraintFunction),
|
||||
/// `builtins.reveal_type`, `typing.reveal_type` or `typing_extensions.reveal_type`
|
||||
RevealType,
|
||||
/// `builtins.len`
|
||||
Len,
|
||||
}
|
||||
|
||||
impl KnownFunction {
|
||||
pub fn constraint_function(self) -> Option<KnownConstraintFunction> {
|
||||
match self {
|
||||
Self::ConstraintFunction(f) => Some(f),
|
||||
Self::RevealType | Self::Len => None,
|
||||
Self::RevealType => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2699,7 +2527,6 @@ impl KnownFunction {
|
||||
"issubclass" if definition.is_builtin_definition(db) => Some(
|
||||
KnownFunction::ConstraintFunction(KnownConstraintFunction::IsSubclass),
|
||||
),
|
||||
"len" if definition.is_builtin_definition(db) => Some(KnownFunction::Len),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -2740,7 +2567,7 @@ impl<'db> Class<'db> {
|
||||
///
|
||||
/// Were this not a salsa query, then the calling query
|
||||
/// would depend on the class's AST and rerun for every change in that file.
|
||||
fn explicit_bases(self, db: &'db dyn Db) -> &'db [Type<'db>] {
|
||||
fn explicit_bases(self, db: &'db dyn Db) -> &[Type<'db>] {
|
||||
self.explicit_bases_query(db)
|
||||
}
|
||||
|
||||
@@ -2809,11 +2636,7 @@ impl<'db> Class<'db> {
|
||||
pub fn is_subclass_of(self, db: &'db dyn Db, other: Class) -> bool {
|
||||
// `is_subclass_of` is checking the subtype relation, in which gradual types do not
|
||||
// participate, so we should not return `True` if we find `Any/Unknown` in the MRO.
|
||||
self.is_subclass_of_base(db, other)
|
||||
}
|
||||
|
||||
fn is_subclass_of_base(self, db: &'db dyn Db, other: impl Into<ClassBase<'db>>) -> bool {
|
||||
self.iter_mro(db).contains(&other.into())
|
||||
self.iter_mro(db).contains(&ClassBase::Class(other))
|
||||
}
|
||||
|
||||
/// Return the explicit `metaclass` of this class, if one is defined.
|
||||
@@ -3154,9 +2977,8 @@ pub struct StringLiteralType<'db> {
|
||||
}
|
||||
|
||||
impl<'db> StringLiteralType<'db> {
|
||||
/// The length of the string, as would be returned by Python's `len()`.
|
||||
pub fn python_len(&self, db: &'db dyn Db) -> usize {
|
||||
self.value(db).chars().count()
|
||||
pub fn len(&self, db: &'db dyn Db) -> usize {
|
||||
self.value(db).len()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3166,12 +2988,6 @@ pub struct BytesLiteralType<'db> {
|
||||
value: Box<[u8]>,
|
||||
}
|
||||
|
||||
impl<'db> BytesLiteralType<'db> {
|
||||
pub fn python_len(&self, db: &'db dyn Db) -> usize {
|
||||
self.value(db).len()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct SliceLiteralType<'db> {
|
||||
start: Option<i32>,
|
||||
@@ -3179,7 +2995,7 @@ pub struct SliceLiteralType<'db> {
|
||||
step: Option<i32>,
|
||||
}
|
||||
|
||||
impl SliceLiteralType<'_> {
|
||||
impl<'db> SliceLiteralType<'db> {
|
||||
fn as_tuple(self, db: &dyn Db) -> (Option<i32>, Option<i32>, Option<i32>) {
|
||||
(self.start(db), self.stop(db), self.step(db))
|
||||
}
|
||||
@@ -3191,23 +3007,6 @@ pub struct TupleType<'db> {
|
||||
}
|
||||
|
||||
impl<'db> TupleType<'db> {
|
||||
pub fn from_elements<T: Into<Type<'db>>>(
|
||||
db: &'db dyn Db,
|
||||
types: impl IntoIterator<Item = T>,
|
||||
) -> Type<'db> {
|
||||
let mut elements = vec![];
|
||||
|
||||
for ty in types {
|
||||
let ty = ty.into();
|
||||
if ty.is_never() {
|
||||
return Type::Never;
|
||||
}
|
||||
elements.push(ty);
|
||||
}
|
||||
|
||||
Type::Tuple(Self::new(db, elements.into_boxed_slice()))
|
||||
}
|
||||
|
||||
pub fn get(&self, db: &'db dyn Db, index: usize) -> Option<Type<'db>> {
|
||||
self.elements(db).get(index).copied()
|
||||
}
|
||||
@@ -3225,20 +3024,42 @@ static_assertions::assert_eq_size!(Type, [u8; 16]);
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use super::*;
|
||||
use crate::db::tests::{setup_db, TestDb, TestDbBuilder};
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::stdlib::typing_symbol;
|
||||
use crate::PythonVersion;
|
||||
use crate::ProgramSettings;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
use ruff_python_ast as ast;
|
||||
use test_case::test_case;
|
||||
|
||||
pub(crate) fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
/// A test representation of a type that can be transformed unambiguously into a real Type,
|
||||
/// given a db.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) enum Ty {
|
||||
#[derive(Debug, Clone)]
|
||||
enum Ty {
|
||||
Never,
|
||||
Unknown,
|
||||
None,
|
||||
@@ -3262,7 +3083,7 @@ pub(crate) mod tests {
|
||||
}
|
||||
|
||||
impl Ty {
|
||||
pub(crate) fn into_type(self, db: &TestDb) -> Type<'_> {
|
||||
fn into_type(self, db: &TestDb) -> Type<'_> {
|
||||
match self {
|
||||
Ty::Never => Type::Never,
|
||||
Ty::Unknown => Type::Unknown,
|
||||
@@ -3293,21 +3114,13 @@ pub(crate) mod tests {
|
||||
builder.build()
|
||||
}
|
||||
Ty::Tuple(tys) => {
|
||||
let elements = tys.into_iter().map(|ty| ty.into_type(db));
|
||||
Type::tuple(db, elements)
|
||||
let elements: Vec<Type> = tys.into_iter().map(|ty| ty.into_type(db)).collect();
|
||||
Type::tuple(db, &elements)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test_case(Ty::Tuple(vec![Ty::Never]))]
|
||||
#[test_case(Ty::Tuple(vec![Ty::BuiltinInstance("str"), Ty::Never, Ty::BuiltinInstance("int")]))]
|
||||
#[test_case(Ty::Tuple(vec![Ty::Tuple(vec![Ty::Never])]))]
|
||||
fn tuple_containing_never_simplifies_to_never(ty: Ty) {
|
||||
let db = setup_db();
|
||||
assert_eq!(ty.into_type(&db), Type::Never);
|
||||
}
|
||||
|
||||
#[test_case(Ty::BuiltinInstance("str"), Ty::BuiltinInstance("object"))]
|
||||
#[test_case(Ty::BuiltinInstance("int"), Ty::BuiltinInstance("object"))]
|
||||
#[test_case(Ty::Unknown, Ty::IntLiteral(1))]
|
||||
@@ -3400,9 +3213,7 @@ pub(crate) mod tests {
|
||||
}
|
||||
|
||||
#[test_case(Ty::BuiltinInstance("object"), Ty::BuiltinInstance("int"))]
|
||||
#[test_case(Ty::Unknown, Ty::Unknown)]
|
||||
#[test_case(Ty::Unknown, Ty::IntLiteral(1))]
|
||||
#[test_case(Ty::Any, Ty::Any)]
|
||||
#[test_case(Ty::Any, Ty::IntLiteral(1))]
|
||||
#[test_case(Ty::IntLiteral(1), Ty::Unknown)]
|
||||
#[test_case(Ty::IntLiteral(1), Ty::Any)]
|
||||
@@ -3423,7 +3234,6 @@ pub(crate) mod tests {
|
||||
#[test_case(Ty::IntLiteral(1), Ty::Intersection{pos: vec![Ty::BuiltinInstance("int")], neg: vec![Ty::IntLiteral(1)]})]
|
||||
#[test_case(Ty::BuiltinClassLiteral("int"), Ty::BuiltinClassLiteral("object"))]
|
||||
#[test_case(Ty::BuiltinInstance("int"), Ty::BuiltinClassLiteral("int"))]
|
||||
#[test_case(Ty::TypingInstance("_SpecialForm"), Ty::TypingLiteral)]
|
||||
fn is_not_subtype_of(from: Ty, to: Ty) {
|
||||
let db = setup_db();
|
||||
assert!(!from.into_type(&db).is_subtype_of(&db, to.into_type(&db)));
|
||||
@@ -3510,18 +3320,6 @@ pub(crate) mod tests {
|
||||
assert!(from.into_type(&db).is_equivalent_to(&db, to.into_type(&db)));
|
||||
}
|
||||
|
||||
#[test_case(Ty::Any, Ty::Any)]
|
||||
#[test_case(Ty::Any, Ty::None)]
|
||||
#[test_case(Ty::Unknown, Ty::Unknown)]
|
||||
#[test_case(Ty::Todo, Ty::Todo)]
|
||||
#[test_case(Ty::Union(vec![Ty::IntLiteral(1), Ty::IntLiteral(2)]), Ty::Union(vec![Ty::IntLiteral(1), Ty::IntLiteral(0)]))]
|
||||
#[test_case(Ty::Union(vec![Ty::IntLiteral(1), Ty::IntLiteral(2)]), Ty::Union(vec![Ty::IntLiteral(1), Ty::IntLiteral(2), Ty::IntLiteral(3)]))]
|
||||
fn is_not_equivalent_to(from: Ty, to: Ty) {
|
||||
let db = setup_db();
|
||||
|
||||
assert!(!from.into_type(&db).is_equivalent_to(&db, to.into_type(&db)));
|
||||
}
|
||||
|
||||
#[test_case(Ty::Never, Ty::Never)]
|
||||
#[test_case(Ty::Never, Ty::None)]
|
||||
#[test_case(Ty::Never, Ty::BuiltinInstance("int"))]
|
||||
@@ -3705,28 +3503,13 @@ pub(crate) mod tests {
|
||||
#[test_case(Ty::None)]
|
||||
#[test_case(Ty::BooleanLiteral(true))]
|
||||
#[test_case(Ty::BooleanLiteral(false))]
|
||||
#[test_case(Ty::KnownClassInstance(KnownClass::NoDefaultType))]
|
||||
fn is_singleton(from: Ty) {
|
||||
let db = setup_db();
|
||||
|
||||
assert!(from.into_type(&db).is_singleton(&db));
|
||||
}
|
||||
|
||||
/// Explicitly test for Python version <3.13 and >=3.13, to ensure that
|
||||
/// the fallback to `typing_extensions` is working correctly.
|
||||
/// See [`KnownClass::canonical_module`] for more information.
|
||||
#[test_case(PythonVersion::PY312)]
|
||||
#[test_case(PythonVersion::PY313)]
|
||||
fn no_default_type_is_singleton(python_version: PythonVersion) {
|
||||
let db = TestDbBuilder::new()
|
||||
.with_python_version(python_version)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let no_default = Ty::KnownClassInstance(KnownClass::NoDefaultType).into_type(&db);
|
||||
|
||||
assert!(no_default.is_singleton(&db));
|
||||
}
|
||||
|
||||
#[test_case(Ty::None)]
|
||||
#[test_case(Ty::BooleanLiteral(true))]
|
||||
#[test_case(Ty::IntLiteral(1))]
|
||||
@@ -3765,41 +3548,6 @@ pub(crate) mod tests {
|
||||
assert!(!from.into_type(&db).is_singleton(&db));
|
||||
}
|
||||
|
||||
#[test_case(Ty::Never)]
|
||||
#[test_case(Ty::None)]
|
||||
#[test_case(Ty::IntLiteral(1))]
|
||||
#[test_case(Ty::BooleanLiteral(true))]
|
||||
#[test_case(Ty::StringLiteral("abc"))]
|
||||
#[test_case(Ty::LiteralString)]
|
||||
#[test_case(Ty::BytesLiteral("abc"))]
|
||||
#[test_case(Ty::KnownClassInstance(KnownClass::Str))]
|
||||
#[test_case(Ty::KnownClassInstance(KnownClass::Object))]
|
||||
#[test_case(Ty::KnownClassInstance(KnownClass::Type))]
|
||||
#[test_case(Ty::BuiltinClassLiteral("str"))]
|
||||
#[test_case(Ty::TypingLiteral)]
|
||||
#[test_case(Ty::Union(vec![Ty::KnownClassInstance(KnownClass::Str), Ty::None]))]
|
||||
#[test_case(Ty::Intersection{pos: vec![Ty::KnownClassInstance(KnownClass::Str)], neg: vec![Ty::LiteralString]})]
|
||||
#[test_case(Ty::Tuple(vec![]))]
|
||||
#[test_case(Ty::Tuple(vec![Ty::KnownClassInstance(KnownClass::Int), Ty::KnownClassInstance(KnownClass::Object)]))]
|
||||
fn is_fully_static(from: Ty) {
|
||||
let db = setup_db();
|
||||
|
||||
assert!(from.into_type(&db).is_fully_static(&db));
|
||||
}
|
||||
|
||||
#[test_case(Ty::Any)]
|
||||
#[test_case(Ty::Unknown)]
|
||||
#[test_case(Ty::Todo)]
|
||||
#[test_case(Ty::Union(vec![Ty::Any, Ty::KnownClassInstance(KnownClass::Str)]))]
|
||||
#[test_case(Ty::Union(vec![Ty::KnownClassInstance(KnownClass::Str), Ty::Unknown]))]
|
||||
#[test_case(Ty::Intersection{pos: vec![Ty::Any], neg: vec![Ty::LiteralString]})]
|
||||
#[test_case(Ty::Tuple(vec![Ty::KnownClassInstance(KnownClass::Int), Ty::Any]))]
|
||||
fn is_not_fully_static(from: Ty) {
|
||||
let db = setup_db();
|
||||
|
||||
assert!(!from.into_type(&db).is_fully_static(&db));
|
||||
}
|
||||
|
||||
#[test_case(Ty::IntLiteral(1); "is_int_literal_truthy")]
|
||||
#[test_case(Ty::IntLiteral(-1))]
|
||||
#[test_case(Ty::StringLiteral("foo"))]
|
||||
@@ -3854,10 +3602,7 @@ pub(crate) mod tests {
|
||||
|
||||
#[test]
|
||||
fn typing_vs_typeshed_no_default() {
|
||||
let db = TestDbBuilder::new()
|
||||
.with_python_version(PythonVersion::PY313)
|
||||
.build()
|
||||
.unwrap();
|
||||
let db = setup_db();
|
||||
|
||||
let typing_no_default = typing_symbol(&db, "NoDefault").expect_type();
|
||||
let typing_extensions_no_default = typing_extensions_symbol(&db, "NoDefault").expect_type();
|
||||
@@ -3977,6 +3722,19 @@ pub(crate) mod tests {
|
||||
let todo3 = todo_type!();
|
||||
let todo4 = todo_type!();
|
||||
|
||||
assert!(todo1.is_equivalent_to(&db, todo2));
|
||||
assert!(todo3.is_equivalent_to(&db, todo4));
|
||||
assert!(todo1.is_equivalent_to(&db, todo3));
|
||||
|
||||
assert!(todo1.is_subtype_of(&db, todo2));
|
||||
assert!(todo2.is_subtype_of(&db, todo1));
|
||||
|
||||
assert!(todo3.is_subtype_of(&db, todo4));
|
||||
assert!(todo4.is_subtype_of(&db, todo3));
|
||||
|
||||
assert!(todo1.is_subtype_of(&db, todo3));
|
||||
assert!(todo3.is_subtype_of(&db, todo1));
|
||||
|
||||
let int = KnownClass::Int.to_instance(&db);
|
||||
|
||||
assert!(int.is_assignable_to(&db, todo1));
|
||||
|
||||
@@ -73,8 +73,7 @@ impl<'db> UnionBuilder<'db> {
|
||||
// supertype of bool. Therefore, we are done.
|
||||
break;
|
||||
}
|
||||
|
||||
if ty.is_same_gradual_form(*element) || ty.is_subtype_of(self.db, *element) {
|
||||
if ty.is_subtype_of(self.db, *element) {
|
||||
return self;
|
||||
} else if element.is_subtype_of(self.db, ty) {
|
||||
to_remove.push(index);
|
||||
@@ -260,9 +259,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
let mut to_remove = SmallVec::<[usize; 1]>::new();
|
||||
for (index, existing_positive) in self.positive.iter().enumerate() {
|
||||
// S & T = S if S <: T
|
||||
if existing_positive.is_subtype_of(db, new_positive)
|
||||
|| existing_positive.is_same_gradual_form(new_positive)
|
||||
{
|
||||
if existing_positive.is_subtype_of(db, new_positive) {
|
||||
return;
|
||||
}
|
||||
// same rule, reverse order
|
||||
@@ -378,14 +375,36 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{IntersectionBuilder, IntersectionType, Type, UnionType};
|
||||
|
||||
use crate::db::tests::{setup_db, TestDb};
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::stdlib::typing_symbol;
|
||||
use crate::types::{global_symbol, todo_type, KnownClass, UnionBuilder};
|
||||
|
||||
use crate::ProgramSettings;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use test_case::test_case;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union() {
|
||||
let db = setup_db();
|
||||
@@ -478,17 +497,6 @@ mod tests {
|
||||
assert_eq!(u1.expect_union().elements(&db), &[t1, t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_simplify_multiple_unknown() {
|
||||
let db = setup_db();
|
||||
let t0 = KnownClass::Str.to_instance(&db);
|
||||
let t1 = Type::Unknown;
|
||||
|
||||
let u = UnionType::from_elements(&db, [t0, t1, t1]);
|
||||
|
||||
assert_eq!(u.expect_union().elements(&db), &[t0, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_subsume_multiple() {
|
||||
let db = setup_db();
|
||||
@@ -596,42 +604,6 @@ mod tests {
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_multiple_unknown() {
|
||||
let db = setup_db();
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unknown)
|
||||
.add_positive(Type::Unknown)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Unknown);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unknown)
|
||||
.add_negative(Type::Unknown)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Unknown);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Unknown)
|
||||
.add_negative(Type::Unknown)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Unknown);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unknown)
|
||||
.add_positive(Type::IntLiteral(0))
|
||||
.add_negative(Type::Unknown)
|
||||
.build();
|
||||
assert_eq!(
|
||||
ty,
|
||||
IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unknown)
|
||||
.add_positive(Type::IntLiteral(0))
|
||||
.build()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersection_distributes_over_union() {
|
||||
let db = setup_db();
|
||||
@@ -654,85 +626,59 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn intersection_negation_distributes_over_union() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/module.py",
|
||||
r#"
|
||||
class A: ...
|
||||
class B: ...
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
let module = ruff_db::files::system_path_to_file(&db, "/src/module.py").unwrap();
|
||||
|
||||
let a = global_symbol(&db, module, "A")
|
||||
let db = setup_db();
|
||||
let st = typing_symbol(&db, "Sized").expect_type().to_instance(&db);
|
||||
let ht = typing_symbol(&db, "Hashable")
|
||||
.expect_type()
|
||||
.to_instance(&db);
|
||||
let b = global_symbol(&db, module, "B")
|
||||
.expect_type()
|
||||
.to_instance(&db);
|
||||
|
||||
// intersection: A & B
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(a)
|
||||
.add_positive(b)
|
||||
// sh_t: Sized & Hashable
|
||||
let sh_t = IntersectionBuilder::new(&db)
|
||||
.add_positive(st)
|
||||
.add_positive(ht)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
assert_eq!(intersection.pos_vec(&db), &[a, b]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[]);
|
||||
assert_eq!(sh_t.pos_vec(&db), &[st, ht]);
|
||||
assert_eq!(sh_t.neg_vec(&db), &[]);
|
||||
|
||||
// ~intersection => ~A | ~B
|
||||
let negated_intersection = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Intersection(intersection))
|
||||
// ~sh_t => ~Sized | ~Hashable
|
||||
let not_s_h_t = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Intersection(sh_t))
|
||||
.build()
|
||||
.expect_union();
|
||||
|
||||
// should have as elements ~A and ~B
|
||||
let not_a = a.negate(&db);
|
||||
let not_b = b.negate(&db);
|
||||
assert_eq!(negated_intersection.elements(&db), &[not_a, not_b]);
|
||||
// should have as elements: (~Sized),(~Hashable)
|
||||
let not_st = st.negate(&db);
|
||||
let not_ht = ht.negate(&db);
|
||||
assert_eq!(not_s_h_t.elements(&db), &[not_st, not_ht]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mixed_intersection_negation_distributes_over_union() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/module.py",
|
||||
r#"
|
||||
class A: ...
|
||||
class B: ...
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
let module = ruff_db::files::system_path_to_file(&db, "/src/module.py").unwrap();
|
||||
|
||||
let a = global_symbol(&db, module, "A")
|
||||
let db = setup_db();
|
||||
let it = KnownClass::Int.to_instance(&db);
|
||||
let st = typing_symbol(&db, "Sized").expect_type().to_instance(&db);
|
||||
let ht = typing_symbol(&db, "Hashable")
|
||||
.expect_type()
|
||||
.to_instance(&db);
|
||||
let b = global_symbol(&db, module, "B")
|
||||
.expect_type()
|
||||
.to_instance(&db);
|
||||
let int = KnownClass::Int.to_instance(&db);
|
||||
|
||||
// a_not_b: A & ~B
|
||||
let a_not_b = IntersectionBuilder::new(&db)
|
||||
.add_positive(a)
|
||||
.add_negative(b)
|
||||
// s_not_h_t: Sized & ~Hashable
|
||||
let s_not_h_t = IntersectionBuilder::new(&db)
|
||||
.add_positive(st)
|
||||
.add_negative(ht)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
assert_eq!(a_not_b.pos_vec(&db), &[a]);
|
||||
assert_eq!(a_not_b.neg_vec(&db), &[b]);
|
||||
assert_eq!(s_not_h_t.pos_vec(&db), &[st]);
|
||||
assert_eq!(s_not_h_t.neg_vec(&db), &[ht]);
|
||||
|
||||
// let's build
|
||||
// int & ~(A & ~B)
|
||||
// = int & ~(A & ~B)
|
||||
// = int & (~A | B)
|
||||
// = (int & ~A) | (int & B)
|
||||
let t = IntersectionBuilder::new(&db)
|
||||
.add_positive(int)
|
||||
.add_negative(Type::Intersection(a_not_b))
|
||||
// let's build int & ~(Sized & ~Hashable)
|
||||
let tt = IntersectionBuilder::new(&db)
|
||||
.add_positive(it)
|
||||
.add_negative(Type::Intersection(s_not_h_t))
|
||||
.build();
|
||||
assert_eq!(t.display(&db).to_string(), "int & ~A | int & B");
|
||||
|
||||
// int & ~(Sized & ~Hashable)
|
||||
// -> int & (~Sized | Hashable)
|
||||
// -> (int & ~Sized) | (int & Hashable)
|
||||
assert_eq!(tt.display(&db).to_string(), "int & ~Sized | int & Hashable");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -1,12 +1,6 @@
|
||||
use crate::lint::{Level, LintId, LintMetadata, LintRegistryBuilder, LintStatus};
|
||||
use crate::types::string_annotation::{
|
||||
BYTE_STRING_TYPE_ANNOTATION, ESCAPE_CHARACTER_IN_FORWARD_ANNOTATION, FSTRING_TYPE_ANNOTATION,
|
||||
IMPLICIT_CONCATENATED_STRING_TYPE_ANNOTATION, INVALID_SYNTAX_IN_FORWARD_ANNOTATION,
|
||||
RAW_STRING_TYPE_ANNOTATION,
|
||||
};
|
||||
use crate::types::{ClassLiteralType, Type};
|
||||
use crate::{declare_lint, Db};
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, Severity};
|
||||
use crate::Db;
|
||||
use ruff_db::diagnostic::{Diagnostic, Severity};
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast::{self as ast, AnyNodeRef};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
@@ -15,411 +9,18 @@ use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Registers all known type check lints.
|
||||
pub(crate) fn register_type_lints(registry: &mut LintRegistryBuilder) {
|
||||
registry.register_lint(&UNRESOLVED_REFERENCE);
|
||||
registry.register_lint(&POSSIBLY_UNRESOLVED_REFERENCE);
|
||||
registry.register_lint(&NOT_ITERABLE);
|
||||
registry.register_lint(&INDEX_OUT_OF_BOUNDS);
|
||||
registry.register_lint(&NON_SUBSCRIPTABLE);
|
||||
registry.register_lint(&UNRESOLVED_IMPORT);
|
||||
registry.register_lint(&POSSIBLY_UNBOUND_IMPORT);
|
||||
registry.register_lint(&ZERO_STEPSIZE_IN_SLICE);
|
||||
registry.register_lint(&INVALID_ASSIGNMENT);
|
||||
registry.register_lint(&INVALID_DECLARATION);
|
||||
registry.register_lint(&CONFLICTING_DECLARATIONS);
|
||||
registry.register_lint(&DIVISION_BY_ZERO);
|
||||
registry.register_lint(&CALL_NON_CALLABLE);
|
||||
registry.register_lint(&INVALID_TYPE_PARAMETER);
|
||||
registry.register_lint(&INVALID_TYPE_VARIABLE_CONSTRAINTS);
|
||||
registry.register_lint(&CYCLIC_CLASS_DEFINITION);
|
||||
registry.register_lint(&DUPLICATE_BASE);
|
||||
registry.register_lint(&INVALID_BASE);
|
||||
registry.register_lint(&INCONSISTENT_MRO);
|
||||
registry.register_lint(&INVALID_LITERAL_PARAMETER);
|
||||
registry.register_lint(&CALL_POSSIBLY_UNBOUND_METHOD);
|
||||
registry.register_lint(&POSSIBLY_UNBOUND_ATTRIBUTE);
|
||||
registry.register_lint(&UNRESOLVED_ATTRIBUTE);
|
||||
registry.register_lint(&CONFLICTING_METACLASS);
|
||||
registry.register_lint(&UNSUPPORTED_OPERATOR);
|
||||
registry.register_lint(&INVALID_CONTEXT_MANAGER);
|
||||
registry.register_lint(&UNDEFINED_REVEAL);
|
||||
registry.register_lint(&INVALID_PARAMETER_DEFAULT);
|
||||
registry.register_lint(&INVALID_TYPE_FORM);
|
||||
|
||||
// String annotations
|
||||
registry.register_lint(&FSTRING_TYPE_ANNOTATION);
|
||||
registry.register_lint(&BYTE_STRING_TYPE_ANNOTATION);
|
||||
registry.register_lint(&RAW_STRING_TYPE_ANNOTATION);
|
||||
registry.register_lint(&IMPLICIT_CONCATENATED_STRING_TYPE_ANNOTATION);
|
||||
registry.register_lint(&INVALID_SYNTAX_IN_FORWARD_ANNOTATION);
|
||||
registry.register_lint(&ESCAPE_CHARACTER_IN_FORWARD_ANNOTATION);
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for references to names that are not defined.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Using an undefined variable will raise a `NameError` at runtime.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```python
|
||||
/// print(x) # NameError: name 'x' is not defined
|
||||
/// ```
|
||||
pub(crate) static UNRESOLVED_REFERENCE = {
|
||||
summary: "detects references to names that are not defined",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Warn,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for references to names that are possibly not defined..
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Using an undefined variable will raise a `NameError` at runtime.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```python
|
||||
/// for i in range(0):
|
||||
/// x = i
|
||||
///
|
||||
/// print(x) # NameError: name 'x' is not defined
|
||||
/// ```
|
||||
pub(crate) static POSSIBLY_UNRESOLVED_REFERENCE = {
|
||||
summary: "detects references to possibly unresolved references",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Warn,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for objects that are not iterable but are used in a context that requires them to be.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Iterating over an object that is not iterable will raise a `TypeError` at runtime.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```python
|
||||
/// for i in 34: # TypeError: 'int' object is not iterable
|
||||
/// pass
|
||||
/// ```
|
||||
pub(crate) static NOT_ITERABLE = {
|
||||
summary: "detects objects that are not iterable",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// TODO
|
||||
pub(crate) static INDEX_OUT_OF_BOUNDS = {
|
||||
summary: "detects index out of bounds errors",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for objects that do not support subscripting but are used in a context that requires them to be.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Subscripting an object that does not support it will raise a `TypeError` at runtime.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// 4[1] # TypeError: 'int' object is not subscriptable
|
||||
/// ```
|
||||
pub(crate) static NON_SUBSCRIPTABLE = {
|
||||
summary: "detects objects that do not support subscripting",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for import statements for which the module cannot be resolved.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Importing a module that cannot be resolved will raise an `ImportError` at runtime.
|
||||
pub(crate) static UNRESOLVED_IMPORT = {
|
||||
summary: "detects unresolved imports",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// TODO
|
||||
pub(crate) static POSSIBLY_UNBOUND_IMPORT = {
|
||||
summary: "detects possibly unbound imports",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Warn,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for step size 0 in slices.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// A slice with a step size of zero will raise a `ValueError` at runtime.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// l = list(range(10))
|
||||
/// l[1:10:0] # ValueError: slice step cannot be zero
|
||||
pub(crate) static ZERO_STEPSIZE_IN_SLICE = {
|
||||
summary: "detects a slice step size of zero",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// TODO
|
||||
pub(crate) static INVALID_ASSIGNMENT = {
|
||||
summary: "detects invalid assignments",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// TODO
|
||||
pub(crate) static INVALID_DECLARATION = {
|
||||
summary: "detects invalid declarations",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// TODO
|
||||
pub(crate) static CONFLICTING_DECLARATIONS = {
|
||||
summary: "detects conflicting declarations",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// It detects division by zero.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Dividing by zero raises a `ZeroDivisionError` at runtime.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// 5 / 0
|
||||
/// ```
|
||||
pub(crate) static DIVISION_BY_ZERO = {
|
||||
summary: "detects division by zero",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for calls to non-callable objects.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Calling a non-callable object will raise a `TypeError` at runtime.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// 4() # TypeError: 'int' object is not callable
|
||||
/// ```
|
||||
pub(crate) static CALL_NON_CALLABLE = {
|
||||
summary: "detects calls to non-callable objects",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// TODO
|
||||
pub(crate) static INVALID_TYPE_PARAMETER = {
|
||||
summary: "detects invalid type parameters",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// TODO
|
||||
pub(crate) static INVALID_TYPE_VARIABLE_CONSTRAINTS = {
|
||||
summary: "detects invalid type variable constraints",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for class definitions with a cyclic inheritance chain.
|
||||
///
|
||||
/// ## Why is it bad?
|
||||
/// TODO
|
||||
pub(crate) static CYCLIC_CLASS_DEFINITION = {
|
||||
summary: "detects cyclic class definitions",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// TODO
|
||||
pub(crate) static DUPLICATE_BASE = {
|
||||
summary: "detects class definitions with duplicate bases",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// TODO
|
||||
pub(crate) static INVALID_BASE = {
|
||||
summary: "detects class definitions with an invalid base",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// TODO
|
||||
pub(crate) static INCONSISTENT_MRO = {
|
||||
summary: "detects class definitions with an inconsistent MRO",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for invalid parameters to `typing.Literal`.
|
||||
pub(crate) static INVALID_LITERAL_PARAMETER = {
|
||||
summary: "detects invalid literal parameters",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for calls to possibly unbound methods.
|
||||
pub(crate) static CALL_POSSIBLY_UNBOUND_METHOD = {
|
||||
summary: "detects calls to possibly unbound methods",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Warn,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for possibly unbound attributes.
|
||||
pub(crate) static POSSIBLY_UNBOUND_ATTRIBUTE = {
|
||||
summary: "detects references to possibly unbound attributes",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Warn,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for unresolved attributes.
|
||||
pub(crate) static UNRESOLVED_ATTRIBUTE = {
|
||||
summary: "detects references to unresolved attributes",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// TODO
|
||||
pub(crate) static CONFLICTING_METACLASS = {
|
||||
summary: "detects conflicting metaclasses",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for binary expressions, comparisons, and unary expressions where the operands don't support the operator.
|
||||
pub(crate) static UNSUPPORTED_OPERATOR = {
|
||||
summary: "detects binary expressions where the operands don't support the operator",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// TODO
|
||||
pub(crate) static INVALID_CONTEXT_MANAGER = {
|
||||
summary: "detects expressions used in with statements that don't implement the context manager protocol",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for calls to `reveal_type` without importing it.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Using `reveal_type` without importing it will raise a `NameError` at runtime.
|
||||
pub(crate) static UNDEFINED_REVEAL = {
|
||||
summary: "detects usages of `reveal_type` without importing it",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Warn,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for default values that can't be assigned to the parameter's annotated type.
|
||||
pub(crate) static INVALID_PARAMETER_DEFAULT = {
|
||||
summary: "detects default values that can't be assigned to the parameter's annotated type",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for `type[]` usages that have too many or too few type arguments.
|
||||
pub(crate) static INVALID_TYPE_FORM = {
|
||||
summary: "detects invalid type forms",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone)]
|
||||
pub struct TypeCheckDiagnostic {
|
||||
pub(super) id: DiagnosticId,
|
||||
// TODO: Don't use string keys for rules
|
||||
pub(super) rule: String,
|
||||
pub(super) message: String,
|
||||
pub(super) range: TextRange,
|
||||
pub(super) severity: Severity,
|
||||
pub(super) file: File,
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostic {
|
||||
pub fn id(&self) -> DiagnosticId {
|
||||
self.id
|
||||
pub fn rule(&self) -> &str {
|
||||
&self.rule
|
||||
}
|
||||
|
||||
pub fn message(&self) -> &str {
|
||||
@@ -432,8 +33,8 @@ impl TypeCheckDiagnostic {
|
||||
}
|
||||
|
||||
impl Diagnostic for TypeCheckDiagnostic {
|
||||
fn id(&self) -> DiagnosticId {
|
||||
self.id
|
||||
fn rule(&self) -> &str {
|
||||
TypeCheckDiagnostic::rule(self)
|
||||
}
|
||||
|
||||
fn message(&self) -> Cow<str> {
|
||||
@@ -449,7 +50,7 @@ impl Diagnostic for TypeCheckDiagnostic {
|
||||
}
|
||||
|
||||
fn severity(&self) -> Severity {
|
||||
self.severity
|
||||
Severity::Error
|
||||
}
|
||||
}
|
||||
|
||||
@@ -549,9 +150,9 @@ impl<'db> TypeCheckDiagnosticsBuilder<'db> {
|
||||
|
||||
/// Emit a diagnostic declaring that the object represented by `node` is not iterable
|
||||
pub(super) fn add_not_iterable(&mut self, node: AnyNodeRef, not_iterable_ty: Type<'db>) {
|
||||
self.add_lint(
|
||||
&NOT_ITERABLE,
|
||||
self.add(
|
||||
node,
|
||||
"not-iterable",
|
||||
format_args!(
|
||||
"Object of type `{}` is not iterable",
|
||||
not_iterable_ty.display(self.db)
|
||||
@@ -566,9 +167,9 @@ impl<'db> TypeCheckDiagnosticsBuilder<'db> {
|
||||
node: AnyNodeRef,
|
||||
element_ty: Type<'db>,
|
||||
) {
|
||||
self.add_lint(
|
||||
&NOT_ITERABLE,
|
||||
self.add(
|
||||
node,
|
||||
"not-iterable",
|
||||
format_args!(
|
||||
"Object of type `{}` is not iterable because its `__iter__` method is possibly unbound",
|
||||
element_ty.display(self.db)
|
||||
@@ -585,9 +186,9 @@ impl<'db> TypeCheckDiagnosticsBuilder<'db> {
|
||||
length: usize,
|
||||
index: i64,
|
||||
) {
|
||||
self.add_lint(
|
||||
&INDEX_OUT_OF_BOUNDS,
|
||||
self.add(
|
||||
node,
|
||||
"index-out-of-bounds",
|
||||
format_args!(
|
||||
"Index {index} is out of bounds for {kind} `{}` with length {length}",
|
||||
tuple_ty.display(self.db)
|
||||
@@ -602,9 +203,9 @@ impl<'db> TypeCheckDiagnosticsBuilder<'db> {
|
||||
non_subscriptable_ty: Type<'db>,
|
||||
method: &str,
|
||||
) {
|
||||
self.add_lint(
|
||||
&NON_SUBSCRIPTABLE,
|
||||
self.add(
|
||||
node,
|
||||
"non-subscriptable",
|
||||
format_args!(
|
||||
"Cannot subscript object of type `{}` with no `{method}` method",
|
||||
non_subscriptable_ty.display(self.db)
|
||||
@@ -618,9 +219,9 @@ impl<'db> TypeCheckDiagnosticsBuilder<'db> {
|
||||
level: u32,
|
||||
module: Option<&str>,
|
||||
) {
|
||||
self.add_lint(
|
||||
&UNRESOLVED_IMPORT,
|
||||
self.add(
|
||||
import_node.into(),
|
||||
"unresolved-import",
|
||||
format_args!(
|
||||
"Cannot resolve import `{}{}`",
|
||||
".".repeat(level as usize),
|
||||
@@ -630,9 +231,9 @@ impl<'db> TypeCheckDiagnosticsBuilder<'db> {
|
||||
}
|
||||
|
||||
pub(super) fn add_slice_step_size_zero(&mut self, node: AnyNodeRef) {
|
||||
self.add_lint(
|
||||
&ZERO_STEPSIZE_IN_SLICE,
|
||||
self.add(
|
||||
node,
|
||||
"zero-stepsize-in-slice",
|
||||
format_args!("Slice step size can not be zero"),
|
||||
);
|
||||
}
|
||||
@@ -645,19 +246,19 @@ impl<'db> TypeCheckDiagnosticsBuilder<'db> {
|
||||
) {
|
||||
match declared_ty {
|
||||
Type::ClassLiteral(ClassLiteralType { class }) => {
|
||||
self.add_lint(&INVALID_ASSIGNMENT, node, format_args!(
|
||||
self.add(node, "invalid-assignment", format_args!(
|
||||
"Implicit shadowing of class `{}`; annotate to make it explicit if this is intentional",
|
||||
class.name(self.db)));
|
||||
}
|
||||
Type::FunctionLiteral(function) => {
|
||||
self.add_lint(&INVALID_ASSIGNMENT, node, format_args!(
|
||||
self.add(node, "invalid-assignment", format_args!(
|
||||
"Implicit shadowing of function `{}`; annotate to make it explicit if this is intentional",
|
||||
function.name(self.db)));
|
||||
}
|
||||
_ => {
|
||||
self.add_lint(
|
||||
&INVALID_ASSIGNMENT,
|
||||
self.add(
|
||||
node,
|
||||
"invalid-assignment",
|
||||
format_args!(
|
||||
"Object of type `{}` is not assignable to `{}`",
|
||||
assigned_ty.display(self.db),
|
||||
@@ -671,9 +272,9 @@ impl<'db> TypeCheckDiagnosticsBuilder<'db> {
|
||||
pub(super) fn add_possibly_unresolved_reference(&mut self, expr_name_node: &ast::ExprName) {
|
||||
let ast::ExprName { id, .. } = expr_name_node;
|
||||
|
||||
self.add_lint(
|
||||
&POSSIBLY_UNRESOLVED_REFERENCE,
|
||||
self.add(
|
||||
expr_name_node.into(),
|
||||
"possibly-unresolved-reference",
|
||||
format_args!("Name `{id}` used when possibly not defined"),
|
||||
);
|
||||
}
|
||||
@@ -681,37 +282,17 @@ impl<'db> TypeCheckDiagnosticsBuilder<'db> {
|
||||
pub(super) fn add_unresolved_reference(&mut self, expr_name_node: &ast::ExprName) {
|
||||
let ast::ExprName { id, .. } = expr_name_node;
|
||||
|
||||
self.add_lint(
|
||||
&UNRESOLVED_REFERENCE,
|
||||
self.add(
|
||||
expr_name_node.into(),
|
||||
"unresolved-reference",
|
||||
format_args!("Name `{id}` used when not defined"),
|
||||
);
|
||||
}
|
||||
|
||||
pub(super) fn add_lint(
|
||||
&mut self,
|
||||
lint: &'static LintMetadata,
|
||||
node: AnyNodeRef,
|
||||
message: std::fmt::Arguments,
|
||||
) {
|
||||
// Skip over diagnostics if the rule is disabled.
|
||||
let Some(severity) = self.db.rule_selection().severity(LintId::of(lint)) else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.add(node, DiagnosticId::Lint(lint.name()), severity, message);
|
||||
}
|
||||
|
||||
/// Adds a new diagnostic.
|
||||
///
|
||||
/// The diagnostic does not get added if the rule isn't enabled for this file.
|
||||
pub(super) fn add(
|
||||
&mut self,
|
||||
node: AnyNodeRef,
|
||||
id: DiagnosticId,
|
||||
severity: Severity,
|
||||
message: std::fmt::Arguments,
|
||||
) {
|
||||
pub(super) fn add(&mut self, node: AnyNodeRef, rule: &str, message: std::fmt::Arguments) {
|
||||
if !self.db.is_file_open(self.file) {
|
||||
return;
|
||||
}
|
||||
@@ -724,10 +305,9 @@ impl<'db> TypeCheckDiagnosticsBuilder<'db> {
|
||||
|
||||
self.diagnostics.push(TypeCheckDiagnostic {
|
||||
file: self.file,
|
||||
id,
|
||||
rule: rule.to_string(),
|
||||
message: message.to_string(),
|
||||
range: node.range(),
|
||||
severity,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -289,7 +289,7 @@ struct DisplayMaybeNegatedType<'db> {
|
||||
negated: bool,
|
||||
}
|
||||
|
||||
impl Display for DisplayMaybeNegatedType<'_> {
|
||||
impl<'db> Display for DisplayMaybeNegatedType<'db> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
if self.negated {
|
||||
f.write_str("~")?;
|
||||
@@ -319,7 +319,7 @@ pub(crate) struct DisplayTypeArray<'b, 'db> {
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl Display for DisplayTypeArray<'_, '_> {
|
||||
impl<'db> Display for DisplayTypeArray<'_, 'db> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
f.join(", ")
|
||||
.entries(self.types.iter().map(|ty| ty.display(self.db)))
|
||||
@@ -357,10 +357,31 @@ impl Display for DisplayStringLiteralType<'_> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::setup_db;
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::types::{global_symbol, SliceLiteralType, StringLiteralType, Type, UnionType};
|
||||
use crate::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_condense_literal_display_by_type() -> anyhow::Result<()> {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -328,6 +328,15 @@ impl<'db> ClassBase<'db> {
|
||||
Display { base: self, db }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[track_caller]
|
||||
pub(super) fn expect_class_base(self) -> Class<'db> {
|
||||
match self {
|
||||
ClassBase::Class(class) => class,
|
||||
_ => panic!("Expected a `ClassBase::Class()` variant"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a `ClassBase` representing the class `builtins.object`
|
||||
fn object(db: &'db dyn Db) -> Self {
|
||||
KnownClass::Object
|
||||
@@ -365,12 +374,10 @@ impl<'db> ClassBase<'db> {
|
||||
KnownInstanceType::TypeVar(_)
|
||||
| KnownInstanceType::TypeAliasType(_)
|
||||
| KnownInstanceType::Literal
|
||||
| KnownInstanceType::LiteralString
|
||||
| KnownInstanceType::Union
|
||||
| KnownInstanceType::NoReturn
|
||||
| KnownInstanceType::Never
|
||||
| KnownInstanceType::Optional => None,
|
||||
KnownInstanceType::Any => Some(Self::Any),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -398,12 +405,6 @@ impl<'db> ClassBase<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> From<Class<'db>> for ClassBase<'db> {
|
||||
fn from(value: Class<'db>) -> Self {
|
||||
ClassBase::Class(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> From<ClassBase<'db>> for Type<'db> {
|
||||
fn from(value: ClassBase<'db>) -> Self {
|
||||
match value {
|
||||
|
||||
@@ -54,7 +54,6 @@ pub(crate) fn narrowing_constraint<'db>(
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::ref_option)]
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_narrowing_constraints_for_pattern<'db>(
|
||||
db: &'db dyn Db,
|
||||
@@ -63,7 +62,6 @@ fn all_narrowing_constraints_for_pattern<'db>(
|
||||
NarrowingConstraintsBuilder::new(db, ConstraintNode::Pattern(pattern), true).finish()
|
||||
}
|
||||
|
||||
#[allow(clippy::ref_option)]
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_narrowing_constraints_for_expression<'db>(
|
||||
db: &'db dyn Db,
|
||||
@@ -72,7 +70,6 @@ fn all_narrowing_constraints_for_expression<'db>(
|
||||
NarrowingConstraintsBuilder::new(db, ConstraintNode::Expression(expression), true).finish()
|
||||
}
|
||||
|
||||
#[allow(clippy::ref_option)]
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_negative_narrowing_constraints_for_expression<'db>(
|
||||
db: &'db dyn Db,
|
||||
@@ -294,15 +291,8 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
.chain(comparators)
|
||||
.tuple_windows::<(&ruff_python_ast::Expr, &ruff_python_ast::Expr)>();
|
||||
let mut constraints = NarrowingConstraints::default();
|
||||
|
||||
let mut last_rhs_ty: Option<Type> = None;
|
||||
|
||||
for (op, (left, right)) in std::iter::zip(&**ops, comparator_tuples) {
|
||||
let lhs_ty = last_rhs_ty.unwrap_or_else(|| {
|
||||
inference.expression_ty(left.scoped_expression_id(self.db, scope))
|
||||
});
|
||||
let rhs_ty = inference.expression_ty(right.scoped_expression_id(self.db, scope));
|
||||
last_rhs_ty = Some(rhs_ty);
|
||||
|
||||
match left {
|
||||
ast::Expr::Name(ast::ExprName {
|
||||
@@ -337,9 +327,6 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
constraints.insert(symbol, ty);
|
||||
}
|
||||
}
|
||||
ast::CmpOp::Eq if lhs_ty.is_literal_string() => {
|
||||
constraints.insert(symbol, rhs_ty);
|
||||
}
|
||||
_ => {
|
||||
// TODO other comparison types
|
||||
}
|
||||
@@ -398,58 +385,46 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
let scope = self.scope();
|
||||
let inference = infer_expression_types(self.db, expression);
|
||||
|
||||
let callable_ty =
|
||||
inference.expression_ty(expr_call.func.scoped_expression_id(self.db, scope));
|
||||
|
||||
// TODO: add support for PEP 604 union types on the right hand side of `isinstance`
|
||||
// and `issubclass`, for example `isinstance(x, str | (int | float))`.
|
||||
match callable_ty {
|
||||
Type::FunctionLiteral(function_type) if expr_call.arguments.keywords.is_empty() => {
|
||||
let function = function_type
|
||||
.known(self.db)
|
||||
.and_then(KnownFunction::constraint_function)?;
|
||||
|
||||
let [ast::Expr::Name(ast::ExprName { id, .. }), class_info] =
|
||||
match inference
|
||||
.expression_ty(expr_call.func.scoped_expression_id(self.db, scope))
|
||||
.into_function_literal()
|
||||
.and_then(|f| f.known(self.db))
|
||||
.and_then(KnownFunction::constraint_function)
|
||||
{
|
||||
Some(function) if expr_call.arguments.keywords.is_empty() => {
|
||||
if let [ast::Expr::Name(ast::ExprName { id, .. }), class_info] =
|
||||
&*expr_call.arguments.args
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
{
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
let class_info_ty =
|
||||
inference.expression_ty(class_info.scoped_expression_id(self.db, scope));
|
||||
|
||||
let class_info_ty =
|
||||
inference.expression_ty(class_info.scoped_expression_id(self.db, scope));
|
||||
|
||||
let to_constraint = match function {
|
||||
KnownConstraintFunction::IsInstance => {
|
||||
|class_literal: ClassLiteralType<'db>| Type::instance(class_literal.class)
|
||||
}
|
||||
KnownConstraintFunction::IsSubclass => {
|
||||
|class_literal: ClassLiteralType<'db>| {
|
||||
Type::subclass_of(class_literal.class)
|
||||
let to_constraint = match function {
|
||||
KnownConstraintFunction::IsInstance => {
|
||||
|class_literal: ClassLiteralType<'db>| {
|
||||
Type::instance(class_literal.class)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
KnownConstraintFunction::IsSubclass => {
|
||||
|class_literal: ClassLiteralType<'db>| {
|
||||
Type::subclass_of(class_literal.class)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
generate_classinfo_constraint(self.db, &class_info_ty, to_constraint).map(
|
||||
|constraint| {
|
||||
let mut constraints = NarrowingConstraints::default();
|
||||
constraints.insert(symbol, constraint.negate_if(self.db, !is_positive));
|
||||
constraints
|
||||
},
|
||||
)
|
||||
}
|
||||
// for the expression `bool(E)`, we further narrow the type based on `E`
|
||||
Type::ClassLiteral(class_type)
|
||||
if expr_call.arguments.args.len() == 1
|
||||
&& expr_call.arguments.keywords.is_empty()
|
||||
&& class_type.class.is_known(self.db, KnownClass::Bool) =>
|
||||
{
|
||||
self.evaluate_expression_node_constraint(
|
||||
&expr_call.arguments.args[0],
|
||||
expression,
|
||||
is_positive,
|
||||
)
|
||||
generate_classinfo_constraint(self.db, &class_info_ty, to_constraint).map(
|
||||
|constraint| {
|
||||
let mut constraints = NarrowingConstraints::default();
|
||||
constraints.insert(symbol, constraint.negate_if(self.db, !is_positive));
|
||||
constraints
|
||||
},
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
|
||||
@@ -1,243 +0,0 @@
|
||||
//! This module contains quickcheck-based property tests for `Type`s.
|
||||
//!
|
||||
//! These tests are disabled by default, as they are non-deterministic and slow. You can
|
||||
//! run them explicitly using:
|
||||
//!
|
||||
//! ```sh
|
||||
//! cargo test -p red_knot_python_semantic -- --ignored types::property_tests::stable
|
||||
//! ```
|
||||
//!
|
||||
//! The number of tests (default: 100) can be controlled by setting the `QUICKCHECK_TESTS`
|
||||
//! environment variable. For example:
|
||||
//!
|
||||
//! ```sh
|
||||
//! QUICKCHECK_TESTS=10000 cargo test …
|
||||
//! ```
|
||||
//!
|
||||
//! If you want to run these tests for a longer period of time, it's advisable to run them
|
||||
//! in release mode. As some tests are slower than others, it's advisable to run them in a
|
||||
//! loop until they fail:
|
||||
//!
|
||||
//! ```sh
|
||||
//! export QUICKCHECK_TESTS=100000
|
||||
//! while cargo test --release -p red_knot_python_semantic -- \
|
||||
//! --ignored types::property_tests::stable; do :; done
|
||||
//! ```
|
||||
|
||||
use std::sync::{Arc, Mutex, MutexGuard, OnceLock};
|
||||
|
||||
use super::tests::Ty;
|
||||
use crate::db::tests::{setup_db, TestDb};
|
||||
use crate::types::KnownClass;
|
||||
use quickcheck::{Arbitrary, Gen};
|
||||
|
||||
fn arbitrary_core_type(g: &mut Gen) -> Ty {
|
||||
// We could select a random integer here, but this would make it much less
|
||||
// likely to explore interesting edge cases:
|
||||
let int_lit = Ty::IntLiteral(*g.choose(&[-2, -1, 0, 1, 2]).unwrap());
|
||||
let bool_lit = Ty::BooleanLiteral(bool::arbitrary(g));
|
||||
g.choose(&[
|
||||
Ty::Never,
|
||||
Ty::Unknown,
|
||||
Ty::None,
|
||||
Ty::Any,
|
||||
int_lit,
|
||||
bool_lit,
|
||||
Ty::StringLiteral(""),
|
||||
Ty::StringLiteral("a"),
|
||||
Ty::LiteralString,
|
||||
Ty::BytesLiteral(""),
|
||||
Ty::BytesLiteral("\x00"),
|
||||
Ty::KnownClassInstance(KnownClass::Object),
|
||||
Ty::KnownClassInstance(KnownClass::Str),
|
||||
Ty::KnownClassInstance(KnownClass::Int),
|
||||
Ty::KnownClassInstance(KnownClass::Bool),
|
||||
Ty::KnownClassInstance(KnownClass::List),
|
||||
Ty::KnownClassInstance(KnownClass::Tuple),
|
||||
Ty::KnownClassInstance(KnownClass::FunctionType),
|
||||
Ty::KnownClassInstance(KnownClass::SpecialForm),
|
||||
Ty::KnownClassInstance(KnownClass::TypeVar),
|
||||
Ty::KnownClassInstance(KnownClass::TypeAliasType),
|
||||
Ty::KnownClassInstance(KnownClass::NoDefaultType),
|
||||
Ty::TypingLiteral,
|
||||
Ty::BuiltinClassLiteral("str"),
|
||||
Ty::BuiltinClassLiteral("int"),
|
||||
Ty::BuiltinClassLiteral("bool"),
|
||||
Ty::BuiltinClassLiteral("object"),
|
||||
])
|
||||
.unwrap()
|
||||
.clone()
|
||||
}
|
||||
|
||||
/// Constructs an arbitrary type.
|
||||
///
|
||||
/// The `size` parameter controls the depth of the type tree. For example,
|
||||
/// a simple type like `int` has a size of 0, `Union[int, str]` has a size
|
||||
/// of 1, `tuple[int, Union[str, bytes]]` has a size of 2, etc.
|
||||
fn arbitrary_type(g: &mut Gen, size: u32) -> Ty {
|
||||
if size == 0 {
|
||||
arbitrary_core_type(g)
|
||||
} else {
|
||||
match u32::arbitrary(g) % 4 {
|
||||
0 => arbitrary_core_type(g),
|
||||
1 => Ty::Union(
|
||||
(0..*g.choose(&[2, 3]).unwrap())
|
||||
.map(|_| arbitrary_type(g, size - 1))
|
||||
.collect(),
|
||||
),
|
||||
2 => Ty::Tuple(
|
||||
(0..*g.choose(&[0, 1, 2]).unwrap())
|
||||
.map(|_| arbitrary_type(g, size - 1))
|
||||
.collect(),
|
||||
),
|
||||
3 => Ty::Intersection {
|
||||
pos: (0..*g.choose(&[0, 1, 2]).unwrap())
|
||||
.map(|_| arbitrary_type(g, size - 1))
|
||||
.collect(),
|
||||
neg: (0..*g.choose(&[0, 1, 2]).unwrap())
|
||||
.map(|_| arbitrary_type(g, size - 1))
|
||||
.collect(),
|
||||
},
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Arbitrary for Ty {
|
||||
fn arbitrary(g: &mut Gen) -> Ty {
|
||||
const MAX_SIZE: u32 = 2;
|
||||
arbitrary_type(g, MAX_SIZE)
|
||||
}
|
||||
|
||||
fn shrink(&self) -> Box<dyn Iterator<Item = Self>> {
|
||||
// This is incredibly naive. We can do much better here by
|
||||
// trying various subsets of the elements in unions, tuples,
|
||||
// and intersections. For now, we only try to shrink by
|
||||
// reducing unions/tuples/intersections to a single element.
|
||||
match self.clone() {
|
||||
Ty::Union(types) => Box::new(types.into_iter()),
|
||||
Ty::Tuple(types) => Box::new(types.into_iter()),
|
||||
Ty::Intersection { pos, neg } => Box::new(pos.into_iter().chain(neg)),
|
||||
_ => Box::new(std::iter::empty()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static CACHED_DB: OnceLock<Arc<Mutex<TestDb>>> = OnceLock::new();
|
||||
|
||||
fn get_cached_db() -> MutexGuard<'static, TestDb> {
|
||||
let db = CACHED_DB.get_or_init(|| Arc::new(Mutex::new(setup_db())));
|
||||
db.lock().unwrap()
|
||||
}
|
||||
|
||||
/// A macro to define a property test for types.
|
||||
///
|
||||
/// The `$test_name` identifier specifies the name of the test function. The `$db` identifier
|
||||
/// is used to refer to the salsa database in the property to be tested. The actual property is
|
||||
/// specified using the syntax:
|
||||
///
|
||||
/// forall types t1, t2, ..., tn . <property>`
|
||||
///
|
||||
/// where `t1`, `t2`, ..., `tn` are identifiers that represent arbitrary types, and `<property>`
|
||||
/// is an expression using these identifiers.
|
||||
///
|
||||
macro_rules! type_property_test {
|
||||
($test_name:ident, $db:ident, forall types $($types:ident),+ . $property:expr) => {
|
||||
#[quickcheck_macros::quickcheck]
|
||||
#[ignore]
|
||||
fn $test_name($($types: crate::types::tests::Ty),+) -> bool {
|
||||
let db_cached = super::get_cached_db();
|
||||
let $db = &*db_cached;
|
||||
$(let $types = $types.into_type($db);)+
|
||||
|
||||
$property
|
||||
}
|
||||
};
|
||||
// A property test with a logical implication.
|
||||
($name:ident, $db:ident, forall types $($types:ident),+ . $premise:expr => $conclusion:expr) => {
|
||||
type_property_test!($name, $db, forall types $($types),+ . !($premise) || ($conclusion));
|
||||
};
|
||||
}
|
||||
|
||||
mod stable {
|
||||
// `T` is equivalent to itself.
|
||||
type_property_test!(
|
||||
equivalent_to_is_reflexive, db,
|
||||
forall types t. t.is_equivalent_to(db, t)
|
||||
);
|
||||
|
||||
// `T` is a subtype of itself.
|
||||
type_property_test!(
|
||||
subtype_of_is_reflexive, db,
|
||||
forall types t. t.is_subtype_of(db, t)
|
||||
);
|
||||
|
||||
// `S <: T` and `T <: U` implies that `S <: U`.
|
||||
type_property_test!(
|
||||
subtype_of_is_transitive, db,
|
||||
forall types s, t, u. s.is_subtype_of(db, t) && t.is_subtype_of(db, u) => s.is_subtype_of(db, u)
|
||||
);
|
||||
|
||||
// `T` is not disjoint from itself, unless `T` is `Never`.
|
||||
type_property_test!(
|
||||
disjoint_from_is_irreflexive, db,
|
||||
forall types t. t.is_disjoint_from(db, t) => t.is_never()
|
||||
);
|
||||
|
||||
// `S` is disjoint from `T` implies that `T` is disjoint from `S`.
|
||||
type_property_test!(
|
||||
disjoint_from_is_symmetric, db,
|
||||
forall types s, t. s.is_disjoint_from(db, t) == t.is_disjoint_from(db, s)
|
||||
);
|
||||
|
||||
// `S <: T` implies that `S` is not disjoint from `T`, unless `S` is `Never`.
|
||||
type_property_test!(
|
||||
subtype_of_implies_not_disjoint_from, db,
|
||||
forall types s, t. s.is_subtype_of(db, t) => !s.is_disjoint_from(db, t) || s.is_never()
|
||||
);
|
||||
|
||||
// `T` can be assigned to itself.
|
||||
type_property_test!(
|
||||
assignable_to_is_reflexive, db,
|
||||
forall types t. t.is_assignable_to(db, t)
|
||||
);
|
||||
|
||||
// `S <: T` implies that `S` can be assigned to `T`.
|
||||
type_property_test!(
|
||||
subtype_of_implies_assignable_to, db,
|
||||
forall types s, t. s.is_subtype_of(db, t) => s.is_assignable_to(db, t)
|
||||
);
|
||||
|
||||
// If `T` is a singleton, it is also single-valued.
|
||||
type_property_test!(
|
||||
singleton_implies_single_valued, db,
|
||||
forall types t. t.is_singleton(db) => t.is_single_valued(db)
|
||||
);
|
||||
|
||||
// If `T` contains a gradual form, it should not participate in subtyping
|
||||
type_property_test!(
|
||||
non_fully_static_types_do_not_participate_in_subtyping, db,
|
||||
forall types s, t. !s.is_fully_static(db) => !s.is_subtype_of(db, t) && !t.is_subtype_of(db, s)
|
||||
);
|
||||
}
|
||||
|
||||
/// This module contains property tests that currently lead to many false positives.
|
||||
///
|
||||
/// The reason for this is our insufficient understanding of equivalence of types. For
|
||||
/// example, we currently consider `int | str` and `str | int` to be different types.
|
||||
/// Similar issues exist for intersection types. Once this is resolved, we can move these
|
||||
/// tests to the `stable` section. In the meantime, it can still be useful to run these
|
||||
/// tests (using [`types::property_tests::flaky`]), to see if there are any new obvious bugs.
|
||||
mod flaky {
|
||||
// `S <: T` and `T <: S` implies that `S` is equivalent to `T`.
|
||||
type_property_test!(
|
||||
subtype_of_is_antisymmetric, db,
|
||||
forall types s, t. s.is_subtype_of(db, t) && t.is_subtype_of(db, s) => s.is_equivalent_to(db, t)
|
||||
);
|
||||
|
||||
// Negating `T` twice is equivalent to `T`.
|
||||
type_property_test!(
|
||||
double_negation_is_identity, db,
|
||||
forall types t. t.negate(db).negate(db).is_equivalent_to(db, t)
|
||||
);
|
||||
}
|
||||
@@ -189,9 +189,32 @@ impl<'db> Parameter<'db> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::db::tests::{setup_db, TestDb};
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::types::{global_symbol, FunctionType};
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use crate::ProgramSettings;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
pub(crate) fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn get_function_f<'db>(db: &'db TestDb, file: &'static str) -> FunctionType<'db> {
|
||||
|
||||
@@ -5,127 +5,8 @@ use ruff_python_ast::{self as ast, ModExpression, StringFlags};
|
||||
use ruff_python_parser::{parse_expression_range, Parsed};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::lint::{Level, LintStatus};
|
||||
use crate::types::diagnostic::{TypeCheckDiagnostics, TypeCheckDiagnosticsBuilder};
|
||||
use crate::{declare_lint, Db};
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for f-strings in type annotation positions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Static analysis tools like Red Knot can't analyse type annotations that use f-string notation.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// def test(): -> f"int":
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// def test(): -> "int":
|
||||
/// ...
|
||||
/// ```
|
||||
pub(crate) static FSTRING_TYPE_ANNOTATION = {
|
||||
summary: "detects F-strings in type annotation positions",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for byte-strings in type annotation positions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Static analysis tools like Red Knot can't analyse type annotations that use byte-string notation.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// def test(): -> b"int":
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// def test(): -> "int":
|
||||
/// ...
|
||||
/// ```
|
||||
pub(crate) static BYTE_STRING_TYPE_ANNOTATION = {
|
||||
summary: "detects byte strings in type annotation positions",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for raw-strings in type annotation positions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Static analysis tools like Red Knot can't analyse type annotations that use raw-string notation.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// def test(): -> r"int":
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// def test(): -> "int":
|
||||
/// ...
|
||||
/// ```
|
||||
pub(crate) static RAW_STRING_TYPE_ANNOTATION = {
|
||||
summary: "detects raw strings in type annotation positions",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for implicit concatenated strings in type annotation positions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Static analysis tools like Red Knot can't analyse type annotations that use implicit concatenated strings.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// def test(): -> "Literal[" "5" "]":
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// def test(): -> "Literal[5]":
|
||||
/// ...
|
||||
/// ```
|
||||
pub(crate) static IMPLICIT_CONCATENATED_STRING_TYPE_ANNOTATION = {
|
||||
summary: "detects implicit concatenated strings in type annotations",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// TODO
|
||||
pub(crate) static INVALID_SYNTAX_IN_FORWARD_ANNOTATION = {
|
||||
summary: "detects invalid syntax in forward annotations",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// TODO
|
||||
pub(crate) static ESCAPE_CHARACTER_IN_FORWARD_ANNOTATION = {
|
||||
summary: "detects forward type annotations with escape characters",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
use crate::Db;
|
||||
|
||||
type AnnotationParseResult = Result<Parsed<ModExpression>, TypeCheckDiagnostics>;
|
||||
|
||||
@@ -144,9 +25,9 @@ pub(crate) fn parse_string_annotation(
|
||||
if let [string_literal] = string_expr.value.as_slice() {
|
||||
let prefix = string_literal.flags.prefix();
|
||||
if prefix.is_raw() {
|
||||
diagnostics.add_lint(
|
||||
&RAW_STRING_TYPE_ANNOTATION,
|
||||
diagnostics.add(
|
||||
string_literal.into(),
|
||||
"annotation-raw-string",
|
||||
format_args!("Type expressions cannot use raw string literal"),
|
||||
);
|
||||
// Compare the raw contents (without quotes) of the expression with the parsed contents
|
||||
@@ -168,26 +49,26 @@ pub(crate) fn parse_string_annotation(
|
||||
// ```
|
||||
match parse_expression_range(source.as_str(), range_excluding_quotes) {
|
||||
Ok(parsed) => return Ok(parsed),
|
||||
Err(parse_error) => diagnostics.add_lint(
|
||||
&INVALID_SYNTAX_IN_FORWARD_ANNOTATION,
|
||||
Err(parse_error) => diagnostics.add(
|
||||
string_literal.into(),
|
||||
"forward-annotation-syntax-error",
|
||||
format_args!("Syntax error in forward annotation: {}", parse_error.error),
|
||||
),
|
||||
}
|
||||
} else {
|
||||
// The raw contents of the string doesn't match the parsed content. This could be the
|
||||
// case for annotations that contain escape sequences.
|
||||
diagnostics.add_lint(
|
||||
&ESCAPE_CHARACTER_IN_FORWARD_ANNOTATION,
|
||||
diagnostics.add(
|
||||
string_expr.into(),
|
||||
"annotation-escape-character",
|
||||
format_args!("Type expressions cannot contain escape characters"),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// String is implicitly concatenated.
|
||||
diagnostics.add_lint(
|
||||
&IMPLICIT_CONCATENATED_STRING_TYPE_ANNOTATION,
|
||||
diagnostics.add(
|
||||
string_expr.into(),
|
||||
"annotation-implicit-concat",
|
||||
format_args!("Type expressions cannot span multiple string literals"),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -95,8 +95,7 @@ impl<'db> Unpacker<'db> {
|
||||
// there would be a cost and it's not clear that it's worth it.
|
||||
let value_ty = Type::tuple(
|
||||
self.db,
|
||||
std::iter::repeat(Type::LiteralString)
|
||||
.take(string_literal_ty.python_len(self.db)),
|
||||
&vec![Type::LiteralString; string_literal_ty.len(self.db)],
|
||||
);
|
||||
self.unpack(target, value_ty, scope);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use camino::Utf8Path;
|
||||
use dir_test::{dir_test, Fixture};
|
||||
use std::ffi::OsStr;
|
||||
use std::path::Path;
|
||||
|
||||
use dir_test::{dir_test, Fixture};
|
||||
|
||||
/// See `crates/red_knot_test/README.md` for documentation on these tests.
|
||||
#[dir_test(
|
||||
dir: "$CARGO_MANIFEST_DIR/resources/mdtest",
|
||||
@@ -9,46 +10,16 @@ use std::path::Path;
|
||||
)]
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn mdtest(fixture: Fixture<&str>) {
|
||||
let fixture_path = Utf8Path::new(fixture.path());
|
||||
let fixture_path = Path::new(fixture.path());
|
||||
let crate_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
|
||||
let workspace_root = crate_dir.ancestors().nth(2).unwrap();
|
||||
let workspace_root = crate_dir.parent().and_then(Path::parent).unwrap();
|
||||
|
||||
let long_title = fixture_path.strip_prefix(workspace_root).unwrap();
|
||||
let short_title = fixture_path.file_name().unwrap();
|
||||
let long_title = fixture_path
|
||||
.strip_prefix(workspace_root)
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap();
|
||||
let short_title = fixture_path.file_name().and_then(OsStr::to_str).unwrap();
|
||||
|
||||
let test_name = test_name("mdtest", fixture_path);
|
||||
|
||||
red_knot_test::run(fixture_path, long_title.as_str(), short_title, &test_name);
|
||||
}
|
||||
|
||||
/// Constructs the test name used for individual markdown files
|
||||
///
|
||||
/// This code is copied from <https://github.com/fe-lang/dir-test/blob/1c0f41c480a3490bc2653a043ff6e3f8085a1f47/macros/src/lib.rs#L104-L138>
|
||||
/// and should be updated if they diverge
|
||||
fn test_name(test_func_name: &str, fixture_path: &Utf8Path) -> String {
|
||||
assert!(fixture_path.is_file());
|
||||
|
||||
let dir_path = format!("{}/resources/mdtest", std::env!("CARGO_MANIFEST_DIR"));
|
||||
let rel_path = fixture_path.strip_prefix(dir_path).unwrap();
|
||||
assert!(rel_path.is_relative());
|
||||
|
||||
let mut test_name = test_func_name.to_owned();
|
||||
test_name.push_str("__");
|
||||
|
||||
for component in rel_path.parent().unwrap().components() {
|
||||
let component = component
|
||||
.as_str()
|
||||
.replace(|c: char| c.is_ascii_punctuation(), "_");
|
||||
test_name.push_str(&component);
|
||||
test_name.push('_');
|
||||
}
|
||||
|
||||
test_name.push_str(
|
||||
&rel_path
|
||||
.file_stem()
|
||||
.unwrap()
|
||||
.replace(|c: char| c.is_ascii_punctuation(), "_"),
|
||||
);
|
||||
|
||||
test_name
|
||||
red_knot_test::run(fixture_path, long_title, short_title);
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ impl SyncNotificationHandler for DidChangeTextDocumentHandler {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(dhruvmanila): Publish diagnostics if the client doesn't support pull diagnostics
|
||||
// TODO(dhruvmanila): Publish diagnostics if the client doesnt support pull diagnostics
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -86,15 +86,14 @@ fn to_lsp_diagnostic(
|
||||
|
||||
let severity = match diagnostic.severity() {
|
||||
Severity::Info => DiagnosticSeverity::INFORMATION,
|
||||
Severity::Warning => DiagnosticSeverity::WARNING,
|
||||
Severity::Error | Severity::Fatal => DiagnosticSeverity::ERROR,
|
||||
Severity::Error => DiagnosticSeverity::ERROR,
|
||||
};
|
||||
|
||||
Diagnostic {
|
||||
range,
|
||||
severity: Some(severity),
|
||||
tags: None,
|
||||
code: Some(NumberOrString::String(diagnostic.id().to_string())),
|
||||
code: Some(NumberOrString::String(diagnostic.rule().to_string())),
|
||||
code_description: None,
|
||||
source: Some("red-knot".into()),
|
||||
message: diagnostic.message().into_owned(),
|
||||
|
||||
@@ -26,7 +26,7 @@ pub(crate) struct Requester<'s> {
|
||||
response_handlers: FxHashMap<lsp_server::RequestId, ResponseBuilder<'s>>,
|
||||
}
|
||||
|
||||
impl Client<'_> {
|
||||
impl<'s> Client<'s> {
|
||||
pub(super) fn new(sender: ClientSender) -> Self {
|
||||
Self {
|
||||
notifier: Notifier(sender.clone()),
|
||||
|
||||
@@ -20,15 +20,12 @@ ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
colored = { workspace = true }
|
||||
memchr = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
|
||||
@@ -184,11 +184,8 @@ The tests are run independently, in independent in-memory file systems and with
|
||||
[Salsa](https://github.com/salsa-rs/salsa) databases. This means that each is a from-scratch run of
|
||||
the type checker, with no data persisting from any previous test.
|
||||
|
||||
It is possible to filter to individual tests within a single markdown file using the
|
||||
`MDTEST_TEST_FILTER` environment variable. This variable will match any tests which contain the
|
||||
value as a case-sensitive substring in its name. An example test name is
|
||||
`unpacking.md - Unpacking - Tuple - Multiple assignment`, which contains the name of the markdown
|
||||
file and its parent headers joined together with hyphens.
|
||||
Due to `cargo test` limitations, an entire test suite (Markdown file) is run as a single Rust test,
|
||||
so it's not possible to select individual tests within it to run.
|
||||
|
||||
## Structured test suites
|
||||
|
||||
@@ -225,22 +222,6 @@ A header-demarcated section must either be a test or a grouping header; it canno
|
||||
a header section can either contain embedded files (making it a test), or it can contain more
|
||||
deeply-nested headers (headers with more `#`), but it cannot contain both.
|
||||
|
||||
## Configuration
|
||||
|
||||
The test framework supports a TOML-based configuration format, which is a subset of the full red-knot
|
||||
configuration format. This configuration can be specified in fenced code blocks with `toml` as the
|
||||
language tag:
|
||||
|
||||
````markdown
|
||||
```toml
|
||||
[environment]
|
||||
target-version = "3.10"
|
||||
```
|
||||
````
|
||||
|
||||
This configuration will apply to all tests in the same section, and all nested sections within that
|
||||
section. Nested sections can override configurations from their parent sections.
|
||||
|
||||
## Documentation of tests
|
||||
|
||||
Arbitrary Markdown syntax (including of course normal prose paragraphs) is permitted (and ignored by
|
||||
@@ -298,6 +279,30 @@ possible in these files.
|
||||
|
||||
A fenced code block with no language will always be an error.
|
||||
|
||||
### Configuration
|
||||
|
||||
We will add the ability to specify non-default red-knot configurations to use in tests, by including
|
||||
a TOML code block:
|
||||
|
||||
````markdown
|
||||
```toml
|
||||
[tool.knot]
|
||||
warn-on-any = true
|
||||
```
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
def f(x: Any): # error: [use-of-any]
|
||||
pass
|
||||
```
|
||||
````
|
||||
|
||||
It should be possible to include a TOML code block in a single test (as shown), or in a grouping
|
||||
section, in which case it applies to all nested tests within that grouping section. Configurations
|
||||
at multiple level are allowed and merged, with the most-nested (closest to the test) taking
|
||||
precedence.
|
||||
|
||||
### Running just a single test from a suite
|
||||
|
||||
Having each test in a suite always run as a distinct Rust test would require writing our own test
|
||||
@@ -309,11 +314,11 @@ variable.
|
||||
|
||||
### Configuring search paths and kinds
|
||||
|
||||
The red-knot TOML configuration format hasn't been finalized, and we may want to implement
|
||||
The red-knot TOML configuration format hasn't been designed yet, and we may want to implement
|
||||
support in the test framework for configuring search paths before it is designed. If so, we can
|
||||
define some configuration options for now under the `[tests]` namespace. In the future, perhaps
|
||||
some of these can be replaced by real red-knot configuration options; some or all may also be
|
||||
kept long-term as test-specific options.
|
||||
define some configuration options for now under the `[tool.knot.tests]` namespace. In the future,
|
||||
perhaps some of these can be replaced by real red-knot configuration options; some or all may also
|
||||
be kept long-term as test-specific options.
|
||||
|
||||
Some configuration options we will want to provide:
|
||||
|
||||
@@ -331,13 +336,13 @@ non-default value using the `workspace-root` config.
|
||||
|
||||
### Specifying a custom typeshed
|
||||
|
||||
Some tests will need to override the default typeshed with custom files. The `[environment]`
|
||||
configuration option `typeshed-path` can be used to do this:
|
||||
Some tests will need to override the default typeshed with custom files. The `[tool.knot.tests]`
|
||||
configuration option `typeshed-root` should be usable for this:
|
||||
|
||||
````markdown
|
||||
```toml
|
||||
[environment]
|
||||
typeshed-path = "/typeshed"
|
||||
[tool.knot.tests]
|
||||
typeshed-root = "/typeshed"
|
||||
```
|
||||
|
||||
This file is importable as part of our custom typeshed, because it is within `/typeshed`, which we
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
//! TOML-deserializable Red Knot configuration, similar to `knot.toml`, to be able to
|
||||
//! control some configuration options from Markdown files. For now, this supports the
|
||||
//! following limited structure:
|
||||
//!
|
||||
//! ```toml
|
||||
//! [environment]
|
||||
//! target-version = "3.10"
|
||||
//! ```
|
||||
|
||||
use anyhow::Context;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub(crate) struct MarkdownTestConfig {
|
||||
pub(crate) environment: Environment,
|
||||
}
|
||||
|
||||
impl MarkdownTestConfig {
|
||||
pub(crate) fn from_str(s: &str) -> anyhow::Result<Self> {
|
||||
toml::from_str(s).context("Error while parsing Markdown TOML config")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub(crate) struct Environment {
|
||||
#[serde(rename = "target-version")]
|
||||
pub(crate) target_version: String,
|
||||
}
|
||||
@@ -1,7 +1,5 @@
|
||||
use red_knot_python_semantic::lint::RuleSelection;
|
||||
use red_knot_python_semantic::{
|
||||
default_lint_registry, Db as SemanticDb, Program, ProgramSettings, PythonVersion,
|
||||
SearchPathSettings,
|
||||
Db as SemanticDb, Program, ProgramSettings, PythonVersion, SearchPathSettings,
|
||||
};
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, SystemPath, SystemPathBuf, TestSystem};
|
||||
@@ -15,20 +13,16 @@ pub(crate) struct Db {
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
rule_selection: RuleSelection,
|
||||
}
|
||||
|
||||
impl Db {
|
||||
pub(crate) fn setup(workspace_root: SystemPathBuf) -> Self {
|
||||
let rule_selection = RuleSelection::from_registry(&default_lint_registry());
|
||||
|
||||
let db = Self {
|
||||
workspace_root,
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: red_knot_vendored::file_system().clone(),
|
||||
files: Files::default(),
|
||||
rule_selection,
|
||||
};
|
||||
|
||||
db.memory_file_system()
|
||||
@@ -91,10 +85,6 @@ impl SemanticDb for Db {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
|
||||
fn rule_selection(&self) -> &RuleSelection {
|
||||
&self.rule_selection
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
|
||||
@@ -144,7 +144,7 @@ struct DiagnosticWithLine<T> {
|
||||
mod tests {
|
||||
use crate::db::Db;
|
||||
use crate::diagnostic::Diagnostic;
|
||||
use ruff_db::diagnostic::{DiagnosticId, LintName, Severity};
|
||||
use ruff_db::diagnostic::Severity;
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::source::line_index;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
@@ -190,8 +190,8 @@ mod tests {
|
||||
}
|
||||
|
||||
impl Diagnostic for DummyDiagnostic {
|
||||
fn id(&self) -> DiagnosticId {
|
||||
DiagnosticId::Lint(LintName::of("dummy"))
|
||||
fn rule(&self) -> &str {
|
||||
"dummy"
|
||||
}
|
||||
|
||||
fn message(&self) -> Cow<str> {
|
||||
|
||||
@@ -1,51 +1,37 @@
|
||||
use camino::Utf8Path;
|
||||
use colored::Colorize;
|
||||
use parser as test_parser;
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use red_knot_python_semantic::Program;
|
||||
use ruff_db::diagnostic::{Diagnostic, ParseDiagnostic};
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_source_file::LineIndex;
|
||||
use ruff_text_size::TextSize;
|
||||
use salsa::Setter;
|
||||
use std::path::Path;
|
||||
|
||||
mod assertion;
|
||||
mod config;
|
||||
mod db;
|
||||
mod diagnostic;
|
||||
mod matcher;
|
||||
mod parser;
|
||||
|
||||
const MDTEST_TEST_FILTER: &str = "MDTEST_TEST_FILTER";
|
||||
|
||||
/// Run `path` as a markdown test suite with given `title`.
|
||||
///
|
||||
/// Panic on test failure, and print failure details.
|
||||
#[allow(clippy::print_stdout)]
|
||||
pub fn run(path: &Utf8Path, long_title: &str, short_title: &str, test_name: &str) {
|
||||
pub fn run(path: &Path, long_title: &str, short_title: &str) {
|
||||
let source = std::fs::read_to_string(path).unwrap();
|
||||
let suite = match test_parser::parse(short_title, &source) {
|
||||
Ok(suite) => suite,
|
||||
Err(err) => {
|
||||
panic!("Error parsing `{path}`: {err:?}")
|
||||
panic!("Error parsing `{}`: {err}", path.to_str().unwrap())
|
||||
}
|
||||
};
|
||||
|
||||
let mut db = db::Db::setup(SystemPathBuf::from("/src"));
|
||||
|
||||
let filter = std::env::var(MDTEST_TEST_FILTER).ok();
|
||||
let mut any_failures = false;
|
||||
for test in suite.tests() {
|
||||
if filter.as_ref().is_some_and(|f| !test.name().contains(f)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Program::get(&db)
|
||||
.set_target_version(&mut db)
|
||||
.to(test.target_version());
|
||||
|
||||
// Remove all files so that the db is in a "fresh" state.
|
||||
db.memory_file_system().remove_all();
|
||||
Files::sync_all(&mut db);
|
||||
@@ -68,15 +54,6 @@ pub fn run(path: &Utf8Path, long_title: &str, short_title: &str, test_name: &str
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"\nTo rerun this specific test, set the environment variable: {MDTEST_TEST_FILTER}=\"{}\"",
|
||||
test.name()
|
||||
);
|
||||
println!(
|
||||
"{MDTEST_TEST_FILTER}=\"{}\" cargo test -p red_knot_python_semantic --test mdtest -- {test_name}",
|
||||
test.name()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ use crate::assertion::{Assertion, ErrorAssertion, InlineFileAssertions};
|
||||
use crate::db::Db;
|
||||
use crate::diagnostic::SortedDiagnostics;
|
||||
use colored::Colorize;
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId};
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::source::{line_index, source_text, SourceText};
|
||||
use ruff_source_file::{LineIndex, OneIndexed};
|
||||
@@ -146,7 +146,7 @@ fn maybe_add_undefined_reveal_clarification<T: Diagnostic>(
|
||||
diagnostic: &T,
|
||||
original: std::fmt::Arguments,
|
||||
) -> String {
|
||||
if diagnostic.id().is_lint_named("undefined-reveal") {
|
||||
if diagnostic.rule() == "undefined-reveal" {
|
||||
format!(
|
||||
"{} add a `# revealed` assertion on this line (original diagnostic: {original})",
|
||||
"used built-in `reveal_type`:".yellow()
|
||||
@@ -163,7 +163,7 @@ where
|
||||
fn unmatched(&self) -> String {
|
||||
maybe_add_undefined_reveal_clarification(
|
||||
self,
|
||||
format_args!(r#"[{}] "{}""#, self.id(), self.message()),
|
||||
format_args!(r#"[{}] "{}""#, self.rule(), self.message()),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -175,7 +175,7 @@ where
|
||||
fn unmatched_with_column(&self, column: OneIndexed) -> String {
|
||||
maybe_add_undefined_reveal_clarification(
|
||||
self,
|
||||
format_args!(r#"{column} [{}] "{}""#, self.id(), self.message()),
|
||||
format_args!(r#"{column} [{}] "{}""#, self.rule(), self.message()),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -270,11 +270,10 @@ impl Matcher {
|
||||
match assertion {
|
||||
Assertion::Error(error) => {
|
||||
let position = unmatched.iter().position(|diagnostic| {
|
||||
!error.rule.is_some_and(|rule| {
|
||||
!(diagnostic.id().is_lint_named(rule) || diagnostic.id().matches(rule))
|
||||
}) && !error
|
||||
.column
|
||||
.is_some_and(|col| col != self.column(*diagnostic))
|
||||
!error.rule.is_some_and(|rule| rule != diagnostic.rule())
|
||||
&& !error
|
||||
.column
|
||||
.is_some_and(|col| col != self.column(*diagnostic))
|
||||
&& !error
|
||||
.message_contains
|
||||
.is_some_and(|needle| !diagnostic.message().contains(needle))
|
||||
@@ -295,12 +294,12 @@ impl Matcher {
|
||||
let expected_reveal_type_message = format!("Revealed type is `{expected_type}`");
|
||||
for (index, diagnostic) in unmatched.iter().enumerate() {
|
||||
if matched_revealed_type.is_none()
|
||||
&& diagnostic.id() == DiagnosticId::RevealedType
|
||||
&& diagnostic.rule() == "revealed-type"
|
||||
&& diagnostic.message() == expected_reveal_type_message
|
||||
{
|
||||
matched_revealed_type = Some(index);
|
||||
} else if matched_undefined_reveal.is_none()
|
||||
&& diagnostic.id().is_lint_named("undefined-reveal")
|
||||
&& diagnostic.rule() == "undefined-reveal"
|
||||
{
|
||||
matched_undefined_reveal = Some(index);
|
||||
}
|
||||
@@ -324,7 +323,7 @@ impl Matcher {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::FailuresByLine;
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, Severity};
|
||||
use ruff_db::diagnostic::{Diagnostic, Severity};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_python_trivia::textwrap::dedent;
|
||||
@@ -333,16 +332,16 @@ mod tests {
|
||||
use std::borrow::Cow;
|
||||
|
||||
struct ExpectedDiagnostic {
|
||||
id: DiagnosticId,
|
||||
rule: &'static str,
|
||||
message: &'static str,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl ExpectedDiagnostic {
|
||||
fn new(id: DiagnosticId, message: &'static str, offset: usize) -> Self {
|
||||
fn new(rule: &'static str, message: &'static str, offset: usize) -> Self {
|
||||
let offset: u32 = offset.try_into().unwrap();
|
||||
Self {
|
||||
id,
|
||||
rule,
|
||||
message,
|
||||
range: TextRange::new(offset.into(), (offset + 1).into()),
|
||||
}
|
||||
@@ -350,7 +349,7 @@ mod tests {
|
||||
|
||||
fn into_diagnostic(self, file: File) -> TestDiagnostic {
|
||||
TestDiagnostic {
|
||||
id: self.id,
|
||||
rule: self.rule,
|
||||
message: self.message,
|
||||
range: self.range,
|
||||
file,
|
||||
@@ -360,15 +359,15 @@ mod tests {
|
||||
|
||||
#[derive(Debug)]
|
||||
struct TestDiagnostic {
|
||||
id: DiagnosticId,
|
||||
rule: &'static str,
|
||||
message: &'static str,
|
||||
range: TextRange,
|
||||
file: File,
|
||||
}
|
||||
|
||||
impl Diagnostic for TestDiagnostic {
|
||||
fn id(&self) -> DiagnosticId {
|
||||
self.id
|
||||
fn rule(&self) -> &str {
|
||||
self.rule
|
||||
}
|
||||
|
||||
fn message(&self) -> Cow<str> {
|
||||
@@ -438,7 +437,7 @@ mod tests {
|
||||
let result = get_result(
|
||||
"x # revealed: Foo",
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::RevealedType,
|
||||
"revealed-type",
|
||||
"Revealed type is `Foo`",
|
||||
0,
|
||||
)],
|
||||
@@ -452,7 +451,7 @@ mod tests {
|
||||
let result = get_result(
|
||||
"x # revealed: Foo",
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("not-revealed-type"),
|
||||
"not-revealed-type",
|
||||
"Revealed type is `Foo`",
|
||||
0,
|
||||
)],
|
||||
@@ -464,7 +463,7 @@ mod tests {
|
||||
0,
|
||||
&[
|
||||
"unmatched assertion: revealed: Foo",
|
||||
r#"unexpected error: 1 [lint/not-revealed-type] "Revealed type is `Foo`""#,
|
||||
r#"unexpected error: 1 [not-revealed-type] "Revealed type is `Foo`""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
@@ -475,7 +474,7 @@ mod tests {
|
||||
let result = get_result(
|
||||
"x # revealed: Foo",
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::RevealedType,
|
||||
"revealed-type",
|
||||
"Something else",
|
||||
0,
|
||||
)],
|
||||
@@ -505,12 +504,8 @@ mod tests {
|
||||
let result = get_result(
|
||||
"x # revealed: Foo",
|
||||
vec![
|
||||
ExpectedDiagnostic::new(DiagnosticId::RevealedType, "Revealed type is `Foo`", 0),
|
||||
ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("undefined-reveal"),
|
||||
"Doesn't matter",
|
||||
0,
|
||||
),
|
||||
ExpectedDiagnostic::new("revealed-type", "Revealed type is `Foo`", 0),
|
||||
ExpectedDiagnostic::new("undefined-reveal", "Doesn't matter", 0),
|
||||
],
|
||||
);
|
||||
|
||||
@@ -522,7 +517,7 @@ mod tests {
|
||||
let result = get_result(
|
||||
"x # revealed: Foo",
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("undefined-reveal"),
|
||||
"undefined-reveal",
|
||||
"Doesn't matter",
|
||||
0,
|
||||
)],
|
||||
@@ -536,12 +531,8 @@ mod tests {
|
||||
let result = get_result(
|
||||
"x # revealed: Foo",
|
||||
vec![
|
||||
ExpectedDiagnostic::new(DiagnosticId::RevealedType, "Revealed type is `Bar`", 0),
|
||||
ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("undefined-reveal"),
|
||||
"Doesn't matter",
|
||||
0,
|
||||
),
|
||||
ExpectedDiagnostic::new("revealed-type", "Revealed type is `Bar`", 0),
|
||||
ExpectedDiagnostic::new("undefined-reveal", "Doesn't matter", 0),
|
||||
],
|
||||
);
|
||||
|
||||
@@ -562,16 +553,8 @@ mod tests {
|
||||
let result = get_result(
|
||||
"reveal_type(1)",
|
||||
vec![
|
||||
ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("undefined-reveal"),
|
||||
"undefined reveal message",
|
||||
0,
|
||||
),
|
||||
ExpectedDiagnostic::new(
|
||||
DiagnosticId::RevealedType,
|
||||
"Revealed type is `Literal[1]`",
|
||||
12,
|
||||
),
|
||||
ExpectedDiagnostic::new("undefined-reveal", "undefined reveal message", 0),
|
||||
ExpectedDiagnostic::new("revealed-type", "Revealed type is `Literal[1]`", 12),
|
||||
],
|
||||
);
|
||||
|
||||
@@ -581,7 +564,7 @@ mod tests {
|
||||
0,
|
||||
&[
|
||||
"used built-in `reveal_type`: add a `# revealed` assertion on this line (\
|
||||
original diagnostic: [lint/undefined-reveal] \"undefined reveal message\")",
|
||||
original diagnostic: [undefined-reveal] \"undefined reveal message\")",
|
||||
r#"unexpected error: [revealed-type] "Revealed type is `Literal[1]`""#,
|
||||
],
|
||||
)],
|
||||
@@ -593,16 +576,8 @@ mod tests {
|
||||
let result = get_result(
|
||||
"reveal_type(1) # error: [something-else]",
|
||||
vec![
|
||||
ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("undefined-reveal"),
|
||||
"undefined reveal message",
|
||||
0,
|
||||
),
|
||||
ExpectedDiagnostic::new(
|
||||
DiagnosticId::RevealedType,
|
||||
"Revealed type is `Literal[1]`",
|
||||
12,
|
||||
),
|
||||
ExpectedDiagnostic::new("undefined-reveal", "undefined reveal message", 0),
|
||||
ExpectedDiagnostic::new("revealed-type", "Revealed type is `Literal[1]`", 12),
|
||||
],
|
||||
);
|
||||
|
||||
@@ -613,7 +588,7 @@ mod tests {
|
||||
&[
|
||||
"unmatched assertion: error: [something-else]",
|
||||
"used built-in `reveal_type`: add a `# revealed` assertion on this line (\
|
||||
original diagnostic: 1 [lint/undefined-reveal] \"undefined reveal message\")",
|
||||
original diagnostic: 1 [undefined-reveal] \"undefined reveal message\")",
|
||||
r#"unexpected error: 13 [revealed-type] "Revealed type is `Literal[1]`""#,
|
||||
],
|
||||
)],
|
||||
@@ -631,11 +606,7 @@ mod tests {
|
||||
fn error_match_rule() {
|
||||
let result = get_result(
|
||||
"x # error: [some-rule]",
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"Any message",
|
||||
0,
|
||||
)],
|
||||
vec![ExpectedDiagnostic::new("some-rule", "Any message", 0)],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
@@ -645,11 +616,7 @@ mod tests {
|
||||
fn error_wrong_rule() {
|
||||
let result = get_result(
|
||||
"x # error: [some-rule]",
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("anything"),
|
||||
"Any message",
|
||||
0,
|
||||
)],
|
||||
vec![ExpectedDiagnostic::new("anything", "Any message", 0)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
@@ -658,7 +625,7 @@ mod tests {
|
||||
0,
|
||||
&[
|
||||
"unmatched assertion: error: [some-rule]",
|
||||
r#"unexpected error: 1 [lint/anything] "Any message""#,
|
||||
r#"unexpected error: 1 [anything] "Any message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
@@ -669,7 +636,7 @@ mod tests {
|
||||
let result = get_result(
|
||||
r#"x # error: "contains this""#,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("anything"),
|
||||
"anything",
|
||||
"message contains this",
|
||||
0,
|
||||
)],
|
||||
@@ -682,11 +649,7 @@ mod tests {
|
||||
fn error_wrong_message() {
|
||||
let result = get_result(
|
||||
r#"x # error: "contains this""#,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("anything"),
|
||||
"Any message",
|
||||
0,
|
||||
)],
|
||||
vec![ExpectedDiagnostic::new("anything", "Any message", 0)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
@@ -695,7 +658,7 @@ mod tests {
|
||||
0,
|
||||
&[
|
||||
r#"unmatched assertion: error: "contains this""#,
|
||||
r#"unexpected error: 1 [lint/anything] "Any message""#,
|
||||
r#"unexpected error: 1 [anything] "Any message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
@@ -705,11 +668,7 @@ mod tests {
|
||||
fn error_match_column_and_rule() {
|
||||
let result = get_result(
|
||||
"x # error: 1 [some-rule]",
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"Any message",
|
||||
0,
|
||||
)],
|
||||
vec![ExpectedDiagnostic::new("some-rule", "Any message", 0)],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
@@ -719,11 +678,7 @@ mod tests {
|
||||
fn error_wrong_column() {
|
||||
let result = get_result(
|
||||
"x # error: 2 [rule]",
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("rule"),
|
||||
"Any message",
|
||||
0,
|
||||
)],
|
||||
vec![ExpectedDiagnostic::new("rule", "Any message", 0)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
@@ -732,7 +687,7 @@ mod tests {
|
||||
0,
|
||||
&[
|
||||
"unmatched assertion: error: 2 [rule]",
|
||||
r#"unexpected error: 1 [lint/rule] "Any message""#,
|
||||
r#"unexpected error: 1 [rule] "Any message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
@@ -743,7 +698,7 @@ mod tests {
|
||||
let result = get_result(
|
||||
r#"x # error: 1 "contains this""#,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("anything"),
|
||||
"anything",
|
||||
"message contains this",
|
||||
0,
|
||||
)],
|
||||
@@ -757,7 +712,7 @@ mod tests {
|
||||
let result = get_result(
|
||||
r#"x # error: [a-rule] "contains this""#,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("a-rule"),
|
||||
"a-rule",
|
||||
"message contains this",
|
||||
0,
|
||||
)],
|
||||
@@ -771,7 +726,7 @@ mod tests {
|
||||
let result = get_result(
|
||||
r#"x # error: 1 [a-rule] "contains this""#,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("a-rule"),
|
||||
"a-rule",
|
||||
"message contains this",
|
||||
0,
|
||||
)],
|
||||
@@ -785,7 +740,7 @@ mod tests {
|
||||
let result = get_result(
|
||||
r#"x # error: 2 [some-rule] "contains this""#,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"some-rule",
|
||||
"message contains this",
|
||||
0,
|
||||
)],
|
||||
@@ -797,7 +752,7 @@ mod tests {
|
||||
0,
|
||||
&[
|
||||
r#"unmatched assertion: error: 2 [some-rule] "contains this""#,
|
||||
r#"unexpected error: 1 [lint/some-rule] "message contains this""#,
|
||||
r#"unexpected error: 1 [some-rule] "message contains this""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
@@ -808,7 +763,7 @@ mod tests {
|
||||
let result = get_result(
|
||||
r#"x # error: 1 [some-rule] "contains this""#,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("other-rule"),
|
||||
"other-rule",
|
||||
"message contains this",
|
||||
0,
|
||||
)],
|
||||
@@ -820,7 +775,7 @@ mod tests {
|
||||
0,
|
||||
&[
|
||||
r#"unmatched assertion: error: 1 [some-rule] "contains this""#,
|
||||
r#"unexpected error: 1 [lint/other-rule] "message contains this""#,
|
||||
r#"unexpected error: 1 [other-rule] "message contains this""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
@@ -830,11 +785,7 @@ mod tests {
|
||||
fn error_match_all_wrong_message() {
|
||||
let result = get_result(
|
||||
r#"x # error: 1 [some-rule] "contains this""#,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"Any message",
|
||||
0,
|
||||
)],
|
||||
vec![ExpectedDiagnostic::new("some-rule", "Any message", 0)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
@@ -843,7 +794,7 @@ mod tests {
|
||||
0,
|
||||
&[
|
||||
r#"unmatched assertion: error: 1 [some-rule] "contains this""#,
|
||||
r#"unexpected error: 1 [lint/some-rule] "Any message""#,
|
||||
r#"unexpected error: 1 [some-rule] "Any message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
@@ -867,9 +818,9 @@ mod tests {
|
||||
let result = get_result(
|
||||
&source,
|
||||
vec![
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("line-two"), "msg", two),
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("line-three"), "msg", three),
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("line-five"), "msg", five),
|
||||
ExpectedDiagnostic::new("line-two", "msg", two),
|
||||
ExpectedDiagnostic::new("line-three", "msg", three),
|
||||
ExpectedDiagnostic::new("line-five", "msg", five),
|
||||
],
|
||||
);
|
||||
|
||||
@@ -877,9 +828,9 @@ mod tests {
|
||||
result,
|
||||
&[
|
||||
(1, &["unmatched assertion: error: [line-one]"]),
|
||||
(2, &[r#"unexpected error: [lint/line-two] "msg""#]),
|
||||
(2, &[r#"unexpected error: [line-two] "msg""#]),
|
||||
(4, &["unmatched assertion: error: [line-four]"]),
|
||||
(5, &[r#"unexpected error: [lint/line-five] "msg""#]),
|
||||
(5, &[r#"unexpected error: [line-five] "msg""#]),
|
||||
(6, &["unmatched assertion: error: [line-six]"]),
|
||||
],
|
||||
);
|
||||
@@ -898,15 +849,12 @@ mod tests {
|
||||
let result = get_result(
|
||||
&source,
|
||||
vec![
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("line-one"), "msg", one),
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("line-two"), "msg", two),
|
||||
ExpectedDiagnostic::new("line-one", "msg", one),
|
||||
ExpectedDiagnostic::new("line-two", "msg", two),
|
||||
],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(2, &[r#"unexpected error: [lint/line-two] "msg""#])],
|
||||
);
|
||||
assert_fail(result, &[(2, &[r#"unexpected error: [line-two] "msg""#])]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -922,8 +870,8 @@ mod tests {
|
||||
let result = get_result(
|
||||
&source,
|
||||
vec![
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("one-rule"), "msg", x),
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("other-rule"), "msg", x),
|
||||
ExpectedDiagnostic::new("one-rule", "msg", x),
|
||||
ExpectedDiagnostic::new("other-rule", "msg", x),
|
||||
],
|
||||
);
|
||||
|
||||
@@ -943,8 +891,8 @@ mod tests {
|
||||
let result = get_result(
|
||||
&source,
|
||||
vec![
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("one-rule"), "msg", x),
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("one-rule"), "msg", x),
|
||||
ExpectedDiagnostic::new("one-rule", "msg", x),
|
||||
ExpectedDiagnostic::new("one-rule", "msg", x),
|
||||
],
|
||||
);
|
||||
|
||||
@@ -964,15 +912,15 @@ mod tests {
|
||||
let result = get_result(
|
||||
&source,
|
||||
vec![
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("one-rule"), "msg", x),
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("other-rule"), "msg", x),
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("third-rule"), "msg", x),
|
||||
ExpectedDiagnostic::new("one-rule", "msg", x),
|
||||
ExpectedDiagnostic::new("other-rule", "msg", x),
|
||||
ExpectedDiagnostic::new("third-rule", "msg", x),
|
||||
],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(3, &[r#"unexpected error: 1 [lint/third-rule] "msg""#])],
|
||||
&[(3, &[r#"unexpected error: 1 [third-rule] "msg""#])],
|
||||
);
|
||||
}
|
||||
|
||||
@@ -990,12 +938,8 @@ mod tests {
|
||||
let result = get_result(
|
||||
&source,
|
||||
vec![
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("undefined-reveal"), "msg", reveal),
|
||||
ExpectedDiagnostic::new(
|
||||
DiagnosticId::RevealedType,
|
||||
"Revealed type is `Literal[5]`",
|
||||
reveal,
|
||||
),
|
||||
ExpectedDiagnostic::new("undefined-reveal", "msg", reveal),
|
||||
ExpectedDiagnostic::new("revealed-type", "Revealed type is `Literal[5]`", reveal),
|
||||
],
|
||||
);
|
||||
|
||||
@@ -1008,11 +952,7 @@ mod tests {
|
||||
let x = source.find('x').unwrap();
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"some message",
|
||||
x,
|
||||
)],
|
||||
vec![ExpectedDiagnostic::new("some-rule", "some message", x)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
@@ -1021,7 +961,7 @@ mod tests {
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: no rule or message text",
|
||||
r#"unexpected error: 1 [lint/some-rule] "some message""#,
|
||||
r#"unexpected error: 1 [some-rule] "some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
@@ -1033,11 +973,7 @@ mod tests {
|
||||
let x = source.find('x').unwrap();
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"some message",
|
||||
x,
|
||||
)],
|
||||
vec![ExpectedDiagnostic::new("some-rule", "some message", x)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
@@ -1046,7 +982,7 @@ mod tests {
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: no rule or message text",
|
||||
r#"unexpected error: 1 [lint/some-rule] "some message""#,
|
||||
r#"unexpected error: 1 [some-rule] "some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anyhow::{bail, Context};
|
||||
use memchr::memchr2;
|
||||
use red_knot_python_semantic::PythonVersion;
|
||||
use regex::{Captures, Match, Regex};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
@@ -10,8 +8,6 @@ use ruff_index::{newtype_index, IndexVec};
|
||||
use ruff_python_trivia::Cursor;
|
||||
use ruff_text_size::{TextLen, TextSize};
|
||||
|
||||
use crate::config::MarkdownTestConfig;
|
||||
|
||||
/// Parse the Markdown `source` as a test suite with given `title`.
|
||||
pub(crate) fn parse<'s>(title: &'s str, source: &'s str) -> anyhow::Result<MarkdownTestSuite<'s>> {
|
||||
let parser = Parser::new(title, source);
|
||||
@@ -73,10 +69,6 @@ impl<'m, 's> MarkdownTest<'m, 's> {
|
||||
pub(crate) fn files(&self) -> impl Iterator<Item = &'m EmbeddedFile<'s>> {
|
||||
self.files.iter()
|
||||
}
|
||||
|
||||
pub(crate) fn target_version(&self) -> PythonVersion {
|
||||
self.section.target_version
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator yielding all [`MarkdownTest`]s in a [`MarkdownTestSuite`].
|
||||
@@ -125,7 +117,6 @@ struct Section<'s> {
|
||||
title: &'s str,
|
||||
level: u8,
|
||||
parent_id: Option<SectionId>,
|
||||
target_version: PythonVersion,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
@@ -183,7 +174,7 @@ impl SectionStack {
|
||||
popped
|
||||
}
|
||||
|
||||
fn top(&mut self) -> SectionId {
|
||||
fn parent(&mut self) -> SectionId {
|
||||
*self
|
||||
.0
|
||||
.last()
|
||||
@@ -210,9 +201,6 @@ struct Parser<'s> {
|
||||
|
||||
/// Names of embedded files in current active section.
|
||||
current_section_files: Option<FxHashSet<&'s str>>,
|
||||
|
||||
/// Whether or not the current section has a config block.
|
||||
current_section_has_config: bool,
|
||||
}
|
||||
|
||||
impl<'s> Parser<'s> {
|
||||
@@ -222,7 +210,6 @@ impl<'s> Parser<'s> {
|
||||
title,
|
||||
level: 0,
|
||||
parent_id: None,
|
||||
target_version: PythonVersion::default(),
|
||||
});
|
||||
Self {
|
||||
sections,
|
||||
@@ -231,7 +218,6 @@ impl<'s> Parser<'s> {
|
||||
source_len: source.text_len(),
|
||||
stack: SectionStack::new(root_section_id),
|
||||
current_section_files: None,
|
||||
current_section_has_config: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -299,13 +285,12 @@ impl<'s> Parser<'s> {
|
||||
|
||||
self.pop_sections_to_level(header_level);
|
||||
|
||||
let parent = self.stack.top();
|
||||
let parent = self.stack.parent();
|
||||
|
||||
let section = Section {
|
||||
title,
|
||||
level: header_level.try_into()?,
|
||||
parent_id: Some(parent),
|
||||
target_version: self.sections[parent].target_version,
|
||||
};
|
||||
|
||||
if self.current_section_files.is_some() {
|
||||
@@ -320,14 +305,13 @@ impl<'s> Parser<'s> {
|
||||
self.stack.push(section_id);
|
||||
|
||||
self.current_section_files = None;
|
||||
self.current_section_has_config = false;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_code_block(&mut self, captures: &Captures<'s>) -> anyhow::Result<()> {
|
||||
// We never pop the implicit root section.
|
||||
let section = self.stack.top();
|
||||
let parent = self.stack.parent();
|
||||
|
||||
let mut config: FxHashMap<&'s str, &'s str> = FxHashMap::default();
|
||||
|
||||
@@ -349,24 +333,16 @@ impl<'s> Parser<'s> {
|
||||
|
||||
let path = config.get("path").copied().unwrap_or("test.py");
|
||||
|
||||
// CODE_RE can't match without matches for 'lang' and 'code'.
|
||||
let lang = captures
|
||||
.name("lang")
|
||||
.as_ref()
|
||||
.map(Match::as_str)
|
||||
.unwrap_or_default();
|
||||
let code = captures.name("code").unwrap().into();
|
||||
|
||||
if lang == "toml" {
|
||||
return self.parse_config(code);
|
||||
}
|
||||
|
||||
self.files.push(EmbeddedFile {
|
||||
path,
|
||||
section,
|
||||
lang,
|
||||
|
||||
code,
|
||||
section: parent,
|
||||
lang: captures
|
||||
.name("lang")
|
||||
.as_ref()
|
||||
.map(Match::as_str)
|
||||
.unwrap_or_default(),
|
||||
// CODE_RE can't match without matches for 'lang' and 'code'.
|
||||
code: captures.name("code").unwrap().into(),
|
||||
|
||||
md_offset: self.offset(),
|
||||
});
|
||||
@@ -378,12 +354,12 @@ impl<'s> Parser<'s> {
|
||||
"Test `{}` has duplicate files named `{path}`. \
|
||||
(This is the default filename; \
|
||||
consider giving some files an explicit name with `path=...`.)",
|
||||
self.sections[section].title
|
||||
self.sections[parent].title
|
||||
));
|
||||
}
|
||||
return Err(anyhow::anyhow!(
|
||||
"Test `{}` has duplicate files named `{path}`.",
|
||||
self.sections[section].title
|
||||
self.sections[parent].title
|
||||
));
|
||||
};
|
||||
} else {
|
||||
@@ -393,36 +369,8 @@ impl<'s> Parser<'s> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_config(&mut self, code: &str) -> anyhow::Result<()> {
|
||||
if self.current_section_has_config {
|
||||
bail!("Multiple TOML configuration blocks in the same section are not allowed.");
|
||||
}
|
||||
|
||||
let config = MarkdownTestConfig::from_str(code)?;
|
||||
let target_version = config.environment.target_version;
|
||||
|
||||
let parts = target_version
|
||||
.split('.')
|
||||
.map(str::parse)
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.context(format!(
|
||||
"Invalid 'target-version' component: '{target_version}'"
|
||||
))?;
|
||||
|
||||
if parts.len() != 2 {
|
||||
bail!("Invalid 'target-version': expected MAJOR.MINOR, got '{target_version}'.",);
|
||||
}
|
||||
|
||||
let current_section = &mut self.sections[self.stack.top()];
|
||||
current_section.target_version = PythonVersion::from((parts[0], parts[1]));
|
||||
|
||||
self.current_section_has_config = true;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pop_sections_to_level(&mut self, level: usize) {
|
||||
while level <= self.sections[self.stack.top()].level.into() {
|
||||
while level <= self.sections[self.stack.parent()].level.into() {
|
||||
self.stack.pop();
|
||||
// We would have errored before pushing a child section if there were files, so we know
|
||||
// no parent section can have files.
|
||||
|
||||
@@ -1 +1 @@
|
||||
0a2da01946a406ede42e9c66f416a7e7758991d6
|
||||
5052fa2f18db4493892e0f2775030683c9d06531
|
||||
|
||||
@@ -33,7 +33,6 @@ _contextvars: 3.7-
|
||||
_csv: 3.0-
|
||||
_ctypes: 3.0-
|
||||
_curses: 3.0-
|
||||
_curses_panel: 3.0-
|
||||
_dbm: 3.0-
|
||||
_decimal: 3.3-
|
||||
_dummy_thread: 3.0-3.8
|
||||
@@ -41,7 +40,6 @@ _dummy_threading: 3.0-3.8
|
||||
_frozen_importlib: 3.0-
|
||||
_frozen_importlib_external: 3.5-
|
||||
_gdbm: 3.0-
|
||||
_hashlib: 3.0-
|
||||
_heapq: 3.0-
|
||||
_imp: 3.0-
|
||||
_interpchannels: 3.13-
|
||||
@@ -54,7 +52,6 @@ _lsprof: 3.0-
|
||||
_lzma: 3.3-
|
||||
_markupbase: 3.0-
|
||||
_msi: 3.0-3.12
|
||||
_multibytecodec: 3.0-
|
||||
_operator: 3.4-
|
||||
_osx_support: 3.0-
|
||||
_posixsubprocess: 3.2-
|
||||
@@ -142,12 +139,6 @@ doctest: 3.0-
|
||||
dummy_threading: 3.0-3.8
|
||||
email: 3.0-
|
||||
encodings: 3.0-
|
||||
encodings.cp1125: 3.4-
|
||||
encodings.cp273: 3.4-
|
||||
encodings.cp858: 3.2-
|
||||
encodings.koi8_t: 3.5-
|
||||
encodings.kz1048: 3.5-
|
||||
encodings.mac_centeuro: 3.0-3.8
|
||||
ensurepip: 3.0-
|
||||
enum: 3.4-
|
||||
errno: 3.0-
|
||||
|
||||
@@ -2,13 +2,10 @@ import codecs
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer
|
||||
from collections.abc import Callable
|
||||
from typing import Literal, final, overload, type_check_only
|
||||
from typing import Literal, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# This type is not exposed; it is defined in unicodeobject.c
|
||||
# At runtime it calls itself builtins.EncodingMap
|
||||
@final
|
||||
@type_check_only
|
||||
class _EncodingMap:
|
||||
def size(self) -> int: ...
|
||||
|
||||
|
||||
@@ -73,7 +73,6 @@ _VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers.
|
||||
@final
|
||||
class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __reversed__(self) -> Iterator[_KT_co]: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@@ -82,7 +81,6 @@ class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
|
||||
@final
|
||||
class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
def __reversed__(self) -> Iterator[_VT_co]: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
@@ -90,7 +88,6 @@ class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
@final
|
||||
class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import csv
|
||||
import sys
|
||||
from _typeshed import SupportsWrite
|
||||
from collections.abc import Iterable
|
||||
from typing import Any, Final, type_check_only
|
||||
from typing_extensions import Self, TypeAlias
|
||||
from collections.abc import Iterable, Iterator
|
||||
from typing import Any, Final
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__version__: Final[str]
|
||||
|
||||
@@ -45,47 +45,17 @@ class Dialect:
|
||||
strict: bool = False,
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
# This class calls itself _csv.reader.
|
||||
class Reader:
|
||||
@property
|
||||
def dialect(self) -> Dialect: ...
|
||||
line_num: int
|
||||
def __iter__(self) -> Self: ...
|
||||
def __next__(self) -> list[str]: ...
|
||||
class _reader(Iterator[list[str]]):
|
||||
@property
|
||||
def dialect(self) -> Dialect: ...
|
||||
line_num: int
|
||||
def __next__(self) -> list[str]: ...
|
||||
|
||||
# This class calls itself _csv.writer.
|
||||
class Writer:
|
||||
@property
|
||||
def dialect(self) -> Dialect: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def writerow(self, row: Iterable[Any], /) -> Any: ...
|
||||
def writerows(self, rows: Iterable[Iterable[Any]], /) -> None: ...
|
||||
else:
|
||||
def writerow(self, row: Iterable[Any]) -> Any: ...
|
||||
def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ...
|
||||
|
||||
# For the return types below.
|
||||
# These aliases can be removed when typeshed drops support for 3.9.
|
||||
_reader = Reader
|
||||
_writer = Writer
|
||||
else:
|
||||
# This class is not exposed. It calls itself _csv.reader.
|
||||
@type_check_only
|
||||
class _reader:
|
||||
@property
|
||||
def dialect(self) -> Dialect: ...
|
||||
line_num: int
|
||||
def __iter__(self) -> Self: ...
|
||||
def __next__(self) -> list[str]: ...
|
||||
|
||||
# This class is not exposed. It calls itself _csv.writer.
|
||||
@type_check_only
|
||||
class _writer:
|
||||
@property
|
||||
def dialect(self) -> Dialect: ...
|
||||
def writerow(self, row: Iterable[Any]) -> Any: ...
|
||||
def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ...
|
||||
class _writer:
|
||||
@property
|
||||
def dialect(self) -> Dialect: ...
|
||||
def writerow(self, row: Iterable[Any]) -> Any: ...
|
||||
def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ...
|
||||
|
||||
def writer(
|
||||
csvfile: SupportsWrite[str],
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import _typeshed
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer
|
||||
from _typeshed import ReadableBuffer, WriteableBuffer
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
|
||||
from ctypes import CDLL, ArgumentError as ArgumentError, c_void_p
|
||||
from typing import Any, ClassVar, Generic, TypeVar, final, overload, type_check_only
|
||||
from typing import Any, ClassVar, Generic, TypeVar, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
@@ -48,79 +47,46 @@ if sys.platform == "win32":
|
||||
def LoadLibrary(name: str, load_flags: int = 0, /) -> int: ...
|
||||
def FreeLibrary(handle: int, /) -> None: ...
|
||||
|
||||
else:
|
||||
def dlclose(handle: int, /) -> None: ...
|
||||
# The default for flag is RTLD_GLOBAL|RTLD_LOCAL, which is platform dependent.
|
||||
def dlopen(name: StrOrBytesPath, flag: int = ..., /) -> int: ...
|
||||
def dlsym(handle: int, name: str, /) -> int: ...
|
||||
class _CDataMeta(type):
|
||||
# By default mypy complains about the following two methods, because strictly speaking cls
|
||||
# might not be a Type[_CT]. However this can never actually happen, because the only class that
|
||||
# uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here.
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
# This class is not exposed. It calls itself _ctypes.CType_Type.
|
||||
@type_check_only
|
||||
class _CType_Type(type):
|
||||
# By default mypy complains about the following two methods, because strictly speaking cls
|
||||
# might not be a Type[_CT]. However this doesn't happen because this is only a
|
||||
# metaclass for subclasses of _CData.
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
|
||||
_CTypeBaseType = _CType_Type
|
||||
|
||||
else:
|
||||
_CTypeBaseType = type
|
||||
|
||||
# This class is not exposed.
|
||||
@type_check_only
|
||||
class _CData:
|
||||
class _CData(metaclass=_CDataMeta):
|
||||
_b_base_: int
|
||||
_b_needsfree_: bool
|
||||
_objects: Mapping[Any, int] | None
|
||||
# At runtime the following classmethods are available only on classes, not
|
||||
# on instances. This can't be reflected properly in the type system:
|
||||
#
|
||||
# Structure.from_buffer(...) # valid at runtime
|
||||
# Structure(...).from_buffer(...) # invalid at runtime
|
||||
#
|
||||
@classmethod
|
||||
def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ...
|
||||
@classmethod
|
||||
def from_buffer_copy(cls, source: ReadableBuffer, offset: int = ...) -> Self: ...
|
||||
@classmethod
|
||||
def from_address(cls, address: int) -> Self: ...
|
||||
@classmethod
|
||||
def from_param(cls, value: Any, /) -> Self | _CArgObject: ...
|
||||
@classmethod
|
||||
def in_dll(cls, library: CDLL, name: str) -> Self: ...
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
def __ctypes_from_outparam__(self, /) -> Self: ...
|
||||
def __release_buffer__(self, buffer: memoryview, /) -> None: ...
|
||||
|
||||
# this is a union of all the subclasses of _CData, which is useful because of
|
||||
# the methods that are present on each of those subclasses which are not present
|
||||
# on _CData itself.
|
||||
_CDataType: TypeAlias = _SimpleCData[Any] | _Pointer[Any] | CFuncPtr | Union | Structure | Array[Any]
|
||||
|
||||
# This class is not exposed. It calls itself _ctypes.PyCSimpleType.
|
||||
@type_check_only
|
||||
class _PyCSimpleType(_CTypeBaseType):
|
||||
def from_address(self: type[_typeshed.Self], value: int, /) -> _typeshed.Self: ...
|
||||
def from_buffer(self: type[_typeshed.Self], obj: WriteableBuffer, offset: int = 0, /) -> _typeshed.Self: ...
|
||||
def from_buffer_copy(self: type[_typeshed.Self], buffer: ReadableBuffer, offset: int = 0, /) -> _typeshed.Self: ...
|
||||
def from_param(self: type[_typeshed.Self], value: Any, /) -> _typeshed.Self | _CArgObject: ...
|
||||
def in_dll(self: type[_typeshed.Self], dll: CDLL, name: str, /) -> _typeshed.Self: ...
|
||||
if sys.version_info < (3, 13):
|
||||
# Inherited from CType_Type starting on 3.13
|
||||
def __mul__(self: type[_CT], value: int, /) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __rmul__(self: type[_CT], value: int, /) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
|
||||
class _SimpleCData(_CData, Generic[_T], metaclass=_PyCSimpleType):
|
||||
class _SimpleCData(_CData, Generic[_T]):
|
||||
value: _T
|
||||
# The TypeVar can be unsolved here,
|
||||
# but we can't use overloads without creating many, many mypy false-positive errors
|
||||
def __init__(self, value: _T = ...) -> None: ... # pyright: ignore[reportInvalidTypeVarUse]
|
||||
def __ctypes_from_outparam__(self, /) -> _T: ... # type: ignore[override]
|
||||
|
||||
class _CanCastTo(_CData): ...
|
||||
class _PointerLike(_CanCastTo): ...
|
||||
|
||||
# This type is not exposed. It calls itself _ctypes.PyCPointerType.
|
||||
@type_check_only
|
||||
class _PyCPointerType(_CTypeBaseType):
|
||||
def from_address(self: type[_typeshed.Self], value: int, /) -> _typeshed.Self: ...
|
||||
def from_buffer(self: type[_typeshed.Self], obj: WriteableBuffer, offset: int = 0, /) -> _typeshed.Self: ...
|
||||
def from_buffer_copy(self: type[_typeshed.Self], buffer: ReadableBuffer, offset: int = 0, /) -> _typeshed.Self: ...
|
||||
def from_param(self: type[_typeshed.Self], value: Any, /) -> _typeshed.Self | _CArgObject: ...
|
||||
def in_dll(self: type[_typeshed.Self], dll: CDLL, name: str, /) -> _typeshed.Self: ...
|
||||
def set_type(self, type: Any, /) -> None: ...
|
||||
if sys.version_info < (3, 13):
|
||||
# Inherited from CType_Type starting on 3.13
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
|
||||
class _Pointer(_PointerLike, _CData, Generic[_CT], metaclass=_PyCPointerType):
|
||||
class _Pointer(_PointerLike, _CData, Generic[_CT]):
|
||||
_type_: type[_CT]
|
||||
contents: _CT
|
||||
@overload
|
||||
@@ -139,32 +105,16 @@ def POINTER(type: None, /) -> type[c_void_p]: ...
|
||||
def POINTER(type: type[_CT], /) -> type[_Pointer[_CT]]: ...
|
||||
def pointer(obj: _CT, /) -> _Pointer[_CT]: ...
|
||||
|
||||
# This class is not exposed. It calls itself _ctypes.CArgObject.
|
||||
@final
|
||||
@type_check_only
|
||||
class _CArgObject: ...
|
||||
|
||||
def byref(obj: _CData | _CDataType, offset: int = ...) -> _CArgObject: ...
|
||||
def byref(obj: _CData, offset: int = ...) -> _CArgObject: ...
|
||||
|
||||
_ECT: TypeAlias = Callable[[_CData | _CDataType | None, CFuncPtr, tuple[_CData | _CDataType, ...]], _CDataType]
|
||||
_ECT: TypeAlias = Callable[[_CData | None, CFuncPtr, tuple[_CData, ...]], _CData]
|
||||
_PF: TypeAlias = tuple[int] | tuple[int, str | None] | tuple[int, str | None, Any]
|
||||
|
||||
# This class is not exposed. It calls itself _ctypes.PyCFuncPtrType.
|
||||
@type_check_only
|
||||
class _PyCFuncPtrType(_CTypeBaseType):
|
||||
def from_address(self: type[_typeshed.Self], value: int, /) -> _typeshed.Self: ...
|
||||
def from_buffer(self: type[_typeshed.Self], obj: WriteableBuffer, offset: int = 0, /) -> _typeshed.Self: ...
|
||||
def from_buffer_copy(self: type[_typeshed.Self], buffer: ReadableBuffer, offset: int = 0, /) -> _typeshed.Self: ...
|
||||
def from_param(self: type[_typeshed.Self], value: Any, /) -> _typeshed.Self | _CArgObject: ...
|
||||
def in_dll(self: type[_typeshed.Self], dll: CDLL, name: str, /) -> _typeshed.Self: ...
|
||||
if sys.version_info < (3, 13):
|
||||
# Inherited from CType_Type starting on 3.13
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
|
||||
class CFuncPtr(_PointerLike, _CData, metaclass=_PyCFuncPtrType):
|
||||
restype: type[_CDataType] | Callable[[int], Any] | None
|
||||
argtypes: Sequence[type[_CDataType]]
|
||||
class CFuncPtr(_PointerLike, _CData):
|
||||
restype: type[_CData] | Callable[[int], Any] | None
|
||||
argtypes: Sequence[type[_CData]]
|
||||
errcheck: _ECT
|
||||
# Abstract attribute that must be defined on subclasses
|
||||
_flags_: ClassVar[int]
|
||||
@@ -179,7 +129,7 @@ class CFuncPtr(_PointerLike, _CData, metaclass=_PyCFuncPtrType):
|
||||
if sys.platform == "win32":
|
||||
@overload
|
||||
def __init__(
|
||||
self, vtbl_index: int, name: str, paramflags: tuple[_PF, ...] | None = ..., iid: _CData | _CDataType | None = ..., /
|
||||
self, vtbl_index: int, name: str, paramflags: tuple[_PF, ...] | None = ..., iid: _CData | None = ..., /
|
||||
) -> None: ...
|
||||
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
||||
@@ -187,95 +137,30 @@ class CFuncPtr(_PointerLike, _CData, metaclass=_PyCFuncPtrType):
|
||||
_GetT = TypeVar("_GetT")
|
||||
_SetT = TypeVar("_SetT")
|
||||
|
||||
# This class is not exposed. It calls itself _ctypes.CField.
|
||||
@final
|
||||
@type_check_only
|
||||
class _CField(Generic[_CT, _GetT, _SetT]):
|
||||
offset: int
|
||||
size: int
|
||||
if sys.version_info >= (3, 10):
|
||||
@overload
|
||||
def __get__(self, instance: None, owner: type[Any] | None = None, /) -> Self: ...
|
||||
@overload
|
||||
def __get__(self, instance: Any, owner: type[Any] | None = None, /) -> _GetT: ...
|
||||
else:
|
||||
@overload
|
||||
def __get__(self, instance: None, owner: type[Any] | None, /) -> Self: ...
|
||||
@overload
|
||||
def __get__(self, instance: Any, owner: type[Any] | None, /) -> _GetT: ...
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: None, owner: type[Any] | None, /) -> Self: ...
|
||||
@overload
|
||||
def __get__(self, instance: Any, owner: type[Any] | None, /) -> _GetT: ...
|
||||
def __set__(self, instance: Any, value: _SetT, /) -> None: ...
|
||||
|
||||
# This class is not exposed. It calls itself _ctypes.UnionType.
|
||||
@type_check_only
|
||||
class _UnionType(_CTypeBaseType):
|
||||
def from_address(self: type[_typeshed.Self], value: int, /) -> _typeshed.Self: ...
|
||||
def from_buffer(self: type[_typeshed.Self], obj: WriteableBuffer, offset: int = 0, /) -> _typeshed.Self: ...
|
||||
def from_buffer_copy(self: type[_typeshed.Self], buffer: ReadableBuffer, offset: int = 0, /) -> _typeshed.Self: ...
|
||||
def from_param(self: type[_typeshed.Self], value: Any, /) -> _typeshed.Self | _CArgObject: ...
|
||||
def in_dll(self: type[_typeshed.Self], dll: CDLL, name: str, /) -> _typeshed.Self: ...
|
||||
# At runtime, various attributes are created on a Union subclass based
|
||||
# on its _fields_. This method doesn't exist, but represents those
|
||||
# dynamically created attributes.
|
||||
class _StructUnionMeta(_CDataMeta):
|
||||
_fields_: Sequence[tuple[str, type[_CData]] | tuple[str, type[_CData], int]]
|
||||
_pack_: int
|
||||
_anonymous_: Sequence[str]
|
||||
def __getattr__(self, name: str) -> _CField[Any, Any, Any]: ...
|
||||
if sys.version_info < (3, 13):
|
||||
# Inherited from CType_Type starting on 3.13
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
|
||||
class Union(_CData, metaclass=_UnionType):
|
||||
_fields_: ClassVar[Sequence[tuple[str, type[_CDataType]] | tuple[str, type[_CDataType], int]]]
|
||||
_pack_: ClassVar[int]
|
||||
_anonymous_: ClassVar[Sequence[str]]
|
||||
if sys.version_info >= (3, 13):
|
||||
_align_: ClassVar[int]
|
||||
|
||||
class _StructUnionBase(_CData, metaclass=_StructUnionMeta):
|
||||
def __init__(self, *args: Any, **kw: Any) -> None: ...
|
||||
def __getattr__(self, name: str) -> Any: ...
|
||||
def __setattr__(self, name: str, value: Any) -> None: ...
|
||||
|
||||
# This class is not exposed. It calls itself _ctypes.PyCStructType.
|
||||
@type_check_only
|
||||
class _PyCStructType(_CTypeBaseType):
|
||||
def from_address(self: type[_typeshed.Self], value: int, /) -> _typeshed.Self: ...
|
||||
def from_buffer(self: type[_typeshed.Self], obj: WriteableBuffer, offset: int = 0, /) -> _typeshed.Self: ...
|
||||
def from_buffer_copy(self: type[_typeshed.Self], buffer: ReadableBuffer, offset: int = 0, /) -> _typeshed.Self: ...
|
||||
def from_param(self: type[_typeshed.Self], value: Any, /) -> _typeshed.Self | _CArgObject: ...
|
||||
def in_dll(self: type[_typeshed.Self], dll: CDLL, name: str, /) -> _typeshed.Self: ...
|
||||
# At runtime, various attributes are created on a Structure subclass based
|
||||
# on its _fields_. This method doesn't exist, but represents those
|
||||
# dynamically created attributes.
|
||||
def __getattr__(self, name: str) -> _CField[Any, Any, Any]: ...
|
||||
if sys.version_info < (3, 13):
|
||||
# Inherited from CType_Type starting on 3.13
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
class Union(_StructUnionBase): ...
|
||||
class Structure(_StructUnionBase): ...
|
||||
|
||||
class Structure(_CData, metaclass=_PyCStructType):
|
||||
_fields_: ClassVar[Sequence[tuple[str, type[_CDataType]] | tuple[str, type[_CDataType], int]]]
|
||||
_pack_: ClassVar[int]
|
||||
_anonymous_: ClassVar[Sequence[str]]
|
||||
if sys.version_info >= (3, 13):
|
||||
_align_: ClassVar[int]
|
||||
|
||||
def __init__(self, *args: Any, **kw: Any) -> None: ...
|
||||
def __getattr__(self, name: str) -> Any: ...
|
||||
def __setattr__(self, name: str, value: Any) -> None: ...
|
||||
|
||||
# This class is not exposed. It calls itself _ctypes.PyCArrayType.
|
||||
@type_check_only
|
||||
class _PyCArrayType(_CTypeBaseType):
|
||||
def from_address(self: type[_typeshed.Self], value: int, /) -> _typeshed.Self: ...
|
||||
def from_buffer(self: type[_typeshed.Self], obj: WriteableBuffer, offset: int = 0, /) -> _typeshed.Self: ...
|
||||
def from_buffer_copy(self: type[_typeshed.Self], buffer: ReadableBuffer, offset: int = 0, /) -> _typeshed.Self: ...
|
||||
def from_param(self: type[_typeshed.Self], value: Any, /) -> _typeshed.Self | _CArgObject: ...
|
||||
def in_dll(self: type[_typeshed.Self], dll: CDLL, name: str, /) -> _typeshed.Self: ...
|
||||
if sys.version_info < (3, 13):
|
||||
# Inherited from CType_Type starting on 3.13
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
|
||||
class Array(_CData, Generic[_CT], metaclass=_PyCArrayType):
|
||||
class Array(_CData, Generic[_CT]):
|
||||
@property
|
||||
@abstractmethod
|
||||
def _length_(self) -> int: ...
|
||||
@@ -320,15 +205,9 @@ class Array(_CData, Generic[_CT], metaclass=_PyCArrayType):
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
|
||||
def addressof(obj: _CData | _CDataType, /) -> int: ...
|
||||
def alignment(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: ...
|
||||
def addressof(obj: _CData, /) -> int: ...
|
||||
def alignment(obj_or_type: _CData | type[_CData], /) -> int: ...
|
||||
def get_errno() -> int: ...
|
||||
def resize(obj: _CData | _CDataType, size: int, /) -> None: ...
|
||||
def resize(obj: _CData, size: int, /) -> None: ...
|
||||
def set_errno(value: int, /) -> int: ...
|
||||
def sizeof(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: ...
|
||||
def PyObj_FromPtr(address: int, /) -> Any: ...
|
||||
def Py_DECREF(o: _T, /) -> _T: ...
|
||||
def Py_INCREF(o: _T, /) -> _T: ...
|
||||
def buffer_info(o: _CData | _CDataType | type[_CData | _CDataType], /) -> tuple[str, int, tuple[int, ...]]: ...
|
||||
def call_cdeclfunction(address: int, arguments: tuple[Any, ...], /) -> Any: ...
|
||||
def call_function(address: int, arguments: tuple[Any, ...], /) -> Any: ...
|
||||
def sizeof(obj_or_type: _CData | type[_CData], /) -> int: ...
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import sys
|
||||
from _typeshed import ReadOnlyBuffer, SupportsRead
|
||||
from curses import _ncurses_version
|
||||
from typing import IO, Any, final, overload
|
||||
from typing import IO, Any, NamedTuple, final, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# NOTE: This module is ordinarily only available on Unix, but the windows-curses
|
||||
@@ -550,4 +549,9 @@ class window: # undocumented
|
||||
@overload
|
||||
def vline(self, y: int, x: int, ch: _ChType, n: int) -> None: ...
|
||||
|
||||
class _ncurses_version(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
patch: int
|
||||
|
||||
ncurses_version: _ncurses_version
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
from _curses import window
|
||||
from typing import final
|
||||
|
||||
__version__: str
|
||||
version: str
|
||||
|
||||
class error(Exception): ...
|
||||
|
||||
@final
|
||||
class panel:
|
||||
def above(self) -> panel: ...
|
||||
def below(self) -> panel: ...
|
||||
def bottom(self) -> None: ...
|
||||
def hidden(self) -> bool: ...
|
||||
def hide(self) -> None: ...
|
||||
def move(self, y: int, x: int, /) -> None: ...
|
||||
def replace(self, win: window, /) -> None: ...
|
||||
def set_userptr(self, obj: object, /) -> None: ...
|
||||
def show(self) -> None: ...
|
||||
def top(self) -> None: ...
|
||||
def userptr(self) -> object: ...
|
||||
def window(self) -> window: ...
|
||||
|
||||
def bottom_panel() -> panel: ...
|
||||
def new_panel(win: window, /) -> panel: ...
|
||||
def top_panel() -> panel: ...
|
||||
def update_panels() -> panel: ...
|
||||
@@ -1,7 +1,7 @@
|
||||
import sys
|
||||
from _typeshed import ReadOnlyBuffer, StrOrBytesPath
|
||||
from types import TracebackType
|
||||
from typing import TypeVar, final, overload, type_check_only
|
||||
from typing import TypeVar, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
if sys.platform != "win32":
|
||||
@@ -13,8 +13,6 @@ if sys.platform != "win32":
|
||||
library: str
|
||||
|
||||
# Actual typename dbm, not exposed by the implementation
|
||||
@final
|
||||
@type_check_only
|
||||
class _dbm:
|
||||
def close(self) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
@@ -24,17 +22,18 @@ if sys.platform != "win32":
|
||||
def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ...
|
||||
def __delitem__(self, key: _KeyType) -> None: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __del__(self) -> None: ...
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> None: ...
|
||||
@overload
|
||||
def get(self, k: _KeyType, /) -> bytes | None: ...
|
||||
def get(self, k: _KeyType) -> bytes | None: ...
|
||||
@overload
|
||||
def get(self, k: _KeyType, default: _T, /) -> bytes | _T: ...
|
||||
def get(self, k: _KeyType, default: _T) -> bytes | _T: ...
|
||||
def keys(self) -> list[bytes]: ...
|
||||
def setdefault(self, k: _KeyType, default: _ValueType = ..., /) -> bytes: ...
|
||||
# This isn't true, but the class can't be instantiated. See #13024
|
||||
def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ...
|
||||
# Don't exist at runtime
|
||||
__new__: None # type: ignore[assignment]
|
||||
__init__: None # type: ignore[assignment]
|
||||
|
||||
|
||||
@@ -17,13 +17,20 @@ from decimal import (
|
||||
Rounded as Rounded,
|
||||
Subnormal as Subnormal,
|
||||
Underflow as Underflow,
|
||||
_ContextManager,
|
||||
)
|
||||
from types import TracebackType
|
||||
from typing import Final
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
_TrapType: TypeAlias = type[DecimalException]
|
||||
|
||||
class _ContextManager:
|
||||
new_context: Context
|
||||
saved_context: Context
|
||||
def __init__(self, new_context: Context) -> None: ...
|
||||
def __enter__(self) -> Context: ...
|
||||
def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ...
|
||||
|
||||
__version__: Final[str]
|
||||
__libmpdec_version__: Final[str]
|
||||
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer
|
||||
from collections.abc import Callable
|
||||
from types import ModuleType
|
||||
from typing import AnyStr, final, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
_DigestMod: TypeAlias = str | Callable[[], HASH] | ModuleType | None
|
||||
|
||||
openssl_md_meth_names: frozenset[str]
|
||||
|
||||
class HASH:
|
||||
@property
|
||||
def digest_size(self) -> int: ...
|
||||
@property
|
||||
def block_size(self) -> int: ...
|
||||
@property
|
||||
def name(self) -> str: ...
|
||||
def copy(self) -> Self: ...
|
||||
def digest(self) -> bytes: ...
|
||||
def hexdigest(self) -> str: ...
|
||||
def update(self, obj: ReadableBuffer, /) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
class UnsupportedDigestmodError(ValueError): ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
class HASHXOF(HASH):
|
||||
def digest(self, length: int) -> bytes: ... # type: ignore[override]
|
||||
def hexdigest(self, length: int) -> str: ... # type: ignore[override]
|
||||
|
||||
@final
|
||||
class HMAC:
|
||||
@property
|
||||
def digest_size(self) -> int: ...
|
||||
@property
|
||||
def block_size(self) -> int: ...
|
||||
@property
|
||||
def name(self) -> str: ...
|
||||
def copy(self) -> Self: ...
|
||||
def digest(self) -> bytes: ...
|
||||
def hexdigest(self) -> str: ...
|
||||
def update(self, msg: ReadableBuffer) -> None: ...
|
||||
|
||||
@overload
|
||||
def compare_digest(a: ReadableBuffer, b: ReadableBuffer, /) -> bool: ...
|
||||
@overload
|
||||
def compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ...
|
||||
def get_fips_mode() -> int: ...
|
||||
def hmac_new(key: bytes | bytearray, msg: ReadableBuffer = b"", digestmod: _DigestMod = None) -> HMAC: ...
|
||||
def new(name: str, string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ...
|
||||
def openssl_md5(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ...
|
||||
def openssl_sha1(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ...
|
||||
def openssl_sha224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ...
|
||||
def openssl_sha256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ...
|
||||
def openssl_sha384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ...
|
||||
def openssl_sha512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ...
|
||||
def openssl_sha3_224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ...
|
||||
def openssl_sha3_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ...
|
||||
def openssl_sha3_384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ...
|
||||
def openssl_sha3_512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ...
|
||||
def openssl_shake_128(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: ...
|
||||
def openssl_shake_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: ...
|
||||
|
||||
else:
|
||||
def new(name: str, string: ReadableBuffer = b"") -> HASH: ...
|
||||
def openssl_md5(string: ReadableBuffer = b"") -> HASH: ...
|
||||
def openssl_sha1(string: ReadableBuffer = b"") -> HASH: ...
|
||||
def openssl_sha224(string: ReadableBuffer = b"") -> HASH: ...
|
||||
def openssl_sha256(string: ReadableBuffer = b"") -> HASH: ...
|
||||
def openssl_sha384(string: ReadableBuffer = b"") -> HASH: ...
|
||||
def openssl_sha512(string: ReadableBuffer = b"") -> HASH: ...
|
||||
|
||||
def hmac_digest(key: bytes | bytearray, msg: ReadableBuffer, digest: str) -> bytes: ...
|
||||
def pbkdf2_hmac(
|
||||
hash_name: str, password: ReadableBuffer, salt: ReadableBuffer, iterations: int, dklen: int | None = None
|
||||
) -> bytes: ...
|
||||
def scrypt(
|
||||
password: ReadableBuffer, *, salt: ReadableBuffer, n: int, r: int, p: int, maxmem: int = 0, dklen: int = 64
|
||||
) -> bytes: ...
|
||||
@@ -45,6 +45,5 @@ class make_scanner:
|
||||
def __init__(self, context: make_scanner) -> None: ...
|
||||
def __call__(self, string: str, index: int) -> tuple[Any, int]: ...
|
||||
|
||||
def encode_basestring(s: str, /) -> str: ...
|
||||
def encode_basestring_ascii(s: str, /) -> str: ...
|
||||
def scanstring(string: str, end: int, strict: bool = ...) -> tuple[str, int]: ...
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
from _typeshed import ReadableBuffer
|
||||
from codecs import _ReadableStream, _WritableStream
|
||||
from collections.abc import Iterable
|
||||
from typing import final, type_check_only
|
||||
|
||||
# This class is not exposed. It calls itself _multibytecodec.MultibyteCodec.
|
||||
@final
|
||||
@type_check_only
|
||||
class _MultibyteCodec:
|
||||
def decode(self, input: ReadableBuffer, errors: str | None = None) -> str: ...
|
||||
def encode(self, input: str, errors: str | None = None) -> bytes: ...
|
||||
|
||||
class MultibyteIncrementalDecoder:
|
||||
errors: str
|
||||
def __init__(self, errors: str = "strict") -> None: ...
|
||||
def decode(self, input: ReadableBuffer, final: bool = False) -> str: ...
|
||||
def getstate(self) -> tuple[bytes, int]: ...
|
||||
def reset(self) -> None: ...
|
||||
def setstate(self, state: tuple[bytes, int], /) -> None: ...
|
||||
|
||||
class MultibyteIncrementalEncoder:
|
||||
errors: str
|
||||
def __init__(self, errors: str = "strict") -> None: ...
|
||||
def encode(self, input: str, final: bool = False) -> bytes: ...
|
||||
def getstate(self) -> int: ...
|
||||
def reset(self) -> None: ...
|
||||
def setstate(self, state: int, /) -> None: ...
|
||||
|
||||
class MultibyteStreamReader:
|
||||
errors: str
|
||||
stream: _ReadableStream
|
||||
def __init__(self, stream: _ReadableStream, errors: str = "strict") -> None: ...
|
||||
def read(self, sizeobj: int | None = None, /) -> str: ...
|
||||
def readline(self, sizeobj: int | None = None, /) -> str: ...
|
||||
def readlines(self, sizehintobj: int | None = None, /) -> list[str]: ...
|
||||
def reset(self) -> None: ...
|
||||
|
||||
class MultibyteStreamWriter:
|
||||
errors: str
|
||||
stream: _WritableStream
|
||||
def __init__(self, stream: _WritableStream, errors: str = "strict") -> None: ...
|
||||
def reset(self) -> None: ...
|
||||
def write(self, strobj: str, /) -> None: ...
|
||||
def writelines(self, lines: Iterable[str], /) -> None: ...
|
||||
@@ -4,6 +4,7 @@ from collections.abc import Callable, Sequence
|
||||
from typing import SupportsIndex
|
||||
|
||||
if sys.platform != "win32":
|
||||
def cloexec_pipe() -> tuple[int, int]: ...
|
||||
def fork_exec(
|
||||
args: Sequence[StrOrBytesPath] | None,
|
||||
executable_list: Sequence[bytes],
|
||||
|
||||
@@ -3,7 +3,7 @@ from _typeshed import ReadableBuffer, WriteableBuffer
|
||||
from collections.abc import Iterable
|
||||
from socket import error as error, gaierror as gaierror, herror as herror, timeout as timeout
|
||||
from typing import Any, SupportsIndex, overload
|
||||
from typing_extensions import CapsuleType, TypeAlias
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
_CMSG: TypeAlias = tuple[int, int, bytes]
|
||||
_CMSGArg: TypeAlias = tuple[int, int, ReadableBuffer]
|
||||
@@ -72,8 +72,7 @@ SO_SNDBUF: int
|
||||
SO_SNDLOWAT: int
|
||||
SO_SNDTIMEO: int
|
||||
SO_TYPE: int
|
||||
if sys.platform != "linux":
|
||||
SO_USELOOPBACK: int
|
||||
SO_USELOOPBACK: int
|
||||
if sys.platform == "win32":
|
||||
SO_EXCLUSIVEADDRUSE: int
|
||||
if sys.platform != "win32":
|
||||
@@ -88,10 +87,7 @@ if sys.platform != "win32" and sys.platform != "darwin":
|
||||
SO_PEERSEC: int
|
||||
SO_PRIORITY: int
|
||||
SO_PROTOCOL: int
|
||||
if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux":
|
||||
SO_SETFIB: int
|
||||
if sys.platform == "linux" and sys.version_info >= (3, 13):
|
||||
SO_BINDTOIFINDEX: int
|
||||
|
||||
SOMAXCONN: int
|
||||
|
||||
@@ -103,32 +99,27 @@ MSG_TRUNC: int
|
||||
MSG_WAITALL: int
|
||||
if sys.platform != "win32":
|
||||
MSG_DONTWAIT: int
|
||||
MSG_EOF: int
|
||||
MSG_EOR: int
|
||||
MSG_NOSIGNAL: int # Sometimes this exists on darwin, sometimes not
|
||||
if sys.platform != "darwin":
|
||||
MSG_ERRQUEUE: int
|
||||
if sys.platform == "win32":
|
||||
MSG_BCAST: int
|
||||
MSG_ERRQUEUE: int
|
||||
MSG_MCAST: int
|
||||
if sys.platform != "win32" and sys.platform != "darwin":
|
||||
MSG_BTAG: int
|
||||
MSG_CMSG_CLOEXEC: int
|
||||
MSG_CONFIRM: int
|
||||
MSG_ETAG: int
|
||||
MSG_FASTOPEN: int
|
||||
MSG_MORE: int
|
||||
if sys.platform != "win32" and sys.platform != "linux":
|
||||
MSG_EOF: int
|
||||
if sys.platform != "win32" and sys.platform != "linux" and sys.platform != "darwin":
|
||||
MSG_NOTIFICATION: int
|
||||
MSG_BTAG: int # Not FreeBSD either
|
||||
MSG_ETAG: int # Not FreeBSD either
|
||||
|
||||
SOL_IP: int
|
||||
SOL_SOCKET: int
|
||||
SOL_TCP: int
|
||||
SOL_UDP: int
|
||||
if sys.platform != "win32" and sys.platform != "darwin":
|
||||
# Defined in socket.h for Linux, but these aren't always present for
|
||||
# some reason.
|
||||
SOL_ATALK: int
|
||||
SOL_AX25: int
|
||||
SOL_HCI: int
|
||||
@@ -137,11 +128,10 @@ if sys.platform != "win32" and sys.platform != "darwin":
|
||||
SOL_ROSE: int
|
||||
|
||||
if sys.platform != "win32":
|
||||
SCM_CREDS: int
|
||||
SCM_RIGHTS: int
|
||||
if sys.platform != "win32" and sys.platform != "darwin":
|
||||
SCM_CREDENTIALS: int
|
||||
if sys.platform != "win32" and sys.platform != "linux":
|
||||
SCM_CREDS: int
|
||||
|
||||
IPPROTO_ICMP: int
|
||||
IPPROTO_IP: int
|
||||
@@ -153,22 +143,21 @@ IPPROTO_DSTOPTS: int
|
||||
IPPROTO_EGP: int
|
||||
IPPROTO_ESP: int
|
||||
IPPROTO_FRAGMENT: int
|
||||
IPPROTO_GGP: int
|
||||
IPPROTO_HOPOPTS: int
|
||||
IPPROTO_ICMPV6: int
|
||||
IPPROTO_IDP: int
|
||||
IPPROTO_IGMP: int
|
||||
IPPROTO_IPV4: int
|
||||
IPPROTO_IPV6: int
|
||||
IPPROTO_MAX: int
|
||||
IPPROTO_ND: int
|
||||
IPPROTO_NONE: int
|
||||
IPPROTO_PIM: int
|
||||
IPPROTO_PUP: int
|
||||
IPPROTO_ROUTING: int
|
||||
IPPROTO_SCTP: int
|
||||
if sys.platform != "linux":
|
||||
IPPROTO_GGP: int
|
||||
IPPROTO_IPV4: int
|
||||
IPPROTO_MAX: int
|
||||
IPPROTO_ND: int
|
||||
if sys.platform == "win32":
|
||||
if sys.platform != "darwin":
|
||||
IPPROTO_CBT: int
|
||||
IPPROTO_ICLFXBM: int
|
||||
IPPROTO_IGP: int
|
||||
@@ -177,19 +166,18 @@ if sys.platform == "win32":
|
||||
IPPROTO_RDP: int
|
||||
IPPROTO_ST: int
|
||||
if sys.platform != "win32":
|
||||
IPPROTO_EON: int
|
||||
IPPROTO_GRE: int
|
||||
IPPROTO_HELLO: int
|
||||
IPPROTO_IPCOMP: int
|
||||
IPPROTO_IPIP: int
|
||||
IPPROTO_RSVP: int
|
||||
IPPROTO_TP: int
|
||||
if sys.platform != "win32" and sys.platform != "linux":
|
||||
IPPROTO_EON: int
|
||||
IPPROTO_HELLO: int
|
||||
IPPROTO_IPCOMP: int
|
||||
IPPROTO_XTP: int
|
||||
if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux":
|
||||
IPPROTO_BIP: int # Not FreeBSD either
|
||||
IPPROTO_MOBILE: int # Not FreeBSD either
|
||||
IPPROTO_VRRP: int # Not FreeBSD either
|
||||
if sys.platform != "win32" and sys.platform != "darwin":
|
||||
IPPROTO_BIP: int
|
||||
IPPROTO_MOBILE: int
|
||||
IPPROTO_VRRP: int
|
||||
if sys.version_info >= (3, 9) and sys.platform == "linux":
|
||||
# Availability: Linux >= 2.6.20, FreeBSD >= 10.1
|
||||
IPPROTO_UDPLITE: int
|
||||
@@ -214,10 +202,11 @@ IP_MULTICAST_IF: int
|
||||
IP_MULTICAST_LOOP: int
|
||||
IP_MULTICAST_TTL: int
|
||||
IP_OPTIONS: int
|
||||
if sys.platform != "linux":
|
||||
IP_RECVDSTADDR: int
|
||||
IP_RECVDSTADDR: int
|
||||
if sys.version_info >= (3, 10):
|
||||
IP_RECVTOS: int
|
||||
elif sys.platform != "win32" and sys.platform != "darwin":
|
||||
IP_RECVTOS: int
|
||||
IP_TOS: int
|
||||
IP_TTL: int
|
||||
if sys.platform != "win32":
|
||||
@@ -229,7 +218,6 @@ if sys.platform != "win32":
|
||||
IP_RETOPTS: int
|
||||
if sys.platform != "win32" and sys.platform != "darwin":
|
||||
IP_TRANSPARENT: int
|
||||
if sys.platform != "win32" and sys.platform != "darwin" and sys.version_info >= (3, 11):
|
||||
IP_BIND_ADDRESS_NO_PORT: int
|
||||
if sys.version_info >= (3, 12):
|
||||
IP_ADD_SOURCE_MEMBERSHIP: int
|
||||
@@ -267,9 +255,6 @@ if sys.platform != "win32":
|
||||
IPV6_RECVPATHMTU: int
|
||||
IPV6_RECVPKTINFO: int
|
||||
IPV6_RTHDRDSTOPTS: int
|
||||
|
||||
if sys.platform != "win32" and sys.platform != "linux":
|
||||
if sys.version_info >= (3, 9) or sys.platform != "darwin":
|
||||
IPV6_USE_MIN_MTU: int
|
||||
|
||||
EAI_AGAIN: int
|
||||
@@ -283,12 +268,11 @@ EAI_SERVICE: int
|
||||
EAI_SOCKTYPE: int
|
||||
if sys.platform != "win32":
|
||||
EAI_ADDRFAMILY: int
|
||||
EAI_OVERFLOW: int
|
||||
EAI_SYSTEM: int
|
||||
if sys.platform != "win32" and sys.platform != "linux":
|
||||
EAI_BADHINTS: int
|
||||
EAI_MAX: int
|
||||
EAI_OVERFLOW: int
|
||||
EAI_PROTOCOL: int
|
||||
EAI_SYSTEM: int
|
||||
|
||||
AI_ADDRCONFIG: int
|
||||
AI_ALL: int
|
||||
@@ -297,7 +281,7 @@ AI_NUMERICHOST: int
|
||||
AI_NUMERICSERV: int
|
||||
AI_PASSIVE: int
|
||||
AI_V4MAPPED: int
|
||||
if sys.platform != "win32" and sys.platform != "linux":
|
||||
if sys.platform != "win32":
|
||||
AI_DEFAULT: int
|
||||
AI_MASK: int
|
||||
AI_V4MAPPED_CFG: int
|
||||
@@ -309,8 +293,6 @@ NI_NAMEREQD: int
|
||||
NI_NOFQDN: int
|
||||
NI_NUMERICHOST: int
|
||||
NI_NUMERICSERV: int
|
||||
if sys.platform == "linux" and sys.version_info >= (3, 13):
|
||||
NI_IDN: int
|
||||
|
||||
TCP_FASTOPEN: int
|
||||
TCP_KEEPCNT: int
|
||||
@@ -336,27 +318,6 @@ if sys.platform != "win32" and sys.platform != "darwin":
|
||||
TCP_SYNCNT: int
|
||||
TCP_USER_TIMEOUT: int
|
||||
TCP_WINDOW_CLAMP: int
|
||||
if sys.platform == "linux" and sys.version_info >= (3, 12):
|
||||
TCP_CC_INFO: int
|
||||
TCP_FASTOPEN_CONNECT: int
|
||||
TCP_FASTOPEN_KEY: int
|
||||
TCP_FASTOPEN_NO_COOKIE: int
|
||||
TCP_INQ: int
|
||||
TCP_MD5SIG: int
|
||||
TCP_MD5SIG_EXT: int
|
||||
TCP_QUEUE_SEQ: int
|
||||
TCP_REPAIR: int
|
||||
TCP_REPAIR_OPTIONS: int
|
||||
TCP_REPAIR_QUEUE: int
|
||||
TCP_REPAIR_WINDOW: int
|
||||
TCP_SAVED_SYN: int
|
||||
TCP_SAVE_SYN: int
|
||||
TCP_THIN_DUPACK: int
|
||||
TCP_THIN_LINEAR_TIMEOUTS: int
|
||||
TCP_TIMESTAMP: int
|
||||
TCP_TX_DELAY: int
|
||||
TCP_ULP: int
|
||||
TCP_ZEROCOPY_RECEIVE: int
|
||||
|
||||
# --------------------
|
||||
# Specifically documented constants
|
||||
@@ -373,13 +334,12 @@ if sys.platform == "linux":
|
||||
CAN_ERR_FLAG: int
|
||||
CAN_ERR_MASK: int
|
||||
CAN_RAW: int
|
||||
CAN_RAW_ERR_FILTER: int
|
||||
CAN_RAW_FILTER: int
|
||||
CAN_RAW_LOOPBACK: int
|
||||
CAN_RAW_RECV_OWN_MSGS: int
|
||||
CAN_RTR_FLAG: int
|
||||
CAN_SFF_MASK: int
|
||||
if sys.version_info < (3, 11):
|
||||
CAN_RAW_ERR_FILTER: int
|
||||
|
||||
if sys.platform == "linux":
|
||||
# Availability: Linux >= 2.6.25
|
||||
@@ -477,13 +437,12 @@ if sys.platform == "linux":
|
||||
AF_RDS: int
|
||||
PF_RDS: int
|
||||
SOL_RDS: int
|
||||
# These are present in include/linux/rds.h but don't always show up
|
||||
# here.
|
||||
RDS_CANCEL_SENT_TO: int
|
||||
RDS_CMSG_RDMA_ARGS: int
|
||||
RDS_CMSG_RDMA_DEST: int
|
||||
RDS_CMSG_RDMA_MAP: int
|
||||
RDS_CMSG_RDMA_STATUS: int
|
||||
RDS_CMSG_RDMA_UPDATE: int
|
||||
RDS_CONG_MONITOR: int
|
||||
RDS_FREE_MR: int
|
||||
RDS_GET_MR: int
|
||||
@@ -497,10 +456,6 @@ if sys.platform == "linux":
|
||||
RDS_RDMA_USE_ONCE: int
|
||||
RDS_RECVERR: int
|
||||
|
||||
# This is supported by CPython but doesn't seem to be a real thing.
|
||||
# The closest existing constant in rds.h is RDS_CMSG_CONG_UPDATE
|
||||
# RDS_CMSG_RDMA_UPDATE: int
|
||||
|
||||
if sys.platform == "win32":
|
||||
SIO_RCVALL: int
|
||||
SIO_KEEPALIVE_VALS: int
|
||||
@@ -567,17 +522,16 @@ if sys.platform == "linux":
|
||||
if sys.platform != "win32" or sys.version_info >= (3, 9):
|
||||
# Documented as only available on BSD, macOS, but empirically sometimes
|
||||
# available on Windows
|
||||
if sys.platform != "linux":
|
||||
AF_LINK: int
|
||||
AF_LINK: int
|
||||
|
||||
has_ipv6: bool
|
||||
|
||||
if sys.platform != "darwin" and sys.platform != "linux":
|
||||
if sys.platform != "darwin":
|
||||
if sys.platform != "win32" or sys.version_info >= (3, 9):
|
||||
BDADDR_ANY: str
|
||||
BDADDR_LOCAL: str
|
||||
|
||||
if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux":
|
||||
if sys.platform != "win32" and sys.platform != "darwin":
|
||||
HCI_FILTER: int # not in NetBSD or DragonFlyBSD
|
||||
HCI_TIME_STAMP: int # not in FreeBSD, NetBSD, or DragonFlyBSD
|
||||
HCI_DATA_DIR: int # not in FreeBSD, NetBSD, or DragonFlyBSD
|
||||
@@ -626,37 +580,36 @@ if sys.version_info >= (3, 12):
|
||||
if sys.platform == "linux":
|
||||
# Netlink is defined by Linux
|
||||
AF_NETLINK: int
|
||||
NETLINK_ARPD: int
|
||||
NETLINK_CRYPTO: int
|
||||
NETLINK_DNRTMSG: int
|
||||
NETLINK_FIREWALL: int
|
||||
NETLINK_IP6_FW: int
|
||||
NETLINK_NFLOG: int
|
||||
NETLINK_ROUTE6: int
|
||||
NETLINK_ROUTE: int
|
||||
NETLINK_SKIP: int
|
||||
NETLINK_TAPBASE: int
|
||||
NETLINK_TCPDIAG: int
|
||||
NETLINK_USERSOCK: int
|
||||
NETLINK_W1: int
|
||||
NETLINK_XFRM: int
|
||||
# Technically still supported by CPython
|
||||
# NETLINK_ARPD: int # linux 2.0 to 2.6.12 (EOL August 2005)
|
||||
# NETLINK_ROUTE6: int # linux 2.2 to 2.6.12 (EOL August 2005)
|
||||
# NETLINK_SKIP: int # linux 2.0 to 2.6.12 (EOL August 2005)
|
||||
# NETLINK_TAPBASE: int # linux 2.2 to 2.6.12 (EOL August 2005)
|
||||
# NETLINK_TCPDIAG: int # linux 2.6.0 to 2.6.13 (EOL December 2005)
|
||||
# NETLINK_W1: int # linux 2.6.13 to 2.6.17 (EOL October 2006)
|
||||
|
||||
if sys.platform == "darwin":
|
||||
PF_SYSTEM: int
|
||||
SYSPROTO_CONTROL: int
|
||||
|
||||
if sys.platform != "darwin" and sys.platform != "linux":
|
||||
if sys.platform != "darwin":
|
||||
if sys.version_info >= (3, 9) or sys.platform != "win32":
|
||||
AF_BLUETOOTH: int
|
||||
|
||||
if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux":
|
||||
if sys.platform != "win32" and sys.platform != "darwin":
|
||||
# Linux and some BSD support is explicit in the docs
|
||||
# Windows and macOS do not support in practice
|
||||
BTPROTO_HCI: int
|
||||
BTPROTO_L2CAP: int
|
||||
BTPROTO_SCO: int # not in FreeBSD
|
||||
if sys.platform != "darwin" and sys.platform != "linux":
|
||||
if sys.platform != "darwin":
|
||||
if sys.version_info >= (3, 9) or sys.platform != "win32":
|
||||
BTPROTO_RFCOMM: int
|
||||
|
||||
@@ -683,14 +636,13 @@ AF_SNA: int
|
||||
|
||||
if sys.platform != "win32":
|
||||
AF_ROUTE: int
|
||||
|
||||
if sys.platform == "darwin":
|
||||
AF_SYSTEM: int
|
||||
|
||||
if sys.platform != "darwin":
|
||||
AF_IRDA: int
|
||||
|
||||
if sys.platform != "win32" and sys.platform != "darwin":
|
||||
AF_AAL5: int
|
||||
AF_ASH: int
|
||||
AF_ATMPVC: int
|
||||
AF_ATMSVC: int
|
||||
@@ -709,12 +661,10 @@ if sys.platform != "win32" and sys.platform != "darwin":
|
||||
|
||||
# Miscellaneous undocumented
|
||||
|
||||
if sys.platform != "win32" and sys.platform != "linux":
|
||||
if sys.platform != "win32":
|
||||
LOCAL_PEERCRED: int
|
||||
|
||||
if sys.platform != "win32" and sys.platform != "darwin":
|
||||
# Defined in linux socket.h, but this isn't always present for
|
||||
# some reason.
|
||||
IPX_TYPE: int
|
||||
|
||||
# ===== Classes =====
|
||||
@@ -842,4 +792,4 @@ def if_nameindex() -> list[tuple[int, str]]: ...
|
||||
def if_nametoindex(oname: str, /) -> int: ...
|
||||
def if_indextoname(index: int, /) -> str: ...
|
||||
|
||||
CAPI: CapsuleType
|
||||
CAPI: object
|
||||
|
||||
@@ -44,12 +44,6 @@ def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpa
|
||||
@overload
|
||||
def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ...
|
||||
|
||||
# Obsolete synonym for start_new_thread()
|
||||
@overload
|
||||
def start_new(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: ...
|
||||
@overload
|
||||
def start_new(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
def interrupt_main(signum: signal.Signals = ..., /) -> None: ...
|
||||
|
||||
@@ -57,9 +51,7 @@ else:
|
||||
def interrupt_main() -> None: ...
|
||||
|
||||
def exit() -> NoReturn: ...
|
||||
def exit_thread() -> NoReturn: ... # Obsolete synonym for exit()
|
||||
def allocate_lock() -> LockType: ...
|
||||
def allocate() -> LockType: ... # Obsolete synonym for allocate_lock()
|
||||
def get_ident() -> int: ...
|
||||
def stack_size(size: int = 0, /) -> int: ...
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user