Compare commits
44 Commits
micha/try-
...
micha/thin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3e7cfca47c | ||
|
|
b8d2037373 | ||
|
|
1714d5c771 | ||
|
|
1fd9103e81 | ||
|
|
ee2759b365 | ||
|
|
35f33d9bf5 | ||
|
|
5d78b3117a | ||
|
|
c2a05b4825 | ||
|
|
fae0b5c89e | ||
|
|
cbe94b094b | ||
|
|
029de784f1 | ||
|
|
ff94fe7447 | ||
|
|
b2501b45e0 | ||
|
|
291699b375 | ||
|
|
64ac7d7dbf | ||
|
|
5f2e855c29 | ||
|
|
3b4667ec32 | ||
|
|
893f5727e5 | ||
|
|
b8dddd514f | ||
|
|
e73a8ba571 | ||
|
|
a1edb69ea5 | ||
|
|
a0d4e1f854 | ||
|
|
c0d04f2d56 | ||
|
|
8d7d02193e | ||
|
|
78dfc8af0f | ||
|
|
0c84652cc5 | ||
|
|
560ae04346 | ||
|
|
a357a68fc9 | ||
|
|
00e7d1ffd6 | ||
|
|
f4d0273532 | ||
|
|
e9cac3684a | ||
|
|
92a302e291 | ||
|
|
7b8161e80d | ||
|
|
e9b0c33703 | ||
|
|
82391b5675 | ||
|
|
464144f1c6 | ||
|
|
002f9057db | ||
|
|
f3a27406c9 | ||
|
|
2c9da80985 | ||
|
|
8e61da740a | ||
|
|
e506296cec | ||
|
|
966cc9d6e9 | ||
|
|
7b27fe966e | ||
|
|
966fd6f57a |
179
.github/workflows/sync_typeshed.yaml
vendored
179
.github/workflows/sync_typeshed.yaml
vendored
@@ -1,5 +1,25 @@
|
||||
name: Sync typeshed
|
||||
|
||||
# How this works:
|
||||
#
|
||||
# 1. A Linux worker:
|
||||
# a. Checks out Ruff and typeshed
|
||||
# b. Deletes the vendored typeshed stdlib stubs from Ruff
|
||||
# c. Copies the latest versions of the stubs from typeshed
|
||||
# d. Uses docstring-adder to sync all docstrings available on Linux
|
||||
# e. Creates a new branch on the upstream astral-sh/ruff repository
|
||||
# f. Commits the changes it's made and pushes them to the new upstream branch
|
||||
# 2. Once the Linux worker is done, a Windows worker:
|
||||
# a. Checks out the branch created by the Linux worker
|
||||
# b. Syncs all docstrings available on Windows that are not available on Linux
|
||||
# c. Commits the changes and pushes them to the same upstream branch
|
||||
# 3. Once the Windows worker is done, a MacOS worker:
|
||||
# a. Checks out the branch created by the Linux worker
|
||||
# b. Syncs all docstrings available on MacOS that are not available on Linux or Windows
|
||||
# c. Commits the changes and pushes them to the same upstream branch
|
||||
# d. Creates a PR against the `main` branch using the branch all three workers have pushed to
|
||||
# 4. If any of steps 1-3 failed, an issue is created in the `astral-sh/ruff` repository
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
@@ -10,7 +30,13 @@ env:
|
||||
FORCE_COLOR: 1
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
# The name of the upstream branch that the first worker creates,
|
||||
# and which all three workers push to.
|
||||
UPSTREAM_BRANCH: typeshedbot/sync-typeshed
|
||||
|
||||
jobs:
|
||||
# Sync typeshed stubs, and sync all docstrings available on Linux.
|
||||
# Push the changes to a new branch on the upstream repository.
|
||||
sync:
|
||||
name: Sync typeshed
|
||||
runs-on: ubuntu-latest
|
||||
@@ -19,7 +45,6 @@ jobs:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
@@ -37,67 +62,129 @@ jobs:
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
- name: Sync typeshed stubs
|
||||
run: |
|
||||
docstring_adder="git+https://github.com/astral-sh/docstring-adder.git@6de51c5f44aea11fe8c8f2d30f9ee0683682c3d2"
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/ty_vendored/vendor/typeshed
|
||||
|
||||
# Run with the full matrix of Python versions supported by typeshed,
|
||||
# so that we codemod in docstrings that only exist on certain versions.
|
||||
#
|
||||
# The codemod will only add docstrings to functions/classes that do not
|
||||
# already have docstrings. We run with Python 3.14 before running with
|
||||
# any other Python version so that we get the Python 3.14 version of the
|
||||
# docstring for a definition that exists on all Python versions: if we
|
||||
# ran with Python 3.9 first, then the later runs with Python 3.10+ would
|
||||
# not modify the docstring that had already been added using the old version of Python.
|
||||
#
|
||||
# TODO: In order to add docstrings for platform-specific APIs, we would also
|
||||
# need to run the codemod on Windows. We get the runtime docstrings by inspecting
|
||||
# the docstrings at runtime, so if an API doesn't exist at runtime (because e.g.
|
||||
# it's Windows-specific and we're running on Linux), then we won't add a docstring to it.
|
||||
#
|
||||
uvx --python=3.14 --force-reinstall --from="${docstring_adder}" add-docstrings --stdlib-path ./typeshed/stdlib
|
||||
uvx --python=3.13 --force-reinstall --from="${docstring_adder}" add-docstrings --stdlib-path ./typeshed/stdlib
|
||||
uvx --python=3.12 --force-reinstall --from="${docstring_adder}" add-docstrings --stdlib-path ./typeshed/stdlib
|
||||
uvx --python=3.11 --force-reinstall --from="${docstring_adder}" add-docstrings --stdlib-path ./typeshed/stdlib
|
||||
uvx --python=3.10 --force-reinstall --from="${docstring_adder}" add-docstrings --stdlib-path ./typeshed/stdlib
|
||||
uvx --python=3.9 --force-reinstall --from="${docstring_adder}" add-docstrings --stdlib-path ./typeshed/stdlib
|
||||
# The pyproject.toml file is needed by a later job for the black configuration.
|
||||
# It's deleted before creating the PR.
|
||||
cp typeshed/pyproject.toml ruff/crates/ty_vendored/vendor/typeshed
|
||||
|
||||
cp -r typeshed/stdlib ruff/crates/ty_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/ty_vendored/vendor/typeshed/source_commit.txt
|
||||
cd ruff
|
||||
git checkout -b typeshedbot/sync-typeshed
|
||||
git add .
|
||||
git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)" --allow-empty
|
||||
- name: Sync Linux docstrings
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
cd ruff
|
||||
./scripts/codemod_docstrings.sh
|
||||
git commit -am "Sync Linux docstrings" --allow-empty
|
||||
- name: Push the changes
|
||||
id: commit
|
||||
if: ${{ success() }}
|
||||
run: git -C ruff push --force --set-upstream origin "${UPSTREAM_BRANCH}"
|
||||
|
||||
# Checkout the branch created by the sync job,
|
||||
# and sync all docstrings available on Windows that are not available on Linux.
|
||||
# Commit the changes and push them to the same branch.
|
||||
docstrings-windows:
|
||||
runs-on: windows-latest
|
||||
timeout-minutes: 20
|
||||
needs: [sync]
|
||||
|
||||
# Don't run the cron job on forks.
|
||||
# The job will also be skipped if the sync job failed, because it's specified in `needs` above,
|
||||
# and we haven't used `always()` in the `if` condition here
|
||||
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync Windows docstrings
|
||||
id: docstrings
|
||||
shell: bash
|
||||
run: ./scripts/codemod_docstrings.sh
|
||||
- name: Commit the changes
|
||||
if: ${{ steps.docstrings.outcome == 'success' }}
|
||||
run: |
|
||||
git commit -am "Sync Windows docstrings" --allow-empty
|
||||
git push
|
||||
|
||||
# Checkout the branch created by the sync job,
|
||||
# and sync all docstrings available on macOS that are not available on Linux or Windows.
|
||||
# Push the changes to the same branch and create a PR against the `main` branch using that branch.
|
||||
docstrings-macos-and-pr:
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 20
|
||||
needs: [sync, docstrings-windows]
|
||||
|
||||
# Don't run the cron job on forks.
|
||||
# The job will also be skipped if the sync or docstrings-windows jobs failed,
|
||||
# because they're specified in `needs` above and we haven't used an `always()` condition in the `if` here
|
||||
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync macOS docstrings
|
||||
run: ./scripts/codemod_docstrings.sh
|
||||
- name: Commit and push the changes
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
git commit -am "Sync macOS docstrings" --allow-empty
|
||||
|
||||
# Here we just reformat the codemodded stubs so that they are
|
||||
# consistent with the other typeshed stubs around them.
|
||||
# Typeshed formats code using black in their CI, so we just invoke
|
||||
# black on the stubs the same way that typeshed does.
|
||||
uvx --directory=typeshed pre-commit run -a black || true
|
||||
uvx black crates/ty_vendored/vendor/typeshed/stdlib --config crates/ty_vendored/vendor/typeshed/pyproject.toml || true
|
||||
git commit -am "Format codemodded docstrings" --allow-empty
|
||||
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/ty_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/ty_vendored/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
run: |
|
||||
cd ruff
|
||||
git checkout -b typeshedbot/sync-typeshed
|
||||
git add .
|
||||
git diff --staged --quiet || git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)"
|
||||
rm crates/ty_vendored/vendor/typeshed/pyproject.toml
|
||||
git commit -am "Remove pyproject.toml file"
|
||||
|
||||
git push
|
||||
- name: Create a PR
|
||||
if: ${{ steps.sync.outcome == 'success' && steps.commit.outcome == 'success' }}
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
cd ruff
|
||||
git push --force origin typeshedbot/sync-typeshed
|
||||
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "[ty] Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "ty"
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the typeshed sync failed
|
||||
runs-on: ubuntu-latest
|
||||
needs: [sync]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.sync.result == 'failure' }}
|
||||
needs: [sync, docstrings-windows, docstrings-macos-and-pr]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && (needs.sync.result == 'failure' || needs.docstrings-windows.result == 'failure' || needs.docstrings-macos-and-pr.result == 'failure') }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
|
||||
76
.github/workflows/ty-ecosystem-report.yaml
vendored
Normal file
76
.github/workflows/ty-ecosystem-report.yaml
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
name: ty ecosystem-report
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Run every Wednesday at 5:00 UTC:
|
||||
- cron: 0 5 * * 3
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
|
||||
jobs:
|
||||
ty-ecosystem-report:
|
||||
name: Create ecosystem report
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Create report
|
||||
shell: bash
|
||||
run: |
|
||||
cd ruff
|
||||
|
||||
echo "Enabling configuration overloads (see .github/mypy-primer-ty.toml)"
|
||||
mkdir -p ~/.config/ty
|
||||
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@f0eec0e549684d8e1d7b8bc3e351202124b63bda"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--verbose \
|
||||
--repository ruff \
|
||||
analyze \
|
||||
--projects ruff/crates/ty_python_semantic/resources/primer/good.txt \
|
||||
--output ecosystem-diagnostics.json
|
||||
|
||||
mkdir dist
|
||||
|
||||
ecosystem-analyzer \
|
||||
generate-report \
|
||||
--max-diagnostics-per-project=1200 \
|
||||
ecosystem-diagnostics.json \
|
||||
--output dist/index.html
|
||||
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
id: deploy
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
command: pages deploy dist --project-name=ty-ecosystem --branch main --commit-hash ${GITHUB_SHA}
|
||||
1
.github/zizmor.yml
vendored
1
.github/zizmor.yml
vendored
@@ -11,6 +11,7 @@ rules:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
- ty-ecosystem-analyzer.yaml
|
||||
- ty-ecosystem-report.yaml
|
||||
excessive-permissions:
|
||||
# it's hard to test what the impact of removing these ignores would be
|
||||
# without actually running the release workflow...
|
||||
|
||||
@@ -128,5 +128,10 @@ repos:
|
||||
# but the integration only works if shellcheck is installed
|
||||
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
|
||||
|
||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||
rev: v0.10.0.1
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, dev-generate-all]
|
||||
|
||||
31
CHANGELOG.md
31
CHANGELOG.md
@@ -1,5 +1,36 @@
|
||||
# Changelog
|
||||
|
||||
## 0.12.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-type-checking`, `pyupgrade`, `ruff`\] Add `from __future__ import annotations` when it would allow new fixes (`TC001`, `TC002`, `TC003`, `UP037`, `RUF013`) ([#19100](https://github.com/astral-sh/ruff/pull/19100))
|
||||
- \[`flake8-use-pathlib`\] Add autofix for `PTH109` ([#19245](https://github.com/astral-sh/ruff/pull/19245))
|
||||
- \[`pylint`\] Detect indirect `pathlib.Path` usages for `unspecified-encoding` (`PLW1514`) ([#19304](https://github.com/astral-sh/ruff/pull/19304))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-bugbear`\] Fix `B017` false negatives for keyword exception arguments ([#19217](https://github.com/astral-sh/ruff/pull/19217))
|
||||
- \[`flake8-use-pathlib`\] Fix false negative on direct `Path()` instantiation (`PTH210`) ([#19388](https://github.com/astral-sh/ruff/pull/19388))
|
||||
- \[`flake8-django`\] Fix `DJ008` false positive for abstract models with type-annotated `abstract` field ([#19221](https://github.com/astral-sh/ruff/pull/19221))
|
||||
- \[`isort`\] Fix `I002` import insertion after docstring with multiple string statements ([#19222](https://github.com/astral-sh/ruff/pull/19222))
|
||||
- \[`isort`\] Treat form feed as valid whitespace before a semicolon ([#19343](https://github.com/astral-sh/ruff/pull/19343))
|
||||
- \[`pydoclint`\] Fix `SyntaxError` from fixes with line continuations (`D201`, `D202`) ([#19246](https://github.com/astral-sh/ruff/pull/19246))
|
||||
- \[`refurb`\] `FURB164` fix should validate arguments and should usually be marked unsafe ([#19136](https://github.com/astral-sh/ruff/pull/19136))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-use-pathlib`\] Skip single dots for `invalid-pathlib-with-suffix` (`PTH210`) on versions >= 3.14 ([#19331](https://github.com/astral-sh/ruff/pull/19331))
|
||||
- \[`pep8_naming`\] Avoid false positives on standard library functions with uppercase names (`N802`) ([#18907](https://github.com/astral-sh/ruff/pull/18907))
|
||||
- \[`pycodestyle`\] Handle brace escapes for t-strings in logical lines ([#19358](https://github.com/astral-sh/ruff/pull/19358))
|
||||
- \[`pylint`\] Extend invalid string character rules to include t-strings ([#19355](https://github.com/astral-sh/ruff/pull/19355))
|
||||
- \[`ruff`\] Allow `strict` kwarg when checking for `starmap-zip` (`RUF058`) in Python 3.14+ ([#19333](https://github.com/astral-sh/ruff/pull/19333))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-type-checking`\] Make `TC010` docs example more realistic ([#19356](https://github.com/astral-sh/ruff/pull/19356))
|
||||
- Make more documentation examples error out-of-the-box ([#19288](https://github.com/astral-sh/ruff/pull/19288),[#19272](https://github.com/astral-sh/ruff/pull/19272),[#19291](https://github.com/astral-sh/ruff/pull/19291),[#19296](https://github.com/astral-sh/ruff/pull/19296),[#19292](https://github.com/astral-sh/ruff/pull/19292),[#19295](https://github.com/astral-sh/ruff/pull/19295),[#19297](https://github.com/astral-sh/ruff/pull/19297),[#19309](https://github.com/astral-sh/ruff/pull/19309))
|
||||
|
||||
## 0.12.3
|
||||
|
||||
### Preview features
|
||||
|
||||
@@ -266,6 +266,13 @@ Finally, regenerate the documentation and generated code with `cargo dev generat
|
||||
|
||||
## MkDocs
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> The documentation uses Material for MkDocs Insiders, which is closed-source software.
|
||||
> This means only members of the Astral organization can preview the documentation exactly as it
|
||||
> will appear in production.
|
||||
> Outside contributors can still preview the documentation, but there will be some differences. Consult [the Material for MkDocs documentation](https://squidfunk.github.io/mkdocs-material/insiders/benefits/#features) for which features are exclusively available in the insiders version.
|
||||
|
||||
To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
|
||||
8
Cargo.lock
generated
8
Cargo.lock
generated
@@ -2711,7 +2711,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.12.3"
|
||||
version = "0.12.4"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2962,7 +2962,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.3"
|
||||
version = "0.12.4"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
@@ -3295,7 +3295,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.12.3"
|
||||
version = "0.12.4"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -4287,6 +4287,7 @@ dependencies = [
|
||||
"strum_macros",
|
||||
"tempfile",
|
||||
"test-case",
|
||||
"thin-vec",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
"ty_python_semantic",
|
||||
@@ -4300,6 +4301,7 @@ name = "ty_server"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.9.1",
|
||||
"crossbeam",
|
||||
"jod-thread",
|
||||
"libc",
|
||||
|
||||
@@ -163,6 +163,7 @@ strum_macros = { version = "0.27.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thin-vec = { version = "0.2.14" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.9.0" }
|
||||
|
||||
@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.12.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.3/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.12.4/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.4/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.3
|
||||
rev: v0.12.4
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.12.3"
|
||||
version = "0.12.4"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -16,7 +16,7 @@ use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{
|
||||
Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, JunitEmitter,
|
||||
PylintEmitter, RdjsonEmitter, SarifEmitter, TextEmitter,
|
||||
SarifEmitter, TextEmitter,
|
||||
};
|
||||
use ruff_linter::notify_user;
|
||||
use ruff_linter::settings::flags::{self};
|
||||
@@ -238,7 +238,11 @@ impl Printer {
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::Rdjson => {
|
||||
RdjsonEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Rdjson)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::JsonLines => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
@@ -290,7 +294,11 @@ impl Printer {
|
||||
GitlabEmitter::default().emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Pylint => {
|
||||
PylintEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Pylint)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::Azure => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
|
||||
@@ -993,6 +993,7 @@ fn value_given_to_table_key_is_not_inline_table_2() {
|
||||
- `lint.exclude`
|
||||
- `lint.preview`
|
||||
- `lint.typing-extensions`
|
||||
- `lint.future-annotations`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
@@ -5717,8 +5718,11 @@ match 42: # invalid-syntax
|
||||
|
||||
let snapshot = format!("output_format_{output_format}");
|
||||
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![
|
||||
(tempdir_filter(&project_dir).as_str(), "[TMP]/"),
|
||||
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
|
||||
(r#""[^"]+\\?/?input.py"#, r#""[TMP]/input.py"#),
|
||||
(ruff_linter::VERSION, "[VERSION]"),
|
||||
@@ -5744,3 +5748,25 @@ match 42: # invalid-syntax
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn future_annotations_preview_warning() {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "lint.future-annotations = true"])
|
||||
.args(["--select", "F"])
|
||||
.arg("--no-preview")
|
||||
.arg("-")
|
||||
.pass_stdin("1"),
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: The `lint.future-annotations` setting will have no effect because `preview` is disabled
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -18,6 +18,6 @@ exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1: [F401] `os` imported but unused
|
||||
input.py:2: [F821] Undefined name `y`
|
||||
input.py:3: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
input.py:3: [invalid-syntax] SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -75,8 +75,7 @@ exit_code: 1
|
||||
},
|
||||
{
|
||||
"code": {
|
||||
"url": null,
|
||||
"value": null
|
||||
"value": "invalid-syntax"
|
||||
},
|
||||
"location": {
|
||||
"path": "[TMP]/input.py",
|
||||
@@ -94,7 +93,7 @@ exit_code: 1
|
||||
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
}
|
||||
],
|
||||
"severity": "warning",
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
use ruff_benchmark::criterion;
|
||||
use ruff_benchmark::real_world_projects::{InstalledProject, RealWorldProject};
|
||||
|
||||
use std::fmt::Write;
|
||||
use std::ops::Range;
|
||||
|
||||
use criterion::{BatchSize, Criterion, criterion_group, criterion_main};
|
||||
@@ -441,6 +442,37 @@ fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
|
||||
});
|
||||
}
|
||||
|
||||
fn benchmark_many_enum_members(criterion: &mut Criterion) {
|
||||
const NUM_ENUM_MEMBERS: usize = 512;
|
||||
|
||||
setup_rayon();
|
||||
|
||||
let mut code = String::new();
|
||||
writeln!(&mut code, "from enum import Enum").ok();
|
||||
|
||||
writeln!(&mut code, "class E(Enum):").ok();
|
||||
for i in 0..NUM_ENUM_MEMBERS {
|
||||
writeln!(&mut code, " m{i} = {i}").ok();
|
||||
}
|
||||
writeln!(&mut code).ok();
|
||||
|
||||
for i in 0..NUM_ENUM_MEMBERS {
|
||||
writeln!(&mut code, "print(E.m{i})").ok();
|
||||
}
|
||||
|
||||
criterion.bench_function("ty_micro[many_enum_members]", |b| {
|
||||
b.iter_batched_ref(
|
||||
|| setup_micro_case(&code),
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check();
|
||||
assert_eq!(result.len(), 0);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
struct ProjectBenchmark<'a> {
|
||||
project: InstalledProject<'a>,
|
||||
fs: MemoryFileSystem,
|
||||
@@ -591,6 +623,7 @@ criterion_group!(
|
||||
benchmark_many_tuple_assignments,
|
||||
benchmark_complex_constrained_attributes_1,
|
||||
benchmark_complex_constrained_attributes_2,
|
||||
benchmark_many_enum_members,
|
||||
);
|
||||
criterion_group!(project, anyio, attrs, hydra, datetype);
|
||||
criterion_main!(check_file, micro, project);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::{fmt::Formatter, sync::Arc};
|
||||
use std::{fmt::Formatter, path::Path, sync::Arc};
|
||||
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_source_file::{LineColumn, SourceCode, SourceFile};
|
||||
@@ -308,6 +308,10 @@ impl Diagnostic {
|
||||
|
||||
/// Set the fix for this diagnostic.
|
||||
pub fn set_fix(&mut self, fix: Fix) {
|
||||
debug_assert!(
|
||||
self.primary_span().is_some(),
|
||||
"Expected a source file for a diagnostic with a fix"
|
||||
);
|
||||
Arc::make_mut(&mut self.inner).fix = Some(fix);
|
||||
}
|
||||
|
||||
@@ -1008,6 +1012,18 @@ impl UnifiedFile {
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the file's path relative to the current working directory.
|
||||
pub fn relative_path<'a>(&'a self, resolver: &'a dyn FileResolver) -> &'a Path {
|
||||
let cwd = resolver.current_directory();
|
||||
let path = Path::new(self.path(resolver));
|
||||
|
||||
if let Ok(path) = path.strip_prefix(cwd) {
|
||||
return path;
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
fn diagnostic_source(&self, resolver: &dyn FileResolver) -> DiagnosticSource {
|
||||
match self {
|
||||
UnifiedFile::Ty(file) => DiagnosticSource::Ty(resolver.input(*file)),
|
||||
@@ -1259,6 +1275,13 @@ pub enum DiagnosticFormat {
|
||||
/// format for an array of all diagnostics. See <https://jsonlines.org/> for more details.
|
||||
#[cfg(feature = "serde")]
|
||||
JsonLines,
|
||||
/// Print diagnostics in the JSON format expected by [reviewdog].
|
||||
///
|
||||
/// [reviewdog]: https://github.com/reviewdog/reviewdog
|
||||
#[cfg(feature = "serde")]
|
||||
Rdjson,
|
||||
/// Print diagnostics in the format emitted by Pylint.
|
||||
Pylint,
|
||||
}
|
||||
|
||||
/// A representation of the kinds of messages inside a diagnostic.
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
|
||||
use ruff_annotate_snippets::{
|
||||
Annotation as AnnotateAnnotation, Level as AnnotateLevel, Message as AnnotateMessage,
|
||||
@@ -22,12 +23,16 @@ use super::{
|
||||
};
|
||||
|
||||
use azure::AzureRenderer;
|
||||
use pylint::PylintRenderer;
|
||||
|
||||
mod azure;
|
||||
#[cfg(feature = "serde")]
|
||||
mod json;
|
||||
#[cfg(feature = "serde")]
|
||||
mod json_lines;
|
||||
mod pylint;
|
||||
#[cfg(feature = "serde")]
|
||||
mod rdjson;
|
||||
|
||||
/// A type that implements `std::fmt::Display` for diagnostic rendering.
|
||||
///
|
||||
@@ -184,6 +189,13 @@ impl std::fmt::Display for DisplayDiagnostics<'_> {
|
||||
json_lines::JsonLinesRenderer::new(self.resolver, self.config)
|
||||
.render(f, self.diagnostics)?;
|
||||
}
|
||||
#[cfg(feature = "serde")]
|
||||
DiagnosticFormat::Rdjson => {
|
||||
rdjson::RdjsonRenderer::new(self.resolver).render(f, self.diagnostics)?;
|
||||
}
|
||||
DiagnosticFormat::Pylint => {
|
||||
PylintRenderer::new(self.resolver).render(f, self.diagnostics)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -705,6 +717,9 @@ pub trait FileResolver {
|
||||
|
||||
/// Returns whether the file given is a Jupyter notebook.
|
||||
fn is_notebook(&self, file: &UnifiedFile) -> bool;
|
||||
|
||||
/// Returns the current working directory.
|
||||
fn current_directory(&self) -> &Path;
|
||||
}
|
||||
|
||||
impl<T> FileResolver for T
|
||||
@@ -740,6 +755,10 @@ where
|
||||
UnifiedFile::Ruff(_) => unimplemented!("Expected an interned ty file"),
|
||||
}
|
||||
}
|
||||
|
||||
fn current_directory(&self) -> &Path {
|
||||
self.system().current_directory().as_std_path()
|
||||
}
|
||||
}
|
||||
|
||||
impl FileResolver for &dyn Db {
|
||||
@@ -772,6 +791,10 @@ impl FileResolver for &dyn Db {
|
||||
UnifiedFile::Ruff(_) => unimplemented!("Expected an interned ty file"),
|
||||
}
|
||||
}
|
||||
|
||||
fn current_directory(&self) -> &Path {
|
||||
self.system().current_directory().as_std_path()
|
||||
}
|
||||
}
|
||||
|
||||
/// An abstraction over a unit of user input.
|
||||
|
||||
@@ -262,9 +262,6 @@ struct JsonEdit<'a> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_diagnostics::{Edit, Fix};
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
@@ -297,13 +294,7 @@ mod tests {
|
||||
env.format(DiagnosticFormat::Json);
|
||||
env.preview(false);
|
||||
|
||||
let diag = env
|
||||
.err()
|
||||
.fix(Fix::safe_edit(Edit::insertion(
|
||||
"edit".to_string(),
|
||||
TextSize::from(0),
|
||||
)))
|
||||
.build();
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@@ -317,23 +308,7 @@ mod tests {
|
||||
"row": 1
|
||||
},
|
||||
"filename": "",
|
||||
"fix": {
|
||||
"applicability": "safe",
|
||||
"edits": [
|
||||
{
|
||||
"content": "edit",
|
||||
"end_location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
},
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": null
|
||||
},
|
||||
"fix": null,
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
@@ -353,13 +328,7 @@ mod tests {
|
||||
env.format(DiagnosticFormat::Json);
|
||||
env.preview(true);
|
||||
|
||||
let diag = env
|
||||
.err()
|
||||
.fix(Fix::safe_edit(Edit::insertion(
|
||||
"edit".to_string(),
|
||||
TextSize::from(0),
|
||||
)))
|
||||
.build();
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@@ -370,17 +339,7 @@ mod tests {
|
||||
"code": null,
|
||||
"end_location": null,
|
||||
"filename": null,
|
||||
"fix": {
|
||||
"applicability": "safe",
|
||||
"edits": [
|
||||
{
|
||||
"content": "edit",
|
||||
"end_location": null,
|
||||
"location": null
|
||||
}
|
||||
],
|
||||
"message": null
|
||||
},
|
||||
"fix": null,
|
||||
"location": null,
|
||||
"message": "main diagnostic message",
|
||||
"noqa_row": null,
|
||||
|
||||
97
crates/ruff_db/src/diagnostic/render/pylint.rs
Normal file
97
crates/ruff_db/src/diagnostic/render/pylint.rs
Normal file
@@ -0,0 +1,97 @@
|
||||
use crate::diagnostic::{Diagnostic, SecondaryCode, render::FileResolver};
|
||||
|
||||
/// Generate violations in Pylint format.
|
||||
///
|
||||
/// The format is given by this string:
|
||||
///
|
||||
/// ```python
|
||||
/// "%(path)s:%(row)d: [%(code)s] %(text)s"
|
||||
/// ```
|
||||
///
|
||||
/// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter)
|
||||
pub(super) struct PylintRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> PylintRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
}
|
||||
|
||||
impl PylintRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
for diagnostic in diagnostics {
|
||||
let (filename, row) = diagnostic
|
||||
.primary_span_ref()
|
||||
.map(|span| {
|
||||
let file = span.file();
|
||||
|
||||
let row = span
|
||||
.range()
|
||||
.filter(|_| !self.resolver.is_notebook(file))
|
||||
.map(|range| {
|
||||
file.diagnostic_source(self.resolver)
|
||||
.as_source_code()
|
||||
.line_column(range.start())
|
||||
.line
|
||||
});
|
||||
|
||||
(file.relative_path(self.resolver).to_string_lossy(), row)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let code = diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(|| diagnostic.name(), SecondaryCode::as_str);
|
||||
|
||||
let row = row.unwrap_or_default();
|
||||
|
||||
writeln!(
|
||||
f,
|
||||
"{path}:{row}: [{code}] {body}",
|
||||
path = filename,
|
||||
body = diagnostic.body()
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Pylint);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Pylint);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Pylint);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@":1: [test-diagnostic] main diagnostic message",
|
||||
);
|
||||
}
|
||||
}
|
||||
235
crates/ruff_db/src/diagnostic/render/rdjson.rs
Normal file
235
crates/ruff_db/src/diagnostic/render/rdjson.rs
Normal file
@@ -0,0 +1,235 @@
|
||||
use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
|
||||
use ruff_diagnostics::{Edit, Fix};
|
||||
use ruff_source_file::{LineColumn, SourceCode};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::diagnostic::Diagnostic;
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub struct RdjsonRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> RdjsonRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{:#}",
|
||||
serde_json::json!(RdjsonDiagnostics::new(diagnostics, self.resolver))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
struct ExpandedDiagnostics<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
diagnostics: &'a [Diagnostic],
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedDiagnostics<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
|
||||
|
||||
for diagnostic in self.diagnostics {
|
||||
let value = diagnostic_to_rdjson(diagnostic, self.resolver);
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
fn diagnostic_to_rdjson<'a>(
|
||||
diagnostic: &'a Diagnostic,
|
||||
resolver: &'a dyn FileResolver,
|
||||
) -> RdjsonDiagnostic<'a> {
|
||||
let span = diagnostic.primary_span_ref();
|
||||
let source_file = span.map(|span| {
|
||||
let file = span.file();
|
||||
(file.path(resolver), file.diagnostic_source(resolver))
|
||||
});
|
||||
|
||||
let location = source_file.as_ref().map(|(path, source)| {
|
||||
let range = diagnostic.range().map(|range| {
|
||||
let source_code = source.as_source_code();
|
||||
let start = source_code.line_column(range.start());
|
||||
let end = source_code.line_column(range.end());
|
||||
RdjsonRange::new(start, end)
|
||||
});
|
||||
|
||||
RdjsonLocation { path, range }
|
||||
});
|
||||
|
||||
let edits = diagnostic.fix().map(Fix::edits).unwrap_or_default();
|
||||
|
||||
RdjsonDiagnostic {
|
||||
message: diagnostic.body(),
|
||||
location,
|
||||
code: RdjsonCode {
|
||||
value: diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(|| diagnostic.name(), |code| code.as_str()),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
},
|
||||
suggestions: rdjson_suggestions(
|
||||
edits,
|
||||
source_file
|
||||
.as_ref()
|
||||
.map(|(_, source)| source.as_source_code()),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn rdjson_suggestions<'a>(
|
||||
edits: &'a [Edit],
|
||||
source_code: Option<SourceCode>,
|
||||
) -> Vec<RdjsonSuggestion<'a>> {
|
||||
if edits.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let Some(source_code) = source_code else {
|
||||
debug_assert!(false, "Expected a source file for a diagnostic with a fix");
|
||||
return Vec::new();
|
||||
};
|
||||
|
||||
edits
|
||||
.iter()
|
||||
.map(|edit| {
|
||||
let start = source_code.line_column(edit.start());
|
||||
let end = source_code.line_column(edit.end());
|
||||
let range = RdjsonRange::new(start, end);
|
||||
|
||||
RdjsonSuggestion {
|
||||
range,
|
||||
text: edit.content().unwrap_or_default(),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonDiagnostics<'a> {
|
||||
diagnostics: ExpandedDiagnostics<'a>,
|
||||
severity: &'static str,
|
||||
source: RdjsonSource,
|
||||
}
|
||||
|
||||
impl<'a> RdjsonDiagnostics<'a> {
|
||||
fn new(diagnostics: &'a [Diagnostic], resolver: &'a dyn FileResolver) -> Self {
|
||||
Self {
|
||||
source: RdjsonSource {
|
||||
name: "ruff",
|
||||
url: env!("CARGO_PKG_HOMEPAGE"),
|
||||
},
|
||||
severity: "WARNING",
|
||||
diagnostics: ExpandedDiagnostics {
|
||||
diagnostics,
|
||||
resolver,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonSource {
|
||||
name: &'static str,
|
||||
url: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonDiagnostic<'a> {
|
||||
code: RdjsonCode<'a>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
location: Option<RdjsonLocation<'a>>,
|
||||
message: &'a str,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
suggestions: Vec<RdjsonSuggestion<'a>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonLocation<'a> {
|
||||
path: &'a str,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
range: Option<RdjsonRange>,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct RdjsonRange {
|
||||
end: LineColumn,
|
||||
start: LineColumn,
|
||||
}
|
||||
|
||||
impl RdjsonRange {
|
||||
fn new(start: LineColumn, end: LineColumn) -> Self {
|
||||
Self { start, end }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonCode<'a> {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
url: Option<String>,
|
||||
value: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonSuggestion<'a> {
|
||||
range: RdjsonRange,
|
||||
text: &'a str,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Rdjson);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Rdjson);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_stable() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Rdjson);
|
||||
env.preview(false);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diag));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_preview() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Rdjson);
|
||||
env.preview(true);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diag));
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/pylint.rs
|
||||
expression: content
|
||||
snapshot_kind: text
|
||||
source: crates/ruff_db/src/diagnostic/render/pylint.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
fib.py:1: [F401] `os` imported but unused
|
||||
fib.py:6: [F841] Local variable `x` is assigned to but never used
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/pylint.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
syntax_errors.py:1: [invalid-syntax] SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3: [invalid-syntax] SyntaxError: Expected ')', found newline
|
||||
@@ -0,0 +1,20 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
|
||||
expression: env.render(&diag)
|
||||
---
|
||||
{
|
||||
"diagnostics": [
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic",
|
||||
"value": "test-diagnostic"
|
||||
},
|
||||
"message": "main diagnostic message"
|
||||
}
|
||||
],
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
|
||||
expression: env.render(&diag)
|
||||
---
|
||||
{
|
||||
"diagnostics": [
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic",
|
||||
"value": "test-diagnostic"
|
||||
},
|
||||
"message": "main diagnostic message"
|
||||
}
|
||||
],
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/rdjson.rs
|
||||
expression: content
|
||||
snapshot_kind: text
|
||||
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
{
|
||||
"diagnostics": [
|
||||
@@ -96,7 +95,7 @@ snapshot_kind: text
|
||||
"message": "Undefined name `a`"
|
||||
}
|
||||
],
|
||||
"severity": "warning",
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
@@ -1,14 +1,12 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/rdjson.rs
|
||||
expression: content
|
||||
snapshot_kind: text
|
||||
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
{
|
||||
"diagnostics": [
|
||||
{
|
||||
"code": {
|
||||
"url": null,
|
||||
"value": null
|
||||
"value": "invalid-syntax"
|
||||
},
|
||||
"location": {
|
||||
"path": "syntax_errors.py",
|
||||
@@ -27,8 +25,7 @@ snapshot_kind: text
|
||||
},
|
||||
{
|
||||
"code": {
|
||||
"url": null,
|
||||
"value": null
|
||||
"value": "invalid-syntax"
|
||||
},
|
||||
"location": {
|
||||
"path": "syntax_errors.py",
|
||||
@@ -46,7 +43,7 @@ snapshot_kind: text
|
||||
"message": "SyntaxError: Expected ')', found newline"
|
||||
}
|
||||
],
|
||||
"severity": "warning",
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.3"
|
||||
version = "0.12.4"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
10
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TC001-3_future.py
vendored
Normal file
10
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TC001-3_future.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
from collections import Counter
|
||||
|
||||
from elsewhere import third_party
|
||||
|
||||
from . import first_party
|
||||
|
||||
|
||||
def f(x: first_party.foo): ...
|
||||
def g(x: third_party.bar): ...
|
||||
def h(x: Counter): ...
|
||||
68
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TC001_future.py
vendored
Normal file
68
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TC001_future.py
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
def f():
|
||||
from . import first_party
|
||||
|
||||
def f(x: first_party.foo): ...
|
||||
|
||||
|
||||
# Type parameter bounds
|
||||
def g():
|
||||
from . import foo
|
||||
|
||||
class C[T: foo.Ty]: ...
|
||||
|
||||
|
||||
def h():
|
||||
from . import foo
|
||||
|
||||
def f[T: foo.Ty](x: T): ...
|
||||
|
||||
|
||||
def i():
|
||||
from . import foo
|
||||
|
||||
type Alias[T: foo.Ty] = list[T]
|
||||
|
||||
|
||||
# Type parameter defaults
|
||||
def j():
|
||||
from . import foo
|
||||
|
||||
class C[T = foo.Ty]: ...
|
||||
|
||||
|
||||
def k():
|
||||
from . import foo
|
||||
|
||||
def f[T = foo.Ty](x: T): ...
|
||||
|
||||
|
||||
def l():
|
||||
from . import foo
|
||||
|
||||
type Alias[T = foo.Ty] = list[T]
|
||||
|
||||
|
||||
# non-generic type alias
|
||||
def m():
|
||||
from . import foo
|
||||
|
||||
type Alias = foo.Ty
|
||||
|
||||
|
||||
# unions
|
||||
from typing import Union
|
||||
|
||||
|
||||
def n():
|
||||
from . import foo
|
||||
|
||||
def f(x: Union[foo.Ty, int]): ...
|
||||
def g(x: foo.Ty | int): ...
|
||||
|
||||
|
||||
# runtime and typing usage
|
||||
def o():
|
||||
from . import foo
|
||||
|
||||
def f(x: foo.Ty):
|
||||
return foo.Ty()
|
||||
6
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TC001_future_present.py
vendored
Normal file
6
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TC001_future_present.py
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from . import first_party
|
||||
|
||||
|
||||
def f(x: first_party.foo): ...
|
||||
@@ -54,6 +54,13 @@ windows_path.with_suffix(r"s")
|
||||
windows_path.with_suffix(u'' "json")
|
||||
windows_path.with_suffix(suffix="js")
|
||||
|
||||
Path().with_suffix(".")
|
||||
Path().with_suffix("py")
|
||||
PosixPath().with_suffix("py")
|
||||
PurePath().with_suffix("py")
|
||||
PurePosixPath().with_suffix("py")
|
||||
PureWindowsPath().with_suffix("py")
|
||||
WindowsPath().with_suffix("py")
|
||||
|
||||
### No errors
|
||||
path.with_suffix()
|
||||
|
||||
26
crates/ruff_linter/resources/test/fixtures/flake8_use_pathlib/PTH210_2.py
vendored
Normal file
26
crates/ruff_linter/resources/test/fixtures/flake8_use_pathlib/PTH210_2.py
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
from pathlib import (
|
||||
Path,
|
||||
PosixPath,
|
||||
PurePath,
|
||||
PurePosixPath,
|
||||
PureWindowsPath,
|
||||
WindowsPath,
|
||||
)
|
||||
import pathlib
|
||||
|
||||
|
||||
path = Path()
|
||||
posix_path: pathlib.PosixPath = PosixPath()
|
||||
pure_path: PurePath = PurePath()
|
||||
pure_posix_path = pathlib.PurePosixPath()
|
||||
pure_windows_path: PureWindowsPath = pathlib.PureWindowsPath()
|
||||
windows_path: pathlib.WindowsPath = pathlib.WindowsPath()
|
||||
|
||||
|
||||
### No Errors
|
||||
path.with_suffix(".")
|
||||
posix_path.with_suffix(".")
|
||||
pure_path.with_suffix(".")
|
||||
pure_posix_path.with_suffix(".")
|
||||
pure_windows_path.with_suffix(".")
|
||||
windows_path.with_suffix(".")
|
||||
@@ -104,3 +104,6 @@ os.chmod(x)
|
||||
os.replace("src", "dst", src_dir_fd=1, dst_dir_fd=2)
|
||||
os.replace("src", "dst", src_dir_fd=1)
|
||||
os.replace("src", "dst", dst_dir_fd=2)
|
||||
|
||||
os.getcwd()
|
||||
os.getcwdb()
|
||||
5
crates/ruff_linter/resources/test/fixtures/isort/required_imports/whitespace.py
vendored
Normal file
5
crates/ruff_linter/resources/test/fixtures/isort/required_imports/whitespace.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# This is a regression test for https://github.com/astral-sh/ruff/issues/19310
|
||||
# there is a (potentially invisible) unicode formfeed character (000C) between "docstring" and the semicolon
|
||||
"docstring"; print(
|
||||
f"{__doc__=}",
|
||||
)
|
||||
@@ -189,3 +189,18 @@ f"{ham[lower + 1 :, "columnname"]}"
|
||||
#: Okay: https://github.com/astral-sh/ruff/issues/12023
|
||||
f"{x = :.2f}"
|
||||
f"{(x) = :.2f}"
|
||||
|
||||
# t-strings
|
||||
t"{ {'a': 1} }"
|
||||
t"{[ { {'a': 1} } ]}"
|
||||
t"normal { {t"{ { [1, 2] } }" } } normal"
|
||||
|
||||
t"{x = :.2f}"
|
||||
t"{(x) = :.2f}"
|
||||
|
||||
#: Okay
|
||||
t"{ham[lower +1 :, "columnname"]}"
|
||||
|
||||
#: E203:1:13
|
||||
t"{ham[lower + 1 :, "columnname"]}"
|
||||
|
||||
|
||||
@@ -142,3 +142,20 @@ class PEP696GoodWithEmptyBases[A: object="foo"[::-1], B: object =[[["foo", "bar"
|
||||
|
||||
class PEP696GoodWithNonEmptyBases[A: object="foo"[::-1], B: object =[[["foo", "bar"]]], C: object= bytes](object, something_dynamic[x::-1]):
|
||||
pass
|
||||
|
||||
# E231
|
||||
t"{(a,b)}"
|
||||
|
||||
# Okay because it's hard to differentiate between the usages of a colon in a t-string
|
||||
t"{a:=1}"
|
||||
t"{ {'a':1} }"
|
||||
t"{a:.3f}"
|
||||
t"{(a:=1)}"
|
||||
t"{(lambda x:x)}"
|
||||
t"normal{t"{a:.3f}"}normal"
|
||||
|
||||
#: Okay
|
||||
snapshot.file_uri[len(t's3://{self.s3_bucket_name}/'):]
|
||||
|
||||
#: E231
|
||||
{len(t's3://{self.s3_bucket_name}/'):1}
|
||||
|
||||
@@ -722,3 +722,10 @@ def inconsistent_indent_byte_size():
|
||||
|
||||
Returns:
|
||||
"""
|
||||
|
||||
|
||||
def line_continuation_chars():\
|
||||
|
||||
"""No fix should be offered for D201/D202 because of the line continuation chars."""\
|
||||
|
||||
...
|
||||
|
||||
Binary file not shown.
@@ -91,9 +91,16 @@ Path("foo.txt").write_text(text, encoding="utf-8")
|
||||
Path("foo.txt").write_text(text, *args)
|
||||
Path("foo.txt").write_text(text, **kwargs)
|
||||
|
||||
# Violation but not detectable
|
||||
# https://github.com/astral-sh/ruff/issues/19294
|
||||
x = Path("foo.txt")
|
||||
x.open()
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/18107
|
||||
codecs.open("plw1514.py", "r", "utf-8").close() # this is fine
|
||||
|
||||
# function argument annotated as Path
|
||||
from pathlib import Path
|
||||
|
||||
def format_file(file: Path):
|
||||
with file.open() as f:
|
||||
contents = f.read()
|
||||
|
||||
@@ -27,7 +27,24 @@ _ = Decimal.from_float(float(" -inF\n \t"))
|
||||
_ = Decimal.from_float(float(" InfinIty \n\t "))
|
||||
_ = Decimal.from_float(float(" -InfinIty\n \t"))
|
||||
|
||||
# OK
|
||||
# Cases with keyword arguments - should produce unsafe fixes
|
||||
_ = Fraction.from_decimal(dec=Decimal("4.2"))
|
||||
_ = Decimal.from_float(f=4.2)
|
||||
|
||||
# Cases with invalid argument counts - should not get fixes
|
||||
_ = Fraction.from_decimal(Decimal("4.2"), 1)
|
||||
_ = Decimal.from_float(4.2, None)
|
||||
|
||||
# Cases with wrong keyword arguments - should not get fixes
|
||||
_ = Fraction.from_decimal(numerator=Decimal("4.2"))
|
||||
_ = Decimal.from_float(value=4.2)
|
||||
|
||||
# Cases with type validation issues - should produce unsafe fixes
|
||||
_ = Decimal.from_float("4.2") # Invalid type for from_float
|
||||
_ = Fraction.from_decimal(4.2) # Invalid type for from_decimal
|
||||
_ = Fraction.from_float("4.2") # Invalid type for from_float
|
||||
|
||||
# OK - should not trigger the rule
|
||||
_ = Fraction(0.1)
|
||||
_ = Fraction(-0.5)
|
||||
_ = Fraction(5.0)
|
||||
|
||||
16
crates/ruff_linter/resources/test/fixtures/ruff/RUF058_2.py
vendored
Normal file
16
crates/ruff_linter/resources/test/fixtures/ruff/RUF058_2.py
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
from itertools import starmap
|
||||
import itertools
|
||||
|
||||
# Errors in Python 3.14+
|
||||
starmap(func, zip(a, b, c, strict=True))
|
||||
starmap(func, zip(a, b, c, strict=False))
|
||||
starmap(func, zip(a, b, c, strict=strict))
|
||||
|
||||
|
||||
# No errors
|
||||
|
||||
starmap(func)
|
||||
starmap(func, zip(a, b, c, **kwargs))
|
||||
starmap(func, zip(a, b, c), foo)
|
||||
starmap(func, zip(a, b, c, lorem=ipsum))
|
||||
starmap(func, zip(a, b, c), lorem=ipsum)
|
||||
@@ -71,7 +71,7 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
|
||||
flake8_type_checking::helpers::is_valid_runtime_import(
|
||||
binding,
|
||||
&checker.semantic,
|
||||
&checker.settings().flake8_type_checking,
|
||||
checker.settings(),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
|
||||
@@ -1044,7 +1044,6 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
Rule::OsMakedirs,
|
||||
Rule::OsRename,
|
||||
Rule::OsReplace,
|
||||
Rule::OsGetcwd,
|
||||
Rule::OsStat,
|
||||
Rule::OsPathJoin,
|
||||
Rule::OsPathSamefile,
|
||||
@@ -1110,6 +1109,9 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
if checker.is_rule_enabled(Rule::OsReadlink) {
|
||||
flake8_use_pathlib::rules::os_readlink(checker, call, segments);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::OsGetcwd) {
|
||||
flake8_use_pathlib::rules::os_getcwd(checker, call, segments);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::PathConstructorCurrentDirectory) {
|
||||
flake8_use_pathlib::rules::path_constructor_current_directory(
|
||||
checker, call, segments,
|
||||
|
||||
@@ -2770,11 +2770,10 @@ impl<'a> Checker<'a> {
|
||||
|
||||
self.semantic.restore(snapshot);
|
||||
|
||||
if self.semantic.in_typing_only_annotation() {
|
||||
if self.is_rule_enabled(Rule::QuotedAnnotation) {
|
||||
pyupgrade::rules::quoted_annotation(self, annotation, range);
|
||||
}
|
||||
if self.is_rule_enabled(Rule::QuotedAnnotation) {
|
||||
pyupgrade::rules::quoted_annotation(self, annotation, range);
|
||||
}
|
||||
|
||||
if self.source_type.is_stub() {
|
||||
if self.is_rule_enabled(Rule::QuotedAnnotationInStub) {
|
||||
flake8_pyi::rules::quoted_annotation_in_stub(
|
||||
|
||||
@@ -928,7 +928,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8UsePathlib, "106") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsRmdir),
|
||||
(Flake8UsePathlib, "107") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsRemove),
|
||||
(Flake8UsePathlib, "108") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsUnlink),
|
||||
(Flake8UsePathlib, "109") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsGetcwd),
|
||||
(Flake8UsePathlib, "109") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsGetcwd),
|
||||
(Flake8UsePathlib, "110") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathExists),
|
||||
(Flake8UsePathlib, "111") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathExpanduser),
|
||||
(Flake8UsePathlib, "112") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathIsdir),
|
||||
|
||||
@@ -288,7 +288,7 @@ fn match_docstring_end(body: &[Stmt]) -> Option<TextSize> {
|
||||
fn match_semicolon(s: &str) -> Option<TextSize> {
|
||||
for (offset, c) in s.char_indices() {
|
||||
match c {
|
||||
' ' | '\t' => continue,
|
||||
_ if is_python_whitespace(c) => continue,
|
||||
';' => return Some(TextSize::try_from(offset).unwrap()),
|
||||
_ => break,
|
||||
}
|
||||
|
||||
@@ -527,6 +527,17 @@ impl<'a> Importer<'a> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Add a `from __future__ import annotations` import.
|
||||
pub(crate) fn add_future_import(&self) -> Edit {
|
||||
let import = &NameImport::ImportFrom(MemberNameImport::member(
|
||||
"__future__".to_string(),
|
||||
"annotations".to_string(),
|
||||
));
|
||||
// Note that `TextSize::default` should ensure that the import is added at the very
|
||||
// beginning of the file via `Insertion::start_of_file`.
|
||||
self.add_import(import, TextSize::default())
|
||||
}
|
||||
}
|
||||
|
||||
/// An edit to the top-level of a module, making it available at runtime.
|
||||
|
||||
@@ -15,8 +15,6 @@ pub use github::GithubEmitter;
|
||||
pub use gitlab::GitlabEmitter;
|
||||
pub use grouped::GroupedEmitter;
|
||||
pub use junit::JunitEmitter;
|
||||
pub use pylint::PylintEmitter;
|
||||
pub use rdjson::RdjsonEmitter;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, SourceFile};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
@@ -31,8 +29,6 @@ mod github;
|
||||
mod gitlab;
|
||||
mod grouped;
|
||||
mod junit;
|
||||
mod pylint;
|
||||
mod rdjson;
|
||||
mod sarif;
|
||||
mod text;
|
||||
|
||||
@@ -80,6 +76,13 @@ where
|
||||
body,
|
||||
);
|
||||
|
||||
let span = Span::from(file).with_range(range);
|
||||
let mut annotation = Annotation::primary(span);
|
||||
if let Some(suggestion) = suggestion {
|
||||
annotation = annotation.message(suggestion);
|
||||
}
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
if let Some(fix) = fix {
|
||||
diagnostic.set_fix(fix);
|
||||
}
|
||||
@@ -92,13 +95,6 @@ where
|
||||
diagnostic.set_noqa_offset(noqa_offset);
|
||||
}
|
||||
|
||||
let span = Span::from(file).with_range(range);
|
||||
let mut annotation = Annotation::primary(span);
|
||||
if let Some(suggestion) = suggestion {
|
||||
annotation = annotation.message(suggestion);
|
||||
}
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
diagnostic.set_secondary_code(SecondaryCode::new(rule.noqa_code().to_string()));
|
||||
|
||||
diagnostic
|
||||
@@ -130,6 +126,10 @@ impl FileResolver for EmitterContext<'_> {
|
||||
UnifiedFile::Ruff(file) => self.notebook_indexes.get(file.name()).is_some(),
|
||||
}
|
||||
}
|
||||
|
||||
fn current_directory(&self) -> &std::path::Path {
|
||||
crate::fs::get_cwd()
|
||||
}
|
||||
}
|
||||
|
||||
struct MessageWithLocation<'a> {
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
use std::io::Write;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_source_file::OneIndexed;
|
||||
|
||||
use crate::fs::relativize_path;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
|
||||
/// Generate violations in Pylint format.
|
||||
/// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter)
|
||||
#[derive(Default)]
|
||||
pub struct PylintEmitter;
|
||||
|
||||
impl Emitter for PylintEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
for diagnostic in diagnostics {
|
||||
let filename = diagnostic.expect_ruff_filename();
|
||||
let row = if context.is_notebook(&filename) {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
// so we show one that's clearly a fallback
|
||||
OneIndexed::from_zero_indexed(0)
|
||||
} else {
|
||||
diagnostic.expect_ruff_start_location().line
|
||||
};
|
||||
|
||||
let body = if let Some(code) = diagnostic.secondary_code() {
|
||||
format!("[{code}] {body}", body = diagnostic.body())
|
||||
} else {
|
||||
diagnostic.body().to_string()
|
||||
};
|
||||
|
||||
writeln!(
|
||||
writer,
|
||||
"{path}:{row}: {body}",
|
||||
path = relativize_path(&filename),
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use crate::message::PylintEmitter;
|
||||
use crate::message::tests::{
|
||||
capture_emitter_output, create_diagnostics, create_syntax_error_diagnostics,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = PylintEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let mut emitter = PylintEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
}
|
||||
@@ -1,143 +0,0 @@
|
||||
use std::io::Write;
|
||||
|
||||
use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_source_file::SourceCode;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Edit;
|
||||
use crate::message::{Emitter, EmitterContext, LineColumn};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct RdjsonEmitter;
|
||||
|
||||
impl Emitter for RdjsonEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
_context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
serde_json::to_writer_pretty(
|
||||
writer,
|
||||
&json!({
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff",
|
||||
},
|
||||
"severity": "warning",
|
||||
"diagnostics": &ExpandedMessages{ diagnostics }
|
||||
}),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct ExpandedMessages<'a> {
|
||||
diagnostics: &'a [Diagnostic],
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedMessages<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
|
||||
|
||||
for message in self.diagnostics {
|
||||
let value = message_to_rdjson_value(message);
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
fn message_to_rdjson_value(message: &Diagnostic) -> Value {
|
||||
let source_file = message.expect_ruff_source_file();
|
||||
let source_code = source_file.to_source_code();
|
||||
|
||||
let start_location = source_code.line_column(message.expect_range().start());
|
||||
let end_location = source_code.line_column(message.expect_range().end());
|
||||
|
||||
if let Some(fix) = message.fix() {
|
||||
json!({
|
||||
"message": message.body(),
|
||||
"location": {
|
||||
"path": message.expect_ruff_filename(),
|
||||
"range": rdjson_range(start_location, end_location),
|
||||
},
|
||||
"code": {
|
||||
"value": message.secondary_code(),
|
||||
"url": message.to_ruff_url(),
|
||||
},
|
||||
"suggestions": rdjson_suggestions(fix.edits(), &source_code),
|
||||
})
|
||||
} else {
|
||||
json!({
|
||||
"message": message.body(),
|
||||
"location": {
|
||||
"path": message.expect_ruff_filename(),
|
||||
"range": rdjson_range(start_location, end_location),
|
||||
},
|
||||
"code": {
|
||||
"value": message.secondary_code(),
|
||||
"url": message.to_ruff_url(),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn rdjson_suggestions(edits: &[Edit], source_code: &SourceCode) -> Value {
|
||||
Value::Array(
|
||||
edits
|
||||
.iter()
|
||||
.map(|edit| {
|
||||
let location = source_code.line_column(edit.start());
|
||||
let end_location = source_code.line_column(edit.end());
|
||||
|
||||
json!({
|
||||
"range": rdjson_range(location, end_location),
|
||||
"text": edit.content().unwrap_or_default(),
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
fn rdjson_range(start: LineColumn, end: LineColumn) -> Value {
|
||||
json!({
|
||||
"start": start,
|
||||
"end": end,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use crate::message::RdjsonEmitter;
|
||||
use crate::message::tests::{
|
||||
capture_emitter_output, create_diagnostics, create_syntax_error_diagnostics,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = RdjsonEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let mut emitter = RdjsonEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/pylint.rs
|
||||
expression: content
|
||||
snapshot_kind: text
|
||||
---
|
||||
syntax_errors.py:1: SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3: SyntaxError: Expected ')', found newline
|
||||
@@ -134,6 +134,11 @@ pub(crate) const fn is_fix_os_path_dirname_enabled(settings: &LinterSettings) ->
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19245
|
||||
pub(crate) const fn is_fix_os_getcwd_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/11436
|
||||
// https://github.com/astral-sh/ruff/pull/11168
|
||||
pub(crate) const fn is_dunder_init_fix_unused_import_enabled(settings: &LinterSettings) -> bool {
|
||||
@@ -195,3 +200,8 @@ pub(crate) const fn is_safe_super_call_with_parameters_fix_enabled(
|
||||
pub(crate) const fn is_assert_raises_exception_call_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19100
|
||||
pub(crate) const fn is_add_future_annotations_imports_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
@@ -2,8 +2,7 @@ use std::fmt;
|
||||
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::Expr;
|
||||
use ruff_python_semantic::{MemberNameImport, NameImport};
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::{AlwaysFixableViolation, Fix};
|
||||
@@ -85,15 +84,7 @@ impl AlwaysFixableViolation for FutureRequiredTypeAnnotation {
|
||||
|
||||
/// FA102
|
||||
pub(crate) fn future_required_type_annotation(checker: &Checker, expr: &Expr, reason: Reason) {
|
||||
let mut diagnostic =
|
||||
checker.report_diagnostic(FutureRequiredTypeAnnotation { reason }, expr.range());
|
||||
let required_import = NameImport::ImportFrom(MemberNameImport::member(
|
||||
"__future__".to_string(),
|
||||
"annotations".to_string(),
|
||||
));
|
||||
diagnostic.set_fix(Fix::unsafe_edit(
|
||||
checker
|
||||
.importer()
|
||||
.add_import(&required_import, TextSize::default()),
|
||||
));
|
||||
checker
|
||||
.report_diagnostic(FutureRequiredTypeAnnotation { reason }, expr.range())
|
||||
.set_fix(Fix::unsafe_edit(checker.importer().add_future_import()));
|
||||
}
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_python_ast::Expr;
|
||||
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_semantic::{MemberNameImport, NameImport};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::AlwaysFixableViolation;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::{AlwaysFixableViolation, Fix};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for missing `from __future__ import annotations` imports upon
|
||||
@@ -95,15 +93,7 @@ pub(crate) fn future_rewritable_type_annotation(checker: &Checker, expr: &Expr)
|
||||
|
||||
let Some(name) = name else { return };
|
||||
|
||||
let import = &NameImport::ImportFrom(MemberNameImport::member(
|
||||
"__future__".to_string(),
|
||||
"annotations".to_string(),
|
||||
));
|
||||
checker
|
||||
.report_diagnostic(FutureRewritableTypeAnnotation { name }, expr.range())
|
||||
.set_fix(Fix::unsafe_edit(
|
||||
checker
|
||||
.importer()
|
||||
.add_import(import, ruff_text_size::TextSize::default()),
|
||||
));
|
||||
.set_fix(Fix::unsafe_edit(checker.importer().add_future_import()));
|
||||
}
|
||||
|
||||
@@ -8,41 +8,110 @@ use ruff_python_ast::{self as ast, Decorator, Expr, StringLiteralFlags};
|
||||
use ruff_python_codegen::{Generator, Stylist};
|
||||
use ruff_python_parser::typing::parse_type_annotation;
|
||||
use ruff_python_semantic::{
|
||||
Binding, BindingKind, Modules, NodeId, ResolvedReference, ScopeKind, SemanticModel, analyze,
|
||||
Binding, BindingKind, Modules, NodeId, ScopeKind, SemanticModel, analyze,
|
||||
};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::Edit;
|
||||
use crate::Locator;
|
||||
use crate::rules::flake8_type_checking::settings::Settings;
|
||||
use crate::settings::LinterSettings;
|
||||
|
||||
/// Returns `true` if the [`ResolvedReference`] is in a typing-only context _or_ a runtime-evaluated
|
||||
/// context (with quoting enabled).
|
||||
pub(crate) fn is_typing_reference(reference: &ResolvedReference, settings: &Settings) -> bool {
|
||||
reference.in_type_checking_block()
|
||||
// if we're not in a type checking block, we necessarily need to be within a
|
||||
// type definition to be considered a typing reference
|
||||
|| (reference.in_type_definition()
|
||||
&& (reference.in_typing_only_annotation()
|
||||
|| reference.in_string_type_definition()
|
||||
|| (settings.quote_annotations && reference.in_runtime_evaluated_annotation())))
|
||||
/// Represents the kind of an existing or potential typing-only annotation.
|
||||
///
|
||||
/// Note that the order of variants is important here. `Runtime` has the highest precedence when
|
||||
/// calling [`TypingReference::combine`] on two references, followed by `Future`, `Quote`, and
|
||||
/// `TypingOnly` with the lowest precedence.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub(crate) enum TypingReference {
|
||||
/// The reference is in a runtime-evaluated context.
|
||||
Runtime,
|
||||
/// The reference is in a runtime-evaluated context, but the
|
||||
/// `lint.future-annotations` setting is enabled.
|
||||
///
|
||||
/// This takes precedence if both quoting and future imports are enabled.
|
||||
Future,
|
||||
/// The reference is in a runtime-evaluated context, but the
|
||||
/// `lint.flake8-type-checking.quote-annotations` setting is enabled.
|
||||
Quote,
|
||||
/// The reference is in a typing-only context.
|
||||
TypingOnly,
|
||||
}
|
||||
|
||||
impl TypingReference {
|
||||
/// Determine the kind of [`TypingReference`] for all references to a binding.
|
||||
pub(crate) fn from_references(
|
||||
binding: &Binding,
|
||||
semantic: &SemanticModel,
|
||||
settings: &LinterSettings,
|
||||
) -> Self {
|
||||
let references = binding
|
||||
.references()
|
||||
.map(|reference_id| semantic.reference(reference_id));
|
||||
let mut kind = Self::TypingOnly;
|
||||
for reference in references {
|
||||
if reference.in_type_checking_block() {
|
||||
kind = kind.combine(Self::TypingOnly);
|
||||
continue;
|
||||
}
|
||||
|
||||
// if we're not in a type checking block, we necessarily need to be within a
|
||||
// type definition to be considered a typing reference
|
||||
if !reference.in_type_definition() {
|
||||
return Self::Runtime;
|
||||
}
|
||||
|
||||
if reference.in_typing_only_annotation() || reference.in_string_type_definition() {
|
||||
kind = kind.combine(Self::TypingOnly);
|
||||
continue;
|
||||
}
|
||||
|
||||
// prefer `from __future__ import annotations` to quoting
|
||||
if settings.future_annotations()
|
||||
&& !reference.in_typing_only_annotation()
|
||||
&& reference.in_runtime_evaluated_annotation()
|
||||
{
|
||||
kind = kind.combine(Self::Future);
|
||||
continue;
|
||||
}
|
||||
|
||||
if settings.flake8_type_checking.quote_annotations
|
||||
&& reference.in_runtime_evaluated_annotation()
|
||||
{
|
||||
kind = kind.combine(Self::Quote);
|
||||
continue;
|
||||
}
|
||||
|
||||
return Self::Runtime;
|
||||
}
|
||||
|
||||
kind
|
||||
}
|
||||
|
||||
/// Logically combine two `TypingReference`s into one.
|
||||
///
|
||||
/// `TypingReference::Runtime` has the highest precedence, followed by
|
||||
/// `TypingReference::Future`, `TypingReference::Quote`, and then `TypingReference::TypingOnly`.
|
||||
fn combine(self, other: TypingReference) -> TypingReference {
|
||||
self.min(other)
|
||||
}
|
||||
|
||||
fn is_runtime(self) -> bool {
|
||||
matches!(self, Self::Runtime)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the [`Binding`] represents a runtime-required import.
|
||||
pub(crate) fn is_valid_runtime_import(
|
||||
binding: &Binding,
|
||||
semantic: &SemanticModel,
|
||||
settings: &Settings,
|
||||
settings: &LinterSettings,
|
||||
) -> bool {
|
||||
if matches!(
|
||||
binding.kind,
|
||||
BindingKind::Import(..) | BindingKind::FromImport(..) | BindingKind::SubmoduleImport(..)
|
||||
) {
|
||||
binding.context.is_runtime()
|
||||
&& binding
|
||||
.references()
|
||||
.map(|reference_id| semantic.reference(reference_id))
|
||||
.any(|reference| !is_typing_reference(reference, settings))
|
||||
&& TypingReference::from_references(binding, semantic, settings).is_runtime()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
||||
@@ -13,6 +13,8 @@ pub(crate) struct ImportBinding<'a> {
|
||||
pub(crate) range: TextRange,
|
||||
/// The range of the import's parent statement.
|
||||
pub(crate) parent_range: Option<TextRange>,
|
||||
/// Whether the binding needs `from __future__ import annotations` to be imported.
|
||||
pub(crate) needs_future_import: bool,
|
||||
}
|
||||
|
||||
impl Ranged for ImportBinding<'_> {
|
||||
|
||||
@@ -9,10 +9,12 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
use ruff_python_ast::PythonVersion;
|
||||
use test_case::test_case;
|
||||
|
||||
use crate::registry::{Linter, Rule};
|
||||
use crate::settings::types::PreviewMode;
|
||||
use crate::test::{test_path, test_snippet};
|
||||
use crate::{assert_diagnostics, settings};
|
||||
|
||||
@@ -64,6 +66,40 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(&[Rule::TypingOnlyFirstPartyImport], Path::new("TC001.py"))]
|
||||
#[test_case(&[Rule::TypingOnlyThirdPartyImport], Path::new("TC002.py"))]
|
||||
#[test_case(&[Rule::TypingOnlyStandardLibraryImport], Path::new("TC003.py"))]
|
||||
#[test_case(
|
||||
&[
|
||||
Rule::TypingOnlyFirstPartyImport,
|
||||
Rule::TypingOnlyThirdPartyImport,
|
||||
Rule::TypingOnlyStandardLibraryImport,
|
||||
],
|
||||
Path::new("TC001-3_future.py")
|
||||
)]
|
||||
#[test_case(&[Rule::TypingOnlyFirstPartyImport], Path::new("TC001_future.py"))]
|
||||
#[test_case(&[Rule::TypingOnlyFirstPartyImport], Path::new("TC001_future_present.py"))]
|
||||
fn add_future_import(rules: &[Rule], path: &Path) -> Result<()> {
|
||||
let name = rules.iter().map(Rule::noqa_code).join("-");
|
||||
let snapshot = format!("add_future_import__{}_{}", name, path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
Path::new("flake8_type_checking").join(path).as_path(),
|
||||
&settings::LinterSettings {
|
||||
future_annotations: true,
|
||||
preview: PreviewMode::Enabled,
|
||||
// also enable quoting annotations to check the interaction. the future import
|
||||
// should take precedence.
|
||||
flake8_type_checking: super::settings::Settings {
|
||||
quote_annotations: true,
|
||||
..Default::default()
|
||||
},
|
||||
..settings::LinterSettings::for_rules(rules.iter().copied())
|
||||
},
|
||||
)?;
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// we test these rules as a pair, since they're opposites of one another
|
||||
// so we want to make sure their fixes are not going around in circles.
|
||||
#[test_case(Rule::UnquotedTypeAlias, Path::new("TC007.py"))]
|
||||
|
||||
@@ -139,6 +139,7 @@ pub(crate) fn runtime_import_in_type_checking_block(checker: &Checker, scope: &S
|
||||
binding,
|
||||
range: binding.range(),
|
||||
parent_range: binding.parent_range(checker.semantic()),
|
||||
needs_future_import: false, // TODO(brent) See #19359.
|
||||
};
|
||||
|
||||
if checker.rule_is_ignored(Rule::RuntimeImportInTypeCheckingBlock, import.start())
|
||||
|
||||
@@ -23,17 +23,28 @@ use crate::checkers::ast::Checker;
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// var: str | "int"
|
||||
/// var: "Foo" | None
|
||||
///
|
||||
///
|
||||
/// class Foo: ...
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// var: str | int
|
||||
/// from __future__ import annotations
|
||||
///
|
||||
/// var: Foo | None
|
||||
///
|
||||
///
|
||||
/// class Foo: ...
|
||||
/// ```
|
||||
///
|
||||
/// Or, extend the quotes to include the entire union:
|
||||
/// ```python
|
||||
/// var: "str | int"
|
||||
/// var: "Foo | None"
|
||||
///
|
||||
///
|
||||
/// class Foo: ...
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
|
||||
@@ -13,7 +13,7 @@ use crate::fix;
|
||||
use crate::importer::ImportedMembers;
|
||||
use crate::preview::is_full_path_match_source_strategy_enabled;
|
||||
use crate::rules::flake8_type_checking::helpers::{
|
||||
filter_contained, is_typing_reference, quote_annotation,
|
||||
TypingReference, filter_contained, quote_annotation,
|
||||
};
|
||||
use crate::rules::flake8_type_checking::imports::ImportBinding;
|
||||
use crate::rules::isort::categorize::MatchSourceStrategy;
|
||||
@@ -71,12 +71,19 @@ use crate::{Fix, FixAvailability, Violation};
|
||||
/// the criterion for determining whether an import is first-party
|
||||
/// is stricter, which could affect whether this lint is triggered vs [`TC001`](https://docs.astral.sh/ruff/rules/typing-only-third-party-import/). See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.
|
||||
///
|
||||
/// If [`lint.future-annotations`] is set to `true`, `from __future__ import
|
||||
/// annotations` will be added if doing so would enable an import to be moved into an `if
|
||||
/// TYPE_CHECKING:` block. This takes precedence over the
|
||||
/// [`lint.flake8-type-checking.quote-annotations`] setting described above if both settings are
|
||||
/// enabled.
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.flake8-type-checking.quote-annotations`
|
||||
/// - `lint.flake8-type-checking.runtime-evaluated-base-classes`
|
||||
/// - `lint.flake8-type-checking.runtime-evaluated-decorators`
|
||||
/// - `lint.flake8-type-checking.strict`
|
||||
/// - `lint.typing-modules`
|
||||
/// - `lint.future-annotations`
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 563: Runtime annotation resolution and `TYPE_CHECKING`](https://peps.python.org/pep-0563/#runtime-annotation-resolution-and-type-checking)
|
||||
@@ -151,12 +158,19 @@ impl Violation for TypingOnlyFirstPartyImport {
|
||||
/// the criterion for determining whether an import is first-party
|
||||
/// is stricter, which could affect whether this lint is triggered vs [`TC001`](https://docs.astral.sh/ruff/rules/typing-only-first-party-import/). See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.
|
||||
///
|
||||
/// If [`lint.future-annotations`] is set to `true`, `from __future__ import
|
||||
/// annotations` will be added if doing so would enable an import to be moved into an `if
|
||||
/// TYPE_CHECKING:` block. This takes precedence over the
|
||||
/// [`lint.flake8-type-checking.quote-annotations`] setting described above if both settings are
|
||||
/// enabled.
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.flake8-type-checking.quote-annotations`
|
||||
/// - `lint.flake8-type-checking.runtime-evaluated-base-classes`
|
||||
/// - `lint.flake8-type-checking.runtime-evaluated-decorators`
|
||||
/// - `lint.flake8-type-checking.strict`
|
||||
/// - `lint.typing-modules`
|
||||
/// - `lint.future-annotations`
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 563: Runtime annotation resolution and `TYPE_CHECKING`](https://peps.python.org/pep-0563/#runtime-annotation-resolution-and-type-checking)
|
||||
@@ -226,12 +240,22 @@ impl Violation for TypingOnlyThirdPartyImport {
|
||||
/// return str(path)
|
||||
/// ```
|
||||
///
|
||||
/// ## Preview
|
||||
///
|
||||
/// When [preview](https://docs.astral.sh/ruff/preview/) is enabled, if
|
||||
/// [`lint.future-annotations`] is set to `true`, `from __future__ import
|
||||
/// annotations` will be added if doing so would enable an import to be moved into an `if
|
||||
/// TYPE_CHECKING:` block. This takes precedence over the
|
||||
/// [`lint.flake8-type-checking.quote-annotations`] setting described above if both settings are
|
||||
/// enabled.
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.flake8-type-checking.quote-annotations`
|
||||
/// - `lint.flake8-type-checking.runtime-evaluated-base-classes`
|
||||
/// - `lint.flake8-type-checking.runtime-evaluated-decorators`
|
||||
/// - `lint.flake8-type-checking.strict`
|
||||
/// - `lint.typing-modules`
|
||||
/// - `lint.future-annotations`
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 563: Runtime annotation resolution and `TYPE_CHECKING`](https://peps.python.org/pep-0563/#runtime-annotation-resolution-and-type-checking)
|
||||
@@ -271,9 +295,10 @@ pub(crate) fn typing_only_runtime_import(
|
||||
for binding_id in scope.binding_ids() {
|
||||
let binding = checker.semantic().binding(binding_id);
|
||||
|
||||
// If we're in un-strict mode, don't flag typing-only imports that are
|
||||
// implicitly loaded by way of a valid runtime import.
|
||||
if !checker.settings().flake8_type_checking.strict
|
||||
// If we can't add a `__future__` import and in un-strict mode, don't flag typing-only
|
||||
// imports that are implicitly loaded by way of a valid runtime import.
|
||||
if !checker.settings().future_annotations()
|
||||
&& !checker.settings().flake8_type_checking.strict
|
||||
&& runtime_imports
|
||||
.iter()
|
||||
.any(|import| is_implicit_import(binding, import))
|
||||
@@ -289,95 +314,102 @@ pub(crate) fn typing_only_runtime_import(
|
||||
continue;
|
||||
};
|
||||
|
||||
if binding.context.is_runtime()
|
||||
&& binding
|
||||
.references()
|
||||
.map(|reference_id| checker.semantic().reference(reference_id))
|
||||
.all(|reference| {
|
||||
is_typing_reference(reference, &checker.settings().flake8_type_checking)
|
||||
})
|
||||
{
|
||||
let qualified_name = import.qualified_name();
|
||||
if !binding.context.is_runtime() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if is_exempt(
|
||||
&qualified_name.to_string(),
|
||||
&checker
|
||||
.settings()
|
||||
.flake8_type_checking
|
||||
.exempt_modules
|
||||
.iter()
|
||||
.map(String::as_str)
|
||||
.collect::<Vec<_>>(),
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
let typing_reference =
|
||||
TypingReference::from_references(binding, checker.semantic(), checker.settings());
|
||||
|
||||
let source_name = import.source_name().join(".");
|
||||
let needs_future_import = match typing_reference {
|
||||
TypingReference::Runtime => continue,
|
||||
// We can only get the `Future` variant if `future_annotations` is
|
||||
// enabled, so we can unconditionally set this here.
|
||||
TypingReference::Future => true,
|
||||
TypingReference::TypingOnly | TypingReference::Quote => false,
|
||||
};
|
||||
|
||||
// Categorize the import, using coarse-grained categorization.
|
||||
let match_source_strategy =
|
||||
if is_full_path_match_source_strategy_enabled(checker.settings()) {
|
||||
MatchSourceStrategy::FullPath
|
||||
} else {
|
||||
MatchSourceStrategy::Root
|
||||
};
|
||||
let qualified_name = import.qualified_name();
|
||||
|
||||
let import_type = match categorize(
|
||||
&source_name,
|
||||
qualified_name.is_unresolved_import(),
|
||||
&checker.settings().src,
|
||||
checker.package(),
|
||||
checker.settings().isort.detect_same_package,
|
||||
&checker.settings().isort.known_modules,
|
||||
checker.target_version(),
|
||||
checker.settings().isort.no_sections,
|
||||
&checker.settings().isort.section_order,
|
||||
&checker.settings().isort.default_section,
|
||||
match_source_strategy,
|
||||
) {
|
||||
ImportSection::Known(ImportType::LocalFolder | ImportType::FirstParty) => {
|
||||
ImportType::FirstParty
|
||||
}
|
||||
ImportSection::Known(ImportType::ThirdParty) | ImportSection::UserDefined(_) => {
|
||||
ImportType::ThirdParty
|
||||
}
|
||||
ImportSection::Known(ImportType::StandardLibrary) => ImportType::StandardLibrary,
|
||||
ImportSection::Known(ImportType::Future) => {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if is_exempt(
|
||||
&qualified_name.to_string(),
|
||||
&checker
|
||||
.settings()
|
||||
.flake8_type_checking
|
||||
.exempt_modules
|
||||
.iter()
|
||||
.map(String::as_str)
|
||||
.collect::<Vec<_>>(),
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if !checker.is_rule_enabled(rule_for(import_type)) {
|
||||
continue;
|
||||
}
|
||||
let source_name = import.source_name().join(".");
|
||||
|
||||
let Some(node_id) = binding.source else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let import = ImportBinding {
|
||||
import,
|
||||
reference_id,
|
||||
binding,
|
||||
range: binding.range(),
|
||||
parent_range: binding.parent_range(checker.semantic()),
|
||||
};
|
||||
|
||||
if checker.rule_is_ignored(rule_for(import_type), import.start())
|
||||
|| import.parent_range.is_some_and(|parent_range| {
|
||||
checker.rule_is_ignored(rule_for(import_type), parent_range.start())
|
||||
})
|
||||
{
|
||||
ignores_by_statement
|
||||
.entry((node_id, import_type))
|
||||
.or_default()
|
||||
.push(import);
|
||||
// Categorize the import, using coarse-grained categorization.
|
||||
let match_source_strategy =
|
||||
if is_full_path_match_source_strategy_enabled(checker.settings()) {
|
||||
MatchSourceStrategy::FullPath
|
||||
} else {
|
||||
errors_by_statement
|
||||
.entry((node_id, import_type))
|
||||
.or_default()
|
||||
.push(import);
|
||||
MatchSourceStrategy::Root
|
||||
};
|
||||
|
||||
let import_type = match categorize(
|
||||
&source_name,
|
||||
qualified_name.is_unresolved_import(),
|
||||
&checker.settings().src,
|
||||
checker.package(),
|
||||
checker.settings().isort.detect_same_package,
|
||||
&checker.settings().isort.known_modules,
|
||||
checker.target_version(),
|
||||
checker.settings().isort.no_sections,
|
||||
&checker.settings().isort.section_order,
|
||||
&checker.settings().isort.default_section,
|
||||
match_source_strategy,
|
||||
) {
|
||||
ImportSection::Known(ImportType::LocalFolder | ImportType::FirstParty) => {
|
||||
ImportType::FirstParty
|
||||
}
|
||||
ImportSection::Known(ImportType::ThirdParty) | ImportSection::UserDefined(_) => {
|
||||
ImportType::ThirdParty
|
||||
}
|
||||
ImportSection::Known(ImportType::StandardLibrary) => ImportType::StandardLibrary,
|
||||
ImportSection::Known(ImportType::Future) => {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
if !checker.is_rule_enabled(rule_for(import_type)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Some(node_id) = binding.source else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let import = ImportBinding {
|
||||
import,
|
||||
reference_id,
|
||||
binding,
|
||||
range: binding.range(),
|
||||
parent_range: binding.parent_range(checker.semantic()),
|
||||
needs_future_import,
|
||||
};
|
||||
|
||||
if checker.rule_is_ignored(rule_for(import_type), import.start())
|
||||
|| import.parent_range.is_some_and(|parent_range| {
|
||||
checker.rule_is_ignored(rule_for(import_type), parent_range.start())
|
||||
})
|
||||
{
|
||||
ignores_by_statement
|
||||
.entry((node_id, import_type))
|
||||
.or_default()
|
||||
.push(import);
|
||||
} else {
|
||||
errors_by_statement
|
||||
.entry((node_id, import_type))
|
||||
.or_default()
|
||||
.push(import);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -509,6 +541,8 @@ fn fix_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) ->
|
||||
.min()
|
||||
.expect("Expected at least one import");
|
||||
|
||||
let add_future_import = imports.iter().any(|binding| binding.needs_future_import);
|
||||
|
||||
// Step 1) Remove the import.
|
||||
let remove_import_edit = fix::edits::remove_unused_imports(
|
||||
member_names.iter().map(AsRef::as_ref),
|
||||
@@ -532,37 +566,52 @@ fn fix_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) ->
|
||||
)?
|
||||
.into_edits();
|
||||
|
||||
// Step 3) Quote any runtime usages of the referenced symbol.
|
||||
let quote_reference_edits = filter_contained(
|
||||
imports
|
||||
.iter()
|
||||
.flat_map(|ImportBinding { binding, .. }| {
|
||||
binding.references.iter().filter_map(|reference_id| {
|
||||
let reference = checker.semantic().reference(*reference_id);
|
||||
if reference.in_runtime_context() {
|
||||
Some(quote_annotation(
|
||||
reference.expression_id()?,
|
||||
checker.semantic(),
|
||||
checker.stylist(),
|
||||
checker.locator(),
|
||||
checker.default_string_flags(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
// Step 3) Either add a `__future__` import or quote any runtime usages of the referenced
|
||||
// symbol.
|
||||
let fix = if add_future_import {
|
||||
let future_import = checker.importer().add_future_import();
|
||||
|
||||
Ok(Fix::unsafe_edits(
|
||||
type_checking_edit,
|
||||
add_import_edit
|
||||
.into_iter()
|
||||
.chain(std::iter::once(remove_import_edit))
|
||||
.chain(quote_reference_edits),
|
||||
)
|
||||
.isolate(Checker::isolation(
|
||||
// The order here is very important. We first need to add the `__future__` import, if
|
||||
// needed, since it's a syntax error to come later. Then `type_checking_edit` imports
|
||||
// `TYPE_CHECKING`, if available. Then we can add and/or remove existing imports.
|
||||
Fix::unsafe_edits(
|
||||
future_import,
|
||||
std::iter::once(type_checking_edit)
|
||||
.chain(add_import_edit)
|
||||
.chain(std::iter::once(remove_import_edit)),
|
||||
)
|
||||
} else {
|
||||
let quote_reference_edits = filter_contained(
|
||||
imports
|
||||
.iter()
|
||||
.flat_map(|ImportBinding { binding, .. }| {
|
||||
binding.references.iter().filter_map(|reference_id| {
|
||||
let reference = checker.semantic().reference(*reference_id);
|
||||
if reference.in_runtime_context() {
|
||||
Some(quote_annotation(
|
||||
reference.expression_id()?,
|
||||
checker.semantic(),
|
||||
checker.stylist(),
|
||||
checker.locator(),
|
||||
checker.default_string_flags(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
Fix::unsafe_edits(
|
||||
type_checking_edit,
|
||||
add_import_edit
|
||||
.into_iter()
|
||||
.chain(std::iter::once(remove_import_edit))
|
||||
.chain(quote_reference_edits),
|
||||
)
|
||||
};
|
||||
|
||||
Ok(fix.isolate(Checker::isolation(
|
||||
checker.semantic().parent_statement_id(node_id),
|
||||
)))
|
||||
}
|
||||
|
||||
@@ -0,0 +1,76 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs
|
||||
---
|
||||
TC001-3_future.py:1:25: TC003 [*] Move standard library import `collections.Counter` into a type-checking block
|
||||
|
|
||||
1 | from collections import Counter
|
||||
| ^^^^^^^ TC003
|
||||
2 |
|
||||
3 | from elsewhere import third_party
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |-from collections import Counter
|
||||
1 |+from __future__ import annotations
|
||||
2 2 |
|
||||
3 3 | from elsewhere import third_party
|
||||
4 4 |
|
||||
5 5 | from . import first_party
|
||||
6 |+from typing import TYPE_CHECKING
|
||||
7 |+
|
||||
8 |+if TYPE_CHECKING:
|
||||
9 |+ from collections import Counter
|
||||
6 10 |
|
||||
7 11 |
|
||||
8 12 | def f(x: first_party.foo): ...
|
||||
|
||||
TC001-3_future.py:3:23: TC002 [*] Move third-party import `elsewhere.third_party` into a type-checking block
|
||||
|
|
||||
1 | from collections import Counter
|
||||
2 |
|
||||
3 | from elsewhere import third_party
|
||||
| ^^^^^^^^^^^ TC002
|
||||
4 |
|
||||
5 | from . import first_party
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from __future__ import annotations
|
||||
1 2 | from collections import Counter
|
||||
2 3 |
|
||||
3 |-from elsewhere import third_party
|
||||
4 4 |
|
||||
5 5 | from . import first_party
|
||||
6 |+from typing import TYPE_CHECKING
|
||||
7 |+
|
||||
8 |+if TYPE_CHECKING:
|
||||
9 |+ from elsewhere import third_party
|
||||
6 10 |
|
||||
7 11 |
|
||||
8 12 | def f(x: first_party.foo): ...
|
||||
|
||||
TC001-3_future.py:5:15: TC001 [*] Move application import `.first_party` into a type-checking block
|
||||
|
|
||||
3 | from elsewhere import third_party
|
||||
4 |
|
||||
5 | from . import first_party
|
||||
| ^^^^^^^^^^^ TC001
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from __future__ import annotations
|
||||
1 2 | from collections import Counter
|
||||
2 3 |
|
||||
3 4 | from elsewhere import third_party
|
||||
4 5 |
|
||||
5 |-from . import first_party
|
||||
6 |+from typing import TYPE_CHECKING
|
||||
7 |+
|
||||
8 |+if TYPE_CHECKING:
|
||||
9 |+ from . import first_party
|
||||
6 10 |
|
||||
7 11 |
|
||||
8 12 | def f(x: first_party.foo): ...
|
||||
@@ -0,0 +1,32 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs
|
||||
---
|
||||
TC001.py:20:19: TC001 [*] Move application import `.TYP001` into a type-checking block
|
||||
|
|
||||
19 | def f():
|
||||
20 | from . import TYP001
|
||||
| ^^^^^^ TC001
|
||||
21 |
|
||||
22 | x: TYP001
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 |
|
||||
3 3 | For typing-only import detection tests, see `TC002.py`.
|
||||
4 4 | """
|
||||
5 |+from typing import TYPE_CHECKING
|
||||
6 |+
|
||||
7 |+if TYPE_CHECKING:
|
||||
8 |+ from . import TYP001
|
||||
5 9 |
|
||||
6 10 |
|
||||
7 11 | def f():
|
||||
--------------------------------------------------------------------------------
|
||||
17 21 |
|
||||
18 22 |
|
||||
19 23 | def f():
|
||||
20 |- from . import TYP001
|
||||
21 24 |
|
||||
22 25 | x: TYP001
|
||||
23 26 |
|
||||
@@ -0,0 +1,56 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs
|
||||
---
|
||||
TC001_future.py:2:19: TC001 [*] Move application import `.first_party` into a type-checking block
|
||||
|
|
||||
1 | def f():
|
||||
2 | from . import first_party
|
||||
| ^^^^^^^^^^^ TC001
|
||||
3 |
|
||||
4 | def f(x: first_party.foo): ...
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |-def f():
|
||||
1 |+from __future__ import annotations
|
||||
2 |+from typing import TYPE_CHECKING
|
||||
3 |+
|
||||
4 |+if TYPE_CHECKING:
|
||||
2 5 | from . import first_party
|
||||
6 |+def f():
|
||||
3 7 |
|
||||
4 8 | def f(x: first_party.foo): ...
|
||||
5 9 |
|
||||
|
||||
TC001_future.py:57:19: TC001 [*] Move application import `.foo` into a type-checking block
|
||||
|
|
||||
56 | def n():
|
||||
57 | from . import foo
|
||||
| ^^^ TC001
|
||||
58 |
|
||||
59 | def f(x: Union[foo.Ty, int]): ...
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from __future__ import annotations
|
||||
1 2 | def f():
|
||||
2 3 | from . import first_party
|
||||
3 4 |
|
||||
--------------------------------------------------------------------------------
|
||||
50 51 |
|
||||
51 52 |
|
||||
52 53 | # unions
|
||||
53 |-from typing import Union
|
||||
54 |+from typing import Union, TYPE_CHECKING
|
||||
54 55 |
|
||||
56 |+if TYPE_CHECKING:
|
||||
57 |+ from . import foo
|
||||
58 |+
|
||||
55 59 |
|
||||
56 60 | def n():
|
||||
57 |- from . import foo
|
||||
58 61 |
|
||||
59 62 | def f(x: Union[foo.Ty, int]): ...
|
||||
60 63 | def g(x: foo.Ty | int): ...
|
||||
@@ -0,0 +1,23 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs
|
||||
---
|
||||
TC001_future_present.py:3:15: TC001 [*] Move application import `.first_party` into a type-checking block
|
||||
|
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
3 | from . import first_party
|
||||
| ^^^^^^^^^^^ TC001
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from . import first_party
|
||||
3 |+from typing import TYPE_CHECKING
|
||||
4 |+
|
||||
5 |+if TYPE_CHECKING:
|
||||
6 |+ from . import first_party
|
||||
4 7 |
|
||||
5 8 |
|
||||
6 9 | def f(x: first_party.foo): ...
|
||||
@@ -0,0 +1,251 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs
|
||||
---
|
||||
TC002.py:5:22: TC002 [*] Move third-party import `pandas` into a type-checking block
|
||||
|
|
||||
4 | def f():
|
||||
5 | import pandas as pd # TC002
|
||||
| ^^ TC002
|
||||
6 |
|
||||
7 | x: pd.DataFrame
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | """Tests to determine accurate detection of typing-only imports."""
|
||||
2 |+from typing import TYPE_CHECKING
|
||||
3 |+
|
||||
4 |+if TYPE_CHECKING:
|
||||
5 |+ import pandas as pd
|
||||
2 6 |
|
||||
3 7 |
|
||||
4 8 | def f():
|
||||
5 |- import pandas as pd # TC002
|
||||
6 9 |
|
||||
7 10 | x: pd.DataFrame
|
||||
8 11 |
|
||||
|
||||
TC002.py:11:24: TC002 [*] Move third-party import `pandas.DataFrame` into a type-checking block
|
||||
|
|
||||
10 | def f():
|
||||
11 | from pandas import DataFrame # TC002
|
||||
| ^^^^^^^^^ TC002
|
||||
12 |
|
||||
13 | x: DataFrame
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | """Tests to determine accurate detection of typing-only imports."""
|
||||
2 |+from typing import TYPE_CHECKING
|
||||
3 |+
|
||||
4 |+if TYPE_CHECKING:
|
||||
5 |+ from pandas import DataFrame
|
||||
2 6 |
|
||||
3 7 |
|
||||
4 8 | def f():
|
||||
--------------------------------------------------------------------------------
|
||||
8 12 |
|
||||
9 13 |
|
||||
10 14 | def f():
|
||||
11 |- from pandas import DataFrame # TC002
|
||||
12 15 |
|
||||
13 16 | x: DataFrame
|
||||
14 17 |
|
||||
|
||||
TC002.py:17:37: TC002 [*] Move third-party import `pandas.DataFrame` into a type-checking block
|
||||
|
|
||||
16 | def f():
|
||||
17 | from pandas import DataFrame as df # TC002
|
||||
| ^^ TC002
|
||||
18 |
|
||||
19 | x: df
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | """Tests to determine accurate detection of typing-only imports."""
|
||||
2 |+from typing import TYPE_CHECKING
|
||||
3 |+
|
||||
4 |+if TYPE_CHECKING:
|
||||
5 |+ from pandas import DataFrame as df
|
||||
2 6 |
|
||||
3 7 |
|
||||
4 8 | def f():
|
||||
--------------------------------------------------------------------------------
|
||||
14 18 |
|
||||
15 19 |
|
||||
16 20 | def f():
|
||||
17 |- from pandas import DataFrame as df # TC002
|
||||
18 21 |
|
||||
19 22 | x: df
|
||||
20 23 |
|
||||
|
||||
TC002.py:23:22: TC002 [*] Move third-party import `pandas` into a type-checking block
|
||||
|
|
||||
22 | def f():
|
||||
23 | import pandas as pd # TC002
|
||||
| ^^ TC002
|
||||
24 |
|
||||
25 | x: pd.DataFrame = 1
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | """Tests to determine accurate detection of typing-only imports."""
|
||||
2 |+from typing import TYPE_CHECKING
|
||||
3 |+
|
||||
4 |+if TYPE_CHECKING:
|
||||
5 |+ import pandas as pd
|
||||
2 6 |
|
||||
3 7 |
|
||||
4 8 | def f():
|
||||
--------------------------------------------------------------------------------
|
||||
20 24 |
|
||||
21 25 |
|
||||
22 26 | def f():
|
||||
23 |- import pandas as pd # TC002
|
||||
24 27 |
|
||||
25 28 | x: pd.DataFrame = 1
|
||||
26 29 |
|
||||
|
||||
TC002.py:29:24: TC002 [*] Move third-party import `pandas.DataFrame` into a type-checking block
|
||||
|
|
||||
28 | def f():
|
||||
29 | from pandas import DataFrame # TC002
|
||||
| ^^^^^^^^^ TC002
|
||||
30 |
|
||||
31 | x: DataFrame = 2
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | """Tests to determine accurate detection of typing-only imports."""
|
||||
2 |+from typing import TYPE_CHECKING
|
||||
3 |+
|
||||
4 |+if TYPE_CHECKING:
|
||||
5 |+ from pandas import DataFrame
|
||||
2 6 |
|
||||
3 7 |
|
||||
4 8 | def f():
|
||||
--------------------------------------------------------------------------------
|
||||
26 30 |
|
||||
27 31 |
|
||||
28 32 | def f():
|
||||
29 |- from pandas import DataFrame # TC002
|
||||
30 33 |
|
||||
31 34 | x: DataFrame = 2
|
||||
32 35 |
|
||||
|
||||
TC002.py:35:37: TC002 [*] Move third-party import `pandas.DataFrame` into a type-checking block
|
||||
|
|
||||
34 | def f():
|
||||
35 | from pandas import DataFrame as df # TC002
|
||||
| ^^ TC002
|
||||
36 |
|
||||
37 | x: df = 3
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | """Tests to determine accurate detection of typing-only imports."""
|
||||
2 |+from typing import TYPE_CHECKING
|
||||
3 |+
|
||||
4 |+if TYPE_CHECKING:
|
||||
5 |+ from pandas import DataFrame as df
|
||||
2 6 |
|
||||
3 7 |
|
||||
4 8 | def f():
|
||||
--------------------------------------------------------------------------------
|
||||
32 36 |
|
||||
33 37 |
|
||||
34 38 | def f():
|
||||
35 |- from pandas import DataFrame as df # TC002
|
||||
36 39 |
|
||||
37 40 | x: df = 3
|
||||
38 41 |
|
||||
|
||||
TC002.py:41:22: TC002 [*] Move third-party import `pandas` into a type-checking block
|
||||
|
|
||||
40 | def f():
|
||||
41 | import pandas as pd # TC002
|
||||
| ^^ TC002
|
||||
42 |
|
||||
43 | x: "pd.DataFrame" = 1
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | """Tests to determine accurate detection of typing-only imports."""
|
||||
2 |+from typing import TYPE_CHECKING
|
||||
3 |+
|
||||
4 |+if TYPE_CHECKING:
|
||||
5 |+ import pandas as pd
|
||||
2 6 |
|
||||
3 7 |
|
||||
4 8 | def f():
|
||||
--------------------------------------------------------------------------------
|
||||
38 42 |
|
||||
39 43 |
|
||||
40 44 | def f():
|
||||
41 |- import pandas as pd # TC002
|
||||
42 45 |
|
||||
43 46 | x: "pd.DataFrame" = 1
|
||||
44 47 |
|
||||
|
||||
TC002.py:47:22: TC002 [*] Move third-party import `pandas` into a type-checking block
|
||||
|
|
||||
46 | def f():
|
||||
47 | import pandas as pd # TC002
|
||||
| ^^ TC002
|
||||
48 |
|
||||
49 | x = dict["pd.DataFrame", "pd.DataFrame"]
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | """Tests to determine accurate detection of typing-only imports."""
|
||||
2 |+from typing import TYPE_CHECKING
|
||||
3 |+
|
||||
4 |+if TYPE_CHECKING:
|
||||
5 |+ import pandas as pd
|
||||
2 6 |
|
||||
3 7 |
|
||||
4 8 | def f():
|
||||
--------------------------------------------------------------------------------
|
||||
44 48 |
|
||||
45 49 |
|
||||
46 50 | def f():
|
||||
47 |- import pandas as pd # TC002
|
||||
48 51 |
|
||||
49 52 | x = dict["pd.DataFrame", "pd.DataFrame"]
|
||||
50 53 |
|
||||
|
||||
TC002.py:172:24: TC002 [*] Move third-party import `module.Member` into a type-checking block
|
||||
|
|
||||
170 | global Member
|
||||
171 |
|
||||
172 | from module import Member
|
||||
| ^^^^^^ TC002
|
||||
173 |
|
||||
174 | x: Member = 1
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | """Tests to determine accurate detection of typing-only imports."""
|
||||
2 |+from typing import TYPE_CHECKING
|
||||
3 |+
|
||||
4 |+if TYPE_CHECKING:
|
||||
5 |+ from module import Member
|
||||
2 6 |
|
||||
3 7 |
|
||||
4 8 | def f():
|
||||
--------------------------------------------------------------------------------
|
||||
169 173 | def f():
|
||||
170 174 | global Member
|
||||
171 175 |
|
||||
172 |- from module import Member
|
||||
173 176 |
|
||||
174 177 | x: Member = 1
|
||||
175 178 |
|
||||
@@ -0,0 +1,28 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs
|
||||
---
|
||||
TC003.py:8:12: TC003 [*] Move standard library import `os` into a type-checking block
|
||||
|
|
||||
7 | def f():
|
||||
8 | import os
|
||||
| ^^ TC003
|
||||
9 |
|
||||
10 | x: os
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 |
|
||||
3 3 | For typing-only import detection tests, see `TC002.py`.
|
||||
4 4 | """
|
||||
5 |+from typing import TYPE_CHECKING
|
||||
6 |+
|
||||
7 |+if TYPE_CHECKING:
|
||||
8 |+ import os
|
||||
5 9 |
|
||||
6 10 |
|
||||
7 11 | def f():
|
||||
8 |- import os
|
||||
9 12 |
|
||||
10 13 | x: os
|
||||
11 14 |
|
||||
@@ -8,6 +8,7 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use ruff_python_ast::PythonVersion;
|
||||
use test_case::test_case;
|
||||
|
||||
use crate::assert_diagnostics;
|
||||
@@ -143,4 +144,22 @@ mod tests {
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::InvalidPathlibWithSuffix, Path::new("PTH210_2.py"))]
|
||||
fn pathlib_with_suffix_py314(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
"py314__{}_{}",
|
||||
rule_code.noqa_code(),
|
||||
path.to_string_lossy()
|
||||
);
|
||||
let diagnostics = test_path(
|
||||
Path::new("flake8_use_pathlib").join(path).as_path(),
|
||||
&settings::LinterSettings {
|
||||
unresolved_target_version: PythonVersion::PY314.into(),
|
||||
..settings::LinterSettings::for_rule(rule_code)
|
||||
},
|
||||
)?;
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::{Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::{self as ast, StringFlags};
|
||||
use ruff_python_ast::{self as ast, PythonVersion, StringFlags};
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_python_semantic::analyze::typing;
|
||||
use ruff_python_semantic::analyze::typing::{self, PathlibPathChecker, TypeChecker};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for `pathlib.Path.with_suffix()` calls where
|
||||
/// the given suffix does not have a leading dot
|
||||
/// or the given suffix is a single dot `"."`.
|
||||
/// or the given suffix is a single dot `"."` and the
|
||||
/// Python version is less than 3.14.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `Path.with_suffix()` will raise an error at runtime
|
||||
/// if the given suffix is not prefixed with a dot
|
||||
/// or it is a single dot `"."`.
|
||||
/// or, in versions prior to Python 3.14, if it is a single dot `"."`.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
@@ -57,9 +58,6 @@ use ruff_text_size::Ranged;
|
||||
/// No fix is offered if the suffix `"."` is given, since the intent is unclear.
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct InvalidPathlibWithSuffix {
|
||||
// TODO: Since "." is a correct suffix in Python 3.14,
|
||||
// we will need to update this rule and documentation
|
||||
// once Ruff supports Python 3.14.
|
||||
single_dot: bool,
|
||||
}
|
||||
|
||||
@@ -116,6 +114,13 @@ pub(crate) fn invalid_pathlib_with_suffix(checker: &Checker, call: &ast::ExprCal
|
||||
};
|
||||
|
||||
let single_dot = string_value == ".";
|
||||
|
||||
// As of Python 3.14, a single dot is considered a valid suffix.
|
||||
// https://docs.python.org/3.14/library/pathlib.html#pathlib.PurePath.with_suffix
|
||||
if single_dot && checker.target_version() >= PythonVersion::PY314 {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut diagnostic =
|
||||
checker.report_diagnostic(InvalidPathlibWithSuffix { single_dot }, call.range);
|
||||
if !single_dot {
|
||||
@@ -136,12 +141,13 @@ fn is_path_with_suffix_call(semantic: &SemanticModel, func: &ast::Expr) -> bool
|
||||
return false;
|
||||
}
|
||||
|
||||
let ast::Expr::Name(name) = &**value else {
|
||||
return false;
|
||||
};
|
||||
let Some(binding) = semantic.only_binding(name).map(|id| semantic.binding(id)) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
typing::is_pathlib_path(binding, semantic)
|
||||
match &**value {
|
||||
ast::Expr::Name(name) => {
|
||||
let Some(binding) = semantic.only_binding(name).map(|id| semantic.binding(id)) else {
|
||||
return false;
|
||||
};
|
||||
typing::is_pathlib_path(binding, semantic)
|
||||
}
|
||||
expr => PathlibPathChecker::match_initializer(expr, semantic),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
pub(crate) use glob_rule::*;
|
||||
pub(crate) use invalid_pathlib_with_suffix::*;
|
||||
pub(crate) use os_getcwd::*;
|
||||
pub(crate) use os_path_abspath::*;
|
||||
pub(crate) use os_path_basename::*;
|
||||
pub(crate) use os_path_dirname::*;
|
||||
@@ -23,6 +24,7 @@ pub(crate) use replaceable_by_pathlib::*;
|
||||
|
||||
mod glob_rule;
|
||||
mod invalid_pathlib_with_suffix;
|
||||
mod os_getcwd;
|
||||
mod os_path_abspath;
|
||||
mod os_path_basename;
|
||||
mod os_path_dirname;
|
||||
|
||||
@@ -0,0 +1,100 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::importer::ImportRequest;
|
||||
use crate::preview::is_fix_os_getcwd_enabled;
|
||||
use crate::{FixAvailability, Violation};
|
||||
use ruff_diagnostics::{Applicability, Edit, Fix};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::ExprCall;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.getcwd` and `os.getcwdb`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.cwd()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.getcwd()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// cwd = os.getcwd()
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// cwd = Path.cwd()
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the original expression.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.cwd`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.cwd)
|
||||
/// - [Python documentation: `os.getcwd`](https://docs.python.org/3/library/os.html#os.getcwd)
|
||||
/// - [Python documentation: `os.getcwdb`](https://docs.python.org/3/library/os.html#os.getcwdb)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsGetcwd;
|
||||
|
||||
impl Violation for OsGetcwd {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.getcwd()` should be replaced by `Path.cwd()`".to_string()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Replace with `Path.cwd()`".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// PTH109
|
||||
pub(crate) fn os_getcwd(checker: &Checker, call: &ExprCall, segments: &[&str]) {
|
||||
if !matches!(segments, ["os", "getcwd" | "getcwdb"]) {
|
||||
return;
|
||||
}
|
||||
|
||||
let range = call.range();
|
||||
let mut diagnostic = checker.report_diagnostic(OsGetcwd, call.func.range());
|
||||
|
||||
if !call.arguments.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
if is_fix_os_getcwd_enabled(checker.settings()) {
|
||||
diagnostic.try_set_fix(|| {
|
||||
let (import_edit, binding) = checker.importer().get_or_import_symbol(
|
||||
&ImportRequest::import("pathlib", "Path"),
|
||||
call.start(),
|
||||
checker.semantic(),
|
||||
)?;
|
||||
|
||||
let applicability = if checker.comment_ranges().intersects(range) {
|
||||
Applicability::Unsafe
|
||||
} else {
|
||||
Applicability::Safe
|
||||
};
|
||||
|
||||
let replacement = format!("{binding}.cwd()");
|
||||
|
||||
Ok(Fix::applicable_edits(
|
||||
Edit::range_replacement(replacement, range),
|
||||
[import_edit],
|
||||
applicability,
|
||||
))
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -7,8 +7,8 @@ use crate::checkers::ast::Checker;
|
||||
use crate::rules::flake8_use_pathlib::helpers::is_keyword_only_argument_non_default;
|
||||
use crate::rules::flake8_use_pathlib::rules::Glob;
|
||||
use crate::rules::flake8_use_pathlib::violations::{
|
||||
BuiltinOpen, Joiner, OsChmod, OsGetcwd, OsListdir, OsMakedirs, OsMkdir, OsPathJoin,
|
||||
OsPathSamefile, OsPathSplitext, OsRename, OsReplace, OsStat, OsSymlink, PyPath,
|
||||
BuiltinOpen, Joiner, OsChmod, OsListdir, OsMakedirs, OsMkdir, OsPathJoin, OsPathSamefile,
|
||||
OsPathSplitext, OsRename, OsReplace, OsStat, OsSymlink, PyPath,
|
||||
};
|
||||
|
||||
pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
|
||||
@@ -83,10 +83,6 @@ pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
|
||||
}
|
||||
checker.report_diagnostic_if_enabled(OsReplace, range)
|
||||
}
|
||||
// PTH109
|
||||
["os", "getcwd"] => checker.report_diagnostic_if_enabled(OsGetcwd, range),
|
||||
["os", "getcwdb"] => checker.report_diagnostic_if_enabled(OsGetcwd, range),
|
||||
|
||||
// PTH116
|
||||
["os", "stat"] => {
|
||||
// `dir_fd` is not supported by pathlib, so check if it's set to non-default values.
|
||||
|
||||
@@ -536,7 +536,7 @@ PTH210.py:54:1: PTH210 [*] Dotless suffix passed to `.with_suffix()`
|
||||
54 |+windows_path.with_suffix(u'.' "json")
|
||||
55 55 | windows_path.with_suffix(suffix="js")
|
||||
56 56 |
|
||||
57 57 |
|
||||
57 57 | Path().with_suffix(".")
|
||||
|
||||
PTH210.py:55:1: PTH210 [*] Dotless suffix passed to `.with_suffix()`
|
||||
|
|
||||
@@ -544,6 +544,8 @@ PTH210.py:55:1: PTH210 [*] Dotless suffix passed to `.with_suffix()`
|
||||
54 | windows_path.with_suffix(u'' "json")
|
||||
55 | windows_path.with_suffix(suffix="js")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH210
|
||||
56 |
|
||||
57 | Path().with_suffix(".")
|
||||
|
|
||||
= help: Add a leading dot
|
||||
|
||||
@@ -554,5 +556,140 @@ PTH210.py:55:1: PTH210 [*] Dotless suffix passed to `.with_suffix()`
|
||||
55 |-windows_path.with_suffix(suffix="js")
|
||||
55 |+windows_path.with_suffix(suffix=".js")
|
||||
56 56 |
|
||||
57 57 |
|
||||
58 58 | ### No errors
|
||||
57 57 | Path().with_suffix(".")
|
||||
58 58 | Path().with_suffix("py")
|
||||
|
||||
PTH210.py:57:1: PTH210 Invalid suffix passed to `.with_suffix()`
|
||||
|
|
||||
55 | windows_path.with_suffix(suffix="js")
|
||||
56 |
|
||||
57 | Path().with_suffix(".")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ PTH210
|
||||
58 | Path().with_suffix("py")
|
||||
59 | PosixPath().with_suffix("py")
|
||||
|
|
||||
= help: Remove "." or extend to valid suffix
|
||||
|
||||
PTH210.py:58:1: PTH210 [*] Dotless suffix passed to `.with_suffix()`
|
||||
|
|
||||
57 | Path().with_suffix(".")
|
||||
58 | Path().with_suffix("py")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ PTH210
|
||||
59 | PosixPath().with_suffix("py")
|
||||
60 | PurePath().with_suffix("py")
|
||||
|
|
||||
= help: Add a leading dot
|
||||
|
||||
ℹ Unsafe fix
|
||||
55 55 | windows_path.with_suffix(suffix="js")
|
||||
56 56 |
|
||||
57 57 | Path().with_suffix(".")
|
||||
58 |-Path().with_suffix("py")
|
||||
58 |+Path().with_suffix(".py")
|
||||
59 59 | PosixPath().with_suffix("py")
|
||||
60 60 | PurePath().with_suffix("py")
|
||||
61 61 | PurePosixPath().with_suffix("py")
|
||||
|
||||
PTH210.py:59:1: PTH210 [*] Dotless suffix passed to `.with_suffix()`
|
||||
|
|
||||
57 | Path().with_suffix(".")
|
||||
58 | Path().with_suffix("py")
|
||||
59 | PosixPath().with_suffix("py")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH210
|
||||
60 | PurePath().with_suffix("py")
|
||||
61 | PurePosixPath().with_suffix("py")
|
||||
|
|
||||
= help: Add a leading dot
|
||||
|
||||
ℹ Unsafe fix
|
||||
56 56 |
|
||||
57 57 | Path().with_suffix(".")
|
||||
58 58 | Path().with_suffix("py")
|
||||
59 |-PosixPath().with_suffix("py")
|
||||
59 |+PosixPath().with_suffix(".py")
|
||||
60 60 | PurePath().with_suffix("py")
|
||||
61 61 | PurePosixPath().with_suffix("py")
|
||||
62 62 | PureWindowsPath().with_suffix("py")
|
||||
|
||||
PTH210.py:60:1: PTH210 [*] Dotless suffix passed to `.with_suffix()`
|
||||
|
|
||||
58 | Path().with_suffix("py")
|
||||
59 | PosixPath().with_suffix("py")
|
||||
60 | PurePath().with_suffix("py")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH210
|
||||
61 | PurePosixPath().with_suffix("py")
|
||||
62 | PureWindowsPath().with_suffix("py")
|
||||
|
|
||||
= help: Add a leading dot
|
||||
|
||||
ℹ Unsafe fix
|
||||
57 57 | Path().with_suffix(".")
|
||||
58 58 | Path().with_suffix("py")
|
||||
59 59 | PosixPath().with_suffix("py")
|
||||
60 |-PurePath().with_suffix("py")
|
||||
60 |+PurePath().with_suffix(".py")
|
||||
61 61 | PurePosixPath().with_suffix("py")
|
||||
62 62 | PureWindowsPath().with_suffix("py")
|
||||
63 63 | WindowsPath().with_suffix("py")
|
||||
|
||||
PTH210.py:61:1: PTH210 [*] Dotless suffix passed to `.with_suffix()`
|
||||
|
|
||||
59 | PosixPath().with_suffix("py")
|
||||
60 | PurePath().with_suffix("py")
|
||||
61 | PurePosixPath().with_suffix("py")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH210
|
||||
62 | PureWindowsPath().with_suffix("py")
|
||||
63 | WindowsPath().with_suffix("py")
|
||||
|
|
||||
= help: Add a leading dot
|
||||
|
||||
ℹ Unsafe fix
|
||||
58 58 | Path().with_suffix("py")
|
||||
59 59 | PosixPath().with_suffix("py")
|
||||
60 60 | PurePath().with_suffix("py")
|
||||
61 |-PurePosixPath().with_suffix("py")
|
||||
61 |+PurePosixPath().with_suffix(".py")
|
||||
62 62 | PureWindowsPath().with_suffix("py")
|
||||
63 63 | WindowsPath().with_suffix("py")
|
||||
64 64 |
|
||||
|
||||
PTH210.py:62:1: PTH210 [*] Dotless suffix passed to `.with_suffix()`
|
||||
|
|
||||
60 | PurePath().with_suffix("py")
|
||||
61 | PurePosixPath().with_suffix("py")
|
||||
62 | PureWindowsPath().with_suffix("py")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH210
|
||||
63 | WindowsPath().with_suffix("py")
|
||||
|
|
||||
= help: Add a leading dot
|
||||
|
||||
ℹ Unsafe fix
|
||||
59 59 | PosixPath().with_suffix("py")
|
||||
60 60 | PurePath().with_suffix("py")
|
||||
61 61 | PurePosixPath().with_suffix("py")
|
||||
62 |-PureWindowsPath().with_suffix("py")
|
||||
62 |+PureWindowsPath().with_suffix(".py")
|
||||
63 63 | WindowsPath().with_suffix("py")
|
||||
64 64 |
|
||||
65 65 | ### No errors
|
||||
|
||||
PTH210.py:63:1: PTH210 [*] Dotless suffix passed to `.with_suffix()`
|
||||
|
|
||||
61 | PurePosixPath().with_suffix("py")
|
||||
62 | PureWindowsPath().with_suffix("py")
|
||||
63 | WindowsPath().with_suffix("py")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH210
|
||||
64 |
|
||||
65 | ### No errors
|
||||
|
|
||||
= help: Add a leading dot
|
||||
|
||||
ℹ Unsafe fix
|
||||
60 60 | PurePath().with_suffix("py")
|
||||
61 61 | PurePosixPath().with_suffix("py")
|
||||
62 62 | PureWindowsPath().with_suffix("py")
|
||||
63 |-WindowsPath().with_suffix("py")
|
||||
63 |+WindowsPath().with_suffix(".py")
|
||||
64 64 |
|
||||
65 65 | ### No errors
|
||||
66 66 | path.with_suffix()
|
||||
|
||||
@@ -103,6 +103,7 @@ full_name.py:16:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
17 | b = os.path.exists(p)
|
||||
18 | bb = os.path.expanduser(p)
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
full_name.py:17:5: PTH110 `os.path.exists()` should be replaced by `Path.exists()`
|
||||
|
|
||||
@@ -292,6 +293,7 @@ full_name.py:35:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
36 | os.path.join(p, *q)
|
||||
37 | os.sep.join(p, *q)
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
full_name.py:36:1: PTH118 `os.path.join()` should be replaced by `Path.joinpath()`
|
||||
|
|
||||
@@ -360,3 +362,21 @@ full_name.py:71:1: PTH123 `open()` should be replaced by `Path.open()`
|
||||
72 |
|
||||
73 | # https://github.com/astral-sh/ruff/issues/17693
|
||||
|
|
||||
|
||||
full_name.py:108:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
|
|
||||
106 | os.replace("src", "dst", dst_dir_fd=2)
|
||||
107 |
|
||||
108 | os.getcwd()
|
||||
| ^^^^^^^^^ PTH109
|
||||
109 | os.getcwdb()
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
full_name.py:109:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
|
|
||||
108 | os.getcwd()
|
||||
109 | os.getcwdb()
|
||||
| ^^^^^^^^^^ PTH109
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
@@ -103,6 +103,7 @@ import_as.py:16:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
17 | b = foo_p.exists(p)
|
||||
18 | bb = foo_p.expanduser(p)
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
import_as.py:17:5: PTH110 `os.path.exists()` should be replaced by `Path.exists()`
|
||||
|
|
||||
|
||||
@@ -103,6 +103,7 @@ import_from.py:18:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
19 | b = exists(p)
|
||||
20 | bb = expanduser(p)
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
import_from.py:19:5: PTH110 `os.path.exists()` should be replaced by `Path.exists()`
|
||||
|
|
||||
|
||||
@@ -103,6 +103,7 @@ import_from_as.py:23:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
24 | b = xexists(p)
|
||||
25 | bb = xexpanduser(p)
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
import_from_as.py:24:5: PTH110 `os.path.exists()` should be replaced by `Path.exists()`
|
||||
|
|
||||
|
||||
@@ -168,6 +168,7 @@ full_name.py:16:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
17 | b = os.path.exists(p)
|
||||
18 | bb = os.path.expanduser(p)
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
full_name.py:17:5: PTH110 [*] `os.path.exists()` should be replaced by `Path.exists()`
|
||||
|
|
||||
@@ -510,6 +511,7 @@ full_name.py:35:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
36 | os.path.join(p, *q)
|
||||
37 | os.sep.join(p, *q)
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
full_name.py:36:1: PTH118 `os.path.join()` should be replaced by `Path.joinpath()`
|
||||
|
|
||||
@@ -578,3 +580,50 @@ full_name.py:71:1: PTH123 `open()` should be replaced by `Path.open()`
|
||||
72 |
|
||||
73 | # https://github.com/astral-sh/ruff/issues/17693
|
||||
|
|
||||
|
||||
full_name.py:108:1: PTH109 [*] `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
|
|
||||
106 | os.replace("src", "dst", dst_dir_fd=2)
|
||||
107 |
|
||||
108 | os.getcwd()
|
||||
| ^^^^^^^^^ PTH109
|
||||
109 | os.getcwdb()
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | import os
|
||||
2 2 | import os.path
|
||||
3 |+import pathlib
|
||||
3 4 |
|
||||
4 5 | p = "/foo"
|
||||
5 6 | q = "bar"
|
||||
--------------------------------------------------------------------------------
|
||||
105 106 | os.replace("src", "dst", src_dir_fd=1)
|
||||
106 107 | os.replace("src", "dst", dst_dir_fd=2)
|
||||
107 108 |
|
||||
108 |-os.getcwd()
|
||||
109 |+pathlib.Path.cwd()
|
||||
109 110 | os.getcwdb()
|
||||
|
||||
full_name.py:109:1: PTH109 [*] `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
|
|
||||
108 | os.getcwd()
|
||||
109 | os.getcwdb()
|
||||
| ^^^^^^^^^^ PTH109
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | import os
|
||||
2 2 | import os.path
|
||||
3 |+import pathlib
|
||||
3 4 |
|
||||
4 5 | p = "/foo"
|
||||
5 6 | q = "bar"
|
||||
--------------------------------------------------------------------------------
|
||||
106 107 | os.replace("src", "dst", dst_dir_fd=2)
|
||||
107 108 |
|
||||
108 109 | os.getcwd()
|
||||
109 |-os.getcwdb()
|
||||
110 |+pathlib.Path.cwd()
|
||||
|
||||
@@ -168,6 +168,7 @@ import_as.py:16:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
17 | b = foo_p.exists(p)
|
||||
18 | bb = foo_p.expanduser(p)
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
import_as.py:17:5: PTH110 [*] `os.path.exists()` should be replaced by `Path.exists()`
|
||||
|
|
||||
|
||||
@@ -172,6 +172,7 @@ import_from.py:18:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
19 | b = exists(p)
|
||||
20 | bb = expanduser(p)
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
import_from.py:19:5: PTH110 [*] `os.path.exists()` should be replaced by `Path.exists()`
|
||||
|
|
||||
|
||||
@@ -172,6 +172,7 @@ import_from_as.py:23:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
||||
24 | b = xexists(p)
|
||||
25 | bb = xexpanduser(p)
|
||||
|
|
||||
= help: Replace with `Path.cwd()`
|
||||
|
||||
import_from_as.py:24:5: PTH110 [*] `os.path.exists()` should be replaced by `Path.exists()`
|
||||
|
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_use_pathlib/mod.rs
|
||||
---
|
||||
|
||||
@@ -230,52 +230,6 @@ impl Violation for OsReplace {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.getcwd` and `os.getcwdb`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.cwd()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.getcwd()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// cwd = os.getcwd()
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// cwd = Path.cwd()
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.cwd`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.cwd)
|
||||
/// - [Python documentation: `os.getcwd`](https://docs.python.org/3/library/os.html#os.getcwd)
|
||||
/// - [Python documentation: `os.getcwdb`](https://docs.python.org/3/library/os.html#os.getcwdb)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsGetcwd;
|
||||
|
||||
impl Violation for OsGetcwd {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.getcwd()` should be replaced by `Path.cwd()`".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.stat`.
|
||||
///
|
||||
|
||||
@@ -801,6 +801,7 @@ mod tests {
|
||||
#[test_case(Path::new("existing_import.py"))]
|
||||
#[test_case(Path::new("multiline_docstring.py"))]
|
||||
#[test_case(Path::new("off.py"))]
|
||||
#[test_case(Path::new("whitespace.py"))]
|
||||
fn required_import(path: &Path) -> Result<()> {
|
||||
let snapshot = format!("required_import_{}", path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/isort/mod.rs
|
||||
---
|
||||
whitespace.py:1:1: I002 [*] Missing required import: `from __future__ import annotations`
|
||||
ℹ Safe fix
|
||||
1 1 | # This is a regression test for https://github.com/astral-sh/ruff/issues/19310
|
||||
2 2 | # there is a (potentially invisible) unicode formfeed character (000C) between "docstring" and the semicolon
|
||||
3 |-"docstring"; print(
|
||||
3 |+"docstring"; from __future__ import annotations; print(
|
||||
4 4 | f"{__doc__=}",
|
||||
5 5 | )
|
||||
@@ -126,7 +126,7 @@ impl AlwaysFixableViolation for WhitespaceBeforePunctuation {
|
||||
|
||||
/// E201, E202, E203
|
||||
pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
let mut fstrings = 0u32;
|
||||
let mut interpolated_strings = 0u32;
|
||||
let mut brackets = vec![];
|
||||
let mut prev_token = None;
|
||||
let mut iter = line.tokens().iter().peekable();
|
||||
@@ -134,8 +134,10 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
while let Some(token) = iter.next() {
|
||||
let kind = token.kind();
|
||||
match kind {
|
||||
TokenKind::FStringStart => fstrings += 1,
|
||||
TokenKind::FStringEnd => fstrings = fstrings.saturating_sub(1),
|
||||
TokenKind::FStringStart | TokenKind::TStringStart => interpolated_strings += 1,
|
||||
TokenKind::FStringEnd | TokenKind::TStringEnd => {
|
||||
interpolated_strings = interpolated_strings.saturating_sub(1);
|
||||
}
|
||||
TokenKind::Lsqb => {
|
||||
brackets.push(kind);
|
||||
}
|
||||
@@ -161,7 +163,9 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
// Here, `{{` / `}} would be interpreted as a single raw `{` / `}`
|
||||
// character.
|
||||
match symbol {
|
||||
BracketOrPunctuation::OpenBracket(symbol) if symbol != '{' || fstrings == 0 => {
|
||||
BracketOrPunctuation::OpenBracket(symbol)
|
||||
if symbol != '{' || interpolated_strings == 0 =>
|
||||
{
|
||||
let (trailing, trailing_len) = line.trailing_whitespace(token);
|
||||
if !matches!(trailing, Whitespace::None) {
|
||||
if let Some(mut diagnostic) = context.report_diagnostic_if_enabled(
|
||||
@@ -173,7 +177,9 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
}
|
||||
}
|
||||
}
|
||||
BracketOrPunctuation::CloseBracket(symbol) if symbol != '}' || fstrings == 0 => {
|
||||
BracketOrPunctuation::CloseBracket(symbol)
|
||||
if symbol != '}' || interpolated_strings == 0 =>
|
||||
{
|
||||
if !matches!(prev_token, Some(TokenKind::Comma)) {
|
||||
if let (Whitespace::Single | Whitespace::Many | Whitespace::Tab, offset) =
|
||||
line.leading_whitespace(token)
|
||||
@@ -286,7 +292,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if fstrings > 0
|
||||
if interpolated_strings > 0
|
||||
&& symbol == ':'
|
||||
&& matches!(prev_token, Some(TokenKind::Equal))
|
||||
{
|
||||
|
||||
@@ -41,7 +41,7 @@ impl AlwaysFixableViolation for MissingWhitespace {
|
||||
|
||||
/// E231
|
||||
pub(crate) fn missing_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
let mut fstrings = 0u32;
|
||||
let mut interpolated_strings = 0u32;
|
||||
let mut definition_state = DefinitionState::from_tokens(line.tokens());
|
||||
let mut brackets = Vec::new();
|
||||
let mut iter = line.tokens().iter().peekable();
|
||||
@@ -50,21 +50,23 @@ pub(crate) fn missing_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
let kind = token.kind();
|
||||
definition_state.visit_token_kind(kind);
|
||||
match kind {
|
||||
TokenKind::FStringStart => fstrings += 1,
|
||||
TokenKind::FStringEnd => fstrings = fstrings.saturating_sub(1),
|
||||
TokenKind::Lsqb if fstrings == 0 => {
|
||||
TokenKind::FStringStart | TokenKind::TStringStart => interpolated_strings += 1,
|
||||
TokenKind::FStringEnd | TokenKind::TStringEnd => {
|
||||
interpolated_strings = interpolated_strings.saturating_sub(1);
|
||||
}
|
||||
TokenKind::Lsqb if interpolated_strings == 0 => {
|
||||
brackets.push(kind);
|
||||
}
|
||||
TokenKind::Rsqb if fstrings == 0 => {
|
||||
TokenKind::Rsqb if interpolated_strings == 0 => {
|
||||
brackets.pop();
|
||||
}
|
||||
TokenKind::Lbrace if fstrings == 0 => {
|
||||
TokenKind::Lbrace if interpolated_strings == 0 => {
|
||||
brackets.push(kind);
|
||||
}
|
||||
TokenKind::Rbrace if fstrings == 0 => {
|
||||
TokenKind::Rbrace if interpolated_strings == 0 => {
|
||||
brackets.pop();
|
||||
}
|
||||
TokenKind::Colon if fstrings > 0 => {
|
||||
TokenKind::Colon if interpolated_strings > 0 => {
|
||||
// Colon in f-string, no space required. This will yield false
|
||||
// negatives for cases like the following as it's hard to
|
||||
// differentiate between the usage of a colon in a f-string.
|
||||
|
||||
@@ -183,3 +183,23 @@ E20.py:145:5: E201 [*] Whitespace after '['
|
||||
146 146 |
|
||||
147 147 | #: Okay
|
||||
148 148 | ham[lower + offset :: upper + offset]
|
||||
|
||||
E20.py:195:5: E201 [*] Whitespace after '['
|
||||
|
|
||||
193 | # t-strings
|
||||
194 | t"{ {'a': 1} }"
|
||||
195 | t"{[ { {'a': 1} } ]}"
|
||||
| ^ E201
|
||||
196 | t"normal { {t"{ { [1, 2] } }" } } normal"
|
||||
|
|
||||
= help: Remove whitespace before '['
|
||||
|
||||
ℹ Safe fix
|
||||
192 192 |
|
||||
193 193 | # t-strings
|
||||
194 194 | t"{ {'a': 1} }"
|
||||
195 |-t"{[ { {'a': 1} } ]}"
|
||||
195 |+t"{[{ {'a': 1} } ]}"
|
||||
196 196 | t"normal { {t"{ { [1, 2] } }" } } normal"
|
||||
197 197 |
|
||||
198 198 | t"{x = :.2f}"
|
||||
|
||||
@@ -165,3 +165,23 @@ E20.py:172:12: E202 [*] Whitespace before ']'
|
||||
173 173 |
|
||||
174 174 | #: E203:1:10
|
||||
175 175 | ham[upper :]
|
||||
|
||||
E20.py:195:18: E202 [*] Whitespace before ']'
|
||||
|
|
||||
193 | # t-strings
|
||||
194 | t"{ {'a': 1} }"
|
||||
195 | t"{[ { {'a': 1} } ]}"
|
||||
| ^ E202
|
||||
196 | t"normal { {t"{ { [1, 2] } }" } } normal"
|
||||
|
|
||||
= help: Remove whitespace before ']'
|
||||
|
||||
ℹ Safe fix
|
||||
192 192 |
|
||||
193 193 | # t-strings
|
||||
194 194 | t"{ {'a': 1} }"
|
||||
195 |-t"{[ { {'a': 1} } ]}"
|
||||
195 |+t"{[ { {'a': 1} }]}"
|
||||
196 196 | t"normal { {t"{ { [1, 2] } }" } } normal"
|
||||
197 197 |
|
||||
198 198 | t"{x = :.2f}"
|
||||
|
||||
@@ -345,3 +345,19 @@ E20.py:187:17: E203 [*] Whitespace before ':'
|
||||
188 188 |
|
||||
189 189 | #: Okay: https://github.com/astral-sh/ruff/issues/12023
|
||||
190 190 | f"{x = :.2f}"
|
||||
|
||||
E20.py:205:17: E203 [*] Whitespace before ':'
|
||||
|
|
||||
204 | #: E203:1:13
|
||||
205 | t"{ham[lower + 1 :, "columnname"]}"
|
||||
| ^^ E203
|
||||
|
|
||||
= help: Remove whitespace before ':'
|
||||
|
||||
ℹ Safe fix
|
||||
202 202 | t"{ham[lower +1 :, "columnname"]}"
|
||||
203 203 |
|
||||
204 204 | #: E203:1:13
|
||||
205 |-t"{ham[lower + 1 :, "columnname"]}"
|
||||
205 |+t"{ham[lower + 1:, "columnname"]}"
|
||||
206 206 |
|
||||
|
||||
@@ -905,3 +905,38 @@ E23.py:126:99: E231 [*] Missing whitespace after ':'
|
||||
127 127 | pass
|
||||
128 128 |
|
||||
129 129 | # Should be no E231 errors on any of these:
|
||||
|
||||
E23.py:147:6: E231 [*] Missing whitespace after ','
|
||||
|
|
||||
146 | # E231
|
||||
147 | t"{(a,b)}"
|
||||
| ^ E231
|
||||
148 |
|
||||
149 | # Okay because it's hard to differentiate between the usages of a colon in a t-string
|
||||
|
|
||||
= help: Add missing whitespace
|
||||
|
||||
ℹ Safe fix
|
||||
144 144 | pass
|
||||
145 145 |
|
||||
146 146 | # E231
|
||||
147 |-t"{(a,b)}"
|
||||
147 |+t"{(a, b)}"
|
||||
148 148 |
|
||||
149 149 | # Okay because it's hard to differentiate between the usages of a colon in a t-string
|
||||
150 150 | t"{a:=1}"
|
||||
|
||||
E23.py:161:37: E231 [*] Missing whitespace after ':'
|
||||
|
|
||||
160 | #: E231
|
||||
161 | {len(t's3://{self.s3_bucket_name}/'):1}
|
||||
| ^ E231
|
||||
|
|
||||
= help: Add missing whitespace
|
||||
|
||||
ℹ Safe fix
|
||||
158 158 | snapshot.file_uri[len(t's3://{self.s3_bucket_name}/'):]
|
||||
159 159 |
|
||||
160 160 | #: E231
|
||||
161 |-{len(t's3://{self.s3_bucket_name}/'):1}
|
||||
161 |+{len(t's3://{self.s3_bucket_name}/'): 1}
|
||||
|
||||
@@ -10,7 +10,7 @@ use ruff_text_size::TextRange;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::docstrings::Docstring;
|
||||
use crate::registry::Rule;
|
||||
use crate::{AlwaysFixableViolation, Edit, Fix};
|
||||
use crate::{Edit, Fix, FixAvailability, Violation};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for docstrings on functions that are separated by one or more blank
|
||||
@@ -42,15 +42,17 @@ pub(crate) struct BlankLineBeforeFunction {
|
||||
num_lines: usize,
|
||||
}
|
||||
|
||||
impl AlwaysFixableViolation for BlankLineBeforeFunction {
|
||||
impl Violation for BlankLineBeforeFunction {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let BlankLineBeforeFunction { num_lines } = self;
|
||||
format!("No blank lines allowed before function docstring (found {num_lines})")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
"Remove blank line(s) before function docstring".to_string()
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Remove blank line(s) before function docstring".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,15 +88,17 @@ pub(crate) struct BlankLineAfterFunction {
|
||||
num_lines: usize,
|
||||
}
|
||||
|
||||
impl AlwaysFixableViolation for BlankLineAfterFunction {
|
||||
impl Violation for BlankLineAfterFunction {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let BlankLineAfterFunction { num_lines } = self;
|
||||
format!("No blank lines allowed after function docstring (found {num_lines})")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
"Remove blank line(s) after function docstring".to_string()
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Remove blank line(s) after function docstring".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -115,12 +119,14 @@ pub(crate) fn blank_before_after_function(checker: &Checker, docstring: &Docstri
|
||||
let mut lines = UniversalNewlineIterator::with_offset(before, function.start()).rev();
|
||||
let mut blank_lines_before = 0usize;
|
||||
let mut blank_lines_start = lines.next().map(|l| l.end()).unwrap_or_default();
|
||||
let mut start_is_line_continuation = false;
|
||||
|
||||
for line in lines {
|
||||
if line.trim().is_empty() {
|
||||
blank_lines_before += 1;
|
||||
blank_lines_start = line.start();
|
||||
} else {
|
||||
start_is_line_continuation = line.ends_with('\\');
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -132,11 +138,14 @@ pub(crate) fn blank_before_after_function(checker: &Checker, docstring: &Docstri
|
||||
},
|
||||
docstring.range(),
|
||||
);
|
||||
// Delete the blank line before the docstring.
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::deletion(
|
||||
blank_lines_start,
|
||||
docstring.line_start(),
|
||||
)));
|
||||
// Do not offer fix if a \ would cause it to be a syntax error
|
||||
if !start_is_line_continuation {
|
||||
// Delete the blank line before the docstring.
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::deletion(
|
||||
blank_lines_start,
|
||||
docstring.line_start(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -156,7 +165,9 @@ pub(crate) fn blank_before_after_function(checker: &Checker, docstring: &Docstri
|
||||
// Count the number of blank lines after the docstring.
|
||||
let mut blank_lines_after = 0usize;
|
||||
let mut lines = UniversalNewlineIterator::with_offset(after, docstring.end()).peekable();
|
||||
let first_line_end = lines.next().map(|l| l.end()).unwrap_or_default();
|
||||
let first_line = lines.next();
|
||||
let first_line_line_continuation = first_line.as_ref().is_some_and(|l| l.ends_with('\\'));
|
||||
let first_line_end = first_line.map(|l| l.end()).unwrap_or_default();
|
||||
let mut blank_lines_end = first_line_end;
|
||||
|
||||
while let Some(line) = lines.peek() {
|
||||
@@ -185,11 +196,14 @@ pub(crate) fn blank_before_after_function(checker: &Checker, docstring: &Docstri
|
||||
},
|
||||
docstring.range(),
|
||||
);
|
||||
// Delete the blank line after the docstring.
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::deletion(
|
||||
first_line_end,
|
||||
blank_lines_end,
|
||||
)));
|
||||
// Do not offer fix if a \ would cause it to be a syntax error
|
||||
if !first_line_line_continuation {
|
||||
// Delete the blank line after the docstring.
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::deletion(
|
||||
first_line_end,
|
||||
blank_lines_end,
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,3 +82,14 @@ D.py:568:5: D201 [*] No blank lines allowed before function docstring (found 1)
|
||||
568 567 | """Trailing and leading space.
|
||||
569 568 |
|
||||
570 569 | More content.
|
||||
|
||||
D.py:729:5: D201 No blank lines allowed before function docstring (found 1)
|
||||
|
|
||||
727 | def line_continuation_chars():\
|
||||
728 |
|
||||
729 | """No fix should be offered for D201/D202 because of the line continuation chars."""\
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ D201
|
||||
730 |
|
||||
731 | ...
|
||||
|
|
||||
= help: Remove blank line(s) before function docstring
|
||||
|
||||
@@ -85,4 +85,15 @@ D.py:568:5: D202 [*] No blank lines allowed after function docstring (found 1)
|
||||
572 |-
|
||||
573 572 | pass
|
||||
574 573 |
|
||||
575 574 |
|
||||
575 574 |
|
||||
|
||||
D.py:729:5: D202 No blank lines allowed after function docstring (found 1)
|
||||
|
|
||||
727 | def line_continuation_chars():\
|
||||
728 |
|
||||
729 | """No fix should be offered for D201/D202 because of the line continuation chars."""\
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ D202
|
||||
730 |
|
||||
731 | ...
|
||||
|
|
||||
= help: Remove blank line(s) after function docstring
|
||||
|
||||
@@ -428,3 +428,5 @@ D.py:723:1: D208 [*] Docstring is over-indented
|
||||
723 |- Returns:
|
||||
723 |+ Returns:
|
||||
724 724 | """
|
||||
725 725 |
|
||||
726 726 |
|
||||
|
||||
@@ -24,24 +24,30 @@ const BIDI_UNICODE: [char; 10] = [
|
||||
];
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for bidirectional unicode characters.
|
||||
/// Checks for bidirectional formatting characters.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The interaction between bidirectional unicode characters and the
|
||||
/// The interaction between bidirectional formatting characters and the
|
||||
/// surrounding code can be surprising to those that are unfamiliar
|
||||
/// with right-to-left writing systems.
|
||||
///
|
||||
/// In some cases, bidirectional unicode characters can also be used to
|
||||
/// In some cases, bidirectional formatting characters can also be used to
|
||||
/// obfuscate code and introduce or mask security vulnerabilities.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// s = "א" * 100 # "א" is assigned
|
||||
/// print(s) # prints a 100-character string
|
||||
/// example = "x" * 100 # "x" is assigned
|
||||
/// ```
|
||||
///
|
||||
/// The example uses two `RIGHT-TO-LEFT MARK`s to make the `100 * ` appear inside the comment.
|
||||
/// Without the `RIGHT-TO-LEFT MARK`s, the code looks like this:
|
||||
///
|
||||
/// ```py
|
||||
/// example = "x" * 100 # "x" is assigned
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 672: Bidirectional Text](https://peps.python.org/pep-0672/#bidirectional-text)
|
||||
/// - [PEP 672: Bidirectional Marks, Embeddings, Overrides and Isolates](https://peps.python.org/pep-0672/#bidirectional-marks-embeddings-overrides-and-isolates)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct BidirectionalUnicode;
|
||||
|
||||
|
||||
@@ -185,7 +185,9 @@ pub(crate) fn invalid_string_characters(context: &LintContext, token: &Token, lo
|
||||
let text = match token.kind() {
|
||||
// We can't use the `value` field since it's decoded and e.g. for f-strings removed a curly
|
||||
// brace that escaped another curly brace, which would gives us wrong column information.
|
||||
TokenKind::String | TokenKind::FStringMiddle => locator.slice(token),
|
||||
TokenKind::String | TokenKind::FStringMiddle | TokenKind::TStringMiddle => {
|
||||
locator.slice(token)
|
||||
}
|
||||
_ => return,
|
||||
};
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::name::QualifiedName;
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_python_semantic::analyze::typing;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -111,20 +112,34 @@ enum Callee<'a> {
|
||||
}
|
||||
|
||||
impl<'a> Callee<'a> {
|
||||
fn is_pathlib_path_call(expr: &Expr, semantic: &SemanticModel) -> bool {
|
||||
if let Expr::Call(ast::ExprCall { func, .. }) = expr {
|
||||
semantic
|
||||
.resolve_qualified_name(func)
|
||||
.is_some_and(|qualified_name| {
|
||||
matches!(qualified_name.segments(), ["pathlib", "Path"])
|
||||
})
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn try_from_call_expression(
|
||||
call: &'a ast::ExprCall,
|
||||
semantic: &'a SemanticModel,
|
||||
) -> Option<Self> {
|
||||
if let Expr::Attribute(ast::ExprAttribute { attr, value, .. }) = call.func.as_ref() {
|
||||
// Check for `pathlib.Path(...).open(...)` or equivalent
|
||||
if let Expr::Call(ast::ExprCall { func, .. }) = value.as_ref() {
|
||||
if semantic
|
||||
.resolve_qualified_name(func)
|
||||
.is_some_and(|qualified_name| {
|
||||
matches!(qualified_name.segments(), ["pathlib", "Path"])
|
||||
})
|
||||
{
|
||||
return Some(Callee::Pathlib(attr));
|
||||
// Direct: Path(...).open()
|
||||
if Self::is_pathlib_path_call(value, semantic) {
|
||||
return Some(Callee::Pathlib(attr));
|
||||
}
|
||||
// Indirect: x.open() where x = Path(...)
|
||||
else if let Expr::Name(name) = value.as_ref() {
|
||||
if let Some(binding_id) = semantic.only_binding(name) {
|
||||
let binding = semantic.binding(binding_id);
|
||||
if typing::is_pathlib_path(binding, semantic) {
|
||||
return Some(Callee::Pathlib(attr));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user