Compare commits
77 Commits
micha/use-
...
david/skip
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
abf676ea56 | ||
|
|
f722bfa9e6 | ||
|
|
b124e182ca | ||
|
|
57373a7e4d | ||
|
|
ae9d450b5f | ||
|
|
c8c80e054e | ||
|
|
4bc34b82ef | ||
|
|
d9cab4d242 | ||
|
|
d77b7312b0 | ||
|
|
f9091ea8bb | ||
|
|
1d2181623c | ||
|
|
dc6be457b5 | ||
|
|
1079975b35 | ||
|
|
39eb0f6c6c | ||
|
|
d13228ab85 | ||
|
|
9461d3076f | ||
|
|
63d1d332b3 | ||
|
|
e0149cd9f3 | ||
|
|
2a00eca66b | ||
|
|
3d17897c02 | ||
|
|
fa1df4cedc | ||
|
|
89258f1938 | ||
|
|
1dcef1a011 | ||
|
|
ba629fe262 | ||
|
|
bb3a05f92b | ||
|
|
4daf59e5e7 | ||
|
|
88bd82938f | ||
|
|
5a55bab3f3 | ||
|
|
cc5885e564 | ||
|
|
4573a0f6a0 | ||
|
|
905b9d7f51 | ||
|
|
b605c3e232 | ||
|
|
c281891b5c | ||
|
|
53d795da67 | ||
|
|
385d6fa608 | ||
|
|
ba070bb6d5 | ||
|
|
dc10ab81bd | ||
|
|
7673d46b71 | ||
|
|
9d5ecacdc5 | ||
|
|
9af8597608 | ||
|
|
64e5780037 | ||
|
|
da8aa6a631 | ||
|
|
ee69d38000 | ||
|
|
fd335eb8b7 | ||
|
|
c82fa94e0a | ||
|
|
6d4687c9af | ||
|
|
9180cd094d | ||
|
|
9d98a66f65 | ||
|
|
cb60ecef6b | ||
|
|
215a1c55d4 | ||
|
|
5e29278aa2 | ||
|
|
af62d0368f | ||
|
|
30683e3a93 | ||
|
|
cbc8c08016 | ||
|
|
897889d1ce | ||
|
|
cb5a9ff8dc | ||
|
|
fcdffe4ac9 | ||
|
|
88de5727df | ||
|
|
b8dec79182 | ||
|
|
dc66019fbc | ||
|
|
926e83323a | ||
|
|
5cace28c3e | ||
|
|
3785e13231 | ||
|
|
c2380fa0e2 | ||
|
|
4dec44ae49 | ||
|
|
b6579eaf04 | ||
|
|
f063c0e874 | ||
|
|
6a65734ee3 | ||
|
|
00066e094c | ||
|
|
37a1958374 | ||
|
|
2535d791ae | ||
|
|
05c4399e7b | ||
|
|
b18434b0f6 | ||
|
|
17779c9a17 | ||
|
|
53fc0614da | ||
|
|
59249f483b | ||
|
|
84e76f4d04 |
30
.github/workflows/ci.yaml
vendored
30
.github/workflows/ci.yaml
vendored
@@ -143,12 +143,12 @@ jobs:
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':**' \
|
||||
':!**/*.md' \
|
||||
':crates/ty_python_semantic/resources/mdtest/**/*.md' \
|
||||
# NOTE: Do not exclude all Markdown files here, but rather use
|
||||
# specific exclude patterns like 'docs/**'), because tests for
|
||||
# 'ty' are written in Markdown.
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
|
||||
':!docs/**' \
|
||||
':!assets/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
@@ -238,13 +238,13 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: ty mdtests (GitHub annotations)
|
||||
@@ -296,13 +296,13 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -325,7 +325,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Run tests"
|
||||
@@ -381,7 +381,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
@@ -406,7 +406,7 @@ jobs:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: rustup default "${MSRV}"
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- name: "Build tests"
|
||||
shell: bash
|
||||
env:
|
||||
@@ -903,7 +903,7 @@ jobs:
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
@@ -936,7 +936,7 @@ jobs:
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
||||
2
.github/workflows/daily_fuzz.yaml
vendored
2
.github/workflows/daily_fuzz.yaml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
|
||||
28
.github/workflows/sync_typeshed.yaml
vendored
28
.github/workflows/sync_typeshed.yaml
vendored
@@ -34,6 +34,10 @@ env:
|
||||
# and which all three workers push to.
|
||||
UPSTREAM_BRANCH: typeshedbot/sync-typeshed
|
||||
|
||||
# The path to the directory that contains the vendored typeshed stubs,
|
||||
# relative to the root of the Ruff repository.
|
||||
VENDORED_TYPESHED: crates/ty_vendored/vendor/typeshed
|
||||
|
||||
jobs:
|
||||
# Sync typeshed stubs, and sync all docstrings available on Linux.
|
||||
# Push the changes to a new branch on the upstream repository.
|
||||
@@ -64,20 +68,20 @@ jobs:
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: Sync typeshed stubs
|
||||
run: |
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/ty_vendored/vendor/typeshed
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}"
|
||||
mkdir "ruff/${VENDORED_TYPESHED}"
|
||||
cp typeshed/README.md "ruff/${VENDORED_TYPESHED}"
|
||||
cp typeshed/LICENSE "ruff/${VENDORED_TYPESHED}"
|
||||
|
||||
# The pyproject.toml file is needed by a later job for the black configuration.
|
||||
# It's deleted before creating the PR.
|
||||
cp typeshed/pyproject.toml ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/pyproject.toml "ruff/${VENDORED_TYPESHED}"
|
||||
|
||||
cp -r typeshed/stdlib ruff/crates/ty_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/ty_vendored/vendor/typeshed/source_commit.txt
|
||||
cp -r typeshed/stdlib "ruff/${VENDORED_TYPESHED}/stdlib"
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}/stdlib/@tests"
|
||||
git -C typeshed rev-parse HEAD > "ruff/${VENDORED_TYPESHED}/source_commit.txt"
|
||||
cd ruff
|
||||
git checkout -b typeshedbot/sync-typeshed
|
||||
git checkout -b "${UPSTREAM_BRANCH}"
|
||||
git add .
|
||||
git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)" --allow-empty
|
||||
- name: Sync Linux docstrings
|
||||
@@ -167,17 +171,17 @@ jobs:
|
||||
# consistent with the other typeshed stubs around them.
|
||||
# Typeshed formats code using black in their CI, so we just invoke
|
||||
# black on the stubs the same way that typeshed does.
|
||||
uvx black crates/ty_vendored/vendor/typeshed/stdlib --config crates/ty_vendored/vendor/typeshed/pyproject.toml || true
|
||||
uvx black "${VENDORED_TYPESHED}/stdlib" --config "${VENDORED_TYPESHED}/pyproject.toml" || true
|
||||
git commit -am "Format codemodded docstrings" --allow-empty
|
||||
|
||||
rm crates/ty_vendored/vendor/typeshed/pyproject.toml
|
||||
rm "${VENDORED_TYPESHED}/pyproject.toml"
|
||||
git commit -am "Remove pyproject.toml file"
|
||||
|
||||
git push
|
||||
- name: Create a PR
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr list --repo "${GITHUB_REPOSITORY}" --head "${UPSTREAM_BRANCH}" --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "[ty] Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "ty"
|
||||
|
||||
create-issue-on-failure:
|
||||
|
||||
2
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
2
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
@@ -64,7 +64,7 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@f0eec0e549684d8e1d7b8bc3e351202124b63bda"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
|
||||
2
.github/workflows/ty-ecosystem-report.yaml
vendored
2
.github/workflows/ty-ecosystem-report.yaml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@f0eec0e549684d8e1d7b8bc3e351202124b63bda"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--verbose \
|
||||
|
||||
@@ -81,10 +81,10 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.3
|
||||
rev: v0.12.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
- id: ruff-check
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
|
||||
18
CHANGELOG.md
18
CHANGELOG.md
@@ -1,5 +1,23 @@
|
||||
# Changelog
|
||||
|
||||
## 0.12.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-use-pathlib`\] Add autofix for `PTH101`, `PTH104`, `PTH105`, `PTH121` ([#19404](https://github.com/astral-sh/ruff/pull/19404))
|
||||
- \[`ruff`\] Support byte strings (`RUF055`) ([#18926](https://github.com/astral-sh/ruff/pull/18926))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `unreachable` panic in parser ([#19183](https://github.com/astral-sh/ruff/pull/19183))
|
||||
- \[`flake8-pyi`\] Skip fix if all `Union` members are `None` (`PYI016`) ([#19416](https://github.com/astral-sh/ruff/pull/19416))
|
||||
- \[`perflint`\] Parenthesize generator expressions (`PERF401`) ([#19325](https://github.com/astral-sh/ruff/pull/19325))
|
||||
- \[`pylint`\] Handle empty comments after line continuation (`PLR2044`) ([#19405](https://github.com/astral-sh/ruff/pull/19405))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pep8-naming`\] Fix `N802` false positives for `CGIHTTPRequestHandler` and `SimpleHTTPRequestHandler` ([#19432](https://github.com/astral-sh/ruff/pull/19432))
|
||||
|
||||
## 0.12.4
|
||||
|
||||
### Preview features
|
||||
|
||||
79
Cargo.lock
generated
79
Cargo.lock
generated
@@ -261,6 +261,18 @@ version = "2.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
|
||||
|
||||
[[package]]
|
||||
name = "bitvec"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
|
||||
dependencies = [
|
||||
"funty",
|
||||
"radium",
|
||||
"tap",
|
||||
"wyz",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
version = "0.10.4"
|
||||
@@ -1121,6 +1133,12 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "funty"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
|
||||
|
||||
[[package]]
|
||||
name = "generic-array"
|
||||
version = "0.14.7"
|
||||
@@ -2548,6 +2566,12 @@ version = "5.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5"
|
||||
|
||||
[[package]]
|
||||
name = "radium"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.8.5"
|
||||
@@ -2561,9 +2585,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.9.1"
|
||||
version = "0.9.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
|
||||
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
|
||||
dependencies = [
|
||||
"rand_chacha 0.9.0",
|
||||
"rand_core 0.9.3",
|
||||
@@ -2710,7 +2734,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.12.4"
|
||||
version = "0.12.5"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2827,7 +2851,6 @@ dependencies = [
|
||||
"anstyle",
|
||||
"arc-swap",
|
||||
"camino",
|
||||
"countme",
|
||||
"dashmap",
|
||||
"dunce",
|
||||
"etcetera",
|
||||
@@ -2937,6 +2960,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"memchr",
|
||||
"ruff_cache",
|
||||
"ruff_db",
|
||||
"ruff_linter",
|
||||
@@ -2962,7 +2986,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.4"
|
||||
version = "0.12.5"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
@@ -3041,7 +3065,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"itertools 0.14.0",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"ruff_diagnostics",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
@@ -3294,7 +3318,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.12.4"
|
||||
version = "0.12.5"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -3535,9 +3559,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.140"
|
||||
version = "1.0.141"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
|
||||
checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -3727,23 +3751,22 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "strum"
|
||||
version = "0.27.1"
|
||||
version = "0.27.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32"
|
||||
checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
|
||||
dependencies = [
|
||||
"strum_macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strum_macros"
|
||||
version = "0.27.1"
|
||||
version = "0.27.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8"
|
||||
checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn",
|
||||
]
|
||||
|
||||
@@ -3769,6 +3792,12 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tap"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.20.0"
|
||||
@@ -4183,6 +4212,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"insta",
|
||||
"itertools 0.14.0",
|
||||
"regex",
|
||||
"ruff_db",
|
||||
"ruff_python_ast",
|
||||
@@ -4191,11 +4221,10 @@ dependencies = [
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash",
|
||||
"salsa",
|
||||
"smallvec",
|
||||
"tracing",
|
||||
"ty_project",
|
||||
"ty_python_semantic",
|
||||
"ty_vendored",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4229,7 +4258,6 @@ dependencies = [
|
||||
"thiserror 2.0.12",
|
||||
"toml 0.9.2",
|
||||
"tracing",
|
||||
"ty_ide",
|
||||
"ty_python_semantic",
|
||||
"ty_vendored",
|
||||
]
|
||||
@@ -4240,6 +4268,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.9.1",
|
||||
"bitvec",
|
||||
"camino",
|
||||
"colored 3.0.0",
|
||||
"compact_str",
|
||||
@@ -4276,6 +4305,7 @@ dependencies = [
|
||||
"strum_macros",
|
||||
"tempfile",
|
||||
"test-case",
|
||||
"thin-vec",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
"ty_python_semantic",
|
||||
@@ -4291,10 +4321,13 @@ dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.9.1",
|
||||
"crossbeam",
|
||||
"dunce",
|
||||
"insta",
|
||||
"jod-thread",
|
||||
"libc",
|
||||
"lsp-server",
|
||||
"lsp-types",
|
||||
"regex",
|
||||
"ruff_db",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
@@ -4305,6 +4338,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
@@ -4564,7 +4598,7 @@ checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d"
|
||||
dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"js-sys",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"uuid-macro-internal",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
@@ -5093,6 +5127,15 @@ version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
|
||||
|
||||
[[package]]
|
||||
name = "wyz"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
|
||||
dependencies = [
|
||||
"tap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yansi"
|
||||
version = "1.0.1"
|
||||
|
||||
@@ -57,6 +57,9 @@ assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "2.0.0" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bitvec = { version = "1.0.1", default-features = false, features = [
|
||||
"alloc",
|
||||
] }
|
||||
bstr = { version = "1.9.1" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
@@ -163,6 +166,7 @@ strum_macros = { version = "0.27.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thin-vec = { version = "0.2.14" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.9.0" }
|
||||
|
||||
@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.12.4/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.4/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.12.5/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.5/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.4
|
||||
rev: v0.12.5
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.12.4"
|
||||
version = "0.12.5"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -169,6 +169,9 @@ pub struct AnalyzeGraphCommand {
|
||||
/// Attempt to detect imports from string literals.
|
||||
#[clap(long)]
|
||||
detect_string_imports: bool,
|
||||
/// The minimum number of dots in a string import to consider it a valid import.
|
||||
#[clap(long)]
|
||||
min_dots: Option<usize>,
|
||||
/// Enable preview mode. Use `--no-preview` to disable.
|
||||
#[arg(long, overrides_with("no_preview"))]
|
||||
preview: bool,
|
||||
@@ -808,6 +811,7 @@ impl AnalyzeGraphCommand {
|
||||
} else {
|
||||
None
|
||||
},
|
||||
string_imports_min_dots: self.min_dots,
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
target_version: self.target_version.map(ast::PythonVersion::from),
|
||||
..ExplicitConfigOverrides::default()
|
||||
@@ -1305,6 +1309,7 @@ struct ExplicitConfigOverrides {
|
||||
show_fixes: Option<bool>,
|
||||
extension: Option<Vec<ExtensionPair>>,
|
||||
detect_string_imports: Option<bool>,
|
||||
string_imports_min_dots: Option<usize>,
|
||||
}
|
||||
|
||||
impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
@@ -1392,6 +1397,9 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
if let Some(detect_string_imports) = &self.detect_string_imports {
|
||||
config.analyze.detect_string_imports = Some(*detect_string_imports);
|
||||
}
|
||||
if let Some(string_imports_min_dots) = &self.string_imports_min_dots {
|
||||
config.analyze.string_imports_min_dots = Some(*string_imports_min_dots);
|
||||
}
|
||||
|
||||
config
|
||||
}
|
||||
|
||||
@@ -454,7 +454,7 @@ impl LintCacheData {
|
||||
CacheMessage {
|
||||
rule,
|
||||
body: msg.body().to_string(),
|
||||
suggestion: msg.suggestion().map(ToString::to_string),
|
||||
suggestion: msg.first_help_text().map(ToString::to_string),
|
||||
range: msg.expect_range(),
|
||||
parent: msg.parent(),
|
||||
fix: msg.fix().cloned(),
|
||||
|
||||
@@ -102,7 +102,7 @@ pub(crate) fn analyze_graph(
|
||||
|
||||
// Resolve the per-file settings.
|
||||
let settings = resolver.resolve(path);
|
||||
let string_imports = settings.analyze.detect_string_imports;
|
||||
let string_imports = settings.analyze.string_imports;
|
||||
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
|
||||
|
||||
// Skip excluded files.
|
||||
|
||||
@@ -264,6 +264,7 @@ impl Printer {
|
||||
.with_show_fix_diff(self.flags.intersects(Flags::SHOW_FIX_DIFF))
|
||||
.with_show_source(self.format == OutputFormat::Full)
|
||||
.with_unsafe_fixes(self.unsafe_fixes)
|
||||
.with_preview(preview)
|
||||
.emit(writer, &diagnostics.inner, &context)?;
|
||||
|
||||
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
|
||||
|
||||
@@ -197,23 +197,43 @@ fn string_detection() -> Result<()> {
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").arg("--min-dots").arg("1").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -2422,7 +2422,7 @@ requires-python = ">= 3.11"
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -2734,7 +2734,7 @@ requires-python = ">= 3.11"
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3098,7 +3098,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3478,7 +3478,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3806,7 +3806,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4134,7 +4134,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.9
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4419,7 +4419,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.9
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4757,7 +4757,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
|
||||
@@ -392,7 +392,7 @@ formatter.docstring_code_line_width = dynamic
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.7
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
|
||||
@@ -25,7 +25,6 @@ ty_static = { workspace = true }
|
||||
anstyle = { workspace = true }
|
||||
arc-swap = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
dashmap = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
filetime = { workspace = true }
|
||||
@@ -59,6 +58,11 @@ tempfile = { workspace = true }
|
||||
cache = ["ruff_cache"]
|
||||
junit = ["dep:quick-junit"]
|
||||
os = ["ignore", "dep:etcetera"]
|
||||
serde = ["camino/serde1", "dep:serde", "dep:serde_json", "ruff_diagnostics/serde"]
|
||||
serde = [
|
||||
"camino/serde1",
|
||||
"dep:serde",
|
||||
"dep:serde_json",
|
||||
"ruff_diagnostics/serde",
|
||||
]
|
||||
# Exposes testing utilities.
|
||||
testing = ["tracing-subscriber"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::{fmt::Formatter, path::Path, sync::Arc};
|
||||
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_diagnostics::{Applicability, Fix};
|
||||
use ruff_source_file::{LineColumn, SourceCode, SourceFile};
|
||||
|
||||
use ruff_annotate_snippets::Level as AnnotateLevel;
|
||||
@@ -122,7 +122,14 @@ impl Diagnostic {
|
||||
/// directly. If callers want or need to avoid cloning the diagnostic
|
||||
/// message, then they can also pass a `DiagnosticMessage` directly.
|
||||
pub fn info<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
|
||||
self.sub(SubDiagnostic::new(Severity::Info, message));
|
||||
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Info, message));
|
||||
}
|
||||
|
||||
/// Adds a "help" sub-diagnostic with the given message.
|
||||
///
|
||||
/// See the closely related [`Diagnostic::info`] method for more details.
|
||||
pub fn help<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
|
||||
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Help, message));
|
||||
}
|
||||
|
||||
/// Adds a "sub" diagnostic to this diagnostic.
|
||||
@@ -377,9 +384,15 @@ impl Diagnostic {
|
||||
self.primary_message()
|
||||
}
|
||||
|
||||
/// Returns the fix suggestion for the violation.
|
||||
pub fn suggestion(&self) -> Option<&str> {
|
||||
self.primary_annotation()?.get_message()
|
||||
/// Returns the message of the first sub-diagnostic with a `Help` severity.
|
||||
///
|
||||
/// Note that this is used as the fix title/suggestion for some of Ruff's output formats, but in
|
||||
/// general this is not the guaranteed meaning of such a message.
|
||||
pub fn first_help_text(&self) -> Option<&str> {
|
||||
self.sub_diagnostics()
|
||||
.iter()
|
||||
.find(|sub| matches!(sub.inner.severity, SubDiagnosticSeverity::Help))
|
||||
.map(|sub| sub.inner.message.as_str())
|
||||
}
|
||||
|
||||
/// Returns the URL for the rule documentation, if it exists.
|
||||
@@ -565,7 +578,10 @@ impl SubDiagnostic {
|
||||
/// Callers can pass anything that implements `std::fmt::Display`
|
||||
/// directly. If callers want or need to avoid cloning the diagnostic
|
||||
/// message, then they can also pass a `DiagnosticMessage` directly.
|
||||
pub fn new<'a>(severity: Severity, message: impl IntoDiagnosticMessage + 'a) -> SubDiagnostic {
|
||||
pub fn new<'a>(
|
||||
severity: SubDiagnosticSeverity,
|
||||
message: impl IntoDiagnosticMessage + 'a,
|
||||
) -> SubDiagnostic {
|
||||
let inner = Box::new(SubDiagnosticInner {
|
||||
severity,
|
||||
message: message.into_diagnostic_message(),
|
||||
@@ -643,7 +659,7 @@ impl SubDiagnostic {
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
struct SubDiagnosticInner {
|
||||
severity: Severity,
|
||||
severity: SubDiagnosticSeverity,
|
||||
message: DiagnosticMessage,
|
||||
annotations: Vec<Annotation>,
|
||||
}
|
||||
@@ -1170,6 +1186,32 @@ impl Severity {
|
||||
}
|
||||
}
|
||||
|
||||
/// Like [`Severity`] but exclusively for sub-diagnostics.
|
||||
///
|
||||
/// This type only exists to add an additional `Help` severity that isn't present in `Severity` or
|
||||
/// used for main diagnostics. If we want to add `Severity::Help` in the future, this type could be
|
||||
/// deleted and the two combined again.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, get_size2::GetSize)]
|
||||
pub enum SubDiagnosticSeverity {
|
||||
Help,
|
||||
Info,
|
||||
Warning,
|
||||
Error,
|
||||
Fatal,
|
||||
}
|
||||
|
||||
impl SubDiagnosticSeverity {
|
||||
fn to_annotate(self) -> AnnotateLevel {
|
||||
match self {
|
||||
SubDiagnosticSeverity::Help => AnnotateLevel::Help,
|
||||
SubDiagnosticSeverity::Info => AnnotateLevel::Info,
|
||||
SubDiagnosticSeverity::Warning => AnnotateLevel::Warning,
|
||||
SubDiagnosticSeverity::Error => AnnotateLevel::Error,
|
||||
SubDiagnosticSeverity::Fatal => AnnotateLevel::Error,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration for rendering diagnostics.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DisplayDiagnosticConfig {
|
||||
@@ -1196,6 +1238,15 @@ pub struct DisplayDiagnosticConfig {
|
||||
reason = "This is currently only used for JSON but will be needed soon for other formats"
|
||||
)]
|
||||
preview: bool,
|
||||
/// Whether to hide the real `Severity` of diagnostics.
|
||||
///
|
||||
/// This is intended for temporary use by Ruff, which only has a single `error` severity at the
|
||||
/// moment. We should be able to remove this option when Ruff gets more severities.
|
||||
hide_severity: bool,
|
||||
/// Whether to show the availability of a fix in a diagnostic.
|
||||
show_fix_status: bool,
|
||||
/// The lowest applicability that should be shown when reporting diagnostics.
|
||||
fix_applicability: Applicability,
|
||||
}
|
||||
|
||||
impl DisplayDiagnosticConfig {
|
||||
@@ -1224,6 +1275,35 @@ impl DisplayDiagnosticConfig {
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to hide a diagnostic's severity or not.
|
||||
pub fn hide_severity(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
hide_severity: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to show a fix's availability or not.
|
||||
pub fn show_fix_status(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
show_fix_status: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the lowest fix applicability that should be shown.
|
||||
///
|
||||
/// In other words, an applicability of `Safe` (the default) would suppress showing fixes or fix
|
||||
/// availability for unsafe or display-only fixes.
|
||||
///
|
||||
/// Note that this option is currently ignored when `hide_severity` is false.
|
||||
pub fn fix_applicability(self, applicability: Applicability) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
fix_applicability: applicability,
|
||||
..self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DisplayDiagnosticConfig {
|
||||
@@ -1233,6 +1313,9 @@ impl Default for DisplayDiagnosticConfig {
|
||||
color: false,
|
||||
context: 2,
|
||||
preview: false,
|
||||
hide_severity: false,
|
||||
show_fix_status: false,
|
||||
fix_applicability: Applicability::Safe,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use ruff_notebook::{Notebook, NotebookIndex};
|
||||
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use crate::diagnostic::stylesheet::{DiagnosticStylesheet, fmt_styled};
|
||||
use crate::diagnostic::stylesheet::DiagnosticStylesheet;
|
||||
use crate::{
|
||||
Db,
|
||||
files::File,
|
||||
@@ -18,14 +18,17 @@ use crate::{
|
||||
};
|
||||
|
||||
use super::{
|
||||
Annotation, Diagnostic, DiagnosticFormat, DiagnosticSource, DisplayDiagnosticConfig, Severity,
|
||||
Annotation, Diagnostic, DiagnosticFormat, DiagnosticSource, DisplayDiagnosticConfig,
|
||||
SubDiagnostic, UnifiedFile,
|
||||
};
|
||||
|
||||
use azure::AzureRenderer;
|
||||
use concise::ConciseRenderer;
|
||||
use pylint::PylintRenderer;
|
||||
|
||||
mod azure;
|
||||
mod concise;
|
||||
mod full;
|
||||
#[cfg(feature = "serde")]
|
||||
mod json;
|
||||
#[cfg(feature = "serde")]
|
||||
@@ -104,48 +107,7 @@ impl std::fmt::Display for DisplayDiagnostics<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match self.config.format {
|
||||
DiagnosticFormat::Concise => {
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
|
||||
for diag in self.diagnostics {
|
||||
let (severity, severity_style) = match diag.severity() {
|
||||
Severity::Info => ("info", stylesheet.info),
|
||||
Severity::Warning => ("warning", stylesheet.warning),
|
||||
Severity::Error => ("error", stylesheet.error),
|
||||
Severity::Fatal => ("fatal", stylesheet.error),
|
||||
};
|
||||
write!(
|
||||
f,
|
||||
"{severity}[{id}]",
|
||||
severity = fmt_styled(severity, severity_style),
|
||||
id = fmt_styled(diag.id(), stylesheet.emphasis)
|
||||
)?;
|
||||
if let Some(span) = diag.primary_span() {
|
||||
write!(
|
||||
f,
|
||||
" {path}",
|
||||
path = fmt_styled(span.file().path(self.resolver), stylesheet.emphasis)
|
||||
)?;
|
||||
if let Some(range) = span.range() {
|
||||
let diagnostic_source = span.file().diagnostic_source(self.resolver);
|
||||
let start = diagnostic_source
|
||||
.as_source_code()
|
||||
.line_column(range.start());
|
||||
|
||||
write!(
|
||||
f,
|
||||
":{line}:{col}",
|
||||
line = fmt_styled(start.line, stylesheet.emphasis),
|
||||
col = fmt_styled(start.column, stylesheet.emphasis),
|
||||
)?;
|
||||
}
|
||||
write!(f, ":")?;
|
||||
}
|
||||
writeln!(f, " {message}", message = diag.concise_message())?;
|
||||
}
|
||||
ConciseRenderer::new(self.resolver, self.config).render(f, self.diagnostics)?;
|
||||
}
|
||||
DiagnosticFormat::Full => {
|
||||
let stylesheet = if self.config.color {
|
||||
@@ -256,7 +218,7 @@ impl<'a> Resolved<'a> {
|
||||
/// both.)
|
||||
#[derive(Debug)]
|
||||
struct ResolvedDiagnostic<'a> {
|
||||
severity: Severity,
|
||||
level: AnnotateLevel,
|
||||
id: Option<String>,
|
||||
message: String,
|
||||
annotations: Vec<ResolvedAnnotation<'a>>,
|
||||
@@ -281,7 +243,7 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
let id = Some(diag.inner.id.to_string());
|
||||
let message = diag.inner.message.as_str().to_string();
|
||||
ResolvedDiagnostic {
|
||||
severity: diag.inner.severity,
|
||||
level: diag.inner.severity.to_annotate(),
|
||||
id,
|
||||
message,
|
||||
annotations,
|
||||
@@ -304,7 +266,7 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
})
|
||||
.collect();
|
||||
ResolvedDiagnostic {
|
||||
severity: diag.inner.severity,
|
||||
level: diag.inner.severity.to_annotate(),
|
||||
id: None,
|
||||
message: diag.inner.message.as_str().to_string(),
|
||||
annotations,
|
||||
@@ -371,7 +333,7 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
snippets_by_input
|
||||
.sort_by(|snips1, snips2| snips1.has_primary.cmp(&snips2.has_primary).reverse());
|
||||
RenderableDiagnostic {
|
||||
severity: self.severity,
|
||||
level: self.level,
|
||||
id: self.id.as_deref(),
|
||||
message: &self.message,
|
||||
snippets_by_input,
|
||||
@@ -459,7 +421,7 @@ struct Renderable<'r> {
|
||||
#[derive(Debug)]
|
||||
struct RenderableDiagnostic<'r> {
|
||||
/// The severity of the diagnostic.
|
||||
severity: Severity,
|
||||
level: AnnotateLevel,
|
||||
/// The ID of the diagnostic. The ID can usually be used on the CLI or in a
|
||||
/// config file to change the severity of a lint.
|
||||
///
|
||||
@@ -478,7 +440,6 @@ struct RenderableDiagnostic<'r> {
|
||||
impl RenderableDiagnostic<'_> {
|
||||
/// Convert this to an "annotate" snippet.
|
||||
fn to_annotate(&self) -> AnnotateMessage<'_> {
|
||||
let level = self.severity.to_annotate();
|
||||
let snippets = self.snippets_by_input.iter().flat_map(|snippets| {
|
||||
let path = snippets.path;
|
||||
snippets
|
||||
@@ -486,7 +447,7 @@ impl RenderableDiagnostic<'_> {
|
||||
.iter()
|
||||
.map(|snippet| snippet.to_annotate(path))
|
||||
});
|
||||
let mut message = level.title(self.message);
|
||||
let mut message = self.level.title(self.message);
|
||||
if let Some(id) = self.id {
|
||||
message = message.id(id);
|
||||
}
|
||||
@@ -862,9 +823,12 @@ fn relativize_path<'p>(cwd: &SystemPath, path: &'p str) -> &'p str {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use ruff_diagnostics::{Edit, Fix};
|
||||
use ruff_diagnostics::{Applicability, Edit, Fix};
|
||||
|
||||
use crate::diagnostic::{Annotation, DiagnosticId, SecondaryCode, Severity, Span};
|
||||
use crate::diagnostic::{
|
||||
Annotation, DiagnosticId, IntoDiagnosticMessage, SecondaryCode, Severity, Span,
|
||||
SubDiagnosticSeverity,
|
||||
};
|
||||
use crate::files::system_path_to_file;
|
||||
use crate::system::{DbWithWritableSystem, SystemPath};
|
||||
use crate::tests::TestDb;
|
||||
@@ -1548,7 +1512,7 @@ watermelon
|
||||
|
||||
let mut diag = env.err().primary("animals", "3", "3", "").build();
|
||||
diag.sub(
|
||||
env.sub_builder(Severity::Info, "this is a helpful note")
|
||||
env.sub_builder(SubDiagnosticSeverity::Info, "this is a helpful note")
|
||||
.build(),
|
||||
);
|
||||
insta::assert_snapshot!(
|
||||
@@ -1577,15 +1541,15 @@ watermelon
|
||||
|
||||
let mut diag = env.err().primary("animals", "3", "3", "").build();
|
||||
diag.sub(
|
||||
env.sub_builder(Severity::Info, "this is a helpful note")
|
||||
env.sub_builder(SubDiagnosticSeverity::Info, "this is a helpful note")
|
||||
.build(),
|
||||
);
|
||||
diag.sub(
|
||||
env.sub_builder(Severity::Info, "another helpful note")
|
||||
env.sub_builder(SubDiagnosticSeverity::Info, "another helpful note")
|
||||
.build(),
|
||||
);
|
||||
diag.sub(
|
||||
env.sub_builder(Severity::Info, "and another helpful note")
|
||||
env.sub_builder(SubDiagnosticSeverity::Info, "and another helpful note")
|
||||
.build(),
|
||||
);
|
||||
insta::assert_snapshot!(
|
||||
@@ -2307,6 +2271,27 @@ watermelon
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// Hide diagnostic severity when rendering.
|
||||
pub(super) fn hide_severity(&mut self, yes: bool) {
|
||||
let mut config = std::mem::take(&mut self.config);
|
||||
config = config.hide_severity(yes);
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// Show fix availability when rendering.
|
||||
pub(super) fn show_fix_status(&mut self, yes: bool) {
|
||||
let mut config = std::mem::take(&mut self.config);
|
||||
config = config.show_fix_status(yes);
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// The lowest fix applicability to show when rendering.
|
||||
pub(super) fn fix_applicability(&mut self, applicability: Applicability) {
|
||||
let mut config = std::mem::take(&mut self.config);
|
||||
config = config.fix_applicability(applicability);
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// Add a file with the given path and contents to this environment.
|
||||
pub(super) fn add(&mut self, path: &str, contents: &str) {
|
||||
let path = SystemPath::new(path);
|
||||
@@ -2370,7 +2355,7 @@ watermelon
|
||||
/// sub-diagnostic with "error" severity and canned values for
|
||||
/// its identifier and message.
|
||||
fn sub_warn(&mut self) -> SubDiagnosticBuilder<'_> {
|
||||
self.sub_builder(Severity::Warning, "sub-diagnostic message")
|
||||
self.sub_builder(SubDiagnosticSeverity::Warning, "sub-diagnostic message")
|
||||
}
|
||||
|
||||
/// Returns a builder for tersely constructing diagnostics.
|
||||
@@ -2391,7 +2376,11 @@ watermelon
|
||||
}
|
||||
|
||||
/// Returns a builder for tersely constructing sub-diagnostics.
|
||||
fn sub_builder(&mut self, severity: Severity, message: &str) -> SubDiagnosticBuilder<'_> {
|
||||
fn sub_builder(
|
||||
&mut self,
|
||||
severity: SubDiagnosticSeverity,
|
||||
message: &str,
|
||||
) -> SubDiagnosticBuilder<'_> {
|
||||
let subdiag = SubDiagnostic::new(severity, message);
|
||||
SubDiagnosticBuilder { env: self, subdiag }
|
||||
}
|
||||
@@ -2494,6 +2483,12 @@ watermelon
|
||||
self.diag.set_noqa_offset(noqa_offset);
|
||||
self
|
||||
}
|
||||
|
||||
/// Adds a "help" sub-diagnostic with the given message.
|
||||
fn help(mut self, message: impl IntoDiagnosticMessage) -> DiagnosticBuilder<'e> {
|
||||
self.diag.help(message);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// A helper builder for tersely populating a `SubDiagnostic`.
|
||||
@@ -2600,7 +2595,8 @@ def fibonacci(n):
|
||||
|
||||
let diagnostics = vec![
|
||||
env.builder("unused-import", Severity::Error, "`os` imported but unused")
|
||||
.primary("fib.py", "1:7", "1:9", "Remove unused import: `os`")
|
||||
.primary("fib.py", "1:7", "1:9", "")
|
||||
.help("Remove unused import: `os`")
|
||||
.secondary_code("F401")
|
||||
.fix(Fix::unsafe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(0),
|
||||
@@ -2613,12 +2609,8 @@ def fibonacci(n):
|
||||
Severity::Error,
|
||||
"Local variable `x` is assigned to but never used",
|
||||
)
|
||||
.primary(
|
||||
"fib.py",
|
||||
"6:4",
|
||||
"6:5",
|
||||
"Remove assignment to unused variable `x`",
|
||||
)
|
||||
.primary("fib.py", "6:4", "6:5", "")
|
||||
.help("Remove assignment to unused variable `x`")
|
||||
.secondary_code("F841")
|
||||
.fix(Fix::unsafe_edit(Edit::deletion(
|
||||
TextSize::from(94),
|
||||
@@ -2665,6 +2657,25 @@ if call(foo
|
||||
}
|
||||
|
||||
/// Create Ruff-style diagnostics for testing the various output formats for a notebook.
|
||||
///
|
||||
/// The concatenated cells look like this:
|
||||
///
|
||||
/// ```python
|
||||
/// # cell 1
|
||||
/// import os
|
||||
/// # cell 2
|
||||
/// import math
|
||||
///
|
||||
/// print('hello world')
|
||||
/// # cell 3
|
||||
/// def foo():
|
||||
/// print()
|
||||
/// x = 1
|
||||
/// ```
|
||||
///
|
||||
/// The first diagnostic is on the unused `os` import with location cell 1, row 2, column 8
|
||||
/// (`cell 1:2:8`). The second diagnostic is the unused `math` import at `cell 2:2:8`, and the
|
||||
/// third diagnostic is an unfixable unused variable at `cell 3:4:5`.
|
||||
#[allow(
|
||||
dead_code,
|
||||
reason = "This is currently only used for JSON but will be needed soon for other formats"
|
||||
@@ -2720,7 +2731,8 @@ if call(foo
|
||||
|
||||
let diagnostics = vec![
|
||||
env.builder("unused-import", Severity::Error, "`os` imported but unused")
|
||||
.primary("notebook.ipynb", "2:7", "2:9", "Remove unused import: `os`")
|
||||
.primary("notebook.ipynb", "2:7", "2:9", "")
|
||||
.help("Remove unused import: `os`")
|
||||
.secondary_code("F401")
|
||||
.fix(Fix::safe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(9),
|
||||
@@ -2733,12 +2745,8 @@ if call(foo
|
||||
Severity::Error,
|
||||
"`math` imported but unused",
|
||||
)
|
||||
.primary(
|
||||
"notebook.ipynb",
|
||||
"4:7",
|
||||
"4:11",
|
||||
"Remove unused import: `math`",
|
||||
)
|
||||
.primary("notebook.ipynb", "4:7", "4:11", "")
|
||||
.help("Remove unused import: `math`")
|
||||
.secondary_code("F401")
|
||||
.fix(Fix::safe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(28),
|
||||
@@ -2751,12 +2759,8 @@ if call(foo
|
||||
Severity::Error,
|
||||
"Local variable `x` is assigned to but never used",
|
||||
)
|
||||
.primary(
|
||||
"notebook.ipynb",
|
||||
"10:4",
|
||||
"10:5",
|
||||
"Remove assignment to unused variable `x`",
|
||||
)
|
||||
.primary("notebook.ipynb", "10:4", "10:5", "")
|
||||
.help("Remove assignment to unused variable `x`")
|
||||
.secondary_code("F841")
|
||||
.fix(Fix::unsafe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(94),
|
||||
|
||||
195
crates/ruff_db/src/diagnostic/render/concise.rs
Normal file
195
crates/ruff_db/src/diagnostic/render/concise.rs
Normal file
@@ -0,0 +1,195 @@
|
||||
use crate::diagnostic::{
|
||||
Diagnostic, DisplayDiagnosticConfig, Severity,
|
||||
stylesheet::{DiagnosticStylesheet, fmt_styled},
|
||||
};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct ConciseRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> ConciseRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
|
||||
let sep = fmt_styled(":", stylesheet.separator);
|
||||
for diag in diagnostics {
|
||||
if let Some(span) = diag.primary_span() {
|
||||
write!(
|
||||
f,
|
||||
"{path}",
|
||||
path = fmt_styled(
|
||||
span.file().relative_path(self.resolver).to_string_lossy(),
|
||||
stylesheet.emphasis
|
||||
)
|
||||
)?;
|
||||
if let Some(range) = span.range() {
|
||||
let diagnostic_source = span.file().diagnostic_source(self.resolver);
|
||||
let start = diagnostic_source
|
||||
.as_source_code()
|
||||
.line_column(range.start());
|
||||
|
||||
if let Some(notebook_index) = self.resolver.notebook_index(span.file()) {
|
||||
write!(
|
||||
f,
|
||||
"{sep}cell {cell}{sep}{line}{sep}{col}",
|
||||
cell = notebook_index.cell(start.line).unwrap_or_default(),
|
||||
line = notebook_index.cell_row(start.line).unwrap_or_default(),
|
||||
col = start.column,
|
||||
)?;
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{sep}{line}{sep}{col}",
|
||||
line = start.line,
|
||||
col = start.column,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
write!(f, "{sep} ")?;
|
||||
}
|
||||
if self.config.hide_severity {
|
||||
if let Some(code) = diag.secondary_code() {
|
||||
write!(
|
||||
f,
|
||||
"{code} ",
|
||||
code = fmt_styled(code, stylesheet.secondary_code)
|
||||
)?;
|
||||
}
|
||||
if self.config.show_fix_status {
|
||||
if let Some(fix) = diag.fix() {
|
||||
// Do not display an indicator for inapplicable fixes
|
||||
if fix.applies(self.config.fix_applicability) {
|
||||
write!(f, "[{fix}] ", fix = fmt_styled("*", stylesheet.separator))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let (severity, severity_style) = match diag.severity() {
|
||||
Severity::Info => ("info", stylesheet.info),
|
||||
Severity::Warning => ("warning", stylesheet.warning),
|
||||
Severity::Error => ("error", stylesheet.error),
|
||||
Severity::Fatal => ("fatal", stylesheet.error),
|
||||
};
|
||||
write!(
|
||||
f,
|
||||
"{severity}[{id}] ",
|
||||
severity = fmt_styled(severity, severity_style),
|
||||
id = fmt_styled(diag.id(), stylesheet.emphasis)
|
||||
)?;
|
||||
}
|
||||
|
||||
writeln!(f, "{message}", message = diag.concise_message())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_diagnostics::Applicability;
|
||||
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
TestEnvironment, create_diagnostics, create_notebook_diagnostics,
|
||||
create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: error[unused-import] `os` imported but unused
|
||||
fib.py:6:5: error[unused-variable] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: error[undefined-name] Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes() {
|
||||
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: F401 [*] `os` imported but unused
|
||||
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: F821 Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes_preview() {
|
||||
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
env.preview(true);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: F401 [*] `os` imported but unused
|
||||
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: F821 Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes_syntax_errors() {
|
||||
let (mut env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
syntax_errors.py:1:15: SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: SyntaxError: Expected ')', found newline
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
syntax_errors.py:1:15: error[invalid-syntax] SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: error[invalid-syntax] SyntaxError: Expected ')', found newline
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
notebook.ipynb:cell 1:2:8: error[unused-import] `os` imported but unused
|
||||
notebook.ipynb:cell 2:2:8: error[unused-import] `math` imported but unused
|
||||
notebook.ipynb:cell 3:4:5: error[unused-variable] Local variable `x` is assigned to but never used
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Concise);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@"error[test-diagnostic] main diagnostic message",
|
||||
);
|
||||
}
|
||||
}
|
||||
66
crates/ruff_db/src/diagnostic/render/full.rs
Normal file
66
crates/ruff_db/src/diagnostic/render/full.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Full);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r#"
|
||||
error[unused-import]: `os` imported but unused
|
||||
--> fib.py:1:8
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
error[unused-variable]: Local variable `x` is assigned to but never used
|
||||
--> fib.py:6:5
|
||||
|
|
||||
4 | def fibonacci(n):
|
||||
5 | """Compute the nth number in the Fibonacci sequence."""
|
||||
6 | x = 1
|
||||
| ^
|
||||
7 | if n == 0:
|
||||
8 | return 0
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
|
||||
error[undefined-name]: Undefined name `a`
|
||||
--> undef.py:1:4
|
||||
|
|
||||
1 | if a == 1: pass
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Full);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[invalid-syntax]: SyntaxError: Expected one or more symbol names after import
|
||||
--> syntax_errors.py:1:15
|
||||
|
|
||||
1 | from os import
|
||||
| ^
|
||||
2 |
|
||||
3 | if call(foo
|
||||
|
|
||||
|
||||
error[invalid-syntax]: SyntaxError: Expected ')', found newline
|
||||
--> syntax_errors.py:3:12
|
||||
|
|
||||
1 | from os import
|
||||
2 |
|
||||
3 | if call(foo
|
||||
| ^
|
||||
4 | def bar():
|
||||
5 | pass
|
||||
|
|
||||
");
|
||||
}
|
||||
}
|
||||
@@ -87,7 +87,7 @@ pub(super) fn diagnostic_to_json<'a>(
|
||||
|
||||
let fix = diagnostic.fix().map(|fix| JsonFix {
|
||||
applicability: fix.applicability(),
|
||||
message: diagnostic.suggestion(),
|
||||
message: diagnostic.first_help_text(),
|
||||
edits: ExpandedEdits {
|
||||
edits: fix.edits(),
|
||||
notebook_index,
|
||||
|
||||
@@ -41,6 +41,8 @@ pub struct DiagnosticStylesheet {
|
||||
pub(crate) line_no: Style,
|
||||
pub(crate) emphasis: Style,
|
||||
pub(crate) none: Style,
|
||||
pub(crate) separator: Style,
|
||||
pub(crate) secondary_code: Style,
|
||||
}
|
||||
|
||||
impl Default for DiagnosticStylesheet {
|
||||
@@ -62,6 +64,8 @@ impl DiagnosticStylesheet {
|
||||
line_no: bright_blue.effects(Effects::BOLD),
|
||||
emphasis: Style::new().effects(Effects::BOLD),
|
||||
none: Style::new(),
|
||||
separator: AnsiColor::Cyan.on_default(),
|
||||
secondary_code: AnsiColor::Red.on_default().effects(Effects::BOLD),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,6 +79,8 @@ impl DiagnosticStylesheet {
|
||||
line_no: Style::new(),
|
||||
emphasis: Style::new(),
|
||||
none: Style::new(),
|
||||
separator: Style::new(),
|
||||
secondary_code: Style::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
use countme::Count;
|
||||
use dashmap::mapref::entry::Entry;
|
||||
pub use file_root::{FileRoot, FileRootKind};
|
||||
pub use path::FilePath;
|
||||
@@ -312,11 +311,6 @@ pub struct File {
|
||||
/// the file has been deleted is to change the status to `Deleted`.
|
||||
#[default]
|
||||
status: FileStatus,
|
||||
|
||||
/// Counter that counts the number of created file instances and active file instances.
|
||||
/// Only enabled in debug builds.
|
||||
#[default]
|
||||
count: Count<File>,
|
||||
}
|
||||
|
||||
// The Salsa heap is tracked separately.
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use countme::Count;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_source_file::LineIndex;
|
||||
@@ -38,11 +36,7 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
};
|
||||
|
||||
SourceText {
|
||||
inner: Arc::new(SourceTextInner {
|
||||
kind,
|
||||
read_error,
|
||||
count: Count::new(),
|
||||
}),
|
||||
inner: Arc::new(SourceTextInner { kind, read_error }),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,8 +119,6 @@ impl std::fmt::Debug for SourceText {
|
||||
|
||||
#[derive(Eq, PartialEq, get_size2::GetSize)]
|
||||
struct SourceTextInner {
|
||||
#[get_size(ignore)]
|
||||
count: Count<SourceText>,
|
||||
kind: SourceTextKind,
|
||||
read_error: Option<SourceTextError>,
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ ty_python_semantic = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, optional = true }
|
||||
memchr = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::StringImports;
|
||||
use ruff_python_ast::visitor::source_order::{
|
||||
SourceOrderVisitor, walk_expr, walk_module, walk_stmt,
|
||||
};
|
||||
@@ -10,13 +11,13 @@ pub(crate) struct Collector<'a> {
|
||||
/// The path to the current module.
|
||||
module_path: Option<&'a [String]>,
|
||||
/// Whether to detect imports from string literals.
|
||||
string_imports: bool,
|
||||
string_imports: StringImports,
|
||||
/// The collected imports from the Python AST.
|
||||
imports: Vec<CollectedImport>,
|
||||
}
|
||||
|
||||
impl<'a> Collector<'a> {
|
||||
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: bool) -> Self {
|
||||
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: StringImports) -> Self {
|
||||
Self {
|
||||
module_path,
|
||||
string_imports,
|
||||
@@ -118,7 +119,7 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
| Stmt::Continue(_)
|
||||
| Stmt::IpyEscapeCommand(_) => {
|
||||
// Only traverse simple statements when string imports is enabled.
|
||||
if self.string_imports {
|
||||
if self.string_imports.enabled {
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
}
|
||||
@@ -126,20 +127,26 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'ast Expr) {
|
||||
if self.string_imports {
|
||||
if self.string_imports.enabled {
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral {
|
||||
value,
|
||||
range: _,
|
||||
node_index: _,
|
||||
}) = expr
|
||||
{
|
||||
// Determine whether the string literal "looks like" an import statement: contains
|
||||
// a dot, and consists solely of valid Python identifiers.
|
||||
let value = value.to_str();
|
||||
if let Some(module_name) = ModuleName::new(value) {
|
||||
self.imports.push(CollectedImport::Import(module_name));
|
||||
// Determine whether the string literal "looks like" an import statement: contains
|
||||
// the requisite number of dots, and consists solely of valid Python identifiers.
|
||||
if self.string_imports.min_dots == 0
|
||||
|| memchr::memchr_iter(b'.', value.as_bytes()).count()
|
||||
>= self.string_imports.min_dots
|
||||
{
|
||||
if let Some(module_name) = ModuleName::new(value) {
|
||||
self.imports.push(CollectedImport::Import(module_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use ruff_python_parser::{Mode, ParseOptions, parse};
|
||||
use crate::collector::Collector;
|
||||
pub use crate::db::ModuleDb;
|
||||
use crate::resolver::Resolver;
|
||||
pub use crate::settings::{AnalyzeSettings, Direction};
|
||||
pub use crate::settings::{AnalyzeSettings, Direction, StringImports};
|
||||
|
||||
mod collector;
|
||||
mod db;
|
||||
@@ -26,7 +26,7 @@ impl ModuleImports {
|
||||
db: &ModuleDb,
|
||||
path: &SystemPath,
|
||||
package: Option<&SystemPath>,
|
||||
string_imports: bool,
|
||||
string_imports: StringImports,
|
||||
) -> Result<Self> {
|
||||
// Read and parse the source code.
|
||||
let source = std::fs::read_to_string(path)?;
|
||||
|
||||
@@ -20,7 +20,7 @@ impl<'a> Resolver<'a> {
|
||||
match import {
|
||||
CollectedImport::Import(import) => {
|
||||
let module = resolve_module(self.db, &import)?;
|
||||
Some(module.file()?.path(self.db))
|
||||
Some(module.file(self.db)?.path(self.db))
|
||||
}
|
||||
CollectedImport::ImportFrom(import) => {
|
||||
// Attempt to resolve the member (e.g., given `from foo import bar`, look for `foo.bar`).
|
||||
@@ -32,7 +32,7 @@ impl<'a> Resolver<'a> {
|
||||
resolve_module(self.db, &parent?)
|
||||
})?;
|
||||
|
||||
Some(module.file()?.path(self.db))
|
||||
Some(module.file(self.db)?.path(self.db))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ pub struct AnalyzeSettings {
|
||||
pub exclude: FilePatternSet,
|
||||
pub preview: PreviewMode,
|
||||
pub target_version: PythonVersion,
|
||||
pub detect_string_imports: bool,
|
||||
pub string_imports: StringImports,
|
||||
pub include_dependencies: BTreeMap<PathBuf, (PathBuf, Vec<String>)>,
|
||||
pub extension: ExtensionMapping,
|
||||
}
|
||||
@@ -26,7 +26,7 @@ impl fmt::Display for AnalyzeSettings {
|
||||
self.exclude,
|
||||
self.preview,
|
||||
self.target_version,
|
||||
self.detect_string_imports,
|
||||
self.string_imports,
|
||||
self.extension | debug,
|
||||
self.include_dependencies | debug,
|
||||
]
|
||||
@@ -35,6 +35,31 @@ impl fmt::Display for AnalyzeSettings {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, CacheKey)]
|
||||
pub struct StringImports {
|
||||
pub enabled: bool,
|
||||
pub min_dots: usize,
|
||||
}
|
||||
|
||||
impl Default for StringImports {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: false,
|
||||
min_dots: 2,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for StringImports {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if self.enabled {
|
||||
write!(f, "enabled (min_dots: {})", self.min_dots)
|
||||
} else {
|
||||
write!(f, "disabled")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, CacheKey)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.4"
|
||||
version = "0.12.5"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -142,3 +142,7 @@ field47: typing.Optional[int] | typing.Optional[dict]
|
||||
# avoid reporting twice
|
||||
field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
field49: typing.Optional[complex | complex] | complex
|
||||
|
||||
# Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
# Should throw duplicate union member but not fix
|
||||
isinstance(None, typing.Union[None, None])
|
||||
@@ -47,3 +47,19 @@ def _():
|
||||
from builtin import open
|
||||
|
||||
with open(p) as _: ... # No error
|
||||
|
||||
file = "file_1.py"
|
||||
|
||||
rename(file, "file_2.py")
|
||||
|
||||
rename(
|
||||
# commment 1
|
||||
file, # comment 2
|
||||
"file_2.py"
|
||||
,
|
||||
# comment 3
|
||||
)
|
||||
|
||||
rename(file, "file_2.py", src_dir_fd=None, dst_dir_fd=None)
|
||||
|
||||
rename(file, "file_2.py", src_dir_fd=1)
|
||||
@@ -84,3 +84,25 @@ class MyRequestHandler(BaseHTTPRequestHandler):
|
||||
def dont_GET(self):
|
||||
pass
|
||||
|
||||
|
||||
from http.server import CGIHTTPRequestHandler
|
||||
|
||||
|
||||
class MyCGIRequestHandler(CGIHTTPRequestHandler):
|
||||
def do_OPTIONS(self):
|
||||
pass
|
||||
|
||||
def dont_OPTIONS(self):
|
||||
pass
|
||||
|
||||
|
||||
from http.server import SimpleHTTPRequestHandler
|
||||
|
||||
|
||||
class MySimpleRequestHandler(SimpleHTTPRequestHandler):
|
||||
def do_OPTIONS(self):
|
||||
pass
|
||||
|
||||
def dont_OPTIONS(self):
|
||||
pass
|
||||
|
||||
|
||||
@@ -278,3 +278,15 @@ def f():
|
||||
for i in src:
|
||||
if lambda: 0:
|
||||
dst.append(i)
|
||||
|
||||
def f():
|
||||
i = "xyz"
|
||||
result = []
|
||||
for i in range(3):
|
||||
result.append(x for x in [i])
|
||||
|
||||
def f():
|
||||
i = "xyz"
|
||||
result = []
|
||||
for i in range(3):
|
||||
result.append((x for x in [i]))
|
||||
5
crates/ruff_linter/resources/test/fixtures/pylint/empty_comment_line_continuation.py
vendored
Normal file
5
crates/ruff_linter/resources/test/fixtures/pylint/empty_comment_line_continuation.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
#
|
||||
x = 0 \
|
||||
#
|
||||
+1
|
||||
print(x)
|
||||
@@ -79,3 +79,8 @@ def in_type_def():
|
||||
from typing import cast
|
||||
a = 'int'
|
||||
cast('f"{a}"','11')
|
||||
|
||||
# Regression test for parser bug
|
||||
# https://github.com/astral-sh/ruff/issues/18860
|
||||
def fuzz_bug():
|
||||
c('{\t"i}')
|
||||
|
||||
@@ -65,3 +65,62 @@ class Foo:
|
||||
bar = "should've used attrs"
|
||||
|
||||
def __post_init__(self, bar: str = "ahhh", baz: str = "hmm") -> None: ...
|
||||
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/18950
|
||||
@dataclass
|
||||
class Foo:
|
||||
def __post_init__(self, bar: int = (x := 1)) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Foo:
|
||||
def __post_init__(
|
||||
self,
|
||||
bar: int = (x := 1) # comment
|
||||
,
|
||||
baz: int = (y := 2), # comment
|
||||
foo = (a := 1) # comment
|
||||
,
|
||||
faz = (b := 2), # comment
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Foo:
|
||||
def __post_init__(
|
||||
self,
|
||||
bar: int = 1, # comment
|
||||
baz: int = 2, # comment
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Foo:
|
||||
def __post_init__(
|
||||
self,
|
||||
arg1: int = (1) # comment
|
||||
,
|
||||
arg2: int = ((1)) # comment
|
||||
,
|
||||
arg2: int = (i for i in range(10)) # comment
|
||||
,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
|
||||
# makes little sense, but is valid syntax
|
||||
def fun_with_python_syntax():
|
||||
@dataclass
|
||||
class Foo:
|
||||
def __post_init__(
|
||||
self,
|
||||
bar: (int) = (yield from range(5)) # comment
|
||||
,
|
||||
) -> None:
|
||||
...
|
||||
|
||||
return Foo
|
||||
|
||||
@@ -53,3 +53,16 @@ regex.subn(br"""eak your machine with rm -""", rf"""/""")
|
||||
regex.splititer(both, non_literal)
|
||||
regex.subf(f, lambda _: r'means', '"format"')
|
||||
regex.subfn(fn, f'''a$1n't''', lambda: "'function'")
|
||||
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/16713
|
||||
re.compile("\a\f\n\r\t\u27F2\U0001F0A1\v\x41") # with unsafe fix
|
||||
re.compile("\b") # without fix
|
||||
re.compile("\"") # without fix
|
||||
re.compile("\'") # without fix
|
||||
re.compile('\"') # without fix
|
||||
re.compile('\'') # without fix
|
||||
re.compile("\\") # without fix
|
||||
re.compile("\101") # without fix
|
||||
re.compile("a\
|
||||
b") # without fix
|
||||
|
||||
@@ -91,3 +91,20 @@ regex.subf(
|
||||
br''br""br''
|
||||
)
|
||||
regex.subfn(br'I\s\nee*d\s[O0o]me\x20\Qoffe\E, ' br'b')
|
||||
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/16713
|
||||
re.compile(
|
||||
"["
|
||||
"\U0001F600-\U0001F64F" # emoticons
|
||||
"\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
"\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
"\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
"\U00002702-\U000027B0"
|
||||
"\U000024C2-\U0001F251"
|
||||
"\u200d" # zero width joiner
|
||||
"\u200c" # zero width non-joiner
|
||||
"\\u200c" # must not be escaped in a raw string
|
||||
"]+",
|
||||
flags=re.UNICODE,
|
||||
)
|
||||
|
||||
3
crates/ruff_linter/resources/test/fixtures/ruff/RUF039_py_version_sensitive.py
vendored
Normal file
3
crates/ruff_linter/resources/test/fixtures/ruff/RUF039_py_version_sensitive.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import re
|
||||
|
||||
re.compile("\N{Partial Differential}") # with unsafe fix if python target is 3.8 or higher, else without fix
|
||||
24
crates/ruff_linter/resources/test/fixtures/ruff/RUF055_3.py
vendored
Normal file
24
crates/ruff_linter/resources/test/fixtures/ruff/RUF055_3.py
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
import re
|
||||
|
||||
b_src = b"abc"
|
||||
|
||||
# Should be replaced with `b_src.replace(rb"x", b"y")`
|
||||
re.sub(rb"x", b"y", b_src)
|
||||
|
||||
# Should be replaced with `b_src.startswith(rb"abc")`
|
||||
if re.match(rb"abc", b_src):
|
||||
pass
|
||||
|
||||
# Should be replaced with `rb"x" in b_src`
|
||||
if re.search(rb"x", b_src):
|
||||
pass
|
||||
|
||||
# Should be replaced with `b_src.split(rb"abc")`
|
||||
re.split(rb"abc", b_src)
|
||||
|
||||
# Patterns containing metacharacters should NOT be replaced
|
||||
re.sub(rb"ab[c]", b"", b_src)
|
||||
re.match(rb"ab[c]", b_src)
|
||||
re.search(rb"ab[c]", b_src)
|
||||
re.fullmatch(rb"ab[c]", b_src)
|
||||
re.split(rb"ab[c]", b_src)
|
||||
@@ -1039,14 +1039,10 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
flake8_simplify::rules::zip_dict_keys_and_values(checker, call);
|
||||
}
|
||||
if checker.any_rule_enabled(&[
|
||||
Rule::OsChmod,
|
||||
Rule::OsMkdir,
|
||||
Rule::OsMakedirs,
|
||||
Rule::OsRename,
|
||||
Rule::OsReplace,
|
||||
Rule::OsStat,
|
||||
Rule::OsPathJoin,
|
||||
Rule::OsPathSamefile,
|
||||
Rule::OsPathSplitext,
|
||||
Rule::BuiltinOpen,
|
||||
Rule::PyPath,
|
||||
@@ -1112,6 +1108,18 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
if checker.is_rule_enabled(Rule::OsGetcwd) {
|
||||
flake8_use_pathlib::rules::os_getcwd(checker, call, segments);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::OsChmod) {
|
||||
flake8_use_pathlib::rules::os_chmod(checker, call, segments);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::OsRename) {
|
||||
flake8_use_pathlib::rules::os_rename(checker, call, segments);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::OsReplace) {
|
||||
flake8_use_pathlib::rules::os_replace(checker, call, segments);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::OsPathSamefile) {
|
||||
flake8_use_pathlib::rules::os_path_samefile(checker, call, segments);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::PathConstructorCurrentDirectory) {
|
||||
flake8_use_pathlib::rules::path_constructor_current_directory(
|
||||
checker, call, segments,
|
||||
|
||||
@@ -58,7 +58,7 @@ pub(crate) fn check_tokens(
|
||||
}
|
||||
|
||||
if context.is_rule_enabled(Rule::EmptyComment) {
|
||||
pylint::rules::empty_comments(context, comment_ranges, locator);
|
||||
pylint::rules::empty_comments(context, comment_ranges, locator, indexer);
|
||||
}
|
||||
|
||||
if context.is_rule_enabled(Rule::AmbiguousUnicodeCharacterComment) {
|
||||
|
||||
@@ -920,11 +920,11 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
|
||||
// flake8-use-pathlib
|
||||
(Flake8UsePathlib, "100") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathAbspath),
|
||||
(Flake8UsePathlib, "101") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsChmod),
|
||||
(Flake8UsePathlib, "101") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsChmod),
|
||||
(Flake8UsePathlib, "102") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsMkdir),
|
||||
(Flake8UsePathlib, "103") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsMakedirs),
|
||||
(Flake8UsePathlib, "104") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsRename),
|
||||
(Flake8UsePathlib, "105") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsReplace),
|
||||
(Flake8UsePathlib, "104") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsRename),
|
||||
(Flake8UsePathlib, "105") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsReplace),
|
||||
(Flake8UsePathlib, "106") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsRmdir),
|
||||
(Flake8UsePathlib, "107") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsRemove),
|
||||
(Flake8UsePathlib, "108") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsUnlink),
|
||||
@@ -940,7 +940,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8UsePathlib, "118") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsPathJoin),
|
||||
(Flake8UsePathlib, "119") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathBasename),
|
||||
(Flake8UsePathlib, "120") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathDirname),
|
||||
(Flake8UsePathlib, "121") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsPathSamefile),
|
||||
(Flake8UsePathlib, "121") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathSamefile),
|
||||
(Flake8UsePathlib, "122") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsPathSplitext),
|
||||
(Flake8UsePathlib, "123") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::BuiltinOpen),
|
||||
(Flake8UsePathlib, "124") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::PyPath),
|
||||
|
||||
@@ -75,12 +75,13 @@ where
|
||||
);
|
||||
|
||||
let span = Span::from(file).with_range(range);
|
||||
let mut annotation = Annotation::primary(span);
|
||||
if let Some(suggestion) = suggestion {
|
||||
annotation = annotation.message(suggestion);
|
||||
}
|
||||
let annotation = Annotation::primary(span);
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
if let Some(suggestion) = suggestion {
|
||||
diagnostic.help(suggestion);
|
||||
}
|
||||
|
||||
if let Some(fix) = fix {
|
||||
diagnostic.set_fix(fix);
|
||||
}
|
||||
|
||||
@@ -6,13 +6,12 @@ use bitflags::bitflags;
|
||||
use colored::Colorize;
|
||||
use ruff_annotate_snippets::{Level, Renderer, Snippet};
|
||||
|
||||
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, SecondaryCode};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed};
|
||||
use ruff_source_file::OneIndexed;
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::Locator;
|
||||
use crate::fs::relativize_path;
|
||||
use crate::line_width::{IndentWidth, LineWidthBuilder};
|
||||
use crate::message::diff::Diff;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
@@ -21,8 +20,6 @@ use crate::settings::types::UnsafeFixes;
|
||||
bitflags! {
|
||||
#[derive(Default)]
|
||||
struct EmitterFlags: u8 {
|
||||
/// Whether to show the fix status of a diagnostic.
|
||||
const SHOW_FIX_STATUS = 1 << 0;
|
||||
/// Whether to show the diff of a fix, for diagnostics that have a fix.
|
||||
const SHOW_FIX_DIFF = 1 << 1;
|
||||
/// Whether to show the source code of a diagnostic.
|
||||
@@ -30,17 +27,27 @@ bitflags! {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct TextEmitter {
|
||||
flags: EmitterFlags,
|
||||
unsafe_fixes: UnsafeFixes,
|
||||
config: DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl Default for TextEmitter {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
flags: EmitterFlags::default(),
|
||||
config: DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Concise)
|
||||
.hide_severity(true)
|
||||
.color(!cfg!(test) && colored::control::SHOULD_COLORIZE.should_colorize()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TextEmitter {
|
||||
#[must_use]
|
||||
pub fn with_show_fix_status(mut self, show_fix_status: bool) -> Self {
|
||||
self.flags
|
||||
.set(EmitterFlags::SHOW_FIX_STATUS, show_fix_status);
|
||||
self.config = self.config.show_fix_status(show_fix_status);
|
||||
self
|
||||
}
|
||||
|
||||
@@ -58,7 +65,15 @@ impl TextEmitter {
|
||||
|
||||
#[must_use]
|
||||
pub fn with_unsafe_fixes(mut self, unsafe_fixes: UnsafeFixes) -> Self {
|
||||
self.unsafe_fixes = unsafe_fixes;
|
||||
self.config = self
|
||||
.config
|
||||
.fix_applicability(unsafe_fixes.required_applicability());
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_preview(mut self, preview: bool) -> Self {
|
||||
self.config = self.config.preview(preview);
|
||||
self
|
||||
}
|
||||
}
|
||||
@@ -71,51 +86,10 @@ impl Emitter for TextEmitter {
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
for message in diagnostics {
|
||||
write!(writer, "{}", message.display(context, &self.config))?;
|
||||
|
||||
let filename = message.expect_ruff_filename();
|
||||
write!(
|
||||
writer,
|
||||
"{path}{sep}",
|
||||
path = relativize_path(&filename).bold(),
|
||||
sep = ":".cyan(),
|
||||
)?;
|
||||
|
||||
let start_location = message.expect_ruff_start_location();
|
||||
let notebook_index = context.notebook_index(&filename);
|
||||
|
||||
// Check if we're working on a jupyter notebook and translate positions with cell accordingly
|
||||
let diagnostic_location = if let Some(notebook_index) = notebook_index {
|
||||
write!(
|
||||
writer,
|
||||
"cell {cell}{sep}",
|
||||
cell = notebook_index
|
||||
.cell(start_location.line)
|
||||
.unwrap_or(OneIndexed::MIN),
|
||||
sep = ":".cyan(),
|
||||
)?;
|
||||
|
||||
LineColumn {
|
||||
line: notebook_index
|
||||
.cell_row(start_location.line)
|
||||
.unwrap_or(OneIndexed::MIN),
|
||||
column: start_location.column,
|
||||
}
|
||||
} else {
|
||||
start_location
|
||||
};
|
||||
|
||||
writeln!(
|
||||
writer,
|
||||
"{row}{sep}{col}{sep} {code_and_body}",
|
||||
row = diagnostic_location.line,
|
||||
col = diagnostic_location.column,
|
||||
sep = ":".cyan(),
|
||||
code_and_body = RuleCodeAndBody {
|
||||
message,
|
||||
show_fix_status: self.flags.intersects(EmitterFlags::SHOW_FIX_STATUS),
|
||||
unsafe_fixes: self.unsafe_fixes,
|
||||
}
|
||||
)?;
|
||||
|
||||
if self.flags.intersects(EmitterFlags::SHOW_SOURCE) {
|
||||
// The `0..0` range is used to highlight file-level diagnostics.
|
||||
if message.expect_range() != TextRange::default() {
|
||||
@@ -186,7 +160,7 @@ pub(super) struct MessageCodeFrame<'a> {
|
||||
|
||||
impl Display for MessageCodeFrame<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let suggestion = self.message.suggestion();
|
||||
let suggestion = self.message.first_help_text();
|
||||
let footers = if let Some(suggestion) = suggestion {
|
||||
vec![Level::Help.title(suggestion)]
|
||||
} else {
|
||||
|
||||
@@ -134,6 +134,26 @@ pub(crate) const fn is_fix_os_path_dirname_enabled(settings: &LinterSettings) ->
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19404
|
||||
pub(crate) const fn is_fix_os_chmod_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19404
|
||||
pub(crate) const fn is_fix_os_rename_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19404
|
||||
pub(crate) const fn is_fix_os_replace_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19404
|
||||
pub(crate) const fn is_fix_os_path_samefile_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19245
|
||||
pub(crate) const fn is_fix_os_getcwd_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
|
||||
@@ -64,6 +64,7 @@ pub(crate) fn duplicate_union_member<'a>(checker: &Checker, expr: &'a Expr) {
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
let mut union_type = UnionKind::TypingUnion;
|
||||
let mut optional_present = false;
|
||||
// Adds a member to `literal_exprs` if it is a `Literal` annotation
|
||||
let mut check_for_duplicate_members = |expr: &'a Expr, parent: &'a Expr| {
|
||||
if matches!(parent, Expr::BinOp(_)) {
|
||||
@@ -74,6 +75,7 @@ pub(crate) fn duplicate_union_member<'a>(checker: &Checker, expr: &'a Expr) {
|
||||
&& is_optional_type(checker, expr)
|
||||
{
|
||||
// If the union member is an `Optional`, add a virtual `None` literal.
|
||||
optional_present = true;
|
||||
&VIRTUAL_NONE_LITERAL
|
||||
} else {
|
||||
expr
|
||||
@@ -87,7 +89,7 @@ pub(crate) fn duplicate_union_member<'a>(checker: &Checker, expr: &'a Expr) {
|
||||
DuplicateUnionMember {
|
||||
duplicate_name: checker.generator().expr(virtual_expr),
|
||||
},
|
||||
// Use the real expression's range for diagnostics,
|
||||
// Use the real expression's range for diagnostics.
|
||||
expr.range(),
|
||||
));
|
||||
}
|
||||
@@ -104,6 +106,13 @@ pub(crate) fn duplicate_union_member<'a>(checker: &Checker, expr: &'a Expr) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Do not reduce `Union[None, ... None]` to avoid introducing a `TypeError` unintentionally
|
||||
// e.g. `isinstance(None, Union[None, None])`, if reduced to `isinstance(None, None)`, causes
|
||||
// `TypeError: isinstance() arg 2 must be a type, a tuple of types, or a union` to throw.
|
||||
if unique_nodes.iter().all(|expr| expr.is_none_literal_expr()) && !optional_present {
|
||||
return;
|
||||
}
|
||||
|
||||
// Mark [`Fix`] as unsafe when comments are in range.
|
||||
let applicability = if checker.comment_ranges().intersects(expr.range()) {
|
||||
Applicability::Unsafe
|
||||
|
||||
@@ -974,6 +974,8 @@ PYI016.py:143:61: PYI016 [*] Duplicate union member `complex`
|
||||
143 |-field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
143 |+field48: typing.Union[typing.Optional[complex], complex]
|
||||
144 144 | field49: typing.Optional[complex | complex] | complex
|
||||
145 145 |
|
||||
146 146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
|
||||
PYI016.py:144:36: PYI016 [*] Duplicate union member `complex`
|
||||
|
|
||||
@@ -981,6 +983,8 @@ PYI016.py:144:36: PYI016 [*] Duplicate union member `complex`
|
||||
143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
144 | field49: typing.Optional[complex | complex] | complex
|
||||
| ^^^^^^^ PYI016
|
||||
145 |
|
||||
146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
|
|
||||
= help: Remove duplicate union member `complex`
|
||||
|
||||
@@ -990,3 +994,15 @@ PYI016.py:144:36: PYI016 [*] Duplicate union member `complex`
|
||||
143 143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
144 |-field49: typing.Optional[complex | complex] | complex
|
||||
144 |+field49: typing.Optional[complex] | complex
|
||||
145 145 |
|
||||
146 146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
147 147 | # Should throw duplicate union member but not fix
|
||||
|
||||
PYI016.py:148:37: PYI016 Duplicate union member `None`
|
||||
|
|
||||
146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
147 | # Should throw duplicate union member but not fix
|
||||
148 | isinstance(None, typing.Union[None, None])
|
||||
| ^^^^ PYI016
|
||||
|
|
||||
= help: Remove duplicate union member `None`
|
||||
|
||||
@@ -1162,6 +1162,8 @@ PYI016.py:143:61: PYI016 [*] Duplicate union member `complex`
|
||||
143 |-field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
143 |+field48: typing.Union[None, complex]
|
||||
144 144 | field49: typing.Optional[complex | complex] | complex
|
||||
145 145 |
|
||||
146 146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
|
||||
PYI016.py:143:72: PYI016 [*] Duplicate union member `complex`
|
||||
|
|
||||
@@ -1179,6 +1181,8 @@ PYI016.py:143:72: PYI016 [*] Duplicate union member `complex`
|
||||
143 |-field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
143 |+field48: typing.Union[None, complex]
|
||||
144 144 | field49: typing.Optional[complex | complex] | complex
|
||||
145 145 |
|
||||
146 146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
|
||||
PYI016.py:144:36: PYI016 [*] Duplicate union member `complex`
|
||||
|
|
||||
@@ -1186,6 +1190,8 @@ PYI016.py:144:36: PYI016 [*] Duplicate union member `complex`
|
||||
143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
144 | field49: typing.Optional[complex | complex] | complex
|
||||
| ^^^^^^^ PYI016
|
||||
145 |
|
||||
146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
|
|
||||
= help: Remove duplicate union member `complex`
|
||||
|
||||
@@ -1195,6 +1201,9 @@ PYI016.py:144:36: PYI016 [*] Duplicate union member `complex`
|
||||
143 143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
144 |-field49: typing.Optional[complex | complex] | complex
|
||||
144 |+field49: None | complex
|
||||
145 145 |
|
||||
146 146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
147 147 | # Should throw duplicate union member but not fix
|
||||
|
||||
PYI016.py:144:47: PYI016 [*] Duplicate union member `complex`
|
||||
|
|
||||
@@ -1202,6 +1211,8 @@ PYI016.py:144:47: PYI016 [*] Duplicate union member `complex`
|
||||
143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
144 | field49: typing.Optional[complex | complex] | complex
|
||||
| ^^^^^^^ PYI016
|
||||
145 |
|
||||
146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
|
|
||||
= help: Remove duplicate union member `complex`
|
||||
|
||||
@@ -1211,3 +1222,15 @@ PYI016.py:144:47: PYI016 [*] Duplicate union member `complex`
|
||||
143 143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
144 |-field49: typing.Optional[complex | complex] | complex
|
||||
144 |+field49: None | complex
|
||||
145 145 |
|
||||
146 146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
147 147 | # Should throw duplicate union member but not fix
|
||||
|
||||
PYI016.py:148:37: PYI016 Duplicate union member `None`
|
||||
|
|
||||
146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
147 | # Should throw duplicate union member but not fix
|
||||
148 | isinstance(None, typing.Union[None, None])
|
||||
| ^^^^ PYI016
|
||||
|
|
||||
= help: Remove duplicate union member `None`
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::importer::ImportRequest;
|
||||
use crate::{Applicability, Edit, Fix, Violation};
|
||||
use ruff_python_ast::{self as ast};
|
||||
use ruff_python_ast::{Expr, ExprCall};
|
||||
use ruff_python_ast::{self as ast, Expr, ExprCall};
|
||||
use ruff_python_semantic::{SemanticModel, analyze::typing};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
pub(crate) fn is_keyword_only_argument_non_default(arguments: &ast::Arguments, name: &str) -> bool {
|
||||
@@ -72,3 +72,85 @@ pub(crate) fn check_os_pathlib_single_arg_calls(
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_name_expr(expr: &Expr) -> Option<&ast::ExprName> {
|
||||
match expr {
|
||||
Expr::Name(name) => Some(name),
|
||||
Expr::Call(ExprCall { func, .. }) => get_name_expr(func),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the given expression looks like a file descriptor, i.e., if it is an integer.
|
||||
pub(crate) fn is_file_descriptor(expr: &Expr, semantic: &SemanticModel) -> bool {
|
||||
if matches!(
|
||||
expr,
|
||||
Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: ast::Number::Int(_),
|
||||
..
|
||||
})
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let Some(name) = get_name_expr(expr) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let Some(binding) = semantic.only_binding(name).map(|id| semantic.binding(id)) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
typing::is_int(binding, semantic)
|
||||
}
|
||||
|
||||
pub(crate) fn check_os_pathlib_two_arg_calls(
|
||||
checker: &Checker,
|
||||
call: &ExprCall,
|
||||
attr: &str,
|
||||
path_arg: &str,
|
||||
second_arg: &str,
|
||||
fix_enabled: bool,
|
||||
violation: impl Violation,
|
||||
) {
|
||||
let range = call.range();
|
||||
let mut diagnostic = checker.report_diagnostic(violation, call.func.range());
|
||||
|
||||
let (Some(path_expr), Some(second_expr)) = (
|
||||
call.arguments.find_argument_value(path_arg, 0),
|
||||
call.arguments.find_argument_value(second_arg, 1),
|
||||
) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let path_code = checker.locator().slice(path_expr.range());
|
||||
let second_code = checker.locator().slice(second_expr.range());
|
||||
|
||||
if fix_enabled {
|
||||
diagnostic.try_set_fix(|| {
|
||||
let (import_edit, binding) = checker.importer().get_or_import_symbol(
|
||||
&ImportRequest::import("pathlib", "Path"),
|
||||
call.start(),
|
||||
checker.semantic(),
|
||||
)?;
|
||||
|
||||
let replacement = if is_pathlib_path_call(checker, path_expr) {
|
||||
format!("{path_code}.{attr}({second_code})")
|
||||
} else {
|
||||
format!("{binding}({path_code}).{attr}({second_code})")
|
||||
};
|
||||
|
||||
let applicability = if checker.comment_ranges().intersects(range) {
|
||||
Applicability::Unsafe
|
||||
} else {
|
||||
Applicability::Safe
|
||||
};
|
||||
|
||||
Ok(Fix::applicable_edits(
|
||||
Edit::range_replacement(replacement, range),
|
||||
[import_edit],
|
||||
applicability,
|
||||
))
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
pub(crate) use glob_rule::*;
|
||||
pub(crate) use invalid_pathlib_with_suffix::*;
|
||||
pub(crate) use os_chmod::*;
|
||||
pub(crate) use os_getcwd::*;
|
||||
pub(crate) use os_path_abspath::*;
|
||||
pub(crate) use os_path_basename::*;
|
||||
@@ -14,8 +15,11 @@ pub(crate) use os_path_isabs::*;
|
||||
pub(crate) use os_path_isdir::*;
|
||||
pub(crate) use os_path_isfile::*;
|
||||
pub(crate) use os_path_islink::*;
|
||||
pub(crate) use os_path_samefile::*;
|
||||
pub(crate) use os_readlink::*;
|
||||
pub(crate) use os_remove::*;
|
||||
pub(crate) use os_rename::*;
|
||||
pub(crate) use os_replace::*;
|
||||
pub(crate) use os_rmdir::*;
|
||||
pub(crate) use os_sep_split::*;
|
||||
pub(crate) use os_unlink::*;
|
||||
@@ -24,6 +28,7 @@ pub(crate) use replaceable_by_pathlib::*;
|
||||
|
||||
mod glob_rule;
|
||||
mod invalid_pathlib_with_suffix;
|
||||
mod os_chmod;
|
||||
mod os_getcwd;
|
||||
mod os_path_abspath;
|
||||
mod os_path_basename;
|
||||
@@ -38,8 +43,11 @@ mod os_path_isabs;
|
||||
mod os_path_isdir;
|
||||
mod os_path_isfile;
|
||||
mod os_path_islink;
|
||||
mod os_path_samefile;
|
||||
mod os_readlink;
|
||||
mod os_remove;
|
||||
mod os_rename;
|
||||
mod os_replace;
|
||||
mod os_rmdir;
|
||||
mod os_sep_split;
|
||||
mod os_unlink;
|
||||
|
||||
@@ -0,0 +1,94 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_fix_os_chmod_enabled;
|
||||
use crate::rules::flake8_use_pathlib::helpers::{
|
||||
check_os_pathlib_two_arg_calls, is_file_descriptor, is_keyword_only_argument_non_default,
|
||||
};
|
||||
use crate::{FixAvailability, Violation};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::ExprCall;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.chmod`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.chmod()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.chmod()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.chmod("file.py", 0o444)
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("file.py").chmod(0o444)
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the original expression.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.chmod`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.chmod)
|
||||
/// - [Python documentation: `os.chmod`](https://docs.python.org/3/library/os.html#os.chmod)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsChmod;
|
||||
|
||||
impl Violation for OsChmod {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.chmod()` should be replaced by `Path.chmod()`".to_string()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Replace with `Path(...).chmod(...)`".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// PTH101
|
||||
pub(crate) fn os_chmod(checker: &Checker, call: &ExprCall, segments: &[&str]) {
|
||||
if segments != ["os", "chmod"] {
|
||||
return;
|
||||
}
|
||||
|
||||
// `dir_fd` is not supported by pathlib, so check if it's set to non-default values.
|
||||
// Signature as of Python 3.13 (https://docs.python.org/3/library/os.html#os.chmod)
|
||||
// ```text
|
||||
// 0 1 2 3
|
||||
// os.chmod(path, mode, *, dir_fd=None, follow_symlinks=True)
|
||||
// ```
|
||||
if call
|
||||
.arguments
|
||||
.find_argument_value("path", 0)
|
||||
.is_some_and(|expr| is_file_descriptor(expr, checker.semantic()))
|
||||
|| is_keyword_only_argument_non_default(&call.arguments, "dir_fd")
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
check_os_pathlib_two_arg_calls(
|
||||
checker,
|
||||
call,
|
||||
"chmod",
|
||||
"path",
|
||||
"mode",
|
||||
is_fix_os_chmod_enabled(checker.settings()),
|
||||
OsChmod,
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,77 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_fix_os_path_samefile_enabled;
|
||||
use crate::rules::flake8_use_pathlib::helpers::check_os_pathlib_two_arg_calls;
|
||||
use crate::{FixAvailability, Violation};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::ExprCall;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.path.samefile`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os.path`. When possible, using `Path` object
|
||||
/// methods such as `Path.samefile()` can improve readability over the `os.path`
|
||||
/// module's counterparts (e.g., `os.path.samefile()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.path.samefile("f1.py", "f2.py")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("f1.py").samefile("f2.py")
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the original expression.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.samefile`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.samefile)
|
||||
/// - [Python documentation: `os.path.samefile`](https://docs.python.org/3/library/os.path.html#os.path.samefile)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsPathSamefile;
|
||||
|
||||
impl Violation for OsPathSamefile {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.path.samefile()` should be replaced by `Path.samefile()`".to_string()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Replace with `Path(...).samefile()`".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// PTH121
|
||||
pub(crate) fn os_path_samefile(checker: &Checker, call: &ExprCall, segments: &[&str]) {
|
||||
if segments != ["os", "path", "samefile"] {
|
||||
return;
|
||||
}
|
||||
|
||||
check_os_pathlib_two_arg_calls(
|
||||
checker,
|
||||
call,
|
||||
"samefile",
|
||||
"f1",
|
||||
"f2",
|
||||
is_fix_os_path_samefile_enabled(checker.settings()),
|
||||
OsPathSamefile,
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_fix_os_rename_enabled;
|
||||
use crate::rules::flake8_use_pathlib::helpers::{
|
||||
check_os_pathlib_two_arg_calls, is_keyword_only_argument_non_default,
|
||||
};
|
||||
use crate::{FixAvailability, Violation};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::ExprCall;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.rename`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.rename()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.rename()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.rename("old.py", "new.py")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("old.py").rename("new.py")
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the original expression.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.rename`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.rename)
|
||||
/// - [Python documentation: `os.rename`](https://docs.python.org/3/library/os.html#os.rename)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsRename;
|
||||
|
||||
impl Violation for OsRename {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.rename()` should be replaced by `Path.rename()`".to_string()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Replace with `Path(...).rename(...)`".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// PTH104
|
||||
pub(crate) fn os_rename(checker: &Checker, call: &ExprCall, segments: &[&str]) {
|
||||
if segments != ["os", "rename"] {
|
||||
return;
|
||||
}
|
||||
// `src_dir_fd` and `dst_dir_fd` are not supported by pathlib, so check if they are
|
||||
// set to non-default values.
|
||||
// Signature as of Python 3.13 (https://docs.python.org/3/library/os.html#os.rename)
|
||||
// ```text
|
||||
// 0 1 2 3
|
||||
// os.rename(src, dst, *, src_dir_fd=None, dst_dir_fd=None)
|
||||
// ```
|
||||
if is_keyword_only_argument_non_default(&call.arguments, "src_dir_fd")
|
||||
|| is_keyword_only_argument_non_default(&call.arguments, "dst_dir_fd")
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
check_os_pathlib_two_arg_calls(
|
||||
checker,
|
||||
call,
|
||||
"rename",
|
||||
"src",
|
||||
"dst",
|
||||
is_fix_os_rename_enabled(checker.settings()),
|
||||
OsRename,
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_fix_os_replace_enabled;
|
||||
use crate::rules::flake8_use_pathlib::helpers::{
|
||||
check_os_pathlib_two_arg_calls, is_keyword_only_argument_non_default,
|
||||
};
|
||||
use crate::{FixAvailability, Violation};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::ExprCall;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.replace`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.replace()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.replace()`).
|
||||
///
|
||||
/// Note that `os` functions may be preferable if performance is a concern,
|
||||
/// e.g., in hot loops.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.replace("old.py", "new.py")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("old.py").replace("new.py")
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the original expression.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.replace`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.replace)
|
||||
/// - [Python documentation: `os.replace`](https://docs.python.org/3/library/os.html#os.replace)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsReplace;
|
||||
|
||||
impl Violation for OsReplace {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.replace()` should be replaced by `Path.replace()`".to_string()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Replace with `Path(...).replace(...)`".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// PTH105
|
||||
pub(crate) fn os_replace(checker: &Checker, call: &ExprCall, segments: &[&str]) {
|
||||
if segments != ["os", "replace"] {
|
||||
return;
|
||||
}
|
||||
// `src_dir_fd` and `dst_dir_fd` are not supported by pathlib, so check if they are
|
||||
// set to non-default values.
|
||||
// Signature as of Python 3.13 (https://docs.python.org/3/library/os.html#os.replace)
|
||||
// ```text
|
||||
// 0 1 2 3
|
||||
// os.replace(src, dst, *, src_dir_fd=None, dst_dir_fd=None)
|
||||
// ```
|
||||
if is_keyword_only_argument_non_default(&call.arguments, "src_dir_fd")
|
||||
|| is_keyword_only_argument_non_default(&call.arguments, "dst_dir_fd")
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
check_os_pathlib_two_arg_calls(
|
||||
checker,
|
||||
call,
|
||||
"replace",
|
||||
"src",
|
||||
"dst",
|
||||
is_fix_os_replace_enabled(checker.settings()),
|
||||
OsReplace,
|
||||
);
|
||||
}
|
||||
@@ -1,14 +1,16 @@
|
||||
use ruff_python_ast::{self as ast, Expr, ExprBooleanLiteral, ExprCall};
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_python_semantic::analyze::typing;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::rules::flake8_use_pathlib::helpers::is_keyword_only_argument_non_default;
|
||||
use crate::rules::flake8_use_pathlib::rules::Glob;
|
||||
use crate::rules::flake8_use_pathlib::violations::{
|
||||
BuiltinOpen, Joiner, OsChmod, OsListdir, OsMakedirs, OsMkdir, OsPathJoin, OsPathSamefile,
|
||||
OsPathSplitext, OsRename, OsReplace, OsStat, OsSymlink, PyPath,
|
||||
use crate::rules::flake8_use_pathlib::helpers::{
|
||||
is_file_descriptor, is_keyword_only_argument_non_default,
|
||||
};
|
||||
use crate::rules::flake8_use_pathlib::{
|
||||
rules::Glob,
|
||||
violations::{
|
||||
BuiltinOpen, Joiner, OsListdir, OsMakedirs, OsMkdir, OsPathJoin, OsPathSplitext, OsStat,
|
||||
OsSymlink, PyPath,
|
||||
},
|
||||
};
|
||||
|
||||
pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
|
||||
@@ -18,24 +20,6 @@ pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
|
||||
|
||||
let range = call.func.range();
|
||||
match qualified_name.segments() {
|
||||
// PTH101
|
||||
["os", "chmod"] => {
|
||||
// `dir_fd` is not supported by pathlib, so check if it's set to non-default values.
|
||||
// Signature as of Python 3.13 (https://docs.python.org/3/library/os.html#os.chmod)
|
||||
// ```text
|
||||
// 0 1 2 3
|
||||
// os.chmod(path, mode, *, dir_fd=None, follow_symlinks=True)
|
||||
// ```
|
||||
if call
|
||||
.arguments
|
||||
.find_argument_value("path", 0)
|
||||
.is_some_and(|expr| is_file_descriptor(expr, checker.semantic()))
|
||||
|| is_keyword_only_argument_non_default(&call.arguments, "dir_fd")
|
||||
{
|
||||
return;
|
||||
}
|
||||
checker.report_diagnostic_if_enabled(OsChmod, range)
|
||||
}
|
||||
// PTH102
|
||||
["os", "makedirs"] => checker.report_diagnostic_if_enabled(OsMakedirs, range),
|
||||
// PTH103
|
||||
@@ -51,38 +35,6 @@ pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
|
||||
}
|
||||
checker.report_diagnostic_if_enabled(OsMkdir, range)
|
||||
}
|
||||
// PTH104
|
||||
["os", "rename"] => {
|
||||
// `src_dir_fd` and `dst_dir_fd` are not supported by pathlib, so check if they are
|
||||
// set to non-default values.
|
||||
// Signature as of Python 3.13 (https://docs.python.org/3/library/os.html#os.rename)
|
||||
// ```text
|
||||
// 0 1 2 3
|
||||
// os.rename(src, dst, *, src_dir_fd=None, dst_dir_fd=None)
|
||||
// ```
|
||||
if is_keyword_only_argument_non_default(&call.arguments, "src_dir_fd")
|
||||
|| is_keyword_only_argument_non_default(&call.arguments, "dst_dir_fd")
|
||||
{
|
||||
return;
|
||||
}
|
||||
checker.report_diagnostic_if_enabled(OsRename, range)
|
||||
}
|
||||
// PTH105
|
||||
["os", "replace"] => {
|
||||
// `src_dir_fd` and `dst_dir_fd` are not supported by pathlib, so check if they are
|
||||
// set to non-default values.
|
||||
// Signature as of Python 3.13 (https://docs.python.org/3/library/os.html#os.replace)
|
||||
// ```text
|
||||
// 0 1 2 3
|
||||
// os.replace(src, dst, *, src_dir_fd=None, dst_dir_fd=None)
|
||||
// ```
|
||||
if is_keyword_only_argument_non_default(&call.arguments, "src_dir_fd")
|
||||
|| is_keyword_only_argument_non_default(&call.arguments, "dst_dir_fd")
|
||||
{
|
||||
return;
|
||||
}
|
||||
checker.report_diagnostic_if_enabled(OsReplace, range)
|
||||
}
|
||||
// PTH116
|
||||
["os", "stat"] => {
|
||||
// `dir_fd` is not supported by pathlib, so check if it's set to non-default values.
|
||||
@@ -124,8 +76,6 @@ pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
|
||||
},
|
||||
range,
|
||||
),
|
||||
// PTH121
|
||||
["os", "path", "samefile"] => checker.report_diagnostic_if_enabled(OsPathSamefile, range),
|
||||
// PTH122
|
||||
["os", "path", "splitext"] => checker.report_diagnostic_if_enabled(OsPathSplitext, range),
|
||||
// PTH211
|
||||
@@ -234,37 +184,6 @@ pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
|
||||
};
|
||||
}
|
||||
|
||||
/// Returns `true` if the given expression looks like a file descriptor, i.e., if it is an integer.
|
||||
fn is_file_descriptor(expr: &Expr, semantic: &SemanticModel) -> bool {
|
||||
if matches!(
|
||||
expr,
|
||||
Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: ast::Number::Int(_),
|
||||
..
|
||||
})
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let Some(name) = get_name_expr(expr) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let Some(binding) = semantic.only_binding(name).map(|id| semantic.binding(id)) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
typing::is_int(binding, semantic)
|
||||
}
|
||||
|
||||
fn get_name_expr(expr: &Expr) -> Option<&ast::ExprName> {
|
||||
match expr {
|
||||
Expr::Name(name) => Some(name),
|
||||
Expr::Call(ExprCall { func, .. }) => get_name_expr(func),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if argument `name` is set to a non-default `None` value.
|
||||
fn is_argument_non_default(arguments: &ast::Arguments, name: &str, position: usize) -> bool {
|
||||
arguments
|
||||
|
||||
@@ -20,6 +20,7 @@ full_name.py:8:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
9 | aaa = os.mkdir(p)
|
||||
10 | os.makedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
full_name.py:9:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -50,6 +51,7 @@ full_name.py:11:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
12 | os.replace(p)
|
||||
13 | os.rmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
full_name.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -60,6 +62,7 @@ full_name.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
13 | os.rmdir(p)
|
||||
14 | os.remove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
full_name.py:13:1: PTH106 `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -253,6 +256,7 @@ full_name.py:30:1: PTH121 `os.path.samefile()` should be replaced by `Path.samef
|
||||
31 | os.path.splitext(p)
|
||||
32 | with open(p) as fp:
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
full_name.py:31:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
|
||||
@@ -20,6 +20,7 @@ import_as.py:8:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
9 | aaa = foo.mkdir(p)
|
||||
10 | foo.makedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
import_as.py:9:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -50,6 +51,7 @@ import_as.py:11:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
12 | foo.replace(p)
|
||||
13 | foo.rmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_as.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -60,6 +62,7 @@ import_as.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
13 | foo.rmdir(p)
|
||||
14 | foo.remove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
import_as.py:13:1: PTH106 `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -252,6 +255,7 @@ import_as.py:30:1: PTH121 `os.path.samefile()` should be replaced by `Path.samef
|
||||
| ^^^^^^^^^^^^^^ PTH121
|
||||
31 | foo_p.splitext(p)
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
import_as.py:31:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
|
||||
@@ -20,6 +20,7 @@ import_from.py:10:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
11 | aaa = mkdir(p)
|
||||
12 | makedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
import_from.py:11:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -50,6 +51,7 @@ import_from.py:13:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
14 | replace(p)
|
||||
15 | rmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_from.py:14:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -60,6 +62,7 @@ import_from.py:14:1: PTH105 `os.replace()` should be replaced by `Path.replace()
|
||||
15 | rmdir(p)
|
||||
16 | remove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
import_from.py:15:1: PTH106 `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -253,6 +256,7 @@ import_from.py:32:1: PTH121 `os.path.samefile()` should be replaced by `Path.sam
|
||||
33 | splitext(p)
|
||||
34 | with open(p) as fp:
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
import_from.py:33:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
@@ -289,3 +293,36 @@ import_from.py:43:10: PTH123 `open()` should be replaced by `Path.open()`
|
||||
43 | with open(p) as _: ... # Error
|
||||
| ^^^^ PTH123
|
||||
|
|
||||
|
||||
import_from.py:53:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
|
|
||||
51 | file = "file_1.py"
|
||||
52 |
|
||||
53 | rename(file, "file_2.py")
|
||||
| ^^^^^^ PTH104
|
||||
54 |
|
||||
55 | rename(
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_from.py:55:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
|
|
||||
53 | rename(file, "file_2.py")
|
||||
54 |
|
||||
55 | rename(
|
||||
| ^^^^^^ PTH104
|
||||
56 | # commment 1
|
||||
57 | file, # comment 2
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_from.py:63:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
|
|
||||
61 | )
|
||||
62 |
|
||||
63 | rename(file, "file_2.py", src_dir_fd=None, dst_dir_fd=None)
|
||||
| ^^^^^^ PTH104
|
||||
64 |
|
||||
65 | rename(file, "file_2.py", src_dir_fd=1)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
@@ -20,6 +20,7 @@ import_from_as.py:15:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
16 | aaa = xmkdir(p)
|
||||
17 | xmakedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
import_from_as.py:16:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -50,6 +51,7 @@ import_from_as.py:18:1: PTH104 `os.rename()` should be replaced by `Path.rename(
|
||||
19 | xreplace(p)
|
||||
20 | xrmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_from_as.py:19:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -60,6 +62,7 @@ import_from_as.py:19:1: PTH105 `os.replace()` should be replaced by `Path.replac
|
||||
20 | xrmdir(p)
|
||||
21 | xremove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
import_from_as.py:20:1: PTH106 `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -252,6 +255,7 @@ import_from_as.py:37:1: PTH121 `os.path.samefile()` should be replaced by `Path.
|
||||
| ^^^^^^^^^ PTH121
|
||||
38 | xsplitext(p)
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
import_from_as.py:38:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
|
||||
@@ -34,6 +34,7 @@ full_name.py:8:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
9 | aaa = os.mkdir(p)
|
||||
10 | os.makedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
full_name.py:9:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -64,6 +65,7 @@ full_name.py:11:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
12 | os.replace(p)
|
||||
13 | os.rmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
full_name.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -74,6 +76,7 @@ full_name.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
13 | os.rmdir(p)
|
||||
14 | os.remove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
full_name.py:13:1: PTH106 [*] `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -471,6 +474,7 @@ full_name.py:30:1: PTH121 `os.path.samefile()` should be replaced by `Path.samef
|
||||
31 | os.path.splitext(p)
|
||||
32 | with open(p) as fp:
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
full_name.py:31:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
|
||||
@@ -34,6 +34,7 @@ import_as.py:8:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
9 | aaa = foo.mkdir(p)
|
||||
10 | foo.makedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
import_as.py:9:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -64,6 +65,7 @@ import_as.py:11:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
12 | foo.replace(p)
|
||||
13 | foo.rmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_as.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -74,6 +76,7 @@ import_as.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
13 | foo.rmdir(p)
|
||||
14 | foo.remove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
import_as.py:13:1: PTH106 [*] `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -469,6 +472,7 @@ import_as.py:30:1: PTH121 `os.path.samefile()` should be replaced by `Path.samef
|
||||
| ^^^^^^^^^^^^^^ PTH121
|
||||
31 | foo_p.splitext(p)
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
import_as.py:31:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
|
||||
@@ -35,6 +35,7 @@ import_from.py:10:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
11 | aaa = mkdir(p)
|
||||
12 | makedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
import_from.py:11:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -65,6 +66,7 @@ import_from.py:13:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
14 | replace(p)
|
||||
15 | rmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_from.py:14:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -75,6 +77,7 @@ import_from.py:14:1: PTH105 `os.replace()` should be replaced by `Path.replace()
|
||||
15 | rmdir(p)
|
||||
16 | remove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
import_from.py:15:1: PTH106 [*] `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -484,6 +487,7 @@ import_from.py:32:1: PTH121 `os.path.samefile()` should be replaced by `Path.sam
|
||||
33 | splitext(p)
|
||||
34 | with open(p) as fp:
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
import_from.py:33:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
@@ -520,3 +524,95 @@ import_from.py:43:10: PTH123 `open()` should be replaced by `Path.open()`
|
||||
43 | with open(p) as _: ... # Error
|
||||
| ^^^^ PTH123
|
||||
|
|
||||
|
||||
import_from.py:53:1: PTH104 [*] `os.rename()` should be replaced by `Path.rename()`
|
||||
|
|
||||
51 | file = "file_1.py"
|
||||
52 |
|
||||
53 | rename(file, "file_2.py")
|
||||
| ^^^^^^ PTH104
|
||||
54 |
|
||||
55 | rename(
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
ℹ Safe fix
|
||||
2 2 | from os import remove, unlink, getcwd, readlink, stat
|
||||
3 3 | from os.path import abspath, exists, expanduser, isdir, isfile, islink
|
||||
4 4 | from os.path import isabs, join, basename, dirname, samefile, splitext
|
||||
5 |+import pathlib
|
||||
5 6 |
|
||||
6 7 | p = "/foo"
|
||||
7 8 | q = "bar"
|
||||
--------------------------------------------------------------------------------
|
||||
50 51 |
|
||||
51 52 | file = "file_1.py"
|
||||
52 53 |
|
||||
53 |-rename(file, "file_2.py")
|
||||
54 |+pathlib.Path(file).rename("file_2.py")
|
||||
54 55 |
|
||||
55 56 | rename(
|
||||
56 57 | # commment 1
|
||||
|
||||
import_from.py:55:1: PTH104 [*] `os.rename()` should be replaced by `Path.rename()`
|
||||
|
|
||||
53 | rename(file, "file_2.py")
|
||||
54 |
|
||||
55 | rename(
|
||||
| ^^^^^^ PTH104
|
||||
56 | # commment 1
|
||||
57 | file, # comment 2
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | from os import remove, unlink, getcwd, readlink, stat
|
||||
3 3 | from os.path import abspath, exists, expanduser, isdir, isfile, islink
|
||||
4 4 | from os.path import isabs, join, basename, dirname, samefile, splitext
|
||||
5 |+import pathlib
|
||||
5 6 |
|
||||
6 7 | p = "/foo"
|
||||
7 8 | q = "bar"
|
||||
--------------------------------------------------------------------------------
|
||||
52 53 |
|
||||
53 54 | rename(file, "file_2.py")
|
||||
54 55 |
|
||||
55 |-rename(
|
||||
56 |- # commment 1
|
||||
57 |- file, # comment 2
|
||||
58 |- "file_2.py"
|
||||
59 |- ,
|
||||
60 |- # comment 3
|
||||
61 |-)
|
||||
56 |+pathlib.Path(file).rename("file_2.py")
|
||||
62 57 |
|
||||
63 58 | rename(file, "file_2.py", src_dir_fd=None, dst_dir_fd=None)
|
||||
64 59 |
|
||||
|
||||
import_from.py:63:1: PTH104 [*] `os.rename()` should be replaced by `Path.rename()`
|
||||
|
|
||||
61 | )
|
||||
62 |
|
||||
63 | rename(file, "file_2.py", src_dir_fd=None, dst_dir_fd=None)
|
||||
| ^^^^^^ PTH104
|
||||
64 |
|
||||
65 | rename(file, "file_2.py", src_dir_fd=1)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
ℹ Safe fix
|
||||
2 2 | from os import remove, unlink, getcwd, readlink, stat
|
||||
3 3 | from os.path import abspath, exists, expanduser, isdir, isfile, islink
|
||||
4 4 | from os.path import isabs, join, basename, dirname, samefile, splitext
|
||||
5 |+import pathlib
|
||||
5 6 |
|
||||
6 7 | p = "/foo"
|
||||
7 8 | q = "bar"
|
||||
--------------------------------------------------------------------------------
|
||||
60 61 | # comment 3
|
||||
61 62 | )
|
||||
62 63 |
|
||||
63 |-rename(file, "file_2.py", src_dir_fd=None, dst_dir_fd=None)
|
||||
64 |+pathlib.Path(file).rename("file_2.py")
|
||||
64 65 |
|
||||
65 66 | rename(file, "file_2.py", src_dir_fd=1)
|
||||
|
||||
@@ -35,6 +35,7 @@ import_from_as.py:15:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
16 | aaa = xmkdir(p)
|
||||
17 | xmakedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
import_from_as.py:16:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -65,6 +66,7 @@ import_from_as.py:18:1: PTH104 `os.rename()` should be replaced by `Path.rename(
|
||||
19 | xreplace(p)
|
||||
20 | xrmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_from_as.py:19:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -75,6 +77,7 @@ import_from_as.py:19:1: PTH105 `os.replace()` should be replaced by `Path.replac
|
||||
20 | xrmdir(p)
|
||||
21 | xremove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
import_from_as.py:20:1: PTH106 [*] `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -482,6 +485,7 @@ import_from_as.py:37:1: PTH121 `os.path.samefile()` should be replaced by `Path.
|
||||
| ^^^^^^^^^ PTH121
|
||||
38 | xsplitext(p)
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
import_from_as.py:38:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
|
||||
@@ -2,51 +2,6 @@ use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
|
||||
use crate::Violation;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.chmod`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.chmod()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.chmod()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.chmod("file.py", 0o444)
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("file.py").chmod(0o444)
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.chmod`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.chmod)
|
||||
/// - [Python documentation: `os.chmod`](https://docs.python.org/3/library/os.html#os.chmod)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsChmod;
|
||||
|
||||
impl Violation for OsChmod {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.chmod()` should be replaced by `Path.chmod()`".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.makedirs`.
|
||||
///
|
||||
@@ -137,99 +92,6 @@ impl Violation for OsMkdir {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.rename`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.rename()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.rename()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.rename("old.py", "new.py")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("old.py").rename("new.py")
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.rename`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.rename)
|
||||
/// - [Python documentation: `os.rename`](https://docs.python.org/3/library/os.html#os.rename)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsRename;
|
||||
|
||||
impl Violation for OsRename {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.rename()` should be replaced by `Path.rename()`".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.replace`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.replace()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.replace()`).
|
||||
///
|
||||
/// Note that `os` functions may be preferable if performance is a concern,
|
||||
/// e.g., in hot loops.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.replace("old.py", "new.py")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("old.py").replace("new.py")
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.replace`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.replace)
|
||||
/// - [Python documentation: `os.replace`](https://docs.python.org/3/library/os.html#os.replace)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsReplace;
|
||||
|
||||
impl Violation for OsReplace {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.replace()` should be replaced by `Path.replace()`".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.stat`.
|
||||
///
|
||||
@@ -347,51 +209,6 @@ pub(crate) enum Joiner {
|
||||
Joinpath,
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.path.samefile`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os.path`. When possible, using `Path` object
|
||||
/// methods such as `Path.samefile()` can improve readability over the `os.path`
|
||||
/// module's counterparts (e.g., `os.path.samefile()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.path.samefile("f1.py", "f2.py")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("f1.py").samefile("f2.py")
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.samefile`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.samefile)
|
||||
/// - [Python documentation: `os.path.samefile`](https://docs.python.org/3/library/os.path.html#os.path.samefile)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsPathSamefile;
|
||||
|
||||
impl Violation for OsPathSamefile {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.path.samefile()` should be replaced by `Path.samefile()`".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.path.splitext`.
|
||||
///
|
||||
|
||||
@@ -100,7 +100,7 @@ pub(crate) fn invalid_function_name(
|
||||
return;
|
||||
}
|
||||
|
||||
// Ignore the do_* methods of the http.server.BaseHTTPRequestHandler class
|
||||
// Ignore the do_* methods of the http.server.BaseHTTPRequestHandler class and its subclasses
|
||||
if name.starts_with("do_")
|
||||
&& parent_class.is_some_and(|class| {
|
||||
any_base_class(class, semantic, &mut |superclass| {
|
||||
@@ -108,7 +108,13 @@ pub(crate) fn invalid_function_name(
|
||||
qualified.is_some_and(|name| {
|
||||
matches!(
|
||||
name.segments(),
|
||||
["http", "server", "BaseHTTPRequestHandler"]
|
||||
[
|
||||
"http",
|
||||
"server",
|
||||
"BaseHTTPRequestHandler"
|
||||
| "CGIHTTPRequestHandler"
|
||||
| "SimpleHTTPRequestHandler"
|
||||
]
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -55,3 +55,21 @@ N802.py:84:9: N802 Function name `dont_GET` should be lowercase
|
||||
| ^^^^^^^^ N802
|
||||
85 | pass
|
||||
|
|
||||
|
||||
N802.py:95:9: N802 Function name `dont_OPTIONS` should be lowercase
|
||||
|
|
||||
93 | pass
|
||||
94 |
|
||||
95 | def dont_OPTIONS(self):
|
||||
| ^^^^^^^^^^^^ N802
|
||||
96 | pass
|
||||
|
|
||||
|
||||
N802.py:106:9: N802 Function name `dont_OPTIONS` should be lowercase
|
||||
|
|
||||
104 | pass
|
||||
105 |
|
||||
106 | def dont_OPTIONS(self):
|
||||
| ^^^^^^^^^^^^ N802
|
||||
107 | pass
|
||||
|
|
||||
|
||||
@@ -406,7 +406,14 @@ fn convert_to_list_extend(
|
||||
};
|
||||
let target_str = locator.slice(for_stmt.target.range());
|
||||
let elt_str = locator.slice(to_append);
|
||||
let generator_str = format!("{elt_str} {for_type} {target_str} in {for_iter_str}{if_str}");
|
||||
let generator_str = if to_append
|
||||
.as_generator_expr()
|
||||
.is_some_and(|generator| !generator.parenthesized)
|
||||
{
|
||||
format!("({elt_str}) {for_type} {target_str} in {for_iter_str}{if_str}")
|
||||
} else {
|
||||
format!("{elt_str} {for_type} {target_str} in {for_iter_str}{if_str}")
|
||||
};
|
||||
|
||||
let variable_name = locator.slice(binding);
|
||||
let for_loop_inline_comments = comment_strings_in_range(
|
||||
|
||||
@@ -241,5 +241,27 @@ PERF401.py:280:13: PERF401 Use `list.extend` to create a transformed list
|
||||
279 | if lambda: 0:
|
||||
280 | dst.append(i)
|
||||
| ^^^^^^^^^^^^^ PERF401
|
||||
281 |
|
||||
282 | def f():
|
||||
|
|
||||
= help: Replace for loop with list.extend
|
||||
|
||||
PERF401.py:286:9: PERF401 Use a list comprehension to create a transformed list
|
||||
|
|
||||
284 | result = []
|
||||
285 | for i in range(3):
|
||||
286 | result.append(x for x in [i])
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PERF401
|
||||
287 |
|
||||
288 | def f():
|
||||
|
|
||||
= help: Replace for loop with list comprehension
|
||||
|
||||
PERF401.py:292:9: PERF401 Use a list comprehension to create a transformed list
|
||||
|
|
||||
290 | result = []
|
||||
291 | for i in range(3):
|
||||
292 | result.append((x for x in [i]))
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PERF401
|
||||
|
|
||||
= help: Replace for loop with list comprehension
|
||||
|
||||
@@ -566,6 +566,8 @@ PERF401.py:280:13: PERF401 [*] Use `list.extend` to create a transformed list
|
||||
279 | if lambda: 0:
|
||||
280 | dst.append(i)
|
||||
| ^^^^^^^^^^^^^ PERF401
|
||||
281 |
|
||||
282 | def f():
|
||||
|
|
||||
= help: Replace for loop with list.extend
|
||||
|
||||
@@ -577,3 +579,47 @@ PERF401.py:280:13: PERF401 [*] Use `list.extend` to create a transformed list
|
||||
279 |- if lambda: 0:
|
||||
280 |- dst.append(i)
|
||||
278 |+ dst.extend(i for i in src if (lambda: 0))
|
||||
281 279 |
|
||||
282 280 | def f():
|
||||
283 281 | i = "xyz"
|
||||
|
||||
PERF401.py:286:9: PERF401 [*] Use a list comprehension to create a transformed list
|
||||
|
|
||||
284 | result = []
|
||||
285 | for i in range(3):
|
||||
286 | result.append(x for x in [i])
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PERF401
|
||||
287 |
|
||||
288 | def f():
|
||||
|
|
||||
= help: Replace for loop with list comprehension
|
||||
|
||||
ℹ Unsafe fix
|
||||
281 281 |
|
||||
282 282 | def f():
|
||||
283 283 | i = "xyz"
|
||||
284 |- result = []
|
||||
285 |- for i in range(3):
|
||||
286 |- result.append(x for x in [i])
|
||||
284 |+ result = [(x for x in [i]) for i in range(3)]
|
||||
287 285 |
|
||||
288 286 | def f():
|
||||
289 287 | i = "xyz"
|
||||
|
||||
PERF401.py:292:9: PERF401 [*] Use a list comprehension to create a transformed list
|
||||
|
|
||||
290 | result = []
|
||||
291 | for i in range(3):
|
||||
292 | result.append((x for x in [i]))
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PERF401
|
||||
|
|
||||
= help: Replace for loop with list comprehension
|
||||
|
||||
ℹ Unsafe fix
|
||||
287 287 |
|
||||
288 288 | def f():
|
||||
289 289 | i = "xyz"
|
||||
290 |- result = []
|
||||
291 |- for i in range(3):
|
||||
292 |- result.append((x for x in [i]))
|
||||
290 |+ result = [(x for x in [i]) for i in range(3)]
|
||||
|
||||
@@ -48,6 +48,7 @@ mod tests {
|
||||
#[test_case(Rule::ComparisonWithItself, Path::new("comparison_with_itself.py"))]
|
||||
#[test_case(Rule::EqWithoutHash, Path::new("eq_without_hash.py"))]
|
||||
#[test_case(Rule::EmptyComment, Path::new("empty_comment.py"))]
|
||||
#[test_case(Rule::EmptyComment, Path::new("empty_comment_line_continuation.py"))]
|
||||
#[test_case(Rule::ManualFromImport, Path::new("import_aliasing.py"))]
|
||||
#[test_case(Rule::IfStmtMinMax, Path::new("if_stmt_min_max.py"))]
|
||||
#[test_case(Rule::SingleStringSlots, Path::new("single_string_slots.py"))]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_trivia::{CommentRanges, is_python_whitespace};
|
||||
use ruff_source_file::LineRanges;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
@@ -49,6 +50,7 @@ pub(crate) fn empty_comments(
|
||||
context: &LintContext,
|
||||
comment_ranges: &CommentRanges,
|
||||
locator: &Locator,
|
||||
indexer: &Indexer,
|
||||
) {
|
||||
let block_comments = comment_ranges.block_comments(locator.contents());
|
||||
|
||||
@@ -59,12 +61,12 @@ pub(crate) fn empty_comments(
|
||||
}
|
||||
|
||||
// If the line contains an empty comment, add a diagnostic.
|
||||
empty_comment(context, range, locator);
|
||||
empty_comment(context, range, locator, indexer);
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a [`Diagnostic`] if the comment at the given [`TextRange`] is empty.
|
||||
fn empty_comment(context: &LintContext, range: TextRange, locator: &Locator) {
|
||||
fn empty_comment(context: &LintContext, range: TextRange, locator: &Locator, indexer: &Indexer) {
|
||||
// Check: is the comment empty?
|
||||
if !locator
|
||||
.slice(range)
|
||||
@@ -95,12 +97,20 @@ fn empty_comment(context: &LintContext, range: TextRange, locator: &Locator) {
|
||||
}
|
||||
});
|
||||
|
||||
// If there is no character preceding the comment, this comment must be on its own physical line.
|
||||
// If there is a line preceding the empty comment's line, check if it ends in a line continuation character. (`\`)
|
||||
let is_on_same_logical_line = indexer
|
||||
.preceded_by_continuations(first_hash_col, locator.contents())
|
||||
.is_some();
|
||||
|
||||
if let Some(mut diagnostic) = context
|
||||
.report_diagnostic_if_enabled(EmptyComment, TextRange::new(first_hash_col, line.end()))
|
||||
{
|
||||
diagnostic.set_fix(Fix::safe_edit(
|
||||
if let Some(deletion_start_col) = deletion_start_col {
|
||||
Edit::deletion(line.start() + deletion_start_col, line.end())
|
||||
} else if is_on_same_logical_line {
|
||||
Edit::deletion(first_hash_col, line.end())
|
||||
} else {
|
||||
Edit::range_deletion(locator.full_line_range(first_hash_col))
|
||||
},
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pylint/mod.rs
|
||||
---
|
||||
empty_comment_line_continuation.py:1:1: PLR2044 [*] Line with empty comment
|
||||
|
|
||||
1 | #
|
||||
| ^ PLR2044
|
||||
2 | x = 0 \
|
||||
3 | #
|
||||
|
|
||||
= help: Delete the empty comment
|
||||
|
||||
ℹ Safe fix
|
||||
1 |-#
|
||||
2 1 | x = 0 \
|
||||
3 2 | #
|
||||
4 3 | +1
|
||||
|
||||
empty_comment_line_continuation.py:3:1: PLR2044 [*] Line with empty comment
|
||||
|
|
||||
1 | #
|
||||
2 | x = 0 \
|
||||
3 | #
|
||||
| ^ PLR2044
|
||||
4 | +1
|
||||
5 | print(x)
|
||||
|
|
||||
= help: Delete the empty comment
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | #
|
||||
2 2 | x = 0 \
|
||||
3 |-#
|
||||
3 |+
|
||||
4 4 | +1
|
||||
5 5 | print(x)
|
||||
@@ -534,6 +534,7 @@ mod tests {
|
||||
#[test_case(Rule::UnnecessaryRegularExpression, Path::new("RUF055_0.py"))]
|
||||
#[test_case(Rule::UnnecessaryRegularExpression, Path::new("RUF055_1.py"))]
|
||||
#[test_case(Rule::UnnecessaryRegularExpression, Path::new("RUF055_2.py"))]
|
||||
#[test_case(Rule::UnnecessaryRegularExpression, Path::new("RUF055_3.py"))]
|
||||
#[test_case(Rule::PytestRaisesAmbiguousPattern, Path::new("RUF043.py"))]
|
||||
#[test_case(Rule::IndentedFormFeed, Path::new("RUF054.py"))]
|
||||
#[test_case(Rule::ImplicitClassVarInDataclass, Path::new("RUF045.py"))]
|
||||
@@ -554,6 +555,44 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::UnrawRePattern, Path::new("RUF039_py_version_sensitive.py"))]
|
||||
fn preview_rules_py37(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
"preview__py37__{}_{}",
|
||||
rule_code.noqa_code(),
|
||||
path.to_string_lossy()
|
||||
);
|
||||
let diagnostics = test_path(
|
||||
Path::new("ruff").join(path).as_path(),
|
||||
&settings::LinterSettings {
|
||||
preview: PreviewMode::Enabled,
|
||||
unresolved_target_version: PythonVersion::PY37.into(),
|
||||
..settings::LinterSettings::for_rule(rule_code)
|
||||
},
|
||||
)?;
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::UnrawRePattern, Path::new("RUF039_py_version_sensitive.py"))]
|
||||
fn preview_rules_py38(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
"preview__py38__{}_{}",
|
||||
rule_code.noqa_code(),
|
||||
path.to_string_lossy()
|
||||
);
|
||||
let diagnostics = test_path(
|
||||
Path::new("ruff").join(path).as_path(),
|
||||
&settings::LinterSettings {
|
||||
preview: PreviewMode::Enabled,
|
||||
unresolved_target_version: PythonVersion::PY38.into(),
|
||||
..settings::LinterSettings::for_rule(rule_code)
|
||||
},
|
||||
)?;
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::UsedDummyVariable, Path::new("RUF052.py"), r"^_+", 1)]
|
||||
#[test_case(Rule::UsedDummyVariable, Path::new("RUF052.py"), r"", 2)]
|
||||
fn custom_regexp_preset(
|
||||
|
||||
@@ -2,6 +2,7 @@ use anyhow::Context;
|
||||
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::parenthesize::parenthesized_range;
|
||||
use ruff_python_semantic::{Scope, ScopeKind};
|
||||
use ruff_python_trivia::{indentation_at_offset, textwrap};
|
||||
use ruff_source_file::LineRanges;
|
||||
@@ -117,13 +118,7 @@ pub(crate) fn post_init_default(checker: &Checker, function_def: &ast::StmtFunct
|
||||
|
||||
if !stopped_fixes {
|
||||
diagnostic.try_set_fix(|| {
|
||||
use_initvar(
|
||||
current_scope,
|
||||
function_def,
|
||||
¶meter.parameter,
|
||||
default,
|
||||
checker,
|
||||
)
|
||||
use_initvar(current_scope, function_def, parameter, default, checker)
|
||||
});
|
||||
// Need to stop fixes as soon as there is a parameter we cannot fix.
|
||||
// Otherwise, we risk a syntax error (a parameter without a default
|
||||
@@ -138,10 +133,11 @@ pub(crate) fn post_init_default(checker: &Checker, function_def: &ast::StmtFunct
|
||||
fn use_initvar(
|
||||
current_scope: &Scope,
|
||||
post_init_def: &ast::StmtFunctionDef,
|
||||
parameter: &ast::Parameter,
|
||||
parameter_with_default: &ast::ParameterWithDefault,
|
||||
default: &ast::Expr,
|
||||
checker: &Checker,
|
||||
) -> anyhow::Result<Fix> {
|
||||
let parameter = ¶meter_with_default.parameter;
|
||||
if current_scope.has(¶meter.name) {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Cannot add a `{}: InitVar` field to the class body, as a field by that name already exists",
|
||||
@@ -157,17 +153,25 @@ fn use_initvar(
|
||||
checker.semantic(),
|
||||
)?;
|
||||
|
||||
let locator = checker.locator();
|
||||
|
||||
let default_loc = parenthesized_range(
|
||||
default.into(),
|
||||
parameter_with_default.into(),
|
||||
checker.comment_ranges(),
|
||||
checker.source(),
|
||||
)
|
||||
.unwrap_or(default.range());
|
||||
|
||||
// Delete the default value. For example,
|
||||
// - def __post_init__(self, foo: int = 0) -> None: ...
|
||||
// + def __post_init__(self, foo: int) -> None: ...
|
||||
let default_edit = Edit::deletion(parameter.end(), default.end());
|
||||
let default_edit = Edit::deletion(parameter.end(), default_loc.end());
|
||||
|
||||
// Add `dataclasses.InitVar` field to class body.
|
||||
let locator = checker.locator();
|
||||
|
||||
let content = {
|
||||
let default = locator.slice(default_loc);
|
||||
let parameter_name = locator.slice(¶meter.name);
|
||||
let default = locator.slice(default);
|
||||
let line_ending = checker.stylist().line_ending().as_str();
|
||||
|
||||
if let Some(annotation) = ¶meter
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use itertools::Itertools;
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::{
|
||||
Arguments, CmpOp, Expr, ExprAttribute, ExprCall, ExprCompare, ExprContext, ExprStringLiteral,
|
||||
ExprUnaryOp, Identifier, UnaryOp,
|
||||
Arguments, CmpOp, Expr, ExprAttribute, ExprBytesLiteral, ExprCall, ExprCompare, ExprContext,
|
||||
ExprStringLiteral, ExprUnaryOp, Identifier, UnaryOp,
|
||||
};
|
||||
use ruff_python_semantic::analyze::typing::find_binding_value;
|
||||
use ruff_python_semantic::{Modules, SemanticModel};
|
||||
@@ -72,6 +72,9 @@ impl Violation for UnnecessaryRegularExpression {
|
||||
}
|
||||
}
|
||||
|
||||
const METACHARACTERS: [char; 12] = ['.', '^', '$', '*', '+', '?', '{', '[', '\\', '|', '(', ')'];
|
||||
const ESCAPABLE_SINGLE_CHARACTERS: &str = "abfnrtv";
|
||||
|
||||
/// RUF055
|
||||
pub(crate) fn unnecessary_regular_expression(checker: &Checker, call: &ExprCall) {
|
||||
// adapted from unraw_re_pattern
|
||||
@@ -96,16 +99,19 @@ pub(crate) fn unnecessary_regular_expression(checker: &Checker, call: &ExprCall)
|
||||
};
|
||||
|
||||
// For now, restrict this rule to string literals and variables that can be resolved to literals
|
||||
let Some(string_lit) = resolve_string_literal(re_func.pattern, semantic) else {
|
||||
let Some(literal) = resolve_literal(re_func.pattern, semantic) else {
|
||||
return;
|
||||
};
|
||||
|
||||
// For now, reject any regex metacharacters. Compare to the complete list
|
||||
// from https://docs.python.org/3/howto/regex.html#matching-characters
|
||||
let has_metacharacters = string_lit
|
||||
.value
|
||||
.to_str()
|
||||
.contains(['.', '^', '$', '*', '+', '?', '{', '[', '\\', '|', '(', ')']);
|
||||
let has_metacharacters = match &literal {
|
||||
Literal::Str(str_lit) => str_lit.value.to_str().contains(METACHARACTERS),
|
||||
Literal::Bytes(bytes_lit) => bytes_lit
|
||||
.value
|
||||
.iter()
|
||||
.any(|part| part.iter().any(|&b| METACHARACTERS.contains(&(b as char)))),
|
||||
};
|
||||
|
||||
if has_metacharacters {
|
||||
return;
|
||||
@@ -186,28 +192,48 @@ impl<'a> ReFunc<'a> {
|
||||
// version
|
||||
("sub", 3) => {
|
||||
let repl = call.arguments.find_argument_value("repl", 1)?;
|
||||
let lit = resolve_string_literal(repl, semantic)?;
|
||||
let lit = resolve_literal(repl, semantic)?;
|
||||
let mut fixable = true;
|
||||
for (c, next) in lit.value.chars().tuple_windows() {
|
||||
// `\0` (or any other ASCII digit) and `\g` have special meaning in `repl` strings.
|
||||
// Meanwhile, nearly all other escapes of ASCII letters in a `repl` string causes
|
||||
// `re.PatternError` to be raised at runtime.
|
||||
//
|
||||
// If we see that the escaped character is an alphanumeric ASCII character,
|
||||
// we should only emit a diagnostic suggesting to replace the `re.sub()` call with
|
||||
// `str.replace`if we can detect that the escaped character is one that is both
|
||||
// valid in a `repl` string *and* does not have any special meaning in a REPL string.
|
||||
//
|
||||
// It's out of scope for this rule to change invalid `re.sub()` calls into something
|
||||
// that would not raise an exception at runtime. They should be left as-is.
|
||||
if c == '\\' && next.is_ascii_alphanumeric() {
|
||||
if "abfnrtv".contains(next) {
|
||||
fixable = false;
|
||||
} else {
|
||||
return None;
|
||||
|
||||
match lit {
|
||||
Literal::Str(lit_str) => {
|
||||
// Perform escape analysis for replacement literals.
|
||||
for (c, next) in lit_str.value.to_str().chars().tuple_windows() {
|
||||
// `\\0` (or any other ASCII digit) and `\\g` have special meaning in `repl` strings.
|
||||
// Meanwhile, nearly all other escapes of ASCII letters in a `repl` string causes
|
||||
// `re.PatternError` to be raised at runtime.
|
||||
//
|
||||
// If we see that the escaped character is an alphanumeric ASCII character,
|
||||
// we should only emit a diagnostic suggesting to replace the `re.sub()` call with
|
||||
// `str.replace`if we can detect that the escaped character is one that is both
|
||||
// valid in a `repl` string *and* does not have any special meaning in a REPL string.
|
||||
//
|
||||
// It's out of scope for this rule to change invalid `re.sub()` calls into something
|
||||
// that would not raise an exception at runtime. They should be left as-is.
|
||||
if c == '\\' && next.is_ascii_alphanumeric() {
|
||||
if ESCAPABLE_SINGLE_CHARACTERS.contains(next) {
|
||||
fixable = false;
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Literal::Bytes(lit_bytes) => {
|
||||
for part in &lit_bytes.value {
|
||||
for (byte, next) in part.iter().copied().tuple_windows() {
|
||||
if byte == b'\\' && (next as char).is_ascii_alphanumeric() {
|
||||
if ESCAPABLE_SINGLE_CHARACTERS.contains(next as char) {
|
||||
fixable = false;
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(ReFunc {
|
||||
kind: ReFuncKind::Sub {
|
||||
repl: fixable.then_some(repl),
|
||||
@@ -329,6 +355,43 @@ impl<'a> ReFunc<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// A literal that can be either a string or a bytes literal.
|
||||
enum Literal<'a> {
|
||||
Str(&'a ExprStringLiteral),
|
||||
Bytes(&'a ExprBytesLiteral),
|
||||
}
|
||||
|
||||
/// Try to resolve `name` to either a string or bytes literal in `semantic`.
|
||||
fn resolve_literal<'a>(name: &'a Expr, semantic: &'a SemanticModel) -> Option<Literal<'a>> {
|
||||
if let Some(str_lit) = resolve_string_literal(name, semantic) {
|
||||
return Some(Literal::Str(str_lit));
|
||||
}
|
||||
if let Some(bytes_lit) = resolve_bytes_literal(name, semantic) {
|
||||
return Some(Literal::Bytes(bytes_lit));
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Try to resolve `name` to an [`ExprBytesLiteral`] in `semantic`.
|
||||
fn resolve_bytes_literal<'a>(
|
||||
name: &'a Expr,
|
||||
semantic: &'a SemanticModel,
|
||||
) -> Option<&'a ExprBytesLiteral> {
|
||||
if name.is_bytes_literal_expr() {
|
||||
return name.as_bytes_literal_expr();
|
||||
}
|
||||
|
||||
if let Some(name_expr) = name.as_name_expr() {
|
||||
let binding = semantic.binding(semantic.only_binding(name_expr)?);
|
||||
let value = find_binding_value(binding, semantic)?;
|
||||
if value.is_bytes_literal_expr() {
|
||||
return value.as_bytes_literal_expr();
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Try to resolve `name` to an [`ExprStringLiteral`] in `semantic`.
|
||||
fn resolve_string_literal<'a>(
|
||||
name: &'a Expr,
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::str::FromStr;
|
||||
|
||||
use ruff_diagnostics::Applicability;
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::{
|
||||
BytesLiteral, Expr, ExprBytesLiteral, ExprCall, ExprStringLiteral, StringLiteral,
|
||||
BytesLiteral, Expr, ExprBytesLiteral, ExprCall, ExprStringLiteral, PythonVersion, StringLiteral,
|
||||
};
|
||||
use ruff_python_semantic::{Modules, SemanticModel};
|
||||
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::{Edit, Fix, FixAvailability, Violation};
|
||||
@@ -24,6 +24,29 @@ use crate::{Edit, Fix, FixAvailability, Violation};
|
||||
/// Regular expressions should be written
|
||||
/// using raw strings to avoid double escaping.
|
||||
///
|
||||
/// ## Fix safety
|
||||
/// The fix is unsafe if the string/bytes literal contains an escape sequence because the fix alters
|
||||
/// the runtime value of the literal while retaining the regex semantics.
|
||||
///
|
||||
/// For example
|
||||
/// ```python
|
||||
/// # Literal is `1\n2`.
|
||||
/// re.compile("1\n2")
|
||||
///
|
||||
/// # Literal is `1\\n2`, but the regex library will interpret `\\n` and will still match a newline
|
||||
/// # character as before.
|
||||
/// re.compile(r"1\n2")
|
||||
/// ```
|
||||
///
|
||||
/// ## Fix availability
|
||||
/// A fix is not available if either
|
||||
/// * the argument is a string with a (no-op) `u` prefix (e.g., `u"foo"`) as the prefix is
|
||||
/// incompatible with the raw prefix `r`
|
||||
/// * the argument is a string or bytes literal with an escape sequence that has a different
|
||||
/// meaning in the context of a regular expression such as `\b`, which is word boundary or
|
||||
/// backspace in a regex, depending on the context, but always a backspace in string and bytes
|
||||
/// literals.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```python
|
||||
@@ -163,20 +186,44 @@ fn check_string(checker: &Checker, literal: &StringLiteral, module: RegexModule,
|
||||
let range = literal.range;
|
||||
let mut diagnostic = checker.report_diagnostic(UnrawRePattern { module, func, kind }, range);
|
||||
|
||||
if
|
||||
// The (no-op) `u` prefix is a syntax error when combined with `r`
|
||||
!literal.flags.prefix().is_unicode()
|
||||
// We are looking for backslash characters
|
||||
// in the raw source code here, because `\n`
|
||||
// gets converted to a single character already
|
||||
// at the lexing stage.
|
||||
&&!checker.locator().slice(literal.range()).contains('\\')
|
||||
{
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::insertion(
|
||||
"r".to_string(),
|
||||
literal.range().start(),
|
||||
)));
|
||||
let Some(applicability) = raw_string_applicability(checker, literal) else {
|
||||
return;
|
||||
};
|
||||
|
||||
diagnostic.set_fix(Fix::applicable_edit(
|
||||
Edit::insertion("r".to_string(), literal.range().start()),
|
||||
applicability,
|
||||
));
|
||||
}
|
||||
|
||||
/// Check how safe it is to prepend the `r` prefix to the string.
|
||||
///
|
||||
/// ## Returns
|
||||
/// * `None` if the prefix cannot be added,
|
||||
/// * `Some(a)` if it can be added with applicability `a`.
|
||||
fn raw_string_applicability(checker: &Checker, literal: &StringLiteral) -> Option<Applicability> {
|
||||
if literal.flags.prefix().is_unicode() {
|
||||
// The (no-op) `u` prefix is a syntax error when combined with `r`
|
||||
return None;
|
||||
}
|
||||
|
||||
if checker.target_version() >= PythonVersion::PY38 {
|
||||
raw_applicability(checker, literal.range(), |escaped| {
|
||||
matches!(
|
||||
escaped,
|
||||
Some('a' | 'f' | 'n' | 'r' | 't' | 'u' | 'U' | 'v' | 'x' | 'N')
|
||||
)
|
||||
})
|
||||
} else {
|
||||
raw_applicability(checker, literal.range(), |escaped| {
|
||||
matches!(
|
||||
escaped,
|
||||
Some('a' | 'f' | 'n' | 'r' | 't' | 'u' | 'U' | 'v' | 'x')
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
// re.compile("\a\f\n\N{Partial Differential}\r\t\u27F2\U0001F0A1\v\x41") # with unsafe fix
|
||||
}
|
||||
|
||||
fn check_bytes(checker: &Checker, literal: &BytesLiteral, module: RegexModule, func: &str) {
|
||||
@@ -187,5 +234,53 @@ fn check_bytes(checker: &Checker, literal: &BytesLiteral, module: RegexModule, f
|
||||
let kind = PatternKind::Bytes;
|
||||
let func = func.to_string();
|
||||
let range = literal.range;
|
||||
checker.report_diagnostic(UnrawRePattern { module, func, kind }, range);
|
||||
let mut diagnostic = checker.report_diagnostic(UnrawRePattern { module, func, kind }, range);
|
||||
|
||||
let Some(applicability) = raw_byte_applicability(checker, literal) else {
|
||||
return;
|
||||
};
|
||||
|
||||
diagnostic.set_fix(Fix::applicable_edit(
|
||||
Edit::insertion("r".to_string(), literal.range().start()),
|
||||
applicability,
|
||||
));
|
||||
}
|
||||
|
||||
/// Check how same it is to prepend the `r` prefix to the byte sting.
|
||||
///
|
||||
/// ## Returns
|
||||
/// * `None` if the prefix cannot be added,
|
||||
/// * `Some(a)` if it can be added with applicability `a`.
|
||||
fn raw_byte_applicability(checker: &Checker, literal: &BytesLiteral) -> Option<Applicability> {
|
||||
raw_applicability(checker, literal.range(), |escaped| {
|
||||
matches!(escaped, Some('a' | 'f' | 'n' | 'r' | 't' | 'v' | 'x'))
|
||||
})
|
||||
}
|
||||
|
||||
fn raw_applicability(
|
||||
checker: &Checker,
|
||||
literal_range: TextRange,
|
||||
match_allowed_escape_sequence: impl Fn(Option<char>) -> bool,
|
||||
) -> Option<Applicability> {
|
||||
let mut found_slash = false;
|
||||
let mut chars = checker.locator().slice(literal_range).chars().peekable();
|
||||
while let Some(char) = chars.next() {
|
||||
if char == '\\' {
|
||||
found_slash = true;
|
||||
// Turning `"\uXXXX"` into `r"\uXXXX"` is behaviorally equivalent when passed
|
||||
// to `re`, however, it's not exactly the same runtime value.
|
||||
// Similarly, for the other escape sequences.
|
||||
if !match_allowed_escape_sequence(chars.peek().copied()) {
|
||||
// If the next character is not one of the whitelisted ones, we likely cannot safely turn
|
||||
// this into a raw string.
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(if found_slash {
|
||||
Applicability::Unsafe
|
||||
} else {
|
||||
Applicability::Safe
|
||||
})
|
||||
}
|
||||
|
||||
@@ -156,3 +156,281 @@ RUF033.py:67:59: RUF033 `__post_init__` method with argument defaults
|
||||
| ^^^^^ RUF033
|
||||
|
|
||||
= help: Use `dataclasses.InitVar` instead
|
||||
|
||||
RUF033.py:73:41: RUF033 [*] `__post_init__` method with argument defaults
|
||||
|
|
||||
71 | @dataclass
|
||||
72 | class Foo:
|
||||
73 | def __post_init__(self, bar: int = (x := 1)) -> None:
|
||||
| ^^^^^^ RUF033
|
||||
74 | pass
|
||||
|
|
||||
= help: Use `dataclasses.InitVar` instead
|
||||
|
||||
ℹ Unsafe fix
|
||||
70 70 | # https://github.com/astral-sh/ruff/issues/18950
|
||||
71 71 | @dataclass
|
||||
72 72 | class Foo:
|
||||
73 |- def __post_init__(self, bar: int = (x := 1)) -> None:
|
||||
73 |+ bar: InitVar[int] = (x := 1)
|
||||
74 |+ def __post_init__(self, bar: int) -> None:
|
||||
74 75 | pass
|
||||
75 76 |
|
||||
76 77 |
|
||||
|
||||
RUF033.py:81:21: RUF033 [*] `__post_init__` method with argument defaults
|
||||
|
|
||||
79 | def __post_init__(
|
||||
80 | self,
|
||||
81 | bar: int = (x := 1) # comment
|
||||
| ^^^^^^ RUF033
|
||||
82 | ,
|
||||
83 | baz: int = (y := 2), # comment
|
||||
|
|
||||
= help: Use `dataclasses.InitVar` instead
|
||||
|
||||
ℹ Unsafe fix
|
||||
76 76 |
|
||||
77 77 | @dataclass
|
||||
78 78 | class Foo:
|
||||
79 |+ bar: InitVar[int] = (x := 1)
|
||||
79 80 | def __post_init__(
|
||||
80 81 | self,
|
||||
81 |- bar: int = (x := 1) # comment
|
||||
82 |+ bar: int # comment
|
||||
82 83 | ,
|
||||
83 84 | baz: int = (y := 2), # comment
|
||||
84 85 | foo = (a := 1) # comment
|
||||
|
||||
RUF033.py:83:21: RUF033 [*] `__post_init__` method with argument defaults
|
||||
|
|
||||
81 | bar: int = (x := 1) # comment
|
||||
82 | ,
|
||||
83 | baz: int = (y := 2), # comment
|
||||
| ^^^^^^ RUF033
|
||||
84 | foo = (a := 1) # comment
|
||||
85 | ,
|
||||
|
|
||||
= help: Use `dataclasses.InitVar` instead
|
||||
|
||||
ℹ Unsafe fix
|
||||
76 76 |
|
||||
77 77 | @dataclass
|
||||
78 78 | class Foo:
|
||||
79 |+ baz: InitVar[int] = (y := 2)
|
||||
79 80 | def __post_init__(
|
||||
80 81 | self,
|
||||
81 82 | bar: int = (x := 1) # comment
|
||||
82 83 | ,
|
||||
83 |- baz: int = (y := 2), # comment
|
||||
84 |+ baz: int, # comment
|
||||
84 85 | foo = (a := 1) # comment
|
||||
85 86 | ,
|
||||
86 87 | faz = (b := 2), # comment
|
||||
|
||||
RUF033.py:84:16: RUF033 [*] `__post_init__` method with argument defaults
|
||||
|
|
||||
82 | ,
|
||||
83 | baz: int = (y := 2), # comment
|
||||
84 | foo = (a := 1) # comment
|
||||
| ^^^^^^ RUF033
|
||||
85 | ,
|
||||
86 | faz = (b := 2), # comment
|
||||
|
|
||||
= help: Use `dataclasses.InitVar` instead
|
||||
|
||||
ℹ Unsafe fix
|
||||
76 76 |
|
||||
77 77 | @dataclass
|
||||
78 78 | class Foo:
|
||||
79 |+ foo: InitVar = (a := 1)
|
||||
79 80 | def __post_init__(
|
||||
80 81 | self,
|
||||
81 82 | bar: int = (x := 1) # comment
|
||||
82 83 | ,
|
||||
83 84 | baz: int = (y := 2), # comment
|
||||
84 |- foo = (a := 1) # comment
|
||||
85 |+ foo # comment
|
||||
85 86 | ,
|
||||
86 87 | faz = (b := 2), # comment
|
||||
87 88 | ) -> None:
|
||||
|
||||
RUF033.py:86:16: RUF033 [*] `__post_init__` method with argument defaults
|
||||
|
|
||||
84 | foo = (a := 1) # comment
|
||||
85 | ,
|
||||
86 | faz = (b := 2), # comment
|
||||
| ^^^^^^ RUF033
|
||||
87 | ) -> None:
|
||||
88 | pass
|
||||
|
|
||||
= help: Use `dataclasses.InitVar` instead
|
||||
|
||||
ℹ Unsafe fix
|
||||
76 76 |
|
||||
77 77 | @dataclass
|
||||
78 78 | class Foo:
|
||||
79 |+ faz: InitVar = (b := 2)
|
||||
79 80 | def __post_init__(
|
||||
80 81 | self,
|
||||
81 82 | bar: int = (x := 1) # comment
|
||||
--------------------------------------------------------------------------------
|
||||
83 84 | baz: int = (y := 2), # comment
|
||||
84 85 | foo = (a := 1) # comment
|
||||
85 86 | ,
|
||||
86 |- faz = (b := 2), # comment
|
||||
87 |+ faz, # comment
|
||||
87 88 | ) -> None:
|
||||
88 89 | pass
|
||||
89 90 |
|
||||
|
||||
RUF033.py:95:20: RUF033 [*] `__post_init__` method with argument defaults
|
||||
|
|
||||
93 | def __post_init__(
|
||||
94 | self,
|
||||
95 | bar: int = 1, # comment
|
||||
| ^ RUF033
|
||||
96 | baz: int = 2, # comment
|
||||
97 | ) -> None:
|
||||
|
|
||||
= help: Use `dataclasses.InitVar` instead
|
||||
|
||||
ℹ Unsafe fix
|
||||
90 90 |
|
||||
91 91 | @dataclass
|
||||
92 92 | class Foo:
|
||||
93 |+ bar: InitVar[int] = 1
|
||||
93 94 | def __post_init__(
|
||||
94 95 | self,
|
||||
95 |- bar: int = 1, # comment
|
||||
96 |+ bar: int, # comment
|
||||
96 97 | baz: int = 2, # comment
|
||||
97 98 | ) -> None:
|
||||
98 99 | pass
|
||||
|
||||
RUF033.py:96:20: RUF033 [*] `__post_init__` method with argument defaults
|
||||
|
|
||||
94 | self,
|
||||
95 | bar: int = 1, # comment
|
||||
96 | baz: int = 2, # comment
|
||||
| ^ RUF033
|
||||
97 | ) -> None:
|
||||
98 | pass
|
||||
|
|
||||
= help: Use `dataclasses.InitVar` instead
|
||||
|
||||
ℹ Unsafe fix
|
||||
90 90 |
|
||||
91 91 | @dataclass
|
||||
92 92 | class Foo:
|
||||
93 |+ baz: InitVar[int] = 2
|
||||
93 94 | def __post_init__(
|
||||
94 95 | self,
|
||||
95 96 | bar: int = 1, # comment
|
||||
96 |- baz: int = 2, # comment
|
||||
97 |+ baz: int, # comment
|
||||
97 98 | ) -> None:
|
||||
98 99 | pass
|
||||
99 100 |
|
||||
|
||||
RUF033.py:105:22: RUF033 [*] `__post_init__` method with argument defaults
|
||||
|
|
||||
103 | def __post_init__(
|
||||
104 | self,
|
||||
105 | arg1: int = (1) # comment
|
||||
| ^ RUF033
|
||||
106 | ,
|
||||
107 | arg2: int = ((1)) # comment
|
||||
|
|
||||
= help: Use `dataclasses.InitVar` instead
|
||||
|
||||
ℹ Unsafe fix
|
||||
100 100 |
|
||||
101 101 | @dataclass
|
||||
102 102 | class Foo:
|
||||
103 |+ arg1: InitVar[int] = (1)
|
||||
103 104 | def __post_init__(
|
||||
104 105 | self,
|
||||
105 |- arg1: int = (1) # comment
|
||||
106 |+ arg1: int # comment
|
||||
106 107 | ,
|
||||
107 108 | arg2: int = ((1)) # comment
|
||||
108 109 | ,
|
||||
|
||||
RUF033.py:107:23: RUF033 [*] `__post_init__` method with argument defaults
|
||||
|
|
||||
105 | arg1: int = (1) # comment
|
||||
106 | ,
|
||||
107 | arg2: int = ((1)) # comment
|
||||
| ^ RUF033
|
||||
108 | ,
|
||||
109 | arg2: int = (i for i in range(10)) # comment
|
||||
|
|
||||
= help: Use `dataclasses.InitVar` instead
|
||||
|
||||
ℹ Unsafe fix
|
||||
100 100 |
|
||||
101 101 | @dataclass
|
||||
102 102 | class Foo:
|
||||
103 |+ arg2: InitVar[int] = ((1))
|
||||
103 104 | def __post_init__(
|
||||
104 105 | self,
|
||||
105 106 | arg1: int = (1) # comment
|
||||
106 107 | ,
|
||||
107 |- arg2: int = ((1)) # comment
|
||||
108 |+ arg2: int # comment
|
||||
108 109 | ,
|
||||
109 110 | arg2: int = (i for i in range(10)) # comment
|
||||
110 111 | ,
|
||||
|
||||
RUF033.py:109:21: RUF033 [*] `__post_init__` method with argument defaults
|
||||
|
|
||||
107 | arg2: int = ((1)) # comment
|
||||
108 | ,
|
||||
109 | arg2: int = (i for i in range(10)) # comment
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^ RUF033
|
||||
110 | ,
|
||||
111 | ) -> None:
|
||||
|
|
||||
= help: Use `dataclasses.InitVar` instead
|
||||
|
||||
ℹ Unsafe fix
|
||||
100 100 |
|
||||
101 101 | @dataclass
|
||||
102 102 | class Foo:
|
||||
103 |+ arg2: InitVar[int] = (i for i in range(10))
|
||||
103 104 | def __post_init__(
|
||||
104 105 | self,
|
||||
105 106 | arg1: int = (1) # comment
|
||||
106 107 | ,
|
||||
107 108 | arg2: int = ((1)) # comment
|
||||
108 109 | ,
|
||||
109 |- arg2: int = (i for i in range(10)) # comment
|
||||
110 |+ arg2: int # comment
|
||||
110 111 | ,
|
||||
111 112 | ) -> None:
|
||||
112 113 | pass
|
||||
|
||||
RUF033.py:121:27: RUF033 [*] `__post_init__` method with argument defaults
|
||||
|
|
||||
119 | def __post_init__(
|
||||
120 | self,
|
||||
121 | bar: (int) = (yield from range(5)) # comment
|
||||
| ^^^^^^^^^^^^^^^^^^^ RUF033
|
||||
122 | ,
|
||||
123 | ) -> None:
|
||||
|
|
||||
= help: Use `dataclasses.InitVar` instead
|
||||
|
||||
ℹ Unsafe fix
|
||||
116 116 | def fun_with_python_syntax():
|
||||
117 117 | @dataclass
|
||||
118 118 | class Foo:
|
||||
119 |+ bar: InitVar[int] = (yield from range(5))
|
||||
119 120 | def __post_init__(
|
||||
120 121 | self,
|
||||
121 |- bar: (int) = (yield from range(5)) # comment
|
||||
122 |+ bar: (int) # comment
|
||||
122 123 | ,
|
||||
123 124 | ) -> None:
|
||||
124 125 | ...
|
||||
|
||||
@@ -21,7 +21,7 @@ RUF039.py:5:12: RUF039 [*] First argument to `re.compile()` is not raw string
|
||||
7 7 | re.finditer("dou\ble")
|
||||
8 8 | re.fullmatch('''t\riple single''')
|
||||
|
||||
RUF039.py:6:12: RUF039 First argument to `re.findall()` is not raw string
|
||||
RUF039.py:6:12: RUF039 [*] First argument to `re.findall()` is not raw string
|
||||
|
|
||||
4 | # Errors
|
||||
5 | re.compile('single free-spacing', flags=re.X)
|
||||
@@ -32,6 +32,16 @@ RUF039.py:6:12: RUF039 First argument to `re.findall()` is not raw string
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
3 3 |
|
||||
4 4 | # Errors
|
||||
5 5 | re.compile('single free-spacing', flags=re.X)
|
||||
6 |-re.findall('si\ngle')
|
||||
6 |+re.findall(r'si\ngle')
|
||||
7 7 | re.finditer("dou\ble")
|
||||
8 8 | re.fullmatch('''t\riple single''')
|
||||
9 9 | re.match("""\triple double""")
|
||||
|
||||
RUF039.py:7:13: RUF039 First argument to `re.finditer()` is not raw string
|
||||
|
|
||||
5 | re.compile('single free-spacing', flags=re.X)
|
||||
@@ -43,7 +53,7 @@ RUF039.py:7:13: RUF039 First argument to `re.finditer()` is not raw string
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039.py:8:14: RUF039 First argument to `re.fullmatch()` is not raw string
|
||||
RUF039.py:8:14: RUF039 [*] First argument to `re.fullmatch()` is not raw string
|
||||
|
|
||||
6 | re.findall('si\ngle')
|
||||
7 | re.finditer("dou\ble")
|
||||
@@ -54,7 +64,17 @@ RUF039.py:8:14: RUF039 First argument to `re.fullmatch()` is not raw string
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039.py:9:10: RUF039 First argument to `re.match()` is not raw string
|
||||
ℹ Unsafe fix
|
||||
5 5 | re.compile('single free-spacing', flags=re.X)
|
||||
6 6 | re.findall('si\ngle')
|
||||
7 7 | re.finditer("dou\ble")
|
||||
8 |-re.fullmatch('''t\riple single''')
|
||||
8 |+re.fullmatch(r'''t\riple single''')
|
||||
9 9 | re.match("""\triple double""")
|
||||
10 10 | re.search('two', 'args')
|
||||
11 11 | re.split("raw", r'second')
|
||||
|
||||
RUF039.py:9:10: RUF039 [*] First argument to `re.match()` is not raw string
|
||||
|
|
||||
7 | re.finditer("dou\ble")
|
||||
8 | re.fullmatch('''t\riple single''')
|
||||
@@ -65,6 +85,16 @@ RUF039.py:9:10: RUF039 First argument to `re.match()` is not raw string
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
6 6 | re.findall('si\ngle')
|
||||
7 7 | re.finditer("dou\ble")
|
||||
8 8 | re.fullmatch('''t\riple single''')
|
||||
9 |-re.match("""\triple double""")
|
||||
9 |+re.match(r"""\triple double""")
|
||||
10 10 | re.search('two', 'args')
|
||||
11 11 | re.split("raw", r'second')
|
||||
12 12 | re.sub(u'''nicode''', u"f(?i)rst")
|
||||
|
||||
RUF039.py:10:11: RUF039 [*] First argument to `re.search()` is not raw string
|
||||
|
|
||||
8 | re.fullmatch('''t\riple single''')
|
||||
@@ -117,7 +147,7 @@ RUF039.py:12:8: RUF039 First argument to `re.sub()` is not raw string
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039.py:13:9: RUF039 First argument to `re.subn()` is not raw bytes literal
|
||||
RUF039.py:13:9: RUF039 [*] First argument to `re.subn()` is not raw bytes literal
|
||||
|
|
||||
11 | re.split("raw", r'second')
|
||||
12 | re.sub(u'''nicode''', u"f(?i)rst")
|
||||
@@ -128,6 +158,16 @@ RUF039.py:13:9: RUF039 First argument to `re.subn()` is not raw bytes literal
|
||||
|
|
||||
= help: Replace with raw bytes literal
|
||||
|
||||
ℹ Safe fix
|
||||
10 10 | re.search('two', 'args')
|
||||
11 11 | re.split("raw", r'second')
|
||||
12 12 | re.sub(u'''nicode''', u"f(?i)rst")
|
||||
13 |-re.subn(b"""ytes are""", f"\u006e")
|
||||
13 |+re.subn(rb"""ytes are""", f"\u006e")
|
||||
14 14 |
|
||||
15 15 | regex.compile('single free-spacing', flags=regex.X)
|
||||
16 16 | regex.findall('si\ngle')
|
||||
|
||||
RUF039.py:15:15: RUF039 [*] First argument to `regex.compile()` is not raw string
|
||||
|
|
||||
13 | re.subn(b"""ytes are""", f"\u006e")
|
||||
@@ -149,7 +189,7 @@ RUF039.py:15:15: RUF039 [*] First argument to `regex.compile()` is not raw strin
|
||||
17 17 | regex.finditer("dou\ble")
|
||||
18 18 | regex.fullmatch('''t\riple single''')
|
||||
|
||||
RUF039.py:16:15: RUF039 First argument to `regex.findall()` is not raw string
|
||||
RUF039.py:16:15: RUF039 [*] First argument to `regex.findall()` is not raw string
|
||||
|
|
||||
15 | regex.compile('single free-spacing', flags=regex.X)
|
||||
16 | regex.findall('si\ngle')
|
||||
@@ -159,6 +199,16 @@ RUF039.py:16:15: RUF039 First argument to `regex.findall()` is not raw string
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
13 13 | re.subn(b"""ytes are""", f"\u006e")
|
||||
14 14 |
|
||||
15 15 | regex.compile('single free-spacing', flags=regex.X)
|
||||
16 |-regex.findall('si\ngle')
|
||||
16 |+regex.findall(r'si\ngle')
|
||||
17 17 | regex.finditer("dou\ble")
|
||||
18 18 | regex.fullmatch('''t\riple single''')
|
||||
19 19 | regex.match("""\triple double""")
|
||||
|
||||
RUF039.py:17:16: RUF039 First argument to `regex.finditer()` is not raw string
|
||||
|
|
||||
15 | regex.compile('single free-spacing', flags=regex.X)
|
||||
@@ -170,7 +220,7 @@ RUF039.py:17:16: RUF039 First argument to `regex.finditer()` is not raw string
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039.py:18:17: RUF039 First argument to `regex.fullmatch()` is not raw string
|
||||
RUF039.py:18:17: RUF039 [*] First argument to `regex.fullmatch()` is not raw string
|
||||
|
|
||||
16 | regex.findall('si\ngle')
|
||||
17 | regex.finditer("dou\ble")
|
||||
@@ -181,7 +231,17 @@ RUF039.py:18:17: RUF039 First argument to `regex.fullmatch()` is not raw string
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039.py:19:13: RUF039 First argument to `regex.match()` is not raw string
|
||||
ℹ Unsafe fix
|
||||
15 15 | regex.compile('single free-spacing', flags=regex.X)
|
||||
16 16 | regex.findall('si\ngle')
|
||||
17 17 | regex.finditer("dou\ble")
|
||||
18 |-regex.fullmatch('''t\riple single''')
|
||||
18 |+regex.fullmatch(r'''t\riple single''')
|
||||
19 19 | regex.match("""\triple double""")
|
||||
20 20 | regex.search('two', 'args')
|
||||
21 21 | regex.split("raw", r'second')
|
||||
|
||||
RUF039.py:19:13: RUF039 [*] First argument to `regex.match()` is not raw string
|
||||
|
|
||||
17 | regex.finditer("dou\ble")
|
||||
18 | regex.fullmatch('''t\riple single''')
|
||||
@@ -192,6 +252,16 @@ RUF039.py:19:13: RUF039 First argument to `regex.match()` is not raw string
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
16 16 | regex.findall('si\ngle')
|
||||
17 17 | regex.finditer("dou\ble")
|
||||
18 18 | regex.fullmatch('''t\riple single''')
|
||||
19 |-regex.match("""\triple double""")
|
||||
19 |+regex.match(r"""\triple double""")
|
||||
20 20 | regex.search('two', 'args')
|
||||
21 21 | regex.split("raw", r'second')
|
||||
22 22 | regex.sub(u'''nicode''', u"f(?i)rst")
|
||||
|
||||
RUF039.py:20:14: RUF039 [*] First argument to `regex.search()` is not raw string
|
||||
|
|
||||
18 | regex.fullmatch('''t\riple single''')
|
||||
@@ -244,7 +314,7 @@ RUF039.py:22:11: RUF039 First argument to `regex.sub()` is not raw string
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039.py:23:12: RUF039 First argument to `regex.subn()` is not raw bytes literal
|
||||
RUF039.py:23:12: RUF039 [*] First argument to `regex.subn()` is not raw bytes literal
|
||||
|
|
||||
21 | regex.split("raw", r'second')
|
||||
22 | regex.sub(u'''nicode''', u"f(?i)rst")
|
||||
@@ -255,6 +325,16 @@ RUF039.py:23:12: RUF039 First argument to `regex.subn()` is not raw bytes litera
|
||||
|
|
||||
= help: Replace with raw bytes literal
|
||||
|
||||
ℹ Safe fix
|
||||
20 20 | regex.search('two', 'args')
|
||||
21 21 | regex.split("raw", r'second')
|
||||
22 22 | regex.sub(u'''nicode''', u"f(?i)rst")
|
||||
23 |-regex.subn(b"""ytes are""", f"\u006e")
|
||||
23 |+regex.subn(rb"""ytes are""", f"\u006e")
|
||||
24 24 |
|
||||
25 25 | regex.template("""(?m)
|
||||
26 26 | (?:ulti)?
|
||||
|
||||
RUF039.py:25:16: RUF039 [*] First argument to `regex.template()` is not raw string
|
||||
|
|
||||
23 | regex.subn(b"""ytes are""", f"\u006e")
|
||||
@@ -278,3 +358,111 @@ RUF039.py:25:16: RUF039 [*] First argument to `regex.template()` is not raw stri
|
||||
26 26 | (?:ulti)?
|
||||
27 27 | (?=(?<!(?<=(?!l)))
|
||||
28 28 | l(?i:ne)
|
||||
|
||||
RUF039.py:59:12: RUF039 [*] First argument to `re.compile()` is not raw string
|
||||
|
|
||||
58 | # https://github.com/astral-sh/ruff/issues/16713
|
||||
59 | re.compile("\a\f\n\r\t\u27F2\U0001F0A1\v\x41") # with unsafe fix
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF039
|
||||
60 | re.compile("\b") # without fix
|
||||
61 | re.compile("\"") # without fix
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
56 56 |
|
||||
57 57 |
|
||||
58 58 | # https://github.com/astral-sh/ruff/issues/16713
|
||||
59 |-re.compile("\a\f\n\r\t\u27F2\U0001F0A1\v\x41") # with unsafe fix
|
||||
59 |+re.compile(r"\a\f\n\r\t\u27F2\U0001F0A1\v\x41") # with unsafe fix
|
||||
60 60 | re.compile("\b") # without fix
|
||||
61 61 | re.compile("\"") # without fix
|
||||
62 62 | re.compile("\'") # without fix
|
||||
|
||||
RUF039.py:60:12: RUF039 First argument to `re.compile()` is not raw string
|
||||
|
|
||||
58 | # https://github.com/astral-sh/ruff/issues/16713
|
||||
59 | re.compile("\a\f\n\r\t\u27F2\U0001F0A1\v\x41") # with unsafe fix
|
||||
60 | re.compile("\b") # without fix
|
||||
| ^^^^ RUF039
|
||||
61 | re.compile("\"") # without fix
|
||||
62 | re.compile("\'") # without fix
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039.py:61:12: RUF039 First argument to `re.compile()` is not raw string
|
||||
|
|
||||
59 | re.compile("\a\f\n\r\t\u27F2\U0001F0A1\v\x41") # with unsafe fix
|
||||
60 | re.compile("\b") # without fix
|
||||
61 | re.compile("\"") # without fix
|
||||
| ^^^^ RUF039
|
||||
62 | re.compile("\'") # without fix
|
||||
63 | re.compile('\"') # without fix
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039.py:62:12: RUF039 First argument to `re.compile()` is not raw string
|
||||
|
|
||||
60 | re.compile("\b") # without fix
|
||||
61 | re.compile("\"") # without fix
|
||||
62 | re.compile("\'") # without fix
|
||||
| ^^^^ RUF039
|
||||
63 | re.compile('\"') # without fix
|
||||
64 | re.compile('\'') # without fix
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039.py:63:12: RUF039 First argument to `re.compile()` is not raw string
|
||||
|
|
||||
61 | re.compile("\"") # without fix
|
||||
62 | re.compile("\'") # without fix
|
||||
63 | re.compile('\"') # without fix
|
||||
| ^^^^ RUF039
|
||||
64 | re.compile('\'') # without fix
|
||||
65 | re.compile("\\") # without fix
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039.py:64:12: RUF039 First argument to `re.compile()` is not raw string
|
||||
|
|
||||
62 | re.compile("\'") # without fix
|
||||
63 | re.compile('\"') # without fix
|
||||
64 | re.compile('\'') # without fix
|
||||
| ^^^^ RUF039
|
||||
65 | re.compile("\\") # without fix
|
||||
66 | re.compile("\101") # without fix
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039.py:65:12: RUF039 First argument to `re.compile()` is not raw string
|
||||
|
|
||||
63 | re.compile('\"') # without fix
|
||||
64 | re.compile('\'') # without fix
|
||||
65 | re.compile("\\") # without fix
|
||||
| ^^^^ RUF039
|
||||
66 | re.compile("\101") # without fix
|
||||
67 | re.compile("a\
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039.py:66:12: RUF039 First argument to `re.compile()` is not raw string
|
||||
|
|
||||
64 | re.compile('\'') # without fix
|
||||
65 | re.compile("\\") # without fix
|
||||
66 | re.compile("\101") # without fix
|
||||
| ^^^^^^ RUF039
|
||||
67 | re.compile("a\
|
||||
68 | b") # without fix
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039.py:67:12: RUF039 First argument to `re.compile()` is not raw string
|
||||
|
|
||||
65 | re.compile("\\") # without fix
|
||||
66 | re.compile("\101") # without fix
|
||||
67 | re.compile("a\
|
||||
| ____________^
|
||||
68 | | b") # without fix
|
||||
| |__^ RUF039
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
@@ -65,7 +65,7 @@ RUF039_concat.py:12:5: RUF039 [*] First argument to `re.findall()` is not raw st
|
||||
14 14 | """
|
||||
15 15 | )
|
||||
|
||||
RUF039_concat.py:26:5: RUF039 First argument to `re.match()` is not raw bytes literal
|
||||
RUF039_concat.py:26:5: RUF039 [*] First argument to `re.match()` is not raw bytes literal
|
||||
|
|
||||
24 | )
|
||||
25 | re.match(
|
||||
@@ -76,6 +76,16 @@ RUF039_concat.py:26:5: RUF039 First argument to `re.match()` is not raw bytes li
|
||||
|
|
||||
= help: Replace with raw bytes literal
|
||||
|
||||
ℹ Safe fix
|
||||
23 23 | f'much?'
|
||||
24 24 | )
|
||||
25 25 | re.match(
|
||||
26 |- b'reak'
|
||||
26 |+ rb'reak'
|
||||
27 27 | br'eak'
|
||||
28 28 | )
|
||||
29 29 | re.search(
|
||||
|
||||
RUF039_concat.py:30:8: RUF039 First argument to `re.search()` is not raw string
|
||||
|
|
||||
28 | )
|
||||
@@ -295,7 +305,7 @@ RUF039_concat.py:52:5: RUF039 [*] First argument to `regex.findall()` is not raw
|
||||
54 54 | """
|
||||
55 55 | )
|
||||
|
||||
RUF039_concat.py:66:5: RUF039 First argument to `regex.match()` is not raw bytes literal
|
||||
RUF039_concat.py:66:5: RUF039 [*] First argument to `regex.match()` is not raw bytes literal
|
||||
|
|
||||
64 | )
|
||||
65 | regex.match(
|
||||
@@ -306,6 +316,16 @@ RUF039_concat.py:66:5: RUF039 First argument to `regex.match()` is not raw bytes
|
||||
|
|
||||
= help: Replace with raw bytes literal
|
||||
|
||||
ℹ Safe fix
|
||||
63 63 | f'much?'
|
||||
64 64 | )
|
||||
65 65 | regex.match(
|
||||
66 |- b'reak'
|
||||
66 |+ rb'reak'
|
||||
67 67 | br'eak'
|
||||
68 68 | )
|
||||
69 69 | regex.search(
|
||||
|
||||
RUF039_concat.py:70:8: RUF039 First argument to `regex.search()` is not raw string
|
||||
|
|
||||
68 | )
|
||||
@@ -460,3 +480,223 @@ RUF039_concat.py:78:24: RUF039 [*] First argument to `regex.subn()` is not raw s
|
||||
79 79 |
|
||||
80 80 |
|
||||
81 81 | regex.template(
|
||||
|
||||
RUF039_concat.py:98:5: RUF039 [*] First argument to `re.compile()` is not raw string
|
||||
|
|
||||
96 | # https://github.com/astral-sh/ruff/issues/16713
|
||||
97 | re.compile(
|
||||
98 | "["
|
||||
| ^^^ RUF039
|
||||
99 | "\U0001F600-\U0001F64F" # emoticons
|
||||
100 | "\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Safe fix
|
||||
95 95 |
|
||||
96 96 | # https://github.com/astral-sh/ruff/issues/16713
|
||||
97 97 | re.compile(
|
||||
98 |- "["
|
||||
98 |+ r"["
|
||||
99 99 | "\U0001F600-\U0001F64F" # emoticons
|
||||
100 100 | "\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
101 101 | "\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
|
||||
RUF039_concat.py:99:5: RUF039 [*] First argument to `re.compile()` is not raw string
|
||||
|
|
||||
97 | re.compile(
|
||||
98 | "["
|
||||
99 | "\U0001F600-\U0001F64F" # emoticons
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ RUF039
|
||||
100 | "\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
101 | "\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
96 96 | # https://github.com/astral-sh/ruff/issues/16713
|
||||
97 97 | re.compile(
|
||||
98 98 | "["
|
||||
99 |- "\U0001F600-\U0001F64F" # emoticons
|
||||
99 |+ r"\U0001F600-\U0001F64F" # emoticons
|
||||
100 100 | "\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
101 101 | "\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
102 102 | "\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
|
||||
RUF039_concat.py:100:5: RUF039 [*] First argument to `re.compile()` is not raw string
|
||||
|
|
||||
98 | "["
|
||||
99 | "\U0001F600-\U0001F64F" # emoticons
|
||||
100 | "\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ RUF039
|
||||
101 | "\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
102 | "\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
97 97 | re.compile(
|
||||
98 98 | "["
|
||||
99 99 | "\U0001F600-\U0001F64F" # emoticons
|
||||
100 |- "\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
100 |+ r"\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
101 101 | "\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
102 102 | "\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
103 103 | "\U00002702-\U000027B0"
|
||||
|
||||
RUF039_concat.py:101:5: RUF039 [*] First argument to `re.compile()` is not raw string
|
||||
|
|
||||
99 | "\U0001F600-\U0001F64F" # emoticons
|
||||
100 | "\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
101 | "\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ RUF039
|
||||
102 | "\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
103 | "\U00002702-\U000027B0"
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
98 98 | "["
|
||||
99 99 | "\U0001F600-\U0001F64F" # emoticons
|
||||
100 100 | "\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
101 |- "\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
101 |+ r"\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
102 102 | "\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
103 103 | "\U00002702-\U000027B0"
|
||||
104 104 | "\U000024C2-\U0001F251"
|
||||
|
||||
RUF039_concat.py:102:5: RUF039 [*] First argument to `re.compile()` is not raw string
|
||||
|
|
||||
100 | "\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
101 | "\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
102 | "\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ RUF039
|
||||
103 | "\U00002702-\U000027B0"
|
||||
104 | "\U000024C2-\U0001F251"
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
99 99 | "\U0001F600-\U0001F64F" # emoticons
|
||||
100 100 | "\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
101 101 | "\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
102 |- "\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
102 |+ r"\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
103 103 | "\U00002702-\U000027B0"
|
||||
104 104 | "\U000024C2-\U0001F251"
|
||||
105 105 | "\u200d" # zero width joiner
|
||||
|
||||
RUF039_concat.py:103:5: RUF039 [*] First argument to `re.compile()` is not raw string
|
||||
|
|
||||
101 | "\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
102 | "\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
103 | "\U00002702-\U000027B0"
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ RUF039
|
||||
104 | "\U000024C2-\U0001F251"
|
||||
105 | "\u200d" # zero width joiner
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
100 100 | "\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
101 101 | "\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
102 102 | "\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
103 |- "\U00002702-\U000027B0"
|
||||
103 |+ r"\U00002702-\U000027B0"
|
||||
104 104 | "\U000024C2-\U0001F251"
|
||||
105 105 | "\u200d" # zero width joiner
|
||||
106 106 | "\u200c" # zero width non-joiner
|
||||
|
||||
RUF039_concat.py:104:5: RUF039 [*] First argument to `re.compile()` is not raw string
|
||||
|
|
||||
102 | "\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
103 | "\U00002702-\U000027B0"
|
||||
104 | "\U000024C2-\U0001F251"
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ RUF039
|
||||
105 | "\u200d" # zero width joiner
|
||||
106 | "\u200c" # zero width non-joiner
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
101 101 | "\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
102 102 | "\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
103 103 | "\U00002702-\U000027B0"
|
||||
104 |- "\U000024C2-\U0001F251"
|
||||
104 |+ r"\U000024C2-\U0001F251"
|
||||
105 105 | "\u200d" # zero width joiner
|
||||
106 106 | "\u200c" # zero width non-joiner
|
||||
107 107 | "\\u200c" # must not be escaped in a raw string
|
||||
|
||||
RUF039_concat.py:105:5: RUF039 [*] First argument to `re.compile()` is not raw string
|
||||
|
|
||||
103 | "\U00002702-\U000027B0"
|
||||
104 | "\U000024C2-\U0001F251"
|
||||
105 | "\u200d" # zero width joiner
|
||||
| ^^^^^^^^ RUF039
|
||||
106 | "\u200c" # zero width non-joiner
|
||||
107 | "\\u200c" # must not be escaped in a raw string
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
102 102 | "\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
103 103 | "\U00002702-\U000027B0"
|
||||
104 104 | "\U000024C2-\U0001F251"
|
||||
105 |- "\u200d" # zero width joiner
|
||||
105 |+ r"\u200d" # zero width joiner
|
||||
106 106 | "\u200c" # zero width non-joiner
|
||||
107 107 | "\\u200c" # must not be escaped in a raw string
|
||||
108 108 | "]+",
|
||||
|
||||
RUF039_concat.py:106:5: RUF039 [*] First argument to `re.compile()` is not raw string
|
||||
|
|
||||
104 | "\U000024C2-\U0001F251"
|
||||
105 | "\u200d" # zero width joiner
|
||||
106 | "\u200c" # zero width non-joiner
|
||||
| ^^^^^^^^ RUF039
|
||||
107 | "\\u200c" # must not be escaped in a raw string
|
||||
108 | "]+",
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
103 103 | "\U00002702-\U000027B0"
|
||||
104 104 | "\U000024C2-\U0001F251"
|
||||
105 105 | "\u200d" # zero width joiner
|
||||
106 |- "\u200c" # zero width non-joiner
|
||||
106 |+ r"\u200c" # zero width non-joiner
|
||||
107 107 | "\\u200c" # must not be escaped in a raw string
|
||||
108 108 | "]+",
|
||||
109 109 | flags=re.UNICODE,
|
||||
|
||||
RUF039_concat.py:107:5: RUF039 First argument to `re.compile()` is not raw string
|
||||
|
|
||||
105 | "\u200d" # zero width joiner
|
||||
106 | "\u200c" # zero width non-joiner
|
||||
107 | "\\u200c" # must not be escaped in a raw string
|
||||
| ^^^^^^^^^ RUF039
|
||||
108 | "]+",
|
||||
109 | flags=re.UNICODE,
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
RUF039_concat.py:108:5: RUF039 [*] First argument to `re.compile()` is not raw string
|
||||
|
|
||||
106 | "\u200c" # zero width non-joiner
|
||||
107 | "\\u200c" # must not be escaped in a raw string
|
||||
108 | "]+",
|
||||
| ^^^^ RUF039
|
||||
109 | flags=re.UNICODE,
|
||||
110 | )
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Safe fix
|
||||
105 105 | "\u200d" # zero width joiner
|
||||
106 106 | "\u200c" # zero width non-joiner
|
||||
107 107 | "\\u200c" # must not be escaped in a raw string
|
||||
108 |- "]+",
|
||||
108 |+ r"]+",
|
||||
109 109 | flags=re.UNICODE,
|
||||
110 110 | )
|
||||
|
||||
@@ -0,0 +1,80 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/ruff/mod.rs
|
||||
---
|
||||
RUF055_3.py:6:1: RUF055 [*] Plain string pattern passed to `re` function
|
||||
|
|
||||
5 | # Should be replaced with `b_src.replace(rb"x", b"y")`
|
||||
6 | re.sub(rb"x", b"y", b_src)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF055
|
||||
7 |
|
||||
8 | # Should be replaced with `b_src.startswith(rb"abc")`
|
||||
|
|
||||
= help: Replace with `b_src.replace(rb"x", b"y")`
|
||||
|
||||
ℹ Safe fix
|
||||
3 3 | b_src = b"abc"
|
||||
4 4 |
|
||||
5 5 | # Should be replaced with `b_src.replace(rb"x", b"y")`
|
||||
6 |-re.sub(rb"x", b"y", b_src)
|
||||
6 |+b_src.replace(rb"x", b"y")
|
||||
7 7 |
|
||||
8 8 | # Should be replaced with `b_src.startswith(rb"abc")`
|
||||
9 9 | if re.match(rb"abc", b_src):
|
||||
|
||||
RUF055_3.py:9:4: RUF055 [*] Plain string pattern passed to `re` function
|
||||
|
|
||||
8 | # Should be replaced with `b_src.startswith(rb"abc")`
|
||||
9 | if re.match(rb"abc", b_src):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ RUF055
|
||||
10 | pass
|
||||
|
|
||||
= help: Replace with `b_src.startswith(rb"abc")`
|
||||
|
||||
ℹ Safe fix
|
||||
6 6 | re.sub(rb"x", b"y", b_src)
|
||||
7 7 |
|
||||
8 8 | # Should be replaced with `b_src.startswith(rb"abc")`
|
||||
9 |-if re.match(rb"abc", b_src):
|
||||
9 |+if b_src.startswith(rb"abc"):
|
||||
10 10 | pass
|
||||
11 11 |
|
||||
12 12 | # Should be replaced with `rb"x" in b_src`
|
||||
|
||||
RUF055_3.py:13:4: RUF055 [*] Plain string pattern passed to `re` function
|
||||
|
|
||||
12 | # Should be replaced with `rb"x" in b_src`
|
||||
13 | if re.search(rb"x", b_src):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ RUF055
|
||||
14 | pass
|
||||
|
|
||||
= help: Replace with `rb"x" in b_src`
|
||||
|
||||
ℹ Safe fix
|
||||
10 10 | pass
|
||||
11 11 |
|
||||
12 12 | # Should be replaced with `rb"x" in b_src`
|
||||
13 |-if re.search(rb"x", b_src):
|
||||
13 |+if rb"x" in b_src:
|
||||
14 14 | pass
|
||||
15 15 |
|
||||
16 16 | # Should be replaced with `b_src.split(rb"abc")`
|
||||
|
||||
RUF055_3.py:17:1: RUF055 [*] Plain string pattern passed to `re` function
|
||||
|
|
||||
16 | # Should be replaced with `b_src.split(rb"abc")`
|
||||
17 | re.split(rb"abc", b_src)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ RUF055
|
||||
18 |
|
||||
19 | # Patterns containing metacharacters should NOT be replaced
|
||||
|
|
||||
= help: Replace with `b_src.split(rb"abc")`
|
||||
|
||||
ℹ Safe fix
|
||||
14 14 | pass
|
||||
15 15 |
|
||||
16 16 | # Should be replaced with `b_src.split(rb"abc")`
|
||||
17 |-re.split(rb"abc", b_src)
|
||||
17 |+b_src.split(rb"abc")
|
||||
18 18 |
|
||||
19 19 | # Patterns containing metacharacters should NOT be replaced
|
||||
20 20 | re.sub(rb"ab[c]", b"", b_src)
|
||||
@@ -0,0 +1,11 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/ruff/mod.rs
|
||||
---
|
||||
RUF039_py_version_sensitive.py:3:12: RUF039 First argument to `re.compile()` is not raw string
|
||||
|
|
||||
1 | import re
|
||||
2 |
|
||||
3 | re.compile("\N{Partial Differential}") # with unsafe fix if python target is 3.8 or higher, else without fix
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF039
|
||||
|
|
||||
= help: Replace with raw string
|
||||
@@ -0,0 +1,17 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/ruff/mod.rs
|
||||
---
|
||||
RUF039_py_version_sensitive.py:3:12: RUF039 [*] First argument to `re.compile()` is not raw string
|
||||
|
|
||||
1 | import re
|
||||
2 |
|
||||
3 | re.compile("\N{Partial Differential}") # with unsafe fix if python target is 3.8 or higher, else without fix
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF039
|
||||
|
|
||||
= help: Replace with raw string
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | import re
|
||||
2 2 |
|
||||
3 |-re.compile("\N{Partial Differential}") # with unsafe fix if python target is 3.8 or higher, else without fix
|
||||
3 |+re.compile(r"\N{Partial Differential}") # with unsafe fix if python target is 3.8 or higher, else without fix
|
||||
@@ -272,7 +272,7 @@ Either ensure you always emit a fix or change `Violation::FIX_AVAILABILITY` to e
|
||||
}
|
||||
|
||||
assert!(
|
||||
!(fixable && diagnostic.suggestion().is_none()),
|
||||
!(fixable && diagnostic.first_help_text().is_none()),
|
||||
"Diagnostic emitted by {rule:?} is fixable but \
|
||||
`Violation::fix_title` returns `None`"
|
||||
);
|
||||
|
||||
@@ -235,12 +235,7 @@ impl TraversalSignal {
|
||||
}
|
||||
|
||||
pub fn walk_annotation<'a, V: SourceOrderVisitor<'a> + ?Sized>(visitor: &mut V, expr: &'a Expr) {
|
||||
let node = AnyNodeRef::from(expr);
|
||||
if visitor.enter_node(node).is_traverse() {
|
||||
visitor.visit_expr(expr);
|
||||
}
|
||||
|
||||
visitor.leave_node(node);
|
||||
visitor.visit_expr(expr);
|
||||
}
|
||||
|
||||
pub fn walk_decorator<'a, V>(visitor: &mut V, decorator: &'a Decorator)
|
||||
|
||||
@@ -1527,7 +1527,7 @@ impl<'src> Parser<'src> {
|
||||
self.bump(kind.start_token());
|
||||
let elements = self.parse_interpolated_string_elements(
|
||||
flags,
|
||||
InterpolatedStringElementsKind::Regular,
|
||||
InterpolatedStringElementsKind::Regular(kind),
|
||||
kind,
|
||||
);
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use crate::error::UnsupportedSyntaxError;
|
||||
use crate::parser::expression::ExpressionContext;
|
||||
use crate::parser::progress::{ParserProgress, TokenId};
|
||||
use crate::string::InterpolatedStringKind;
|
||||
use crate::token::TokenValue;
|
||||
use crate::token_set::TokenSet;
|
||||
use crate::token_source::{TokenSource, TokenSourceCheckpoint};
|
||||
@@ -799,7 +800,7 @@ impl WithItemKind {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Copy, Clone)]
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
||||
enum InterpolatedStringElementsKind {
|
||||
/// The regular f-string elements.
|
||||
///
|
||||
@@ -807,7 +808,7 @@ enum InterpolatedStringElementsKind {
|
||||
/// ```py
|
||||
/// f"hello {x:.2f} world"
|
||||
/// ```
|
||||
Regular,
|
||||
Regular(InterpolatedStringKind),
|
||||
|
||||
/// The f-string elements are part of the format specifier.
|
||||
///
|
||||
@@ -819,15 +820,13 @@ enum InterpolatedStringElementsKind {
|
||||
}
|
||||
|
||||
impl InterpolatedStringElementsKind {
|
||||
const fn list_terminators(self) -> TokenSet {
|
||||
const fn list_terminator(self) -> TokenKind {
|
||||
match self {
|
||||
InterpolatedStringElementsKind::Regular => {
|
||||
TokenSet::new([TokenKind::FStringEnd, TokenKind::TStringEnd])
|
||||
}
|
||||
InterpolatedStringElementsKind::Regular(string_kind) => string_kind.end_token(),
|
||||
// test_ok fstring_format_spec_terminator
|
||||
// f"hello {x:} world"
|
||||
// f"hello {x:.3f} world"
|
||||
InterpolatedStringElementsKind::FormatSpec => TokenSet::new([TokenKind::Rbrace]),
|
||||
InterpolatedStringElementsKind::FormatSpec => TokenKind::Rbrace,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1121,7 +1120,7 @@ impl RecoveryContextKind {
|
||||
.then_some(ListTerminatorKind::Regular),
|
||||
},
|
||||
RecoveryContextKind::InterpolatedStringElements(kind) => {
|
||||
if p.at_ts(kind.list_terminators()) {
|
||||
if p.at(kind.list_terminator()) {
|
||||
Some(ListTerminatorKind::Regular)
|
||||
} else {
|
||||
// test_err unterminated_fstring_newline_recovery
|
||||
@@ -1177,13 +1176,23 @@ impl RecoveryContextKind {
|
||||
) || p.at_name_or_soft_keyword()
|
||||
}
|
||||
RecoveryContextKind::WithItems(_) => p.at_expr(),
|
||||
RecoveryContextKind::InterpolatedStringElements(_) => matches!(
|
||||
p.current_token_kind(),
|
||||
// Literal element
|
||||
TokenKind::FStringMiddle | TokenKind::TStringMiddle
|
||||
// Expression element
|
||||
| TokenKind::Lbrace
|
||||
),
|
||||
RecoveryContextKind::InterpolatedStringElements(elements_kind) => {
|
||||
match elements_kind {
|
||||
InterpolatedStringElementsKind::Regular(interpolated_string_kind) => {
|
||||
p.current_token_kind() == interpolated_string_kind.middle_token()
|
||||
|| p.current_token_kind() == TokenKind::Lbrace
|
||||
}
|
||||
InterpolatedStringElementsKind::FormatSpec => {
|
||||
matches!(
|
||||
p.current_token_kind(),
|
||||
// Literal element
|
||||
TokenKind::FStringMiddle | TokenKind::TStringMiddle
|
||||
// Expression element
|
||||
| TokenKind::Lbrace
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1272,8 +1281,8 @@ impl RecoveryContextKind {
|
||||
),
|
||||
},
|
||||
RecoveryContextKind::InterpolatedStringElements(kind) => match kind {
|
||||
InterpolatedStringElementsKind::Regular => ParseErrorType::OtherError(
|
||||
"Expected an f-string or t-string element or the end of the f-string or t-string".to_string(),
|
||||
InterpolatedStringElementsKind::Regular(string_kind) => ParseErrorType::OtherError(
|
||||
format!("Expected an element of or the end of the {string_kind}"),
|
||||
),
|
||||
InterpolatedStringElementsKind::FormatSpec => ParseErrorType::OtherError(
|
||||
"Expected an f-string or t-string element or a '}'".to_string(),
|
||||
@@ -1316,8 +1325,9 @@ bitflags! {
|
||||
const WITH_ITEMS_PARENTHESIZED = 1 << 25;
|
||||
const WITH_ITEMS_PARENTHESIZED_EXPRESSION = 1 << 26;
|
||||
const WITH_ITEMS_UNPARENTHESIZED = 1 << 28;
|
||||
const FT_STRING_ELEMENTS = 1 << 29;
|
||||
const FT_STRING_ELEMENTS_IN_FORMAT_SPEC = 1 << 30;
|
||||
const F_STRING_ELEMENTS = 1 << 29;
|
||||
const T_STRING_ELEMENTS = 1 << 30;
|
||||
const FT_STRING_ELEMENTS_IN_FORMAT_SPEC = 1 << 31;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1371,7 +1381,13 @@ impl RecoveryContext {
|
||||
WithItemKind::Unparenthesized => RecoveryContext::WITH_ITEMS_UNPARENTHESIZED,
|
||||
},
|
||||
RecoveryContextKind::InterpolatedStringElements(kind) => match kind {
|
||||
InterpolatedStringElementsKind::Regular => RecoveryContext::FT_STRING_ELEMENTS,
|
||||
InterpolatedStringElementsKind::Regular(InterpolatedStringKind::FString) => {
|
||||
RecoveryContext::F_STRING_ELEMENTS
|
||||
}
|
||||
InterpolatedStringElementsKind::Regular(InterpolatedStringKind::TString) => {
|
||||
RecoveryContext::T_STRING_ELEMENTS
|
||||
}
|
||||
|
||||
InterpolatedStringElementsKind::FormatSpec => {
|
||||
RecoveryContext::FT_STRING_ELEMENTS_IN_FORMAT_SPEC
|
||||
}
|
||||
@@ -1442,8 +1458,11 @@ impl RecoveryContext {
|
||||
RecoveryContext::WITH_ITEMS_UNPARENTHESIZED => {
|
||||
RecoveryContextKind::WithItems(WithItemKind::Unparenthesized)
|
||||
}
|
||||
RecoveryContext::FT_STRING_ELEMENTS => RecoveryContextKind::InterpolatedStringElements(
|
||||
InterpolatedStringElementsKind::Regular,
|
||||
RecoveryContext::F_STRING_ELEMENTS => RecoveryContextKind::InterpolatedStringElements(
|
||||
InterpolatedStringElementsKind::Regular(InterpolatedStringKind::FString),
|
||||
),
|
||||
RecoveryContext::T_STRING_ELEMENTS => RecoveryContextKind::InterpolatedStringElements(
|
||||
InterpolatedStringElementsKind::Regular(InterpolatedStringKind::TString),
|
||||
),
|
||||
RecoveryContext::FT_STRING_ELEMENTS_IN_FORMAT_SPEC => {
|
||||
RecoveryContextKind::InterpolatedStringElements(
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
---
|
||||
source: crates/ruff_python_parser/src/parser/tests.rs
|
||||
expression: error
|
||||
---
|
||||
ParseError {
|
||||
error: Lexical(
|
||||
LineContinuationError,
|
||||
),
|
||||
location: 3..4,
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
---
|
||||
source: crates/ruff_python_parser/src/parser/tests.rs
|
||||
expression: error
|
||||
---
|
||||
ParseError {
|
||||
error: Lexical(
|
||||
TStringError(
|
||||
SingleRbrace,
|
||||
),
|
||||
),
|
||||
location: 8..9,
|
||||
}
|
||||
@@ -134,3 +134,26 @@ foo.bar[0].baz[2].egg??
|
||||
.unwrap();
|
||||
insta::assert_debug_snapshot!(parsed.syntax());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fstring_expr_inner_line_continuation_and_t_string() {
|
||||
let source = r#"f'{\t"i}'"#;
|
||||
|
||||
let parsed = parse_expression(source);
|
||||
|
||||
let error = parsed.unwrap_err();
|
||||
|
||||
insta::assert_debug_snapshot!(error);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fstring_expr_inner_line_continuation_newline_t_string() {
|
||||
let source = r#"f'{\
|
||||
t"i}'"#;
|
||||
|
||||
let parsed = parse_expression(source);
|
||||
|
||||
let error = parsed.unwrap_err();
|
||||
|
||||
insta::assert_debug_snapshot!(error);
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ impl From<StringType> for Expr {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum InterpolatedStringKind {
|
||||
FString,
|
||||
TString,
|
||||
|
||||
@@ -124,5 +124,5 @@ Module(
|
||||
|
||||
|
|
||||
1 | f"{lambda x: x}"
|
||||
| ^ Syntax Error: Expected an f-string or t-string element or the end of the f-string or t-string
|
||||
| ^ Syntax Error: Expected an element of or the end of the f-string
|
||||
|
|
||||
|
||||
@@ -221,7 +221,7 @@ Module(
|
||||
2 | 'hello'
|
||||
3 | f'world {x}
|
||||
4 | )
|
||||
| ^ Syntax Error: Expected an f-string or t-string element or the end of the f-string or t-string
|
||||
| ^ Syntax Error: Expected an element of or the end of the f-string
|
||||
5 | 1 + 1
|
||||
6 | (
|
||||
|
|
||||
|
||||
@@ -128,5 +128,5 @@ Module(
|
||||
|
|
||||
1 | # parse_options: {"target-version": "3.14"}
|
||||
2 | t"{lambda x: x}"
|
||||
| ^ Syntax Error: Expected an f-string or t-string element or the end of the f-string or t-string
|
||||
| ^ Syntax Error: Expected an element of or the end of the t-string
|
||||
|
|
||||
|
||||
@@ -163,7 +163,7 @@ fn stem(path: &str) -> &str {
|
||||
}
|
||||
|
||||
/// Infer the [`Visibility`] of a module from its path.
|
||||
pub(crate) fn module_visibility(module: &Module) -> Visibility {
|
||||
pub(crate) fn module_visibility(module: Module) -> Visibility {
|
||||
match &module.source {
|
||||
ModuleSource::Path(path) => {
|
||||
if path.iter().any(|m| is_private_module(m)) {
|
||||
|
||||
@@ -223,7 +223,7 @@ impl<'a> Definitions<'a> {
|
||||
// visibility.
|
||||
let visibility = {
|
||||
match &definition {
|
||||
Definition::Module(module) => module_visibility(module),
|
||||
Definition::Module(module) => module_visibility(*module),
|
||||
Definition::Member(member) => match member.kind {
|
||||
MemberKind::Class(class) => {
|
||||
let parent = &definitions[member.parent];
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user