Compare commits
125 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0ead9a16ac | ||
|
|
653429bef9 | ||
|
|
f0aa6bd4d3 | ||
|
|
5665968b42 | ||
|
|
33a91773f7 | ||
|
|
0666added9 | ||
|
|
7566ca8ff7 | ||
|
|
5dd9e56748 | ||
|
|
f8173daf4c | ||
|
|
511ec0d7bc | ||
|
|
30bec3fcfa | ||
|
|
8b9193ab1f | ||
|
|
62a24e1028 | ||
|
|
f1d367655b | ||
|
|
0c8ec80d7b | ||
|
|
df15ad9696 | ||
|
|
8665a1a19d | ||
|
|
9a8ba58b4c | ||
|
|
715250a179 | ||
|
|
d30e9125eb | ||
|
|
212fd86bf0 | ||
|
|
4b58a9c092 | ||
|
|
b7794f855b | ||
|
|
15c7b6bcf7 | ||
|
|
1782fb8c30 | ||
|
|
987111f5fb | ||
|
|
9f486fa841 | ||
|
|
4dee49d6fa | ||
|
|
e7e2f44440 | ||
|
|
93bfa239b7 | ||
|
|
14f2158e5d | ||
|
|
b8a6ce43a2 | ||
|
|
5ab9538573 | ||
|
|
d19839fe0f | ||
|
|
8dc06d1035 | ||
|
|
120e9d37f1 | ||
|
|
28fe2d334a | ||
|
|
3562d809b2 | ||
|
|
4cac75bc27 | ||
|
|
ed872145fe | ||
|
|
35b04c2fab | ||
|
|
ae4a7ef0ed | ||
|
|
cab3a507bc | ||
|
|
82317ba1fd | ||
|
|
24bcbb85a1 | ||
|
|
089a671adb | ||
|
|
bd8f65814c | ||
|
|
1e894f328c | ||
|
|
52b22ceb6e | ||
|
|
c9d7c0d7d5 | ||
|
|
eb69fe37bf | ||
|
|
27011448ea | ||
|
|
b4d6b7c230 | ||
|
|
fa1341b0db | ||
|
|
401d172e47 | ||
|
|
6a4b216362 | ||
|
|
9dd05424c4 | ||
|
|
ac2e374a5a | ||
|
|
38fa305f35 | ||
|
|
456273a92e | ||
|
|
507961f27d | ||
|
|
a1c559eaa4 | ||
|
|
d0dae7e576 | ||
|
|
efe7c393d1 | ||
|
|
0b9af031fb | ||
|
|
0f9d7283e7 | ||
|
|
bb7303f867 | ||
|
|
60d318ddcf | ||
|
|
5640c310bb | ||
|
|
072358e26b | ||
|
|
aaab9f1597 | ||
|
|
b22e6c3d38 | ||
|
|
40ddc1604c | ||
|
|
bf4b96c5de | ||
|
|
b11492e940 | ||
|
|
cd4718988a | ||
|
|
5908b39102 | ||
|
|
edfe76d673 | ||
|
|
5e5a96ca28 | ||
|
|
3650aaa8b3 | ||
|
|
cc822082a7 | ||
|
|
87ca6171cf | ||
|
|
9713ee4b80 | ||
|
|
528bf2df3a | ||
|
|
8184235f93 | ||
|
|
25981420c4 | ||
|
|
b56b8915ca | ||
|
|
bf02c77fd7 | ||
|
|
ba7041b6bf | ||
|
|
5dff3195d4 | ||
|
|
23363cafd1 | ||
|
|
e4596ebc35 | ||
|
|
c9e02c52a8 | ||
|
|
d097b49371 | ||
|
|
ea270da289 | ||
|
|
cdb9fda3b8 | ||
|
|
a0c0b74b6d | ||
|
|
1a2e444799 | ||
|
|
6f548d9872 | ||
|
|
5a74a8e5a1 | ||
|
|
c5bfd1e877 | ||
|
|
9e1039f823 | ||
|
|
9478454b96 | ||
|
|
9a8e5f7877 | ||
|
|
6fd71e6f53 | ||
|
|
dd60a3865c | ||
|
|
0726dc25c2 | ||
|
|
634ed8975c | ||
|
|
5100c56273 | ||
|
|
26a268a3ec | ||
|
|
324455f580 | ||
|
|
da1c320bfa | ||
|
|
485d997d35 | ||
|
|
d7214e77e6 | ||
|
|
952c623102 | ||
|
|
0a26201643 | ||
|
|
0e67757edb | ||
|
|
c395e44bd7 | ||
|
|
75da72bd7f | ||
|
|
521e6de2c8 | ||
|
|
0b963ddcfa | ||
|
|
937de121f3 | ||
|
|
787e2fd49d | ||
|
|
6acc316d19 | ||
|
|
a647f31600 |
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -4,3 +4,4 @@ crates/ruff/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
||||
crates/ruff/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||
|
||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||
*.md.snap linguist-language=Markdown
|
||||
|
||||
67
.github/workflows/ci.yaml
vendored
67
.github/workflows/ci.yaml
vendored
@@ -16,7 +16,7 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
PACKAGE_NAME: ruff
|
||||
PYTHON_VERSION: "3.7" # to build abi3 wheels
|
||||
PYTHON_VERSION: "3.11" # to build abi3 wheels
|
||||
|
||||
jobs:
|
||||
cargo-fmt:
|
||||
@@ -31,17 +31,6 @@ jobs:
|
||||
cargo-clippy:
|
||||
name: "cargo clippy"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Install Rust toolchain"
|
||||
run: |
|
||||
rustup component add clippy
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- run: cargo clippy --workspace --all-targets --all-features -- -D warnings
|
||||
|
||||
cargo-clippy-wasm:
|
||||
name: "cargo clippy (wasm)"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -49,7 +38,10 @@ jobs:
|
||||
rustup component add clippy
|
||||
rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features -- -D warnings
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features -- -D warnings
|
||||
|
||||
cargo-test:
|
||||
strategy:
|
||||
@@ -62,21 +54,19 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- run: cargo install cargo-insta
|
||||
# cargo insta 1.30.0 fails for some reason (https://github.com/mitsuhiko/insta/issues/392)
|
||||
- run: cargo install cargo-insta@=1.29.0
|
||||
- run: pip install black[d]==23.1.0
|
||||
- name: "Run tests (Ubuntu)"
|
||||
if: ${{ matrix.os == 'ubuntu-latest' }}
|
||||
run: |
|
||||
cargo insta test --all --all-features --delete-unreferenced-snapshots
|
||||
git diff --exit-code
|
||||
run: cargo insta test --all --all-features --unreferenced reject
|
||||
- name: "Run tests (Windows)"
|
||||
if: ${{ matrix.os == 'windows-latest' }}
|
||||
shell: bash
|
||||
run: |
|
||||
cargo insta test --all --all-features
|
||||
git diff --exit-code
|
||||
# We can't reject unreferenced snapshots on windows because flake8_executable can't run on windows
|
||||
run: cargo insta test --all --all-features
|
||||
- run: cargo test --package ruff_cli --test black_compatibility_test -- --ignored
|
||||
# Skipped as it's currently broken. The resource were moved from the
|
||||
# TODO: Skipped as it's currently broken. The resource were moved from the
|
||||
# ruff_cli to ruff crate, but this test was not updated.
|
||||
if: false
|
||||
# Check for broken links in the documentation.
|
||||
@@ -152,7 +142,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
name: Download Ruff binary
|
||||
@@ -236,7 +226,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@@ -260,13 +250,24 @@ jobs:
|
||||
docs:
|
||||
name: "mkdocs"
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.8.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
- name: "Update README File"
|
||||
run: python scripts/transform_readme.py --target mkdocs
|
||||
@@ -274,5 +275,23 @@ jobs:
|
||||
run: python scripts/generate_mkdocs.py
|
||||
- name: "Check docs formatting"
|
||||
run: python scripts/check_docs_formatted.py
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
- name: "Build docs"
|
||||
run: mkdocs build --strict
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.generated.yml
|
||||
|
||||
check-formatter-stability:
|
||||
name: "Check formatter stability"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Cache rust"
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: "Clone CPython 3.10"
|
||||
run: git clone --branch 3.10 --depth 1 https://github.com/python/cpython.git crates/ruff/resources/test/cpython
|
||||
- name: "Check stability"
|
||||
run: cargo run --bin ruff_dev -- format-dev --stability-check crates/ruff/resources/test/cpython
|
||||
|
||||
20
.github/workflows/docs.yaml
vendored
20
.github/workflows/docs.yaml
vendored
@@ -10,20 +10,34 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.8.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
- name: "Install dependencies"
|
||||
run: |
|
||||
pip install -r docs/requirements.txt
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
mkdocs build --strict
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.generated.yml
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@2.0.0
|
||||
|
||||
2
.github/workflows/flake8-to-ruff.yaml
vendored
2
.github/workflows/flake8-to-ruff.yaml
vendored
@@ -9,7 +9,7 @@ concurrency:
|
||||
env:
|
||||
PACKAGE_NAME: flake8-to-ruff
|
||||
CRATE_NAME: flake8_to_ruff
|
||||
PYTHON_VERSION: "3.7" # to build abi3 wheels
|
||||
PYTHON_VERSION: "3.11"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
2
.github/workflows/release.yaml
vendored
2
.github/workflows/release.yaml
vendored
@@ -20,7 +20,7 @@ concurrency:
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: ruff
|
||||
PYTHON_VERSION: "3.7" # to build abi3 wheels
|
||||
PYTHON_VERSION: "3.11"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,8 +1,7 @@
|
||||
# Benchmarking cpython (CONTRIBUTING.md)
|
||||
crates/ruff/resources/test/cpython
|
||||
# generate_mkdocs.py
|
||||
mkdocs.yml
|
||||
.overrides
|
||||
mkdocs.generated.yml
|
||||
# check_ecosystem.py
|
||||
ruff-old
|
||||
github_search*.jsonl
|
||||
|
||||
@@ -1,5 +1,41 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.0.277
|
||||
|
||||
### `.ipynb_checkpoints`, `.pyenv`, `.pytest_cache`, and `.vscode` are now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
|
||||
|
||||
Ruff maintains a list of default exclusions, which now consists of the following patterns:
|
||||
|
||||
- `.bzr`
|
||||
- `.direnv`
|
||||
- `.eggs`
|
||||
- `.git`
|
||||
- `.git-rewrite`
|
||||
- `.hg`
|
||||
- `.ipynb_checkpoints`
|
||||
- `.mypy_cache`
|
||||
- `.nox`
|
||||
- `.pants.d`
|
||||
- `.pyenv`
|
||||
- `.pytest_cache`
|
||||
- `.pytype`
|
||||
- `.ruff_cache`
|
||||
- `.svn`
|
||||
- `.tox`
|
||||
- `.venv`
|
||||
- `.vscode`
|
||||
- `__pypackages__`
|
||||
- `_build`
|
||||
- `buck-out`
|
||||
- `build`
|
||||
- `dist`
|
||||
- `node_modules`
|
||||
- `venv`
|
||||
|
||||
Previously, the `.ipynb_checkpoints`, `.pyenv`, `.pytest_cache`, and `.vscode` directories were not
|
||||
excluded by default. This change brings Ruff's default exclusions in line with other tools like
|
||||
Black.
|
||||
|
||||
## 0.0.276
|
||||
|
||||
### The `keep-runtime-typing` setting has been reinstated ([#5470](https://github.com/astral-sh/ruff/pull/5470))
|
||||
|
||||
137
CONTRIBUTING.md
137
CONTRIBUTING.md
@@ -256,7 +256,11 @@ To preview any changes to the documentation locally:
|
||||
1. Run the development server with:
|
||||
|
||||
```shell
|
||||
mkdocs serve
|
||||
# For contributors.
|
||||
mkdocs serve -f mkdocs.generated.yml
|
||||
|
||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||
mkdocs serve -f mkdocs.insiders.yml
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
@@ -546,3 +550,134 @@ cargo instruments -t time --bench linter --profile release-debug -p ruff_benchma
|
||||
- You may want to pass an additional filter to run a single test file
|
||||
|
||||
Otherwise, follow the instructions from the linux section.
|
||||
|
||||
## `cargo dev`
|
||||
|
||||
`cargo dev` is a shortcut for `cargo run --package ruff_dev --bin ruff_dev`. You can run some useful
|
||||
utils with it:
|
||||
|
||||
- `cargo dev print-ast <file>`: Print the AST of a python file using the
|
||||
[RustPython parser](https://github.com/astral-sh/RustPython-Parser/tree/main/parser) that is
|
||||
mainly used in Ruff. For `if True: pass # comment`, you can see the syntax tree, the byte offsets
|
||||
for start and stop of each node and also how the `:` token, the comment and whitespace are not
|
||||
represented anymore:
|
||||
|
||||
```text
|
||||
[
|
||||
If(
|
||||
StmtIf {
|
||||
range: 0..13,
|
||||
test: Constant(
|
||||
ExprConstant {
|
||||
range: 3..7,
|
||||
value: Bool(
|
||||
true,
|
||||
),
|
||||
kind: None,
|
||||
},
|
||||
),
|
||||
body: [
|
||||
Pass(
|
||||
StmtPass {
|
||||
range: 9..13,
|
||||
},
|
||||
),
|
||||
],
|
||||
orelse: [],
|
||||
},
|
||||
),
|
||||
]
|
||||
```
|
||||
|
||||
- `cargo dev print-tokens <file>`: Print the tokens that the AST is built upon. Again for
|
||||
`if True: pass # comment`:
|
||||
|
||||
```text
|
||||
0 If 2
|
||||
3 True 7
|
||||
7 Colon 8
|
||||
9 Pass 13
|
||||
14 Comment(
|
||||
"# comment",
|
||||
) 23
|
||||
23 Newline 24
|
||||
```
|
||||
|
||||
- `cargo dev print-cst <file>`: Print the CST of a python file using
|
||||
[LibCST](https://github.com/Instagram/LibCST), which is used in addition to the RustPython parser
|
||||
in Ruff. E.g. for `if True: pass # comment` everything including the whitespace is represented:
|
||||
|
||||
```text
|
||||
Module {
|
||||
body: [
|
||||
Compound(
|
||||
If(
|
||||
If {
|
||||
test: Name(
|
||||
Name {
|
||||
value: "True",
|
||||
lpar: [],
|
||||
rpar: [],
|
||||
},
|
||||
),
|
||||
body: SimpleStatementSuite(
|
||||
SimpleStatementSuite {
|
||||
body: [
|
||||
Pass(
|
||||
Pass {
|
||||
semicolon: None,
|
||||
},
|
||||
),
|
||||
],
|
||||
leading_whitespace: SimpleWhitespace(
|
||||
" ",
|
||||
),
|
||||
trailing_whitespace: TrailingWhitespace {
|
||||
whitespace: SimpleWhitespace(
|
||||
" ",
|
||||
),
|
||||
comment: Some(
|
||||
Comment(
|
||||
"# comment",
|
||||
),
|
||||
),
|
||||
newline: Newline(
|
||||
None,
|
||||
Real,
|
||||
),
|
||||
},
|
||||
},
|
||||
),
|
||||
orelse: None,
|
||||
leading_lines: [],
|
||||
whitespace_before_test: SimpleWhitespace(
|
||||
" ",
|
||||
),
|
||||
whitespace_after_test: SimpleWhitespace(
|
||||
"",
|
||||
),
|
||||
is_elif: false,
|
||||
},
|
||||
),
|
||||
),
|
||||
],
|
||||
header: [],
|
||||
footer: [],
|
||||
default_indent: " ",
|
||||
default_newline: "\n",
|
||||
has_trailing_newline: true,
|
||||
encoding: "utf-8",
|
||||
}
|
||||
```
|
||||
|
||||
- `cargo dev generate-all`: Update `ruff.schema.json`, `docs/configuration.md` and `docs/rules`.
|
||||
You can also set `RUFF_UPDATE_SCHEMA=1` to update `ruff.schema.json` during `cargo test`.
|
||||
- `cargo dev generate-cli-help`, `cargo dev generate-docs` and `cargo dev generate-json-schema`:
|
||||
Update just `docs/configuration.md`, `docs/rules` and `ruff.schema.json` respectively.
|
||||
- `cargo dev generate-options`: Generate a markdown-compatible table of all `pyproject.toml`
|
||||
options. Used for <https://beta.ruff.rs/docs/settings/>
|
||||
- `cargo dev generate-rules-table`: Generate a markdown-compatible table of all rules. Used for <https://beta.ruff.rs/docs/rules/>
|
||||
- `cargo dev round-trip <python file or jupyter notebook>`: Read a Python file or Jupyter Notebook,
|
||||
parse it, serialize the parsed representation and write it back. Used to check how good our
|
||||
representation is so that fixes don't rewrite irrelevant parts of a file.
|
||||
- `cargo dev format_dev`: See ruff_python_formatter README.md
|
||||
|
||||
275
Cargo.lock
generated
275
Cargo.lock
generated
@@ -148,17 +148,6 @@ dependencies = [
|
||||
"wait-timeout",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atty"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
|
||||
dependencies = [
|
||||
"hermit-abi 0.1.19",
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.1.0"
|
||||
@@ -188,18 +177,17 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.3.2"
|
||||
version = "2.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6dbe3c979c178231552ecba20214a8272df4e09f232a87aef4320cf06539aded"
|
||||
checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42"
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.5.0"
|
||||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a246e68bb43f6cd9db24bea052a53e40405417c5fb372e3d1a8a7f770a564ef5"
|
||||
checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"regex-automata",
|
||||
"serde",
|
||||
]
|
||||
@@ -291,9 +279,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.3.8"
|
||||
version = "4.3.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9394150f5b4273a1763355bd1c2ec54cc5a2593f790587bcd6b2c947cfa9211"
|
||||
checksum = "1640e5cc7fb47dbb8338fd471b105e7ed6c3cb2aeb00c2e067127ffd3764a05d"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -302,22 +290,21 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.3.8"
|
||||
version = "4.3.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a78fbdd3cc2914ddf37ba444114bc7765bbdcb55ec9cbe6fa054f0137400717"
|
||||
checksum = "98c59138d527eeaf9b53f35a77fcc1fad9d883116070c63d5de1c7dc7b00c72b"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
"bitflags 1.3.2",
|
||||
"clap_lex",
|
||||
"strsim",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_complete"
|
||||
version = "4.3.1"
|
||||
version = "4.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f6b5c519bab3ea61843a7923d074b04245624bb84a64a8c150f5deb014e388b"
|
||||
checksum = "5fc443334c81a804575546c5a8a79b4913b50e28d69232903604cada1de817ce"
|
||||
dependencies = [
|
||||
"clap",
|
||||
]
|
||||
@@ -363,7 +350,7 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.22",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -393,13 +380,13 @@ checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
|
||||
|
||||
[[package]]
|
||||
name = "colored"
|
||||
version = "2.0.0"
|
||||
version = "2.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd"
|
||||
checksum = "2674ec482fbc38012cf31e6c42ba0177b431a0cb6f15fe40efa5aab1bda516f6"
|
||||
dependencies = [
|
||||
"atty",
|
||||
"is-terminal",
|
||||
"lazy_static",
|
||||
"winapi",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -417,6 +404,7 @@ dependencies = [
|
||||
"encode_unicode",
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"unicode-width",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
@@ -577,7 +565,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"strsim",
|
||||
"syn 2.0.22",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -588,7 +576,7 @@ checksum = "29a358ff9f12ec09c3e61fef9b5a9902623a695a46a917b07f269bff1445611a"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"quote",
|
||||
"syn 2.0.22",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -746,7 +734,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.276"
|
||||
version = "0.0.278"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -853,27 +841,9 @@ checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.1.19"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.2.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286"
|
||||
checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b"
|
||||
|
||||
[[package]]
|
||||
name = "hex"
|
||||
@@ -980,6 +950,19 @@ dependencies = [
|
||||
"hashbrown 0.14.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indicatif"
|
||||
version = "0.17.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ff8cc23a7393a397ed1d7f56e6365cba772aba9f9912ab968b03043c395d057"
|
||||
dependencies = [
|
||||
"console",
|
||||
"instant",
|
||||
"number_prefix",
|
||||
"portable-atomic",
|
||||
"unicode-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inotify"
|
||||
version = "0.9.6"
|
||||
@@ -1030,7 +1013,7 @@ version = "1.0.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2"
|
||||
dependencies = [
|
||||
"hermit-abi 0.3.1",
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
@@ -1050,13 +1033,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "is-terminal"
|
||||
version = "0.4.7"
|
||||
version = "0.4.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f"
|
||||
checksum = "24fddda5af7e54bf7da53067d6e802dbcc381d0a8eef629df528e3ebf68755cb"
|
||||
dependencies = [
|
||||
"hermit-abi 0.3.1",
|
||||
"io-lifetimes",
|
||||
"rustix",
|
||||
"hermit-abi",
|
||||
"rustix 0.38.3",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
@@ -1071,9 +1053,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.6"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6"
|
||||
checksum = "62b02a5381cc465bd3041d84623d0fa3b66738b52b8e2fc3bab8ad63ab032f4a"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
@@ -1155,7 +1137,7 @@ checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=80e4c1399f95e5beb532fdd1e209ad2dbb470438#80e4c1399f95e5beb532fdd1e209ad2dbb470438"
|
||||
source = "git+https://github.com/Instagram/LibCST.git?rev=3cacca1a1029f05707e50703b49fe3dd860aa839#3cacca1a1029f05707e50703b49fe3dd860aa839"
|
||||
dependencies = [
|
||||
"chic",
|
||||
"itertools",
|
||||
@@ -1170,7 +1152,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "libcst_derive"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=80e4c1399f95e5beb532fdd1e209ad2dbb470438#80e4c1399f95e5beb532fdd1e209ad2dbb470438"
|
||||
source = "git+https://github.com/Instagram/LibCST.git?rev=3cacca1a1029f05707e50703b49fe3dd860aa839#3cacca1a1029f05707e50703b49fe3dd860aa839"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
@@ -1198,6 +1180,12 @@ version = "0.3.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0"
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.19"
|
||||
@@ -1351,14 +1339,20 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "num_cpus"
|
||||
version = "1.15.0"
|
||||
version = "1.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
|
||||
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
|
||||
dependencies = [
|
||||
"hermit-abi 0.2.6",
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "number_prefix"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.18.0"
|
||||
@@ -1397,9 +1391,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "paste"
|
||||
version = "1.0.12"
|
||||
version = "1.0.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79"
|
||||
checksum = "b4b27ab7be369122c218afc2079489cdcb4b517c0a3fc386ff11e1fedfcc2b35"
|
||||
|
||||
[[package]]
|
||||
name = "path-absolutize"
|
||||
@@ -1526,7 +1520,7 @@ dependencies = [
|
||||
"phf_shared",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.22",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1540,9 +1534,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-lite"
|
||||
version = "0.2.9"
|
||||
version = "0.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
|
||||
checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57"
|
||||
|
||||
[[package]]
|
||||
name = "plotters"
|
||||
@@ -1583,6 +1577,12 @@ dependencies = [
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "portable-atomic"
|
||||
version = "1.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "767eb9f07d4a5ebcb39bbf2d452058a93c011373abf6832e24194a1c3f004794"
|
||||
|
||||
[[package]]
|
||||
name = "predicates"
|
||||
version = "3.0.3"
|
||||
@@ -1694,9 +1694,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.28"
|
||||
version = "1.0.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
|
||||
checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
@@ -1769,26 +1769,32 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.8.4"
|
||||
version = "1.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f"
|
||||
checksum = "89089e897c013b3deb627116ae56a6955a72b8bed395c9526af31c9fe528b484"
|
||||
dependencies = [
|
||||
"aho-corasick 1.0.2",
|
||||
"memchr",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa250384981ea14565685dea16a9ccc4d1c541a13f82b9c168572264d1df8c56"
|
||||
dependencies = [
|
||||
"aho-corasick 1.0.2",
|
||||
"memchr",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.7.2"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
|
||||
checksum = "2ab07dc67230e4a4718e70fd5c20055a4334b121f1f9db8fe63ef39ce9b8c846"
|
||||
|
||||
[[package]]
|
||||
name = "result-like"
|
||||
@@ -1829,11 +1835,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.276"
|
||||
version = "0.0.278"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
"bitflags 2.3.2",
|
||||
"bitflags 2.3.3",
|
||||
"chrono",
|
||||
"clap",
|
||||
"colored",
|
||||
@@ -1865,6 +1871,7 @@ dependencies = [
|
||||
"result-like",
|
||||
"ruff_cache",
|
||||
"ruff_diagnostics",
|
||||
"ruff_index",
|
||||
"ruff_macros",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_semantic",
|
||||
@@ -1926,15 +1933,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_cli"
|
||||
version = "0.0.276"
|
||||
version = "0.0.278"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
"argfile",
|
||||
"assert_cmd",
|
||||
"atty",
|
||||
"bincode",
|
||||
"bitflags 2.3.2",
|
||||
"bitflags 2.3.3",
|
||||
"cachedir",
|
||||
"chrono",
|
||||
"clap",
|
||||
@@ -1978,6 +1984,8 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"ignore",
|
||||
"indicatif",
|
||||
"itertools",
|
||||
"libcst",
|
||||
"log",
|
||||
@@ -1988,6 +1996,7 @@ dependencies = [
|
||||
"ruff",
|
||||
"ruff_cli",
|
||||
"ruff_diagnostics",
|
||||
"ruff_formatter",
|
||||
"ruff_python_formatter",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_textwrap",
|
||||
@@ -1998,6 +2007,7 @@ dependencies = [
|
||||
"similar",
|
||||
"strum",
|
||||
"strum_macros",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2041,7 +2051,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"ruff_textwrap",
|
||||
"syn 2.0.22",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2049,7 +2059,7 @@ name = "ruff_python_ast"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.3.2",
|
||||
"bitflags 2.3.3",
|
||||
"insta",
|
||||
"is-macro",
|
||||
"itertools",
|
||||
@@ -2073,7 +2083,7 @@ name = "ruff_python_formatter"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.3.2",
|
||||
"bitflags 2.3.3",
|
||||
"clap",
|
||||
"countme",
|
||||
"insta",
|
||||
@@ -2090,6 +2100,8 @@ dependencies = [
|
||||
"serde_json",
|
||||
"similar",
|
||||
"smallvec",
|
||||
"thiserror",
|
||||
"unic-ucd-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2106,7 +2118,7 @@ dependencies = [
|
||||
name = "ruff_python_semantic"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.3.2",
|
||||
"bitflags 2.3.3",
|
||||
"is-macro",
|
||||
"nohash-hasher",
|
||||
"num-traits",
|
||||
@@ -2193,15 +2205,28 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "0.37.20"
|
||||
version = "0.37.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b96e891d04aa506a6d1f318d2771bcb1c7dfda84e126660ace067c9b474bb2c0"
|
||||
checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"errno",
|
||||
"io-lifetimes",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"linux-raw-sys 0.3.8",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "0.38.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac5ffa1efe7548069688cd7028f32591853cd7b5b756d41bcffd2353e4fc75b4"
|
||||
dependencies = [
|
||||
"bitflags 2.3.3",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.4.3",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
@@ -2243,7 +2268,7 @@ name = "rustpython-format"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/astral-sh/RustPython-Parser.git?rev=c174bbf1f29527edd43d432326327f16f47ab9e0#c174bbf1f29527edd43d432326327f16f47ab9e0"
|
||||
dependencies = [
|
||||
"bitflags 2.3.2",
|
||||
"bitflags 2.3.3",
|
||||
"itertools",
|
||||
"num-bigint",
|
||||
"num-traits",
|
||||
@@ -2297,15 +2322,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rustversion"
|
||||
version = "1.0.12"
|
||||
version = "1.0.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06"
|
||||
checksum = "dc31bd9b61a32c31f9650d18add92aa83a49ba979c143eefd27fe7177b05bd5f"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.13"
|
||||
version = "1.0.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041"
|
||||
checksum = "fe232bdf6be8c8de797b22184ee71118d63780ea42ac85b61d1baa6d3b782ae9"
|
||||
|
||||
[[package]]
|
||||
name = "same-file"
|
||||
@@ -2370,9 +2395,9 @@ checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.164"
|
||||
version = "1.0.166"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d"
|
||||
checksum = "d01b7404f9d441d3ad40e6a636a7782c377d2abdbe4fa2440e2edcc2f4f10db8"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -2390,13 +2415,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.164"
|
||||
version = "1.0.166"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68"
|
||||
checksum = "5dd83d6dde2b6b2d466e14d9d1acce8816dedee94f735eac6395808b3483c6d6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.22",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2412,9 +2437,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.99"
|
||||
version = "1.0.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46266871c240a00b8f503b877622fe33430b3c7d963bdc0f2adc511e54a1eae3"
|
||||
checksum = "0f1e14e89be7aa4c4b78bdbdc9eb5bf8517829a600ae8eaa39a6e1d960b5185c"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
@@ -2455,7 +2480,7 @@ dependencies = [
|
||||
"darling",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.22",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2538,9 +2563,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.22"
|
||||
version = "2.0.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2efbeae7acf4eabd6bcdcbd11c92f45231ddda7539edc7806bd1a04a03b24616"
|
||||
checksum = "59fb7d6d8281a51045d62b8eb3a7d1ce347b76f312af50cd3dc0af39c87c1737"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2566,7 +2591,7 @@ dependencies = [
|
||||
"cfg-if",
|
||||
"fastrand",
|
||||
"redox_syscall 0.3.5",
|
||||
"rustix",
|
||||
"rustix 0.37.23",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
@@ -2635,22 +2660,22 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.40"
|
||||
version = "1.0.43"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac"
|
||||
checksum = "a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.40"
|
||||
version = "1.0.43"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
|
||||
checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.22",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2810,7 +2835,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.22",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2900,9 +2925,9 @@ checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.9"
|
||||
version = "1.0.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
|
||||
checksum = "22049a19f4a68748a168c0fc439f9516686aa045927ff767eca0a85101fb6e73"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
@@ -2969,9 +2994,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.3.4"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fa2982af2eec27de306107c027578ff7f423d65f7250e40ce0fea8f45248b81"
|
||||
checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
@@ -3031,7 +3056,7 @@ dependencies = [
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.22",
|
||||
"syn 2.0.23",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
@@ -3065,7 +3090,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.22",
|
||||
"syn 2.0.23",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
@@ -3176,7 +3201,7 @@ version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f"
|
||||
dependencies = [
|
||||
"windows-targets 0.48.0",
|
||||
"windows-targets 0.48.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3194,7 +3219,7 @@ version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
|
||||
dependencies = [
|
||||
"windows-targets 0.48.0",
|
||||
"windows-targets 0.48.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3214,9 +3239,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.48.0"
|
||||
version = "0.48.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5"
|
||||
checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.48.0",
|
||||
"windows_aarch64_msvc 0.48.0",
|
||||
|
||||
@@ -45,12 +45,13 @@ strum = { version = "0.24.1", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.24.3" }
|
||||
syn = { version = "2.0.15" }
|
||||
test-case = { version = "3.0.0" }
|
||||
thiserror = { version = "1.0.43" }
|
||||
toml = { version = "0.7.2" }
|
||||
|
||||
# v0.0.1
|
||||
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "80e4c1399f95e5beb532fdd1e209ad2dbb470438" }
|
||||
# v1.0.1
|
||||
libcst = { git = "https://github.com/Instagram/LibCST.git", rev = "3cacca1a1029f05707e50703b49fe3dd860aa839", default-features = false }
|
||||
|
||||
# Please tag the RustPython version everytime you update its revision here and in fuzz/Cargo.toml
|
||||
# Please tag the RustPython version every time you update its revision here and in fuzz/Cargo.toml
|
||||
# Tagging the version ensures that older ruff versions continue to build from source even when we rebase our RustPython fork.
|
||||
# Current tag: v0.0.7
|
||||
ruff_text_size = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "c174bbf1f29527edd43d432326327f16f47ab9e0" }
|
||||
|
||||
17
README.md
17
README.md
@@ -34,7 +34,8 @@ An extremely fast Python linter, written in Rust.
|
||||
- ⚖️ [Near-parity](https://beta.ruff.rs/docs/faq/#how-does-ruff-compare-to-flake8) with the
|
||||
built-in Flake8 rule set
|
||||
- 🔌 Native re-implementations of dozens of Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party editor integrations for [VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- ⌨️ First-party [editor integrations](https://beta.ruff.rs/docs/editor-integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://beta.ruff.rs/docs/configuration/#pyprojecttoml-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
@@ -139,7 +140,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com) hook:
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.0.276
|
||||
rev: v0.0.278
|
||||
hooks:
|
||||
- id: ruff
|
||||
```
|
||||
@@ -347,6 +348,7 @@ Ruff is released under the MIT license.
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
|
||||
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))
|
||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||
@@ -356,26 +358,30 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [DVC](https://github.com/iterative/dvc)
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||
- [HTTPX](https://github.com/encode/httpx)
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- [Hatch](https://github.com/pypa/hatch)
|
||||
- [Home Assistant](https://github.com/home-assistant/core)
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||
- [LangChain](https://github.com/hwchase17/langchain)
|
||||
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
|
||||
- Matrix ([Synapse](https://github.com/matrix-org/synapse))
|
||||
- Meltano ([Meltano CLI](https://github.com/meltano/meltano), [Singer SDK](https://github.com/meltano/sdk))
|
||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python-sdk))
|
||||
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
|
||||
- [MegaLinter](https://github.com/oxsecurity/megalinter)
|
||||
- Meltano ([Meltano CLI](https://github.com/meltano/meltano), [Singer SDK](https://github.com/meltano/sdk))
|
||||
- Microsoft ([Semantic Kernel](https://github.com/microsoft/semantic-kernel),
|
||||
[ONNX Runtime](https://github.com/microsoft/onnxruntime),
|
||||
[LightGBM](https://github.com/microsoft/LightGBM))
|
||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python-sdk))
|
||||
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
|
||||
- [Mypy](https://github.com/python/mypy)
|
||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||
- [Neon](https://github.com/neondatabase/neon)
|
||||
- [ONNX](https://github.com/onnx/onnx)
|
||||
@@ -411,6 +417,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [featuretools](https://github.com/alteryx/featuretools)
|
||||
- [meson-python](https://github.com/mesonbuild/meson-python)
|
||||
- [nox](https://github.com/wntrblm/nox)
|
||||
- [pip](https://github.com/pypa/pip)
|
||||
|
||||
### Show Your Support
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
extend-exclude = ["resources", "snapshots"]
|
||||
|
||||
[default.extend-words]
|
||||
trivias = "trivias"
|
||||
hel = "hel"
|
||||
whos = "whos"
|
||||
spawnve = "spawnve"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.276"
|
||||
version = "0.0.278"
|
||||
description = """
|
||||
Convert Flake8 configuration files to Ruff configuration files.
|
||||
"""
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.0.276"
|
||||
version = "0.0.278"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -17,6 +17,7 @@ name = "ruff"
|
||||
[dependencies]
|
||||
ruff_cache = { path = "../ruff_cache" }
|
||||
ruff_diagnostics = { path = "../ruff_diagnostics", features = ["serde"] }
|
||||
ruff_index = { path = "../ruff_index" }
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_python_whitespace = { path = "../ruff_python_whitespace" }
|
||||
ruff_python_ast = { path = "../ruff_python_ast", features = ["serde"] }
|
||||
@@ -72,7 +73,7 @@ shellexpand = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
strum = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
thiserror = { version = "1.0.38" }
|
||||
thiserror = { version = "1.0.43" }
|
||||
toml = { workspace = true }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unicode-width = { version = "0.1.10" }
|
||||
@@ -88,3 +89,5 @@ colored = { workspace = true, features = ["no-color"] }
|
||||
[features]
|
||||
default = []
|
||||
schemars = ["dep:schemars"]
|
||||
# Enables the UnreachableCode rule
|
||||
unreachable-code = []
|
||||
|
||||
11
crates/ruff/resources/test/fixtures/control-flow-graph/assert.py
vendored
Normal file
11
crates/ruff/resources/test/fixtures/control-flow-graph/assert.py
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
def func():
|
||||
assert True
|
||||
|
||||
def func():
|
||||
assert False
|
||||
|
||||
def func():
|
||||
assert True, "oops"
|
||||
|
||||
def func():
|
||||
assert False, "oops"
|
||||
41
crates/ruff/resources/test/fixtures/control-flow-graph/async-for.py
vendored
Normal file
41
crates/ruff/resources/test/fixtures/control-flow-graph/async-for.py
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
def func():
|
||||
async for i in range(5):
|
||||
print(i)
|
||||
|
||||
def func():
|
||||
async for i in range(20):
|
||||
print(i)
|
||||
else:
|
||||
return 0
|
||||
|
||||
def func():
|
||||
async for i in range(10):
|
||||
if i == 5:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def func():
|
||||
async for i in range(111):
|
||||
if i == 5:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
return 2
|
||||
|
||||
def func():
|
||||
async for i in range(12):
|
||||
continue
|
||||
|
||||
def func():
|
||||
async for i in range(1110):
|
||||
if True:
|
||||
continue
|
||||
|
||||
def func():
|
||||
async for i in range(13):
|
||||
break
|
||||
|
||||
def func():
|
||||
async for i in range(1110):
|
||||
if True:
|
||||
break
|
||||
41
crates/ruff/resources/test/fixtures/control-flow-graph/for.py
vendored
Normal file
41
crates/ruff/resources/test/fixtures/control-flow-graph/for.py
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
def func():
|
||||
for i in range(5):
|
||||
print(i)
|
||||
|
||||
def func():
|
||||
for i in range(20):
|
||||
print(i)
|
||||
else:
|
||||
return 0
|
||||
|
||||
def func():
|
||||
for i in range(10):
|
||||
if i == 5:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def func():
|
||||
for i in range(111):
|
||||
if i == 5:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
return 2
|
||||
|
||||
def func():
|
||||
for i in range(12):
|
||||
continue
|
||||
|
||||
def func():
|
||||
for i in range(1110):
|
||||
if True:
|
||||
continue
|
||||
|
||||
def func():
|
||||
for i in range(13):
|
||||
break
|
||||
|
||||
def func():
|
||||
for i in range(1110):
|
||||
if True:
|
||||
break
|
||||
108
crates/ruff/resources/test/fixtures/control-flow-graph/if.py
vendored
Normal file
108
crates/ruff/resources/test/fixtures/control-flow-graph/if.py
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
def func():
|
||||
if False:
|
||||
return 0
|
||||
return 1
|
||||
|
||||
def func():
|
||||
if True:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def func():
|
||||
if False:
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
def func():
|
||||
if True:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
def func():
|
||||
if False:
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
return "unreachable"
|
||||
|
||||
def func():
|
||||
if True:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
return "unreachable"
|
||||
|
||||
def func():
|
||||
if True:
|
||||
if True:
|
||||
return 1
|
||||
return 2
|
||||
else:
|
||||
return 3
|
||||
return "unreachable2"
|
||||
|
||||
def func():
|
||||
if False:
|
||||
return 0
|
||||
|
||||
def func():
|
||||
if True:
|
||||
return 1
|
||||
|
||||
def func():
|
||||
if True:
|
||||
return 1
|
||||
elif False:
|
||||
return 2
|
||||
else:
|
||||
return 0
|
||||
|
||||
def func():
|
||||
if False:
|
||||
return 1
|
||||
elif True:
|
||||
return 2
|
||||
else:
|
||||
return 0
|
||||
|
||||
def func():
|
||||
if True:
|
||||
if False:
|
||||
return 0
|
||||
elif True:
|
||||
return 1
|
||||
else:
|
||||
return 2
|
||||
return 3
|
||||
elif True:
|
||||
return 4
|
||||
else:
|
||||
return 5
|
||||
return 6
|
||||
|
||||
def func():
|
||||
if False:
|
||||
return "unreached"
|
||||
elif False:
|
||||
return "also unreached"
|
||||
return "reached"
|
||||
|
||||
# Test case found in the Bokeh repository that trigger a false positive.
|
||||
def func(self, obj: BytesRep) -> bytes:
|
||||
data = obj["data"]
|
||||
|
||||
if isinstance(data, str):
|
||||
return base64.b64decode(data)
|
||||
elif isinstance(data, Buffer):
|
||||
buffer = data
|
||||
else:
|
||||
id = data["id"]
|
||||
|
||||
if id in self._buffers:
|
||||
buffer = self._buffers[id]
|
||||
else:
|
||||
self.error(f"can't resolve buffer '{id}'")
|
||||
|
||||
return buffer.data
|
||||
131
crates/ruff/resources/test/fixtures/control-flow-graph/match.py
vendored
Normal file
131
crates/ruff/resources/test/fixtures/control-flow-graph/match.py
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
def func(status):
|
||||
match status:
|
||||
case _:
|
||||
return 0
|
||||
return "unreachable"
|
||||
|
||||
def func(status):
|
||||
match status:
|
||||
case 1:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def func(status):
|
||||
match status:
|
||||
case 1:
|
||||
return 1
|
||||
case _:
|
||||
return 0
|
||||
|
||||
def func(status):
|
||||
match status:
|
||||
case 1 | 2 | 3:
|
||||
return 5
|
||||
return 6
|
||||
|
||||
def func(status):
|
||||
match status:
|
||||
case 1 | 2 | 3:
|
||||
return 5
|
||||
case _:
|
||||
return 10
|
||||
return 0
|
||||
|
||||
def func(status):
|
||||
match status:
|
||||
case 0:
|
||||
return 0
|
||||
case 1:
|
||||
return 1
|
||||
case 1:
|
||||
return "1 again"
|
||||
case _:
|
||||
return 3
|
||||
|
||||
def func(status):
|
||||
i = 0
|
||||
match status, i:
|
||||
case _, _:
|
||||
return 0
|
||||
|
||||
def func(status):
|
||||
i = 0
|
||||
match status, i:
|
||||
case _, 0:
|
||||
return 0
|
||||
case _, 2:
|
||||
return 0
|
||||
|
||||
def func(point):
|
||||
match point:
|
||||
case (0, 0):
|
||||
print("Origin")
|
||||
case _:
|
||||
raise ValueError("oops")
|
||||
|
||||
def func(point):
|
||||
match point:
|
||||
case (0, 0):
|
||||
print("Origin")
|
||||
case (0, y):
|
||||
print(f"Y={y}")
|
||||
case (x, 0):
|
||||
print(f"X={x}")
|
||||
case (x, y):
|
||||
print(f"X={x}, Y={y}")
|
||||
case _:
|
||||
raise ValueError("Not a point")
|
||||
|
||||
def where_is(point):
|
||||
class Point:
|
||||
x: int
|
||||
y: int
|
||||
|
||||
match point:
|
||||
case Point(x=0, y=0):
|
||||
print("Origin")
|
||||
case Point(x=0, y=y):
|
||||
print(f"Y={y}")
|
||||
case Point(x=x, y=0):
|
||||
print(f"X={x}")
|
||||
case Point():
|
||||
print("Somewhere else")
|
||||
case _:
|
||||
print("Not a point")
|
||||
|
||||
def func(points):
|
||||
match points:
|
||||
case []:
|
||||
print("No points")
|
||||
case [Point(0, 0)]:
|
||||
print("The origin")
|
||||
case [Point(x, y)]:
|
||||
print(f"Single point {x}, {y}")
|
||||
case [Point(0, y1), Point(0, y2)]:
|
||||
print(f"Two on the Y axis at {y1}, {y2}")
|
||||
case _:
|
||||
print("Something else")
|
||||
|
||||
def func(point):
|
||||
match point:
|
||||
case Point(x, y) if x == y:
|
||||
print(f"Y=X at {x}")
|
||||
case Point(x, y):
|
||||
print(f"Not on the diagonal")
|
||||
|
||||
def func():
|
||||
from enum import Enum
|
||||
class Color(Enum):
|
||||
RED = 'red'
|
||||
GREEN = 'green'
|
||||
BLUE = 'blue'
|
||||
|
||||
color = Color(input("Enter your choice of 'red', 'blue' or 'green': "))
|
||||
|
||||
match color:
|
||||
case Color.RED:
|
||||
print("I see red!")
|
||||
case Color.GREEN:
|
||||
print("Grass is green")
|
||||
case Color.BLUE:
|
||||
print("I'm feeling the blues :(")
|
||||
5
crates/ruff/resources/test/fixtures/control-flow-graph/raise.py
vendored
Normal file
5
crates/ruff/resources/test/fixtures/control-flow-graph/raise.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
def func():
|
||||
raise Exception
|
||||
|
||||
def func():
|
||||
raise "a glass!"
|
||||
23
crates/ruff/resources/test/fixtures/control-flow-graph/simple.py
vendored
Normal file
23
crates/ruff/resources/test/fixtures/control-flow-graph/simple.py
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
def func():
|
||||
pass
|
||||
|
||||
def func():
|
||||
pass
|
||||
|
||||
def func():
|
||||
return
|
||||
|
||||
def func():
|
||||
return 1
|
||||
|
||||
def func():
|
||||
return 1
|
||||
return "unreachable"
|
||||
|
||||
def func():
|
||||
i = 0
|
||||
|
||||
def func():
|
||||
i = 0
|
||||
i += 2
|
||||
return i
|
||||
41
crates/ruff/resources/test/fixtures/control-flow-graph/try.py
vendored
Normal file
41
crates/ruff/resources/test/fixtures/control-flow-graph/try.py
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
def func():
|
||||
try:
|
||||
...
|
||||
except Exception:
|
||||
...
|
||||
except OtherException as e:
|
||||
...
|
||||
else:
|
||||
...
|
||||
finally:
|
||||
...
|
||||
|
||||
def func():
|
||||
try:
|
||||
...
|
||||
except Exception:
|
||||
...
|
||||
|
||||
def func():
|
||||
try:
|
||||
...
|
||||
except Exception:
|
||||
...
|
||||
except OtherException as e:
|
||||
...
|
||||
|
||||
def func():
|
||||
try:
|
||||
...
|
||||
except Exception:
|
||||
...
|
||||
except OtherException as e:
|
||||
...
|
||||
else:
|
||||
...
|
||||
|
||||
def func():
|
||||
try:
|
||||
...
|
||||
finally:
|
||||
...
|
||||
121
crates/ruff/resources/test/fixtures/control-flow-graph/while.py
vendored
Normal file
121
crates/ruff/resources/test/fixtures/control-flow-graph/while.py
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
def func():
|
||||
while False:
|
||||
return "unreachable"
|
||||
return 1
|
||||
|
||||
def func():
|
||||
while False:
|
||||
return "unreachable"
|
||||
else:
|
||||
return 1
|
||||
|
||||
def func():
|
||||
while False:
|
||||
return "unreachable"
|
||||
else:
|
||||
return 1
|
||||
return "also unreachable"
|
||||
|
||||
def func():
|
||||
while True:
|
||||
return 1
|
||||
return "unreachable"
|
||||
|
||||
def func():
|
||||
while True:
|
||||
return 1
|
||||
else:
|
||||
return "unreachable"
|
||||
|
||||
def func():
|
||||
while True:
|
||||
return 1
|
||||
else:
|
||||
return "unreachable"
|
||||
return "also unreachable"
|
||||
|
||||
def func():
|
||||
i = 0
|
||||
while False:
|
||||
i += 1
|
||||
return i
|
||||
|
||||
def func():
|
||||
i = 0
|
||||
while True:
|
||||
i += 1
|
||||
return i
|
||||
|
||||
def func():
|
||||
while True:
|
||||
pass
|
||||
return 1
|
||||
|
||||
def func():
|
||||
i = 0
|
||||
while True:
|
||||
if True:
|
||||
print("ok")
|
||||
i += 1
|
||||
return i
|
||||
|
||||
def func():
|
||||
i = 0
|
||||
while True:
|
||||
if False:
|
||||
print("ok")
|
||||
i += 1
|
||||
return i
|
||||
|
||||
def func():
|
||||
while True:
|
||||
if True:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def func():
|
||||
while True:
|
||||
continue
|
||||
|
||||
def func():
|
||||
while False:
|
||||
continue
|
||||
|
||||
def func():
|
||||
while True:
|
||||
break
|
||||
|
||||
def func():
|
||||
while False:
|
||||
break
|
||||
|
||||
def func():
|
||||
while True:
|
||||
if True:
|
||||
continue
|
||||
|
||||
def func():
|
||||
while True:
|
||||
if True:
|
||||
break
|
||||
|
||||
'''
|
||||
TODO: because `try` statements aren't handled this triggers a false positive as
|
||||
the last statement is reached, but the rules thinks it isn't (it doesn't
|
||||
see/process the break statement).
|
||||
|
||||
# Test case found in the Bokeh repository that trigger a false positive.
|
||||
def bokeh2(self, host: str = DEFAULT_HOST, port: int = DEFAULT_PORT) -> None:
|
||||
self.stop_serving = False
|
||||
while True:
|
||||
try:
|
||||
self.server = HTTPServer((host, port), HtmlOnlyHandler)
|
||||
self.host = host
|
||||
self.port = port
|
||||
break
|
||||
except OSError:
|
||||
log.debug(f"port {port} is in use, trying to next one")
|
||||
port += 1
|
||||
|
||||
self.thread = threading.Thread(target=self._run_web_server)
|
||||
'''
|
||||
12
crates/ruff/resources/test/fixtures/flake8_bandit/S307.py
vendored
Normal file
12
crates/ruff/resources/test/fixtures/flake8_bandit/S307.py
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
import os
|
||||
|
||||
print(eval("1+1")) # S307
|
||||
print(eval("os.getcwd()")) # S307
|
||||
|
||||
|
||||
class Class(object):
|
||||
def eval(self):
|
||||
print("hi")
|
||||
|
||||
def foo(self):
|
||||
self.eval() # OK
|
||||
27
crates/ruff/resources/test/fixtures/flake8_bugbear/B034.py
vendored
Normal file
27
crates/ruff/resources/test/fixtures/flake8_bugbear/B034.py
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
import re
|
||||
from re import sub
|
||||
|
||||
# B034
|
||||
re.sub("a", "b", "aaa", re.IGNORECASE)
|
||||
re.sub("a", "b", "aaa", 5)
|
||||
re.sub("a", "b", "aaa", 5, re.IGNORECASE)
|
||||
re.subn("a", "b", "aaa", re.IGNORECASE)
|
||||
re.subn("a", "b", "aaa", 5)
|
||||
re.subn("a", "b", "aaa", 5, re.IGNORECASE)
|
||||
re.split(" ", "a a a a", re.I)
|
||||
re.split(" ", "a a a a", 2)
|
||||
re.split(" ", "a a a a", 2, re.I)
|
||||
sub("a", "b", "aaa", re.IGNORECASE)
|
||||
|
||||
# OK
|
||||
re.sub("a", "b", "aaa")
|
||||
re.sub("a", "b", "aaa", flags=re.IGNORECASE)
|
||||
re.sub("a", "b", "aaa", count=5)
|
||||
re.sub("a", "b", "aaa", count=5, flags=re.IGNORECASE)
|
||||
re.subn("a", "b", "aaa")
|
||||
re.subn("a", "b", "aaa", flags=re.IGNORECASE)
|
||||
re.subn("a", "b", "aaa", count=5)
|
||||
re.subn("a", "b", "aaa", count=5, flags=re.IGNORECASE)
|
||||
re.split(" ", "a a a a", flags=re.I)
|
||||
re.split(" ", "a a a a", maxsplit=2)
|
||||
re.split(" ", "a a a a", maxsplit=2, flags=re.I)
|
||||
@@ -25,10 +25,15 @@ map(lambda x=2, y=1: x + y, nums, nums)
|
||||
set(map(lambda x, y: x, nums, nums))
|
||||
|
||||
|
||||
def myfunc(arg1: int, arg2: int = 4):
|
||||
def func(arg1: int, arg2: int = 4):
|
||||
return 2 * arg1 + arg2
|
||||
|
||||
|
||||
list(map(myfunc, nums))
|
||||
# Non-error: `func` is not a lambda.
|
||||
list(map(func, nums))
|
||||
|
||||
[x for x in nums]
|
||||
# False positive: need to preserve the late-binding of `x` in the inner lambda.
|
||||
map(lambda x: lambda: x, range(4))
|
||||
|
||||
# Error: the `x` is overridden by the inner lambda.
|
||||
map(lambda x: lambda x: x, range(4))
|
||||
|
||||
@@ -19,3 +19,6 @@ from datetime import datetime
|
||||
|
||||
# no args unqualified
|
||||
datetime(2000, 1, 1, 0, 0, 0)
|
||||
|
||||
# uses `astimezone` method
|
||||
datetime(2000, 1, 1, 0, 0, 0).astimezone()
|
||||
|
||||
@@ -7,3 +7,6 @@ from datetime import datetime
|
||||
|
||||
# unqualified
|
||||
datetime.today()
|
||||
|
||||
# uses `astimezone` method
|
||||
datetime.today().astimezone()
|
||||
|
||||
@@ -7,3 +7,6 @@ from datetime import datetime
|
||||
|
||||
# unqualified
|
||||
datetime.utcnow()
|
||||
|
||||
# uses `astimezone` method
|
||||
datetime.utcnow().astimezone()
|
||||
|
||||
@@ -7,3 +7,6 @@ from datetime import datetime
|
||||
|
||||
# unqualified
|
||||
datetime.utcfromtimestamp(1234)
|
||||
|
||||
# uses `astimezone` method
|
||||
datetime.utcfromtimestamp(1234).astimezone()
|
||||
|
||||
@@ -16,3 +16,6 @@ from datetime import datetime
|
||||
|
||||
# no args unqualified
|
||||
datetime.now()
|
||||
|
||||
# uses `astimezone` method
|
||||
datetime.now().astimezone()
|
||||
|
||||
@@ -16,3 +16,6 @@ from datetime import datetime
|
||||
|
||||
# no args unqualified
|
||||
datetime.fromtimestamp(1234)
|
||||
|
||||
# uses `astimezone` method
|
||||
datetime.fromtimestamp(1234).astimezone()
|
||||
|
||||
@@ -5,15 +5,18 @@ import matplotlib.pyplot # unconventional
|
||||
import numpy # unconventional
|
||||
import pandas # unconventional
|
||||
import seaborn # unconventional
|
||||
import tkinter # unconventional
|
||||
|
||||
import altair as altr # unconventional
|
||||
import matplotlib.pyplot as plot # unconventional
|
||||
import numpy as nmp # unconventional
|
||||
import pandas as pdas # unconventional
|
||||
import seaborn as sbrn # unconventional
|
||||
import tkinter as tkr # unconventional
|
||||
|
||||
import altair as alt # conventional
|
||||
import matplotlib.pyplot as plt # conventional
|
||||
import numpy as np # conventional
|
||||
import pandas as pd # conventional
|
||||
import seaborn as sns # conventional
|
||||
import tkinter as tk # conventional
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import typing
|
||||
|
||||
# Shouldn't affect non-union field types.
|
||||
field1: str
|
||||
|
||||
@@ -30,3 +32,42 @@ field10: (str | int) | str # PYI016: Duplicate union member `str`
|
||||
|
||||
# Should emit for nested unions.
|
||||
field11: dict[int | int, str]
|
||||
|
||||
# Should emit for unions with more than two cases
|
||||
field12: int | int | int # Error
|
||||
field13: int | int | int | int # Error
|
||||
|
||||
# Should emit for unions with more than two cases, even if not directly adjacent
|
||||
field14: int | int | str | int # Error
|
||||
|
||||
# Should emit for duplicate literal types; also covered by PYI030
|
||||
field15: typing.Literal[1] | typing.Literal[1] # Error
|
||||
|
||||
# Shouldn't emit if in new parent type
|
||||
field16: int | dict[int, str] # OK
|
||||
|
||||
# Shouldn't emit if not in a union parent
|
||||
field17: dict[int, int] # OK
|
||||
|
||||
# Should emit in cases with newlines
|
||||
field18: typing.Union[
|
||||
set[
|
||||
int # foo
|
||||
],
|
||||
set[
|
||||
int # bar
|
||||
],
|
||||
] # Error, newline and comment will not be emitted in message
|
||||
|
||||
|
||||
# Should emit in cases with `typing.Union` instead of `|`
|
||||
field19: typing.Union[int, int] # Error
|
||||
|
||||
# Should emit in cases with nested `typing.Union`
|
||||
field20: typing.Union[int, typing.Union[int, str]] # Error
|
||||
|
||||
# Should emit in cases with mixed `typing.Union` and `|`
|
||||
field21: typing.Union[int, int | str] # Error
|
||||
|
||||
# Should emit only once in cases with multiple nested `typing.Union`
|
||||
field22: typing.Union[int, typing.Union[int, typing.Union[int, int]]] # Error
|
||||
|
||||
24
crates/ruff/resources/test/fixtures/flake8_pyi/PYI030.py
vendored
Normal file
24
crates/ruff/resources/test/fixtures/flake8_pyi/PYI030.py
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
from typing import Literal
|
||||
# Shouldn't emit for any cases in the non-stub file for compatibility with flake8-pyi.
|
||||
# Note that this rule could be applied here in the future.
|
||||
|
||||
field1: Literal[1] # OK
|
||||
field2: Literal[1] | Literal[2] # OK
|
||||
|
||||
def func1(arg1: Literal[1] | Literal[2]): # OK
|
||||
print(arg1)
|
||||
|
||||
|
||||
def func2() -> Literal[1] | Literal[2]: # OK
|
||||
return "my Literal[1]ing"
|
||||
|
||||
|
||||
field3: Literal[1] | Literal[2] | str # OK
|
||||
field4: str | Literal[1] | Literal[2] # OK
|
||||
field5: Literal[1] | str | Literal[2] # OK
|
||||
field6: Literal[1] | bool | Literal[2] | str # OK
|
||||
field7 = Literal[1] | Literal[2] # OK
|
||||
field8: Literal[1] | (Literal[2] | str) # OK
|
||||
field9: Literal[1] | (Literal[2] | str) # OK
|
||||
field10: (Literal[1] | str) | Literal[2] # OK
|
||||
field11: dict[Literal[1] | Literal[2], str] # OK
|
||||
86
crates/ruff/resources/test/fixtures/flake8_pyi/PYI030.pyi
vendored
Normal file
86
crates/ruff/resources/test/fixtures/flake8_pyi/PYI030.pyi
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
import typing
|
||||
import typing_extensions
|
||||
from typing import Literal
|
||||
|
||||
# Shouldn't affect non-union field types.
|
||||
field1: Literal[1] # OK
|
||||
|
||||
# Should emit for duplicate field types.
|
||||
field2: Literal[1] | Literal[2] # Error
|
||||
|
||||
# Should emit for union types in arguments.
|
||||
def func1(arg1: Literal[1] | Literal[2]): # Error
|
||||
print(arg1)
|
||||
|
||||
|
||||
# Should emit for unions in return types.
|
||||
def func2() -> Literal[1] | Literal[2]: # Error
|
||||
return "my Literal[1]ing"
|
||||
|
||||
|
||||
# Should emit in longer unions, even if not directly adjacent.
|
||||
field3: Literal[1] | Literal[2] | str # Error
|
||||
field4: str | Literal[1] | Literal[2] # Error
|
||||
field5: Literal[1] | str | Literal[2] # Error
|
||||
field6: Literal[1] | bool | Literal[2] | str # Error
|
||||
|
||||
# Should emit for non-type unions.
|
||||
field7 = Literal[1] | Literal[2] # Error
|
||||
|
||||
# Should emit for parenthesized unions.
|
||||
field8: Literal[1] | (Literal[2] | str) # Error
|
||||
|
||||
# Should handle user parentheses when fixing.
|
||||
field9: Literal[1] | (Literal[2] | str) # Error
|
||||
field10: (Literal[1] | str) | Literal[2] # Error
|
||||
|
||||
# Should emit for union in generic parent type.
|
||||
field11: dict[Literal[1] | Literal[2], str] # Error
|
||||
|
||||
# Should emit for unions with more than two cases
|
||||
field12: Literal[1] | Literal[2] | Literal[3] # Error
|
||||
field13: Literal[1] | Literal[2] | Literal[3] | Literal[4] # Error
|
||||
|
||||
# Should emit for unions with more than two cases, even if not directly adjacent
|
||||
field14: Literal[1] | Literal[2] | str | Literal[3] # Error
|
||||
|
||||
# Should emit for unions with mixed literal internal types
|
||||
field15: Literal[1] | Literal["foo"] | Literal[True] # Error
|
||||
|
||||
# Shouldn't emit for duplicate field types with same value; covered by Y016
|
||||
field16: Literal[1] | Literal[1] # OK
|
||||
|
||||
# Shouldn't emit if in new parent type
|
||||
field17: Literal[1] | dict[Literal[2], str] # OK
|
||||
|
||||
# Shouldn't emit if not in a union parent
|
||||
field18: dict[Literal[1], Literal[2]] # OK
|
||||
|
||||
# Should respect name of literal type used
|
||||
field19: typing.Literal[1] | typing.Literal[2] # Error
|
||||
|
||||
# Should emit in cases with newlines
|
||||
field20: typing.Union[
|
||||
Literal[
|
||||
1 # test
|
||||
],
|
||||
Literal[2],
|
||||
] # Error, newline and comment will not be emitted in message
|
||||
|
||||
# Should handle multiple unions with multiple members
|
||||
field21: Literal[1, 2] | Literal[3, 4] # Error
|
||||
|
||||
# Should emit in cases with `typing.Union` instead of `|`
|
||||
field22: typing.Union[Literal[1], Literal[2]] # Error
|
||||
|
||||
# Should emit in cases with `typing_extensions.Literal`
|
||||
field23: typing_extensions.Literal[1] | typing_extensions.Literal[2] # Error
|
||||
|
||||
# Should emit in cases with nested `typing.Union`
|
||||
field24: typing.Union[Literal[1], typing.Union[Literal[2], str]] # Error
|
||||
|
||||
# Should emit in cases with mixed `typing.Union` and `|`
|
||||
field25: typing.Union[Literal[1], Literal[2] | str] # Error
|
||||
|
||||
# Should emit only once in cases with multiple nested `typing.Union`
|
||||
field24: typing.Union[Literal[1], typing.Union[Literal[2], typing.Union[Literal[3], Literal[4]]]] # Error
|
||||
@@ -29,6 +29,26 @@ raise TypeError(
|
||||
# Hello, world!
|
||||
)
|
||||
|
||||
# OK
|
||||
raise AssertionError
|
||||
|
||||
# OK
|
||||
raise AttributeError("test message")
|
||||
|
||||
|
||||
def return_error():
|
||||
return ValueError("Something")
|
||||
|
||||
|
||||
# OK
|
||||
raise return_error()
|
||||
|
||||
|
||||
class Class:
|
||||
@staticmethod
|
||||
def error():
|
||||
return ValueError("Something")
|
||||
|
||||
|
||||
# OK
|
||||
raise Class.error()
|
||||
|
||||
9
crates/ruff/resources/test/fixtures/isort/case_sensitive.py
vendored
Normal file
9
crates/ruff/resources/test/fixtures/isort/case_sensitive.py
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import A
|
||||
import B
|
||||
import b
|
||||
import C
|
||||
import d
|
||||
import E
|
||||
import f
|
||||
from g import a, B, c
|
||||
from h import A, b, C
|
||||
@@ -26,3 +26,9 @@ def f():
|
||||
import os # isort:skip
|
||||
import collections
|
||||
import abc
|
||||
|
||||
|
||||
def f():
|
||||
import sys; import os # isort:skip
|
||||
import sys; import os # isort:skip # isort:skip
|
||||
import sys; import os
|
||||
|
||||
@@ -19,3 +19,13 @@ if True:
|
||||
|
||||
import D
|
||||
import B
|
||||
|
||||
|
||||
import e
|
||||
import f
|
||||
|
||||
# isort: split
|
||||
# isort: split
|
||||
|
||||
import d
|
||||
import c
|
||||
|
||||
@@ -30,3 +30,18 @@ def f():
|
||||
result = []
|
||||
for i in items:
|
||||
result.append(i) # OK
|
||||
|
||||
|
||||
def f():
|
||||
items = [1, 2, 3, 4]
|
||||
result = {}
|
||||
for i in items:
|
||||
result[i].append(i) # OK
|
||||
|
||||
|
||||
def f():
|
||||
items = [1, 2, 3, 4]
|
||||
result = []
|
||||
for i in items:
|
||||
if i not in result:
|
||||
result.append(i) # OK
|
||||
|
||||
@@ -17,3 +17,10 @@ def f():
|
||||
result = []
|
||||
for i in items:
|
||||
result.append(i * i) # OK
|
||||
|
||||
|
||||
def f():
|
||||
items = [1, 2, 3, 4]
|
||||
result = {}
|
||||
for i in items:
|
||||
result[i].append(i * i) # OK
|
||||
|
||||
@@ -48,3 +48,8 @@ x = {
|
||||
|
||||
x = {"a": 1, "a": 1}
|
||||
x = {"a": 1, "b": 2, "a": 1}
|
||||
|
||||
x = {
|
||||
('a', 'b'): 'asdf',
|
||||
('a', 'b'): 'qwer',
|
||||
}
|
||||
|
||||
37
crates/ruff/resources/test/fixtures/pylint/type_bivariance.py
vendored
Normal file
37
crates/ruff/resources/test/fixtures/pylint/type_bivariance.py
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
from typing import ParamSpec, TypeVar
|
||||
|
||||
# Errors.
|
||||
|
||||
T = TypeVar("T", covariant=True, contravariant=True)
|
||||
T = TypeVar(name="T", covariant=True, contravariant=True)
|
||||
|
||||
T = ParamSpec("T", covariant=True, contravariant=True)
|
||||
T = ParamSpec(name="T", covariant=True, contravariant=True)
|
||||
|
||||
# Non-errors.
|
||||
|
||||
T = TypeVar("T")
|
||||
T = TypeVar("T", covariant=False)
|
||||
T = TypeVar("T", contravariant=False)
|
||||
T = TypeVar("T", covariant=False, contravariant=False)
|
||||
T = TypeVar("T", covariant=True)
|
||||
T = TypeVar("T", covariant=True, contravariant=False)
|
||||
T = TypeVar(name="T", covariant=True, contravariant=False)
|
||||
T = TypeVar(name="T", covariant=True)
|
||||
T = TypeVar("T", contravariant=True)
|
||||
T = TypeVar("T", covariant=False, contravariant=True)
|
||||
T = TypeVar(name="T", covariant=False, contravariant=True)
|
||||
T = TypeVar(name="T", contravariant=True)
|
||||
|
||||
T = ParamSpec("T")
|
||||
T = ParamSpec("T", covariant=False)
|
||||
T = ParamSpec("T", contravariant=False)
|
||||
T = ParamSpec("T", covariant=False, contravariant=False)
|
||||
T = ParamSpec("T", covariant=True)
|
||||
T = ParamSpec("T", covariant=True, contravariant=False)
|
||||
T = ParamSpec(name="T", covariant=True, contravariant=False)
|
||||
T = ParamSpec(name="T", covariant=True)
|
||||
T = ParamSpec("T", contravariant=True)
|
||||
T = ParamSpec("T", covariant=False, contravariant=True)
|
||||
T = ParamSpec(name="T", covariant=False, contravariant=True)
|
||||
T = ParamSpec(name="T", contravariant=True)
|
||||
68
crates/ruff/resources/test/fixtures/pylint/type_name_incorrect_variance.py
vendored
Normal file
68
crates/ruff/resources/test/fixtures/pylint/type_name_incorrect_variance.py
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
from typing import ParamSpec, TypeVar
|
||||
|
||||
# Errors.
|
||||
|
||||
T = TypeVar("T", covariant=True)
|
||||
T = TypeVar("T", covariant=True, contravariant=False)
|
||||
T = TypeVar("T", contravariant=True)
|
||||
T = TypeVar("T", covariant=False, contravariant=True)
|
||||
P = ParamSpec("P", covariant=True)
|
||||
P = ParamSpec("P", covariant=True, contravariant=False)
|
||||
P = ParamSpec("P", contravariant=True)
|
||||
P = ParamSpec("P", covariant=False, contravariant=True)
|
||||
|
||||
T_co = TypeVar("T_co")
|
||||
T_co = TypeVar("T_co", covariant=False)
|
||||
T_co = TypeVar("T_co", contravariant=False)
|
||||
T_co = TypeVar("T_co", covariant=False, contravariant=False)
|
||||
T_co = TypeVar("T_co", contravariant=True)
|
||||
T_co = TypeVar("T_co", covariant=False, contravariant=True)
|
||||
P_co = ParamSpec("P_co")
|
||||
P_co = ParamSpec("P_co", covariant=False)
|
||||
P_co = ParamSpec("P_co", contravariant=False)
|
||||
P_co = ParamSpec("P_co", covariant=False, contravariant=False)
|
||||
P_co = ParamSpec("P_co", contravariant=True)
|
||||
P_co = ParamSpec("P_co", covariant=False, contravariant=True)
|
||||
|
||||
T_contra = TypeVar("T_contra")
|
||||
T_contra = TypeVar("T_contra", covariant=False)
|
||||
T_contra = TypeVar("T_contra", contravariant=False)
|
||||
T_contra = TypeVar("T_contra", covariant=False, contravariant=False)
|
||||
T_contra = TypeVar("T_contra", covariant=True)
|
||||
T_contra = TypeVar("T_contra", covariant=True, contravariant=False)
|
||||
P_contra = ParamSpec("P_contra")
|
||||
P_contra = ParamSpec("P_contra", covariant=False)
|
||||
P_contra = ParamSpec("P_contra", contravariant=False)
|
||||
P_contra = ParamSpec("P_contra", covariant=False, contravariant=False)
|
||||
P_contra = ParamSpec("P_contra", covariant=True)
|
||||
P_contra = ParamSpec("P_contra", covariant=True, contravariant=False)
|
||||
|
||||
# Non-errors.
|
||||
|
||||
T = TypeVar("T")
|
||||
T = TypeVar("T", covariant=False)
|
||||
T = TypeVar("T", contravariant=False)
|
||||
T = TypeVar("T", covariant=False, contravariant=False)
|
||||
P = ParamSpec("P")
|
||||
P = ParamSpec("P", covariant=False)
|
||||
P = ParamSpec("P", contravariant=False)
|
||||
P = ParamSpec("P", covariant=False, contravariant=False)
|
||||
|
||||
T_co = TypeVar("T_co", covariant=True)
|
||||
T_co = TypeVar("T_co", covariant=True, contravariant=False)
|
||||
P_co = ParamSpec("P_co", covariant=True)
|
||||
P_co = ParamSpec("P_co", covariant=True, contravariant=False)
|
||||
|
||||
T_contra = TypeVar("T_contra", contravariant=True)
|
||||
T_contra = TypeVar("T_contra", covariant=False, contravariant=True)
|
||||
P_contra = ParamSpec("P_contra", contravariant=True)
|
||||
P_contra = ParamSpec("P_contra", covariant=False, contravariant=True)
|
||||
|
||||
# Bivariate types are errors, but not covered by this check.
|
||||
|
||||
T = TypeVar("T", covariant=True, contravariant=True)
|
||||
P = ParamSpec("P", covariant=True, contravariant=True)
|
||||
T_co = TypeVar("T_co", covariant=True, contravariant=True)
|
||||
P_co = ParamSpec("P_co", covariant=True, contravariant=True)
|
||||
T_contra = TypeVar("T_contra", covariant=True, contravariant=True)
|
||||
P_contra = ParamSpec("P_contra", covariant=True, contravariant=True)
|
||||
56
crates/ruff/resources/test/fixtures/pylint/type_param_name_mismatch.py
vendored
Normal file
56
crates/ruff/resources/test/fixtures/pylint/type_param_name_mismatch.py
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
from typing import TypeVar, ParamSpec, NewType, TypeVarTuple
|
||||
|
||||
# Errors.
|
||||
|
||||
X = TypeVar("T")
|
||||
X = TypeVar(name="T")
|
||||
|
||||
Y = ParamSpec("T")
|
||||
Y = ParamSpec(name="T")
|
||||
|
||||
Z = NewType("T", int)
|
||||
Z = NewType(name="T", tp=int)
|
||||
|
||||
Ws = TypeVarTuple("Ts")
|
||||
Ws = TypeVarTuple(name="Ts")
|
||||
|
||||
# Non-errors.
|
||||
|
||||
T = TypeVar("T")
|
||||
T = TypeVar(name="T")
|
||||
|
||||
T = ParamSpec("T")
|
||||
T = ParamSpec(name="T")
|
||||
|
||||
T = NewType("T", int)
|
||||
T = NewType(name="T", tp=int)
|
||||
|
||||
Ts = TypeVarTuple("Ts")
|
||||
Ts = TypeVarTuple(name="Ts")
|
||||
|
||||
# Errors, but not covered by this rule.
|
||||
|
||||
# Non-string literal name.
|
||||
T = TypeVar(some_str)
|
||||
T = TypeVar(name=some_str)
|
||||
T = TypeVar(1)
|
||||
T = TypeVar(name=1)
|
||||
T = ParamSpec(some_str)
|
||||
T = ParamSpec(name=some_str)
|
||||
T = ParamSpec(1)
|
||||
T = ParamSpec(name=1)
|
||||
T = NewType(some_str, int)
|
||||
T = NewType(name=some_str, tp=int)
|
||||
T = NewType(1, int)
|
||||
T = NewType(name=1, tp=int)
|
||||
Ts = TypeVarTuple(some_str)
|
||||
Ts = TypeVarTuple(name=some_str)
|
||||
Ts = TypeVarTuple(1)
|
||||
Ts = TypeVarTuple(name=1)
|
||||
|
||||
# No names provided.
|
||||
T = TypeVar()
|
||||
T = ParamSpec()
|
||||
T = NewType()
|
||||
T = NewType(tp=int)
|
||||
Ts = TypeVarTuple()
|
||||
@@ -54,6 +54,14 @@ print("foo {} ".format(x))
|
||||
|
||||
'''{[b]}'''.format(a)
|
||||
|
||||
"{}".format(
|
||||
1
|
||||
)
|
||||
|
||||
"123456789 {}".format(
|
||||
1111111111111111111111111111111111111111111111111111111111111111111111111,
|
||||
)
|
||||
|
||||
###
|
||||
# Non-errors
|
||||
###
|
||||
@@ -87,6 +95,9 @@ r'"\N{snowman} {}".format(a)'
|
||||
|
||||
"{a}" "{b}".format(a=1, b=1)
|
||||
|
||||
"123456789 {}".format(
|
||||
11111111111111111111111111111111111111111111111111111111111111111111111111,
|
||||
)
|
||||
|
||||
async def c():
|
||||
return "{}".format(await 3)
|
||||
|
||||
@@ -6,6 +6,7 @@ from fractions import Fraction
|
||||
from pathlib import Path
|
||||
from typing import ClassVar, NamedTuple
|
||||
|
||||
|
||||
def default_function() -> list[int]:
|
||||
return []
|
||||
|
||||
@@ -25,12 +26,13 @@ class A:
|
||||
fine_timedelta: datetime.timedelta = datetime.timedelta(hours=7)
|
||||
fine_tuple: tuple[int] = tuple([1])
|
||||
fine_regex: re.Pattern = re.compile(r".*")
|
||||
fine_float: float = float('-inf')
|
||||
fine_float: float = float("-inf")
|
||||
fine_int: int = int(12)
|
||||
fine_complex: complex = complex(1, 2)
|
||||
fine_str: str = str("foo")
|
||||
fine_bool: bool = bool("foo")
|
||||
fine_fraction: Fraction = Fraction(1,2)
|
||||
fine_fraction: Fraction = Fraction(1, 2)
|
||||
|
||||
|
||||
DEFAULT_IMMUTABLETYPE_FOR_ALL_DATACLASSES = ImmutableType(40)
|
||||
DEFAULT_A_FOR_ALL_DATACLASSES = A([1, 2, 3])
|
||||
@@ -45,3 +47,25 @@ class B:
|
||||
okay_variant: A = DEFAULT_A_FOR_ALL_DATACLASSES
|
||||
|
||||
fine_dataclass_function: list[int] = field(default_factory=list)
|
||||
|
||||
|
||||
class IntConversionDescriptor:
|
||||
def __init__(self, *, default):
|
||||
self._default = default
|
||||
|
||||
def __set_name__(self, owner, name):
|
||||
self._name = "_" + name
|
||||
|
||||
def __get__(self, obj, type):
|
||||
if obj is None:
|
||||
return self._default
|
||||
|
||||
return getattr(obj, self._name, self._default)
|
||||
|
||||
def __set__(self, obj, value):
|
||||
setattr(obj, self._name, int(value))
|
||||
|
||||
|
||||
@dataclass
|
||||
class InventoryItem:
|
||||
quantity_on_hand: IntConversionDescriptor = IntConversionDescriptor(default=100)
|
||||
|
||||
@@ -34,3 +34,7 @@ f"{ascii(bla)}" # OK
|
||||
" intermediary content "
|
||||
f" that flows {repr(obj)} of type {type(obj)}.{additional_message}" # RUF010
|
||||
)
|
||||
|
||||
|
||||
# OK
|
||||
f"{str({})}"
|
||||
|
||||
185
crates/ruff/resources/test/fixtures/ruff/RUF014.py
vendored
Normal file
185
crates/ruff/resources/test/fixtures/ruff/RUF014.py
vendored
Normal file
@@ -0,0 +1,185 @@
|
||||
def after_return():
|
||||
return "reachable"
|
||||
return "unreachable"
|
||||
|
||||
async def also_works_on_async_functions():
|
||||
return "reachable"
|
||||
return "unreachable"
|
||||
|
||||
def if_always_true():
|
||||
if True:
|
||||
return "reachable"
|
||||
return "unreachable"
|
||||
|
||||
def if_always_false():
|
||||
if False:
|
||||
return "unreachable"
|
||||
return "reachable"
|
||||
|
||||
def if_elif_always_false():
|
||||
if False:
|
||||
return "unreachable"
|
||||
elif False:
|
||||
return "also unreachable"
|
||||
return "reachable"
|
||||
|
||||
def if_elif_always_true():
|
||||
if False:
|
||||
return "unreachable"
|
||||
elif True:
|
||||
return "reachable"
|
||||
return "also unreachable"
|
||||
|
||||
def ends_with_if():
|
||||
if False:
|
||||
return "unreachable"
|
||||
else:
|
||||
return "reachable"
|
||||
|
||||
def infinite_loop():
|
||||
while True:
|
||||
continue
|
||||
return "unreachable"
|
||||
|
||||
''' TODO: we could determine these, but we don't yet.
|
||||
def for_range_return():
|
||||
for i in range(10):
|
||||
if i == 5:
|
||||
return "reachable"
|
||||
return "unreachable"
|
||||
|
||||
def for_range_else():
|
||||
for i in range(111):
|
||||
if i == 5:
|
||||
return "reachable"
|
||||
else:
|
||||
return "unreachable"
|
||||
return "also unreachable"
|
||||
|
||||
def for_range_break():
|
||||
for i in range(13):
|
||||
return "reachable"
|
||||
return "unreachable"
|
||||
|
||||
def for_range_if_break():
|
||||
for i in range(1110):
|
||||
if True:
|
||||
return "reachable"
|
||||
return "unreachable"
|
||||
'''
|
||||
|
||||
def match_wildcard(status):
|
||||
match status:
|
||||
case _:
|
||||
return "reachable"
|
||||
return "unreachable"
|
||||
|
||||
def match_case_and_wildcard(status):
|
||||
match status:
|
||||
case 1:
|
||||
return "reachable"
|
||||
case _:
|
||||
return "reachable"
|
||||
return "unreachable"
|
||||
|
||||
def raise_exception():
|
||||
raise Exception
|
||||
return "unreachable"
|
||||
|
||||
def while_false():
|
||||
while False:
|
||||
return "unreachable"
|
||||
return "reachable"
|
||||
|
||||
def while_false_else():
|
||||
while False:
|
||||
return "unreachable"
|
||||
else:
|
||||
return "reachable"
|
||||
|
||||
def while_false_else_return():
|
||||
while False:
|
||||
return "unreachable"
|
||||
else:
|
||||
return "reachable"
|
||||
return "also unreachable"
|
||||
|
||||
def while_true():
|
||||
while True:
|
||||
return "reachable"
|
||||
return "unreachable"
|
||||
|
||||
def while_true_else():
|
||||
while True:
|
||||
return "reachable"
|
||||
else:
|
||||
return "unreachable"
|
||||
|
||||
def while_true_else_return():
|
||||
while True:
|
||||
return "reachable"
|
||||
else:
|
||||
return "unreachable"
|
||||
return "also unreachable"
|
||||
|
||||
def while_false_var_i():
|
||||
i = 0
|
||||
while False:
|
||||
i += 1
|
||||
return i
|
||||
|
||||
def while_true_var_i():
|
||||
i = 0
|
||||
while True:
|
||||
i += 1
|
||||
return i
|
||||
|
||||
def while_infinite():
|
||||
while True:
|
||||
pass
|
||||
return "unreachable"
|
||||
|
||||
def while_if_true():
|
||||
while True:
|
||||
if True:
|
||||
return "reachable"
|
||||
return "unreachable"
|
||||
|
||||
# Test case found in the Bokeh repository that trigger a false positive.
|
||||
def bokeh1(self, obj: BytesRep) -> bytes:
|
||||
data = obj["data"]
|
||||
|
||||
if isinstance(data, str):
|
||||
return base64.b64decode(data)
|
||||
elif isinstance(data, Buffer):
|
||||
buffer = data
|
||||
else:
|
||||
id = data["id"]
|
||||
|
||||
if id in self._buffers:
|
||||
buffer = self._buffers[id]
|
||||
else:
|
||||
self.error(f"can't resolve buffer '{id}'")
|
||||
|
||||
return buffer.data
|
||||
|
||||
'''
|
||||
TODO: because `try` statements aren't handled this triggers a false positive as
|
||||
the last statement is reached, but the rules thinks it isn't (it doesn't
|
||||
see/process the break statement).
|
||||
|
||||
# Test case found in the Bokeh repository that trigger a false positive.
|
||||
def bokeh2(self, host: str = DEFAULT_HOST, port: int = DEFAULT_PORT) -> None:
|
||||
self.stop_serving = False
|
||||
while True:
|
||||
try:
|
||||
self.server = HTTPServer((host, port), HtmlOnlyHandler)
|
||||
self.host = host
|
||||
self.port = port
|
||||
break
|
||||
except OSError:
|
||||
log.debug(f"port {port} is in use, trying to next one")
|
||||
port += 1
|
||||
|
||||
self.thread = threading.Thread(target=self._run_web_server)
|
||||
'''
|
||||
44
crates/ruff/resources/test/fixtures/ruff/RUF015.py
vendored
Normal file
44
crates/ruff/resources/test/fixtures/ruff/RUF015.py
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
x = range(10)
|
||||
|
||||
# RUF015
|
||||
list(x)[0]
|
||||
list(x)[:1]
|
||||
list(x)[:1:1]
|
||||
list(x)[:1:2]
|
||||
tuple(x)[0]
|
||||
tuple(x)[:1]
|
||||
tuple(x)[:1:1]
|
||||
tuple(x)[:1:2]
|
||||
list(i for i in x)[0]
|
||||
list(i for i in x)[:1]
|
||||
list(i for i in x)[:1:1]
|
||||
list(i for i in x)[:1:2]
|
||||
[i for i in x][0]
|
||||
[i for i in x][:1]
|
||||
[i for i in x][:1:1]
|
||||
[i for i in x][:1:2]
|
||||
|
||||
# OK (not indexing (solely) the first element)
|
||||
list(x)
|
||||
list(x)[1]
|
||||
list(x)[-1]
|
||||
list(x)[1:]
|
||||
list(x)[:3:2]
|
||||
list(x)[::2]
|
||||
list(x)[::]
|
||||
[i for i in x]
|
||||
[i for i in x][1]
|
||||
[i for i in x][-1]
|
||||
[i for i in x][1:]
|
||||
[i for i in x][:3:2]
|
||||
[i for i in x][::2]
|
||||
[i for i in x][::]
|
||||
|
||||
# OK (doesn't mirror the underlying list)
|
||||
[i + 1 for i in x][0]
|
||||
[i for i in x if i > 5][0]
|
||||
[(i, i + 1) for i in x][0]
|
||||
|
||||
# OK (multiple generators)
|
||||
y = range(10)
|
||||
[i + j for i in x for j in y][0]
|
||||
115
crates/ruff/resources/test/fixtures/ruff/RUF016.py
vendored
Normal file
115
crates/ruff/resources/test/fixtures/ruff/RUF016.py
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
# Should not emit for valid access with index
|
||||
var = "abc"[0]
|
||||
var = f"abc"[0]
|
||||
var = [1, 2, 3][0]
|
||||
var = (1, 2, 3)[0]
|
||||
var = b"abc"[0]
|
||||
|
||||
# Should not emit for valid access with slice
|
||||
var = "abc"[0:2]
|
||||
var = f"abc"[0:2]
|
||||
var = b"abc"[0:2]
|
||||
var = [1, 2, 3][0:2]
|
||||
var = (1, 2, 3)[0:2]
|
||||
var = [1, 2, 3][None:2]
|
||||
var = [1, 2, 3][0:None]
|
||||
var = [1, 2, 3][:2]
|
||||
var = [1, 2, 3][0:]
|
||||
|
||||
# Should emit for invalid access on strings
|
||||
var = "abc"["x"]
|
||||
var = f"abc"["x"]
|
||||
|
||||
# Should emit for invalid access on bytes
|
||||
var = b"abc"["x"]
|
||||
|
||||
# Should emit for invalid access on lists and tuples
|
||||
var = [1, 2, 3]["x"]
|
||||
var = (1, 2, 3)["x"]
|
||||
|
||||
# Should emit for invalid access on list comprehensions
|
||||
var = [x for x in range(10)]["x"]
|
||||
|
||||
# Should emit for invalid access using tuple
|
||||
var = "abc"[1, 2]
|
||||
|
||||
# Should emit for invalid access using string
|
||||
var = [1, 2]["x"]
|
||||
|
||||
# Should emit for invalid access using float
|
||||
var = [1, 2][0.25]
|
||||
|
||||
# Should emit for invalid access using dict
|
||||
var = [1, 2][{"x": "y"}]
|
||||
|
||||
# Should emit for invalid access using dict comp
|
||||
var = [1, 2][{x: "y" for x in range(2)}]
|
||||
|
||||
# Should emit for invalid access using list
|
||||
var = [1, 2][2, 3]
|
||||
|
||||
# Should emit for invalid access using list comp
|
||||
var = [1, 2][[x for x in range(2)]]
|
||||
|
||||
# Should emit on invalid access using set
|
||||
var = [1, 2][{"x", "y"}]
|
||||
|
||||
# Should emit on invalid access using set comp
|
||||
var = [1, 2][{x for x in range(2)}]
|
||||
|
||||
# Should emit on invalid access using bytes
|
||||
var = [1, 2][b"x"]
|
||||
|
||||
# Should emit for non-integer slice start
|
||||
var = [1, 2, 3]["x":2]
|
||||
var = [1, 2, 3][f"x":2]
|
||||
var = [1, 2, 3][1.2:2]
|
||||
var = [1, 2, 3][{"x"}:2]
|
||||
var = [1, 2, 3][{x for x in range(2)}:2]
|
||||
var = [1, 2, 3][{"x": x for x in range(2)}:2]
|
||||
var = [1, 2, 3][[x for x in range(2)]:2]
|
||||
|
||||
# Should emit for non-integer slice end
|
||||
var = [1, 2, 3][0:"x"]
|
||||
var = [1, 2, 3][0:f"x"]
|
||||
var = [1, 2, 3][0:1.2]
|
||||
var = [1, 2, 3][0:{"x"}]
|
||||
var = [1, 2, 3][0:{x for x in range(2)}]
|
||||
var = [1, 2, 3][0:{"x": x for x in range(2)}]
|
||||
var = [1, 2, 3][0:[x for x in range(2)]]
|
||||
|
||||
# Should emit for non-integer slice step
|
||||
var = [1, 2, 3][0:1:"x"]
|
||||
var = [1, 2, 3][0:1:f"x"]
|
||||
var = [1, 2, 3][0:1:1.2]
|
||||
var = [1, 2, 3][0:1:{"x"}]
|
||||
var = [1, 2, 3][0:1:{x for x in range(2)}]
|
||||
var = [1, 2, 3][0:1:{"x": x for x in range(2)}]
|
||||
var = [1, 2, 3][0:1:[x for x in range(2)]]
|
||||
|
||||
# Should emit for non-integer slice start and end; should emit twice with specific ranges
|
||||
var = [1, 2, 3]["x":"y"]
|
||||
|
||||
# Should emit once for repeated invalid access
|
||||
var = [1, 2, 3]["x"]["y"]["z"]
|
||||
|
||||
# Cannot emit on invalid access using variable in index
|
||||
x = "x"
|
||||
var = "abc"[x]
|
||||
|
||||
# Cannot emit on invalid access using call
|
||||
def func():
|
||||
return 1
|
||||
var = "abc"[func()]
|
||||
|
||||
# Cannot emit on invalid access using a variable in parent
|
||||
x = [1, 2, 3]
|
||||
var = x["y"]
|
||||
|
||||
# Cannot emit for invalid access on byte array
|
||||
var = bytearray(b"abc")["x"]
|
||||
|
||||
# Cannot emit for slice bound using variable
|
||||
x = "x"
|
||||
var = [1, 2, 3][0:x]
|
||||
var = [1, 2, 3][x:1]
|
||||
@@ -57,3 +57,10 @@ def fine():
|
||||
a = process() # This throws the exception now
|
||||
finally:
|
||||
print("finally")
|
||||
|
||||
|
||||
def fine():
|
||||
try:
|
||||
raise ValueError("a doesn't exist")
|
||||
except TypeError: # A different exception is caught
|
||||
print("A different exception is caught")
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
use anyhow::{bail, Result};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
use rustpython_parser::ast::{self, ExceptHandler, Expr, Keyword, Ranged, Stmt};
|
||||
use rustpython_parser::{lexer, Mode, Tok};
|
||||
use rustpython_parser::{lexer, Mode};
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::helpers;
|
||||
@@ -98,7 +98,7 @@ pub(crate) fn remove_argument(
|
||||
// Case 1: there is only one argument.
|
||||
let mut count = 0u32;
|
||||
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, call_at).flatten() {
|
||||
if matches!(tok, Tok::Lpar) {
|
||||
if tok.is_lpar() {
|
||||
if count == 0 {
|
||||
fix_start = Some(if remove_parentheses {
|
||||
range.start()
|
||||
@@ -109,7 +109,7 @@ pub(crate) fn remove_argument(
|
||||
count = count.saturating_add(1);
|
||||
}
|
||||
|
||||
if matches!(tok, Tok::Rpar) {
|
||||
if tok.is_rpar() {
|
||||
count = count.saturating_sub(1);
|
||||
if count == 0 {
|
||||
fix_end = Some(if remove_parentheses {
|
||||
@@ -131,11 +131,11 @@ pub(crate) fn remove_argument(
|
||||
let mut seen_comma = false;
|
||||
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, call_at).flatten() {
|
||||
if seen_comma {
|
||||
if matches!(tok, Tok::NonLogicalNewline) {
|
||||
if tok.is_non_logical_newline() {
|
||||
// Also delete any non-logical newlines after the comma.
|
||||
continue;
|
||||
}
|
||||
fix_end = Some(if matches!(tok, Tok::Newline) {
|
||||
fix_end = Some(if tok.is_newline() {
|
||||
range.end()
|
||||
} else {
|
||||
range.start()
|
||||
@@ -145,7 +145,7 @@ pub(crate) fn remove_argument(
|
||||
if range.start() == expr_range.start() {
|
||||
fix_start = Some(range.start());
|
||||
}
|
||||
if fix_start.is_some() && matches!(tok, Tok::Comma) {
|
||||
if fix_start.is_some() && tok.is_comma() {
|
||||
seen_comma = true;
|
||||
}
|
||||
}
|
||||
@@ -157,7 +157,7 @@ pub(crate) fn remove_argument(
|
||||
fix_end = Some(expr_range.end());
|
||||
break;
|
||||
}
|
||||
if matches!(tok, Tok::Comma) {
|
||||
if tok.is_comma() {
|
||||
fix_start = Some(range.start());
|
||||
}
|
||||
}
|
||||
@@ -317,10 +317,10 @@ mod tests {
|
||||
Some(TextSize::from(6))
|
||||
);
|
||||
|
||||
let contents = r#"
|
||||
let contents = r"
|
||||
x = 1 \
|
||||
; y = 1
|
||||
"#
|
||||
"
|
||||
.trim();
|
||||
let program = Suite::parse(contents, "<filename>")?;
|
||||
let stmt = program.first().unwrap();
|
||||
@@ -349,10 +349,10 @@ x = 1 \
|
||||
TextSize::from(6)
|
||||
);
|
||||
|
||||
let contents = r#"
|
||||
let contents = r"
|
||||
x = 1 \
|
||||
; y = 1
|
||||
"#
|
||||
"
|
||||
.trim();
|
||||
let locator = Locator::new(contents);
|
||||
assert_eq!(
|
||||
|
||||
@@ -619,6 +619,11 @@ where
|
||||
);
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "unreachable-code")]
|
||||
if self.enabled(Rule::UnreachableCode) {
|
||||
self.diagnostics
|
||||
.extend(ruff::rules::unreachable::in_function(name, body));
|
||||
}
|
||||
}
|
||||
Stmt::Return(_) => {
|
||||
if self.enabled(Rule::ReturnOutsideFunction) {
|
||||
@@ -771,7 +776,7 @@ where
|
||||
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt, self.locator);
|
||||
}
|
||||
if self.enabled(Rule::GlobalStatement) {
|
||||
for name in names.iter() {
|
||||
for name in names {
|
||||
if let Some(asname) = name.asname.as_ref() {
|
||||
pylint::rules::global_statement(self, asname);
|
||||
} else {
|
||||
@@ -967,7 +972,7 @@ where
|
||||
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt, self.locator);
|
||||
}
|
||||
if self.enabled(Rule::GlobalStatement) {
|
||||
for name in names.iter() {
|
||||
for name in names {
|
||||
if let Some(asname) = name.asname.as_ref() {
|
||||
pylint::rules::global_statement(self, asname);
|
||||
} else {
|
||||
@@ -1612,7 +1617,7 @@ where
|
||||
flake8_bandit::rules::assign_hardcoded_password_string(self, value, targets);
|
||||
}
|
||||
if self.enabled(Rule::GlobalStatement) {
|
||||
for target in targets.iter() {
|
||||
for target in targets {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target {
|
||||
pylint::rules::global_statement(self, id);
|
||||
}
|
||||
@@ -1650,6 +1655,15 @@ where
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if self.settings.rules.enabled(Rule::TypeParamNameMismatch) {
|
||||
pylint::rules::type_param_name_mismatch(self, value, targets);
|
||||
}
|
||||
if self.settings.rules.enabled(Rule::TypeNameIncorrectVariance) {
|
||||
pylint::rules::type_name_incorrect_variance(self, value);
|
||||
}
|
||||
if self.settings.rules.enabled(Rule::TypeBivariance) {
|
||||
pylint::rules::type_bivariance(self, value);
|
||||
}
|
||||
if self.is_stub {
|
||||
if self.any_enabled(&[
|
||||
Rule::UnprefixedTypeParam,
|
||||
@@ -1735,7 +1749,7 @@ where
|
||||
}
|
||||
Stmt::Delete(ast::StmtDelete { targets, range: _ }) => {
|
||||
if self.enabled(Rule::GlobalStatement) {
|
||||
for target in targets.iter() {
|
||||
for target in targets {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target {
|
||||
pylint::rules::global_statement(self, id);
|
||||
}
|
||||
@@ -1770,7 +1784,6 @@ where
|
||||
match stmt {
|
||||
Stmt::FunctionDef(ast::StmtFunctionDef {
|
||||
body,
|
||||
name,
|
||||
args,
|
||||
decorator_list,
|
||||
returns,
|
||||
@@ -1778,7 +1791,6 @@ where
|
||||
})
|
||||
| Stmt::AsyncFunctionDef(ast::StmtAsyncFunctionDef {
|
||||
body,
|
||||
name,
|
||||
args,
|
||||
decorator_list,
|
||||
returns,
|
||||
@@ -1802,7 +1814,7 @@ where
|
||||
{
|
||||
if let Some(expr) = &arg_with_default.def.annotation {
|
||||
if runtime_annotation {
|
||||
self.visit_type_definition(expr);
|
||||
self.visit_runtime_annotation(expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
@@ -1814,7 +1826,7 @@ where
|
||||
if let Some(arg) = &args.vararg {
|
||||
if let Some(expr) = &arg.annotation {
|
||||
if runtime_annotation {
|
||||
self.visit_type_definition(expr);
|
||||
self.visit_runtime_annotation(expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
@@ -1823,7 +1835,7 @@ where
|
||||
if let Some(arg) = &args.kwarg {
|
||||
if let Some(expr) = &arg.annotation {
|
||||
if runtime_annotation {
|
||||
self.visit_type_definition(expr);
|
||||
self.visit_runtime_annotation(expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
@@ -1831,19 +1843,12 @@ where
|
||||
}
|
||||
for expr in returns {
|
||||
if runtime_annotation {
|
||||
self.visit_type_definition(expr);
|
||||
self.visit_runtime_annotation(expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
}
|
||||
|
||||
self.add_binding(
|
||||
name,
|
||||
stmt.identifier(),
|
||||
BindingKind::FunctionDefinition,
|
||||
BindingFlags::empty(),
|
||||
);
|
||||
|
||||
let definition = docstrings::extraction::extract_definition(
|
||||
ExtractionTarget::Function,
|
||||
stmt,
|
||||
@@ -1990,7 +1995,7 @@ where
|
||||
};
|
||||
|
||||
if runtime_annotation {
|
||||
self.visit_type_definition(annotation);
|
||||
self.visit_runtime_annotation(annotation);
|
||||
} else {
|
||||
self.visit_annotation(annotation);
|
||||
}
|
||||
@@ -2053,19 +2058,28 @@ where
|
||||
|
||||
// Post-visit.
|
||||
match stmt {
|
||||
Stmt::FunctionDef(_) | Stmt::AsyncFunctionDef(_) => {
|
||||
self.deferred.scopes.push(self.semantic.scope_id);
|
||||
self.semantic.pop_scope();
|
||||
self.semantic.pop_definition();
|
||||
}
|
||||
Stmt::ClassDef(ast::StmtClassDef { name, .. }) => {
|
||||
self.deferred.scopes.push(self.semantic.scope_id);
|
||||
Stmt::FunctionDef(ast::StmtFunctionDef { name, .. })
|
||||
| Stmt::AsyncFunctionDef(ast::StmtAsyncFunctionDef { name, .. }) => {
|
||||
let scope_id = self.semantic.scope_id;
|
||||
self.deferred.scopes.push(scope_id);
|
||||
self.semantic.pop_scope();
|
||||
self.semantic.pop_definition();
|
||||
self.add_binding(
|
||||
name,
|
||||
stmt.identifier(),
|
||||
BindingKind::ClassDefinition,
|
||||
BindingKind::FunctionDefinition(scope_id),
|
||||
BindingFlags::empty(),
|
||||
);
|
||||
}
|
||||
Stmt::ClassDef(ast::StmtClassDef { name, .. }) => {
|
||||
let scope_id = self.semantic.scope_id;
|
||||
self.deferred.scopes.push(scope_id);
|
||||
self.semantic.pop_scope();
|
||||
self.semantic.pop_definition();
|
||||
self.add_binding(
|
||||
name,
|
||||
stmt.identifier(),
|
||||
BindingKind::ClassDefinition(scope_id),
|
||||
BindingFlags::empty(),
|
||||
);
|
||||
}
|
||||
@@ -2078,7 +2092,7 @@ where
|
||||
|
||||
fn visit_annotation(&mut self, expr: &'b Expr) {
|
||||
let flags_snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::ANNOTATION;
|
||||
self.semantic.flags |= SemanticModelFlags::TYPING_ONLY_ANNOTATION;
|
||||
self.visit_type_definition(expr);
|
||||
self.semantic.flags = flags_snapshot;
|
||||
}
|
||||
@@ -2128,7 +2142,7 @@ where
|
||||
|
||||
// Pre-visit.
|
||||
match expr {
|
||||
Expr::Subscript(ast::ExprSubscript { value, slice, .. }) => {
|
||||
Expr::Subscript(subscript @ ast::ExprSubscript { value, slice, .. }) => {
|
||||
// Ex) Optional[...], Union[...]
|
||||
if self.any_enabled(&[
|
||||
Rule::FutureRewritableTypeAnnotation,
|
||||
@@ -2137,7 +2151,8 @@ where
|
||||
if let Some(operator) = typing::to_pep604_operator(value, slice, &self.semantic)
|
||||
{
|
||||
if self.enabled(Rule::FutureRewritableTypeAnnotation) {
|
||||
if self.settings.target_version < PythonVersion::Py310
|
||||
if !self.is_stub
|
||||
&& self.settings.target_version < PythonVersion::Py310
|
||||
&& self.settings.target_version >= PythonVersion::Py37
|
||||
&& !self.semantic.future_annotations()
|
||||
&& self.semantic.in_annotation()
|
||||
@@ -2149,7 +2164,8 @@ where
|
||||
}
|
||||
}
|
||||
if self.enabled(Rule::NonPEP604Annotation) {
|
||||
if self.settings.target_version >= PythonVersion::Py310
|
||||
if self.is_stub
|
||||
|| self.settings.target_version >= PythonVersion::Py310
|
||||
|| (self.settings.target_version >= PythonVersion::Py37
|
||||
&& self.semantic.future_annotations()
|
||||
&& self.semantic.in_annotation()
|
||||
@@ -2165,7 +2181,8 @@ where
|
||||
|
||||
// Ex) list[...]
|
||||
if self.enabled(Rule::FutureRequiredTypeAnnotation) {
|
||||
if self.settings.target_version < PythonVersion::Py39
|
||||
if !self.is_stub
|
||||
&& self.settings.target_version < PythonVersion::Py39
|
||||
&& !self.semantic.future_annotations()
|
||||
&& self.semantic.in_annotation()
|
||||
&& typing::is_pep585_generic(value, &self.semantic)
|
||||
@@ -2178,6 +2195,28 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
// Ex) Union[...]
|
||||
if self.any_enabled(&[Rule::UnnecessaryLiteralUnion, Rule::DuplicateUnionMember]) {
|
||||
// Determine if the current expression is an union
|
||||
// Avoid duplicate checks if the parent is an `Union[...]` since these rules traverse nested unions
|
||||
let is_unchecked_union = self
|
||||
.semantic
|
||||
.expr_grandparent()
|
||||
.and_then(Expr::as_subscript_expr)
|
||||
.map_or(true, |parent| {
|
||||
!self.semantic.match_typing_expr(&parent.value, "Union")
|
||||
});
|
||||
|
||||
if is_unchecked_union {
|
||||
if self.enabled(Rule::UnnecessaryLiteralUnion) {
|
||||
flake8_pyi::rules::unnecessary_literal_union(self, expr);
|
||||
}
|
||||
if self.enabled(Rule::DuplicateUnionMember) {
|
||||
flake8_pyi::rules::duplicate_union_member(self, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if self.semantic.match_typing_expr(value, "Literal") {
|
||||
self.semantic.flags |= SemanticModelFlags::LITERAL;
|
||||
}
|
||||
@@ -2192,6 +2231,13 @@ where
|
||||
if self.enabled(Rule::UncapitalizedEnvironmentVariables) {
|
||||
flake8_simplify::rules::use_capital_environment_variables(self, expr);
|
||||
}
|
||||
if self.enabled(Rule::UnnecessaryIterableAllocationForFirstElement) {
|
||||
ruff::rules::unnecessary_iterable_allocation_for_first_element(self, subscript);
|
||||
}
|
||||
|
||||
if self.enabled(Rule::InvalidIndexType) {
|
||||
ruff::rules::invalid_index_type(self, subscript);
|
||||
}
|
||||
|
||||
pandas_vet::rules::subscript(self, value, expr);
|
||||
}
|
||||
@@ -2247,19 +2293,21 @@ where
|
||||
typing::to_pep585_generic(expr, &self.semantic)
|
||||
{
|
||||
if self.enabled(Rule::FutureRewritableTypeAnnotation) {
|
||||
if self.settings.target_version < PythonVersion::Py39
|
||||
if !self.is_stub
|
||||
&& self.settings.target_version < PythonVersion::Py39
|
||||
&& self.settings.target_version >= PythonVersion::Py37
|
||||
&& !self.semantic.future_annotations()
|
||||
&& self.semantic.in_annotation()
|
||||
&& !self.settings.pyupgrade.keep_runtime_typing
|
||||
{
|
||||
flake8_future_annotations::rules::future_rewritable_type_annotation(
|
||||
self, expr,
|
||||
);
|
||||
self, expr,
|
||||
);
|
||||
}
|
||||
}
|
||||
if self.enabled(Rule::NonPEP585Annotation) {
|
||||
if self.settings.target_version >= PythonVersion::Py39
|
||||
if self.is_stub
|
||||
|| self.settings.target_version >= PythonVersion::Py39
|
||||
|| (self.settings.target_version >= PythonVersion::Py37
|
||||
&& self.semantic.future_annotations()
|
||||
&& self.semantic.in_annotation()
|
||||
@@ -2324,7 +2372,8 @@ where
|
||||
]) {
|
||||
if let Some(replacement) = typing::to_pep585_generic(expr, &self.semantic) {
|
||||
if self.enabled(Rule::FutureRewritableTypeAnnotation) {
|
||||
if self.settings.target_version < PythonVersion::Py39
|
||||
if !self.is_stub
|
||||
&& self.settings.target_version < PythonVersion::Py39
|
||||
&& self.settings.target_version >= PythonVersion::Py37
|
||||
&& !self.semantic.future_annotations()
|
||||
&& self.semantic.in_annotation()
|
||||
@@ -2336,7 +2385,8 @@ where
|
||||
}
|
||||
}
|
||||
if self.enabled(Rule::NonPEP585Annotation) {
|
||||
if self.settings.target_version >= PythonVersion::Py39
|
||||
if self.is_stub
|
||||
|| self.settings.target_version >= PythonVersion::Py39
|
||||
|| (self.settings.target_version >= PythonVersion::Py37
|
||||
&& self.semantic.future_annotations()
|
||||
&& self.semantic.in_annotation()
|
||||
@@ -2380,12 +2430,14 @@ where
|
||||
}
|
||||
pandas_vet::rules::attr(self, attr, value, expr);
|
||||
}
|
||||
Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
args,
|
||||
keywords,
|
||||
range: _,
|
||||
}) => {
|
||||
Expr::Call(
|
||||
call @ ast::ExprCall {
|
||||
func,
|
||||
args,
|
||||
keywords,
|
||||
range: _,
|
||||
},
|
||||
) => {
|
||||
if let Expr::Name(ast::ExprName { id, ctx, range: _ }) = func.as_ref() {
|
||||
if id == "locals" && matches!(ctx, ExprContext::Load) {
|
||||
let scope = self.semantic.scope_mut();
|
||||
@@ -2409,19 +2461,19 @@ where
|
||||
if let Expr::Attribute(ast::ExprAttribute { value, attr, .. }) = func.as_ref() {
|
||||
let attr = attr.as_str();
|
||||
if let Expr::Constant(ast::ExprConstant {
|
||||
value: Constant::Str(value),
|
||||
value: Constant::Str(val),
|
||||
..
|
||||
}) = value.as_ref()
|
||||
{
|
||||
if attr == "join" {
|
||||
// "...".join(...) call
|
||||
if self.enabled(Rule::StaticJoinToFString) {
|
||||
flynt::rules::static_join_to_fstring(self, expr, value);
|
||||
flynt::rules::static_join_to_fstring(self, expr, val);
|
||||
}
|
||||
} else if attr == "format" {
|
||||
// "...".format(...) call
|
||||
let location = expr.range();
|
||||
match pyflakes::format::FormatSummary::try_from(value.as_ref()) {
|
||||
match pyflakes::format::FormatSummary::try_from(val.as_ref()) {
|
||||
Err(e) => {
|
||||
if self.enabled(Rule::StringDotFormatInvalidFormat) {
|
||||
self.diagnostics.push(Diagnostic::new(
|
||||
@@ -2465,7 +2517,13 @@ where
|
||||
}
|
||||
|
||||
if self.enabled(Rule::FString) {
|
||||
pyupgrade::rules::f_strings(self, &summary, expr);
|
||||
pyupgrade::rules::f_strings(
|
||||
self,
|
||||
&summary,
|
||||
expr,
|
||||
value,
|
||||
self.settings.line_length,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2503,10 +2561,10 @@ where
|
||||
if self.enabled(Rule::OSErrorAlias) {
|
||||
pyupgrade::rules::os_error_alias_call(self, func);
|
||||
}
|
||||
if self.enabled(Rule::NonPEP604Isinstance)
|
||||
&& self.settings.target_version >= PythonVersion::Py310
|
||||
{
|
||||
pyupgrade::rules::use_pep604_isinstance(self, expr, func, args);
|
||||
if self.enabled(Rule::NonPEP604Isinstance) {
|
||||
if self.settings.target_version >= PythonVersion::Py310 {
|
||||
pyupgrade::rules::use_pep604_isinstance(self, expr, func, args);
|
||||
}
|
||||
}
|
||||
if self.enabled(Rule::BlockingHttpCallInAsyncFunction) {
|
||||
flake8_async::rules::blocking_http_call(self, expr);
|
||||
@@ -2545,6 +2603,9 @@ where
|
||||
]) {
|
||||
flake8_bandit::rules::suspicious_function_call(self, expr);
|
||||
}
|
||||
if self.enabled(Rule::ReSubPositionalArgs) {
|
||||
flake8_bugbear::rules::re_sub_positional_args(self, call);
|
||||
}
|
||||
if self.enabled(Rule::UnreliableCallableCheck) {
|
||||
flake8_bugbear::rules::unreliable_callable_check(self, expr, func, args);
|
||||
}
|
||||
@@ -2579,9 +2640,7 @@ where
|
||||
flake8_pie::rules::unnecessary_dict_kwargs(self, expr, keywords);
|
||||
}
|
||||
if self.enabled(Rule::ExecBuiltin) {
|
||||
if let Some(diagnostic) = flake8_bandit::rules::exec_used(expr, func) {
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
flake8_bandit::rules::exec_used(self, func);
|
||||
}
|
||||
if self.enabled(Rule::BadFilePermissions) {
|
||||
flake8_bandit::rules::bad_file_permissions(self, func, args, keywords);
|
||||
@@ -3112,7 +3171,8 @@ where
|
||||
}) => {
|
||||
// Ex) `str | None`
|
||||
if self.enabled(Rule::FutureRequiredTypeAnnotation) {
|
||||
if self.settings.target_version < PythonVersion::Py310
|
||||
if !self.is_stub
|
||||
&& self.settings.target_version < PythonVersion::Py310
|
||||
&& !self.semantic.future_annotations()
|
||||
&& self.semantic.in_annotation()
|
||||
{
|
||||
@@ -3123,22 +3183,26 @@ where
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if self.is_stub {
|
||||
if self.enabled(Rule::DuplicateUnionMember)
|
||||
&& self.semantic.in_type_definition()
|
||||
&& self.semantic.expr_parent().map_or(true, |parent| {
|
||||
!matches!(
|
||||
parent,
|
||||
Expr::BinOp(ast::ExprBinOp {
|
||||
op: Operator::BitOr,
|
||||
..
|
||||
})
|
||||
)
|
||||
})
|
||||
// Avoid duplicate checks if the parent is an `|`
|
||||
&& !matches!(
|
||||
self.semantic.expr_parent(),
|
||||
Some(Expr::BinOp(ast::ExprBinOp { op: Operator::BitOr, ..}))
|
||||
)
|
||||
{
|
||||
flake8_pyi::rules::duplicate_union_member(self, expr);
|
||||
}
|
||||
if self.enabled(Rule::UnnecessaryLiteralUnion)
|
||||
// Avoid duplicate checks if the parent is an `|`
|
||||
&& !matches!(
|
||||
self.semantic.expr_parent(),
|
||||
Some(Expr::BinOp(ast::ExprBinOp { op: Operator::BitOr, ..}))
|
||||
)
|
||||
{
|
||||
flake8_pyi::rules::unnecessary_literal_union(self, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::UnaryOp(ast::ExprUnaryOp {
|
||||
@@ -3878,7 +3942,7 @@ where
|
||||
}
|
||||
|
||||
// Store the existing binding, if any.
|
||||
let existing_id = self.semantic.lookup(name);
|
||||
let existing_id = self.semantic.lookup_symbol(name);
|
||||
|
||||
// Add the bound exception name to the scope.
|
||||
let binding_id = self.add_binding(
|
||||
@@ -4116,6 +4180,14 @@ impl<'a> Checker<'a> {
|
||||
self.semantic.flags = snapshot;
|
||||
}
|
||||
|
||||
/// Visit an [`Expr`], and treat it as a runtime-required type annotation.
|
||||
fn visit_runtime_annotation(&mut self, expr: &'a Expr) {
|
||||
let snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::RUNTIME_ANNOTATION;
|
||||
self.visit_type_definition(expr);
|
||||
self.semantic.flags = snapshot;
|
||||
}
|
||||
|
||||
/// Visit an [`Expr`], and treat it as a type definition.
|
||||
fn visit_type_definition(&mut self, expr: &'a Expr) {
|
||||
let snapshot = self.semantic.flags;
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
use itertools::Itertools;
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
use rustpython_parser::ast::Ranged;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
||||
use ruff_python_ast::source_code::Locator;
|
||||
@@ -22,7 +23,7 @@ pub(crate) fn check_noqa(
|
||||
settings: &Settings,
|
||||
) -> Vec<usize> {
|
||||
// Identify any codes that are globally exempted (within the current file).
|
||||
let exemption = noqa::file_exemption(locator.contents(), comment_ranges);
|
||||
let exemption = FileExemption::try_extract(locator.contents(), comment_ranges, locator);
|
||||
|
||||
// Extract all `noqa` directives.
|
||||
let mut noqa_directives = NoqaDirectives::from_commented_ranges(comment_ranges, locator);
|
||||
@@ -37,19 +38,19 @@ pub(crate) fn check_noqa(
|
||||
}
|
||||
|
||||
match &exemption {
|
||||
FileExemption::All => {
|
||||
Some(FileExemption::All) => {
|
||||
// If the file is exempted, ignore all diagnostics.
|
||||
ignored_diagnostics.push(index);
|
||||
continue;
|
||||
}
|
||||
FileExemption::Codes(codes) => {
|
||||
Some(FileExemption::Codes(codes)) => {
|
||||
// If the diagnostic is ignored by a global exemption, ignore it.
|
||||
if codes.contains(&diagnostic.kind.rule().noqa_code()) {
|
||||
ignored_diagnostics.push(index);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
FileExemption::None => {}
|
||||
None => {}
|
||||
}
|
||||
|
||||
let noqa_offsets = diagnostic
|
||||
@@ -63,15 +64,15 @@ pub(crate) fn check_noqa(
|
||||
if let Some(directive_line) = noqa_directives.find_line_with_directive_mut(noqa_offset)
|
||||
{
|
||||
let suppressed = match &directive_line.directive {
|
||||
Directive::All(..) => {
|
||||
Directive::All(_) => {
|
||||
directive_line
|
||||
.matches
|
||||
.push(diagnostic.kind.rule().noqa_code());
|
||||
ignored_diagnostics.push(index);
|
||||
true
|
||||
}
|
||||
Directive::Codes(.., codes, _) => {
|
||||
if noqa::includes(diagnostic.kind.rule(), codes) {
|
||||
Directive::Codes(directive) => {
|
||||
if noqa::includes(diagnostic.kind.rule(), directive.codes()) {
|
||||
directive_line
|
||||
.matches
|
||||
.push(diagnostic.kind.rule().noqa_code());
|
||||
@@ -81,7 +82,6 @@ pub(crate) fn check_noqa(
|
||||
false
|
||||
}
|
||||
}
|
||||
Directive::None => unreachable!(),
|
||||
};
|
||||
|
||||
if suppressed {
|
||||
@@ -95,36 +95,31 @@ pub(crate) fn check_noqa(
|
||||
if analyze_directives && settings.rules.enabled(Rule::UnusedNOQA) {
|
||||
for line in noqa_directives.lines() {
|
||||
match &line.directive {
|
||||
Directive::All(leading_spaces, noqa_range, trailing_spaces) => {
|
||||
Directive::All(directive) => {
|
||||
if line.matches.is_empty() {
|
||||
let mut diagnostic =
|
||||
Diagnostic::new(UnusedNOQA { codes: None }, *noqa_range);
|
||||
Diagnostic::new(UnusedNOQA { codes: None }, directive.range());
|
||||
if settings.rules.should_fix(diagnostic.kind.rule()) {
|
||||
#[allow(deprecated)]
|
||||
diagnostic.set_fix_from_edit(delete_noqa(
|
||||
*leading_spaces,
|
||||
*noqa_range,
|
||||
*trailing_spaces,
|
||||
locator,
|
||||
));
|
||||
diagnostic.set_fix_from_edit(delete_noqa(directive.range(), locator));
|
||||
}
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
Directive::Codes(leading_spaces, range, codes, trailing_spaces) => {
|
||||
Directive::Codes(directive) => {
|
||||
let mut disabled_codes = vec![];
|
||||
let mut unknown_codes = vec![];
|
||||
let mut unmatched_codes = vec![];
|
||||
let mut valid_codes = vec![];
|
||||
let mut self_ignore = false;
|
||||
for code in codes {
|
||||
for code in directive.codes() {
|
||||
let code = get_redirect_target(code).unwrap_or(code);
|
||||
if Rule::UnusedNOQA.noqa_code() == code {
|
||||
self_ignore = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if line.matches.iter().any(|m| *m == code)
|
||||
if line.matches.iter().any(|match_| *match_ == code)
|
||||
|| settings.external.contains(code)
|
||||
{
|
||||
valid_codes.push(code);
|
||||
@@ -166,29 +161,24 @@ pub(crate) fn check_noqa(
|
||||
.collect(),
|
||||
}),
|
||||
},
|
||||
*range,
|
||||
directive.range(),
|
||||
);
|
||||
if settings.rules.should_fix(diagnostic.kind.rule()) {
|
||||
if valid_codes.is_empty() {
|
||||
#[allow(deprecated)]
|
||||
diagnostic.set_fix_from_edit(delete_noqa(
|
||||
*leading_spaces,
|
||||
*range,
|
||||
*trailing_spaces,
|
||||
locator,
|
||||
));
|
||||
diagnostic
|
||||
.set_fix_from_edit(delete_noqa(directive.range(), locator));
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
diagnostic.set_fix(Fix::unspecified(Edit::range_replacement(
|
||||
format!("# noqa: {}", valid_codes.join(", ")),
|
||||
*range,
|
||||
directive.range(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
Directive::None => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -198,38 +188,46 @@ pub(crate) fn check_noqa(
|
||||
}
|
||||
|
||||
/// Generate a [`Edit`] to delete a `noqa` directive.
|
||||
fn delete_noqa(
|
||||
leading_spaces: TextSize,
|
||||
noqa_range: TextRange,
|
||||
trailing_spaces: TextSize,
|
||||
locator: &Locator,
|
||||
) -> Edit {
|
||||
let line_range = locator.line_range(noqa_range.start());
|
||||
fn delete_noqa(range: TextRange, locator: &Locator) -> Edit {
|
||||
let line_range = locator.line_range(range.start());
|
||||
|
||||
// Compute the leading space.
|
||||
let prefix = locator.slice(TextRange::new(line_range.start(), range.start()));
|
||||
let leading_space = prefix
|
||||
.rfind(|c: char| !c.is_whitespace())
|
||||
.map_or(prefix.len(), |i| prefix.len() - i - 1);
|
||||
let leading_space_len = TextSize::try_from(leading_space).unwrap();
|
||||
|
||||
// Compute the trailing space.
|
||||
let suffix = locator.slice(TextRange::new(range.end(), line_range.end()));
|
||||
let trailing_space = suffix
|
||||
.find(|c: char| !c.is_whitespace())
|
||||
.map_or(suffix.len(), |i| i);
|
||||
let trailing_space_len = TextSize::try_from(trailing_space).unwrap();
|
||||
|
||||
// Ex) `# noqa`
|
||||
if line_range
|
||||
== TextRange::new(
|
||||
noqa_range.start() - leading_spaces,
|
||||
noqa_range.end() + trailing_spaces,
|
||||
range.start() - leading_space_len,
|
||||
range.end() + trailing_space_len,
|
||||
)
|
||||
{
|
||||
let full_line_end = locator.full_line_end(line_range.end());
|
||||
Edit::deletion(line_range.start(), full_line_end)
|
||||
}
|
||||
// Ex) `x = 1 # noqa`
|
||||
else if noqa_range.end() + trailing_spaces == line_range.end() {
|
||||
Edit::deletion(noqa_range.start() - leading_spaces, line_range.end())
|
||||
else if range.end() + trailing_space_len == line_range.end() {
|
||||
Edit::deletion(range.start() - leading_space_len, line_range.end())
|
||||
}
|
||||
// Ex) `x = 1 # noqa # type: ignore`
|
||||
else if locator.contents()[usize::from(noqa_range.end() + trailing_spaces)..].starts_with('#')
|
||||
{
|
||||
Edit::deletion(noqa_range.start(), noqa_range.end() + trailing_spaces)
|
||||
else if locator.contents()[usize::from(range.end() + trailing_space_len)..].starts_with('#') {
|
||||
Edit::deletion(range.start(), range.end() + trailing_space_len)
|
||||
}
|
||||
// Ex) `x = 1 # noqa here`
|
||||
else {
|
||||
Edit::deletion(
|
||||
noqa_range.start() + "# ".text_len(),
|
||||
noqa_range.end() + trailing_spaces,
|
||||
range.start() + "# ".text_len(),
|
||||
range.end() + trailing_space_len,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use ruff_python_whitespace::UniversalNewlines;
|
||||
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::flake8_copyright::rules::missing_copyright_notice;
|
||||
use crate::rules::flake8_executable::helpers::{extract_shebang, ShebangDirective};
|
||||
use crate::rules::flake8_executable::helpers::ShebangDirective;
|
||||
use crate::rules::flake8_executable::rules::{
|
||||
shebang_missing, shebang_newline, shebang_not_executable, shebang_python, shebang_whitespace,
|
||||
};
|
||||
@@ -87,33 +87,35 @@ pub(crate) fn check_physical_lines(
|
||||
|| enforce_shebang_newline
|
||||
|| enforce_shebang_python
|
||||
{
|
||||
let shebang = extract_shebang(&line);
|
||||
if enforce_shebang_not_executable {
|
||||
if let Some(diagnostic) = shebang_not_executable(path, line.range(), &shebang) {
|
||||
diagnostics.push(diagnostic);
|
||||
if let Some(shebang) = ShebangDirective::try_extract(&line) {
|
||||
has_any_shebang = true;
|
||||
if enforce_shebang_not_executable {
|
||||
if let Some(diagnostic) =
|
||||
shebang_not_executable(path, line.range(), &shebang)
|
||||
{
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
if enforce_shebang_missing {
|
||||
if !has_any_shebang && matches!(shebang, ShebangDirective::Match(..)) {
|
||||
has_any_shebang = true;
|
||||
if enforce_shebang_whitespace {
|
||||
if let Some(diagnostic) =
|
||||
shebang_whitespace(line.range(), &shebang, fix_shebang_whitespace)
|
||||
{
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
if enforce_shebang_whitespace {
|
||||
if let Some(diagnostic) =
|
||||
shebang_whitespace(line.range(), &shebang, fix_shebang_whitespace)
|
||||
{
|
||||
diagnostics.push(diagnostic);
|
||||
if enforce_shebang_newline {
|
||||
if let Some(diagnostic) =
|
||||
shebang_newline(line.range(), &shebang, index == 0)
|
||||
{
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
if enforce_shebang_newline {
|
||||
if let Some(diagnostic) = shebang_newline(line.range(), &shebang, index == 0) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if enforce_shebang_python {
|
||||
if let Some(diagnostic) = shebang_python(line.range(), &shebang) {
|
||||
diagnostics.push(diagnostic);
|
||||
if enforce_shebang_python {
|
||||
if let Some(diagnostic) = shebang_python(line.range(), &shebang) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::source_code::{Indexer, Locator};
|
||||
|
||||
use crate::directives::TodoComment;
|
||||
use crate::lex::docstring_detection::StateMachine;
|
||||
use crate::registry::{AsRule, Rule};
|
||||
@@ -12,8 +15,6 @@ use crate::rules::{
|
||||
flake8_todos, pycodestyle, pylint, pyupgrade, ruff,
|
||||
};
|
||||
use crate::settings::Settings;
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::source_code::{Indexer, Locator};
|
||||
|
||||
pub(crate) fn check_tokens(
|
||||
locator: &Locator,
|
||||
@@ -88,10 +89,11 @@ pub(crate) fn check_tokens(
|
||||
};
|
||||
|
||||
if matches!(tok, Tok::String { .. } | Tok::Comment(_)) {
|
||||
diagnostics.extend(ruff::rules::ambiguous_unicode_character(
|
||||
ruff::rules::ambiguous_unicode_character(
|
||||
&mut diagnostics,
|
||||
locator,
|
||||
range,
|
||||
if matches!(tok, Tok::String { .. }) {
|
||||
if tok.is_string() {
|
||||
if is_docstring {
|
||||
Context::Docstring
|
||||
} else {
|
||||
@@ -101,93 +103,77 @@ pub(crate) fn check_tokens(
|
||||
Context::Comment
|
||||
},
|
||||
settings,
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ERA001
|
||||
if enforce_commented_out_code {
|
||||
diagnostics.extend(eradicate::rules::commented_out_code(
|
||||
locator, indexer, settings,
|
||||
));
|
||||
eradicate::rules::commented_out_code(&mut diagnostics, locator, indexer, settings);
|
||||
}
|
||||
|
||||
// W605
|
||||
if enforce_invalid_escape_sequence {
|
||||
for (tok, range) in tokens.iter().flatten() {
|
||||
if matches!(tok, Tok::String { .. }) {
|
||||
diagnostics.extend(pycodestyle::rules::invalid_escape_sequence(
|
||||
if tok.is_string() {
|
||||
pycodestyle::rules::invalid_escape_sequence(
|
||||
&mut diagnostics,
|
||||
locator,
|
||||
*range,
|
||||
settings.rules.should_fix(Rule::InvalidEscapeSequence),
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
// PLE2510, PLE2512, PLE2513
|
||||
if enforce_invalid_string_character {
|
||||
for (tok, range) in tokens.iter().flatten() {
|
||||
if matches!(tok, Tok::String { .. }) {
|
||||
diagnostics.extend(
|
||||
pylint::rules::invalid_string_characters(locator, *range)
|
||||
.into_iter()
|
||||
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
|
||||
);
|
||||
if tok.is_string() {
|
||||
pylint::rules::invalid_string_characters(&mut diagnostics, *range, locator);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// E701, E702, E703
|
||||
if enforce_compound_statements {
|
||||
diagnostics.extend(
|
||||
pycodestyle::rules::compound_statements(tokens, locator, indexer, settings)
|
||||
.into_iter()
|
||||
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
|
||||
pycodestyle::rules::compound_statements(
|
||||
&mut diagnostics,
|
||||
tokens,
|
||||
locator,
|
||||
indexer,
|
||||
settings,
|
||||
);
|
||||
}
|
||||
|
||||
// Q001, Q002, Q003
|
||||
if enforce_quotes {
|
||||
diagnostics.extend(
|
||||
flake8_quotes::rules::from_tokens(tokens, locator, settings)
|
||||
.into_iter()
|
||||
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
|
||||
);
|
||||
flake8_quotes::rules::from_tokens(&mut diagnostics, tokens, locator, settings);
|
||||
}
|
||||
|
||||
// ISC001, ISC002
|
||||
if enforce_implicit_string_concatenation {
|
||||
diagnostics.extend(
|
||||
flake8_implicit_str_concat::rules::implicit(
|
||||
tokens,
|
||||
&settings.flake8_implicit_str_concat,
|
||||
locator,
|
||||
)
|
||||
.into_iter()
|
||||
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
|
||||
flake8_implicit_str_concat::rules::implicit(
|
||||
&mut diagnostics,
|
||||
tokens,
|
||||
&settings.flake8_implicit_str_concat,
|
||||
locator,
|
||||
);
|
||||
}
|
||||
|
||||
// COM812, COM818, COM819
|
||||
if enforce_trailing_comma {
|
||||
diagnostics.extend(
|
||||
flake8_commas::rules::trailing_commas(tokens, locator, settings)
|
||||
.into_iter()
|
||||
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
|
||||
);
|
||||
flake8_commas::rules::trailing_commas(&mut diagnostics, tokens, locator, settings);
|
||||
}
|
||||
|
||||
// UP034
|
||||
if enforce_extraneous_parenthesis {
|
||||
diagnostics.extend(
|
||||
pyupgrade::rules::extraneous_parentheses(tokens, locator, settings).into_iter(),
|
||||
);
|
||||
pyupgrade::rules::extraneous_parentheses(&mut diagnostics, tokens, locator, settings);
|
||||
}
|
||||
|
||||
// PYI033
|
||||
if enforce_type_comment_in_stub && is_stub {
|
||||
diagnostics.extend(flake8_pyi::rules::type_comment_in_stub(locator, indexer));
|
||||
flake8_pyi::rules::type_comment_in_stub(&mut diagnostics, locator, indexer);
|
||||
}
|
||||
|
||||
// TD001, TD002, TD003, TD004, TD005, TD006, TD007
|
||||
@@ -203,18 +189,12 @@ pub(crate) fn check_tokens(
|
||||
})
|
||||
.collect();
|
||||
|
||||
diagnostics.extend(
|
||||
flake8_todos::rules::todos(&todo_comments, locator, indexer, settings)
|
||||
.into_iter()
|
||||
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
|
||||
);
|
||||
flake8_todos::rules::todos(&mut diagnostics, &todo_comments, locator, indexer, settings);
|
||||
|
||||
diagnostics.extend(
|
||||
flake8_fixme::rules::todos(&todo_comments)
|
||||
.into_iter()
|
||||
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
|
||||
);
|
||||
flake8_fixme::rules::todos(&mut diagnostics, &todo_comments);
|
||||
}
|
||||
|
||||
diagnostics.retain(|diagnostic| settings.rules.enabled(diagnostic.kind.rule()));
|
||||
|
||||
diagnostics
|
||||
}
|
||||
|
||||
@@ -14,6 +14,18 @@ use crate::rules;
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct NoqaCode(&'static str, &'static str);
|
||||
|
||||
impl NoqaCode {
|
||||
/// Return the prefix for the [`NoqaCode`], e.g., `SIM` for `SIM101`.
|
||||
pub fn prefix(&self) -> &str {
|
||||
self.0
|
||||
}
|
||||
|
||||
/// Return the suffix for the [`NoqaCode`], e.g., `101` for `SIM101`.
|
||||
pub fn suffix(&self) -> &str {
|
||||
self.1
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for NoqaCode {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self, f)
|
||||
@@ -156,6 +168,9 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pyflakes, "901") => (RuleGroup::Unspecified, rules::pyflakes::rules::RaiseNotImplemented),
|
||||
|
||||
// pylint
|
||||
(Pylint, "C0105") => (RuleGroup::Unspecified, rules::pylint::rules::TypeNameIncorrectVariance),
|
||||
(Pylint, "C0131") => (RuleGroup::Unspecified, rules::pylint::rules::TypeBivariance),
|
||||
(Pylint, "C0132") => (RuleGroup::Unspecified, rules::pylint::rules::TypeParamNameMismatch),
|
||||
(Pylint, "C0205") => (RuleGroup::Unspecified, rules::pylint::rules::SingleStringSlots),
|
||||
(Pylint, "C0414") => (RuleGroup::Unspecified, rules::pylint::rules::UselessImportAlias),
|
||||
(Pylint, "C1901") => (RuleGroup::Nursery, rules::pylint::rules::CompareToEmptyString),
|
||||
@@ -251,6 +266,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8Bugbear, "031") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::ReuseOfGroupbyGenerator),
|
||||
(Flake8Bugbear, "032") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::UnintentionalTypeAnnotation),
|
||||
(Flake8Bugbear, "033") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::DuplicateValue),
|
||||
(Flake8Bugbear, "034") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::ReSubPositionalArgs),
|
||||
(Flake8Bugbear, "904") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::RaiseWithoutFromInsideExcept),
|
||||
(Flake8Bugbear, "905") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::ZipWithoutExplicitStrict),
|
||||
|
||||
@@ -375,8 +391,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8Simplify, "401") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::IfElseBlockInsteadOfDictGet),
|
||||
(Flake8Simplify, "910") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::DictGetWithNoneDefault),
|
||||
|
||||
// copyright
|
||||
(Copyright, "001") => (RuleGroup::Nursery, rules::flake8_copyright::rules::MissingCopyrightNotice),
|
||||
// flake8-copyright
|
||||
(Flake8Copyright, "001") => (RuleGroup::Nursery, rules::flake8_copyright::rules::MissingCopyrightNotice),
|
||||
|
||||
// pyupgrade
|
||||
(Pyupgrade, "001") => (RuleGroup::Unspecified, rules::pyupgrade::rules::UselessMetaclassType),
|
||||
@@ -616,6 +632,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8Pyi, "024") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::CollectionsNamedTuple),
|
||||
(Flake8Pyi, "025") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::UnaliasedCollectionsAbcSetImport),
|
||||
(Flake8Pyi, "029") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::StrOrReprDefinedInStub),
|
||||
(Flake8Pyi, "030") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::UnnecessaryLiteralUnion),
|
||||
(Flake8Pyi, "032") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::AnyEqNeAnnotation),
|
||||
(Flake8Pyi, "033") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::TypeCommentInStub),
|
||||
(Flake8Pyi, "034") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::NonSelfReturnType),
|
||||
@@ -761,6 +778,10 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Ruff, "011") => (RuleGroup::Unspecified, rules::ruff::rules::StaticKeyDictComprehension),
|
||||
(Ruff, "012") => (RuleGroup::Unspecified, rules::ruff::rules::MutableClassDefault),
|
||||
(Ruff, "013") => (RuleGroup::Unspecified, rules::ruff::rules::ImplicitOptional),
|
||||
#[cfg(feature = "unreachable-code")]
|
||||
(Ruff, "014") => (RuleGroup::Nursery, rules::ruff::rules::UnreachableCode),
|
||||
(Ruff, "015") => (RuleGroup::Unspecified, rules::ruff::rules::UnnecessaryIterableAllocationForFirstElement),
|
||||
(Ruff, "016") => (RuleGroup::Unspecified, rules::ruff::rules::InvalidIndexType),
|
||||
(Ruff, "100") => (RuleGroup::Unspecified, rules::ruff::rules::UnusedNOQA),
|
||||
(Ruff, "200") => (RuleGroup::Unspecified, rules::ruff::rules::InvalidPyprojectToml),
|
||||
|
||||
|
||||
@@ -427,22 +427,22 @@ ghi
|
||||
NoqaMapping::from_iter([TextRange::new(TextSize::from(6), TextSize::from(28))])
|
||||
);
|
||||
|
||||
let contents = r#"x = \
|
||||
1"#;
|
||||
let contents = r"x = \
|
||||
1";
|
||||
assert_eq!(
|
||||
noqa_mappings(contents),
|
||||
NoqaMapping::from_iter([TextRange::new(TextSize::from(0), TextSize::from(6))])
|
||||
);
|
||||
|
||||
let contents = r#"from foo import \
|
||||
let contents = r"from foo import \
|
||||
bar as baz, \
|
||||
qux as quux"#;
|
||||
qux as quux";
|
||||
assert_eq!(
|
||||
noqa_mappings(contents),
|
||||
NoqaMapping::from_iter([TextRange::new(TextSize::from(0), TextSize::from(36))])
|
||||
);
|
||||
|
||||
let contents = r#"
|
||||
let contents = r"
|
||||
# Foo
|
||||
from foo import \
|
||||
bar as baz, \
|
||||
@@ -450,7 +450,7 @@ from foo import \
|
||||
x = \
|
||||
1
|
||||
y = \
|
||||
2"#;
|
||||
2";
|
||||
assert_eq!(
|
||||
noqa_mappings(contents),
|
||||
NoqaMapping::from_iter([
|
||||
|
||||
@@ -333,7 +333,7 @@ pub(crate) fn infer_plugins_from_codes(selectors: &HashSet<RuleSelector>) -> Vec
|
||||
for selector in selectors {
|
||||
if selector
|
||||
.into_iter()
|
||||
.any(|rule| Linter::from(plugin).into_iter().any(|r| r == rule))
|
||||
.any(|rule| Linter::from(plugin).rules().any(|r| r == rule))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -38,6 +38,7 @@ mod rule_selector;
|
||||
pub mod rules;
|
||||
pub mod settings;
|
||||
pub mod source_kind;
|
||||
pub mod upstream_categories;
|
||||
|
||||
#[cfg(any(test, fuzzing))]
|
||||
pub mod test;
|
||||
|
||||
@@ -51,7 +51,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = AzureEmitter::default();
|
||||
let mut emitter = AzureEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||
|
||||
assert_snapshot!(content);
|
||||
|
||||
@@ -66,7 +66,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = GithubEmitter::default();
|
||||
let mut emitter = GithubEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||
|
||||
assert_snapshot!(content);
|
||||
|
||||
@@ -108,7 +108,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = JsonEmitter::default();
|
||||
let mut emitter = JsonEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||
|
||||
assert_snapshot!(content);
|
||||
|
||||
@@ -24,14 +24,14 @@ impl Emitter for JsonLinesEmitter {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::message::json_lines::JsonLinesEmitter;
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use crate::message::json_lines::JsonLinesEmitter;
|
||||
use crate::message::tests::{capture_emitter_output, create_messages};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = JsonLinesEmitter::default();
|
||||
let mut emitter = JsonLinesEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||
|
||||
assert_snapshot!(content);
|
||||
|
||||
@@ -93,7 +93,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = JunitEmitter::default();
|
||||
let mut emitter = JunitEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||
|
||||
assert_snapshot!(content);
|
||||
|
||||
@@ -49,7 +49,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = PylintEmitter::default();
|
||||
let mut emitter = PylintEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||
|
||||
assert_snapshot!(content);
|
||||
|
||||
@@ -1,123 +1,188 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::error::Error;
|
||||
use std::fmt::{Display, Write};
|
||||
use std::fs;
|
||||
use std::ops::Add;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
use log::warn;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
use rustpython_parser::ast::Ranged;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::source_code::Locator;
|
||||
use ruff_python_whitespace::{LineEnding, PythonWhitespace};
|
||||
use ruff_python_whitespace::LineEnding;
|
||||
|
||||
use crate::codes::NoqaCode;
|
||||
use crate::registry::{AsRule, Rule, RuleSet};
|
||||
use crate::rule_redirects::get_redirect_target;
|
||||
|
||||
static NOQA_LINE_REGEX: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(
|
||||
r"(?P<leading_spaces>\s*)(?P<noqa>(?i:# noqa)(?::\s?(?P<codes>(?:[A-Z]+[0-9]+)(?:[,\s]+[A-Z]+[0-9]+)*))?)(?P<trailing_spaces>\s*)",
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
/// A directive to ignore a set of rules for a given line of Python source code (e.g.,
|
||||
/// `# noqa: F401, F841`).
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum Directive<'a> {
|
||||
None,
|
||||
// (leading spaces, noqa_range, trailing_spaces)
|
||||
All(TextSize, TextRange, TextSize),
|
||||
// (leading spaces, start_offset, end_offset, codes, trailing_spaces)
|
||||
Codes(TextSize, TextRange, Vec<&'a str>, TextSize),
|
||||
/// The `noqa` directive ignores all rules (e.g., `# noqa`).
|
||||
All(All),
|
||||
/// The `noqa` directive ignores specific rules (e.g., `# noqa: F401, F841`).
|
||||
Codes(Codes<'a>),
|
||||
}
|
||||
|
||||
/// Extract the noqa `Directive` from a line of Python source code.
|
||||
pub(crate) fn extract_noqa_directive<'a>(range: TextRange, locator: &'a Locator) -> Directive<'a> {
|
||||
let text = &locator.contents()[range];
|
||||
match NOQA_LINE_REGEX.captures(text) {
|
||||
Some(caps) => match (
|
||||
caps.name("leading_spaces"),
|
||||
caps.name("noqa"),
|
||||
caps.name("codes"),
|
||||
caps.name("trailing_spaces"),
|
||||
) {
|
||||
(Some(leading_spaces), Some(noqa), Some(codes), Some(trailing_spaces)) => {
|
||||
let codes = codes
|
||||
.as_str()
|
||||
.split(|c: char| c.is_whitespace() || c == ',')
|
||||
.map(str::trim)
|
||||
.filter(|code| !code.is_empty())
|
||||
.collect_vec();
|
||||
let start = range.start() + TextSize::try_from(noqa.start()).unwrap();
|
||||
if codes.is_empty() {
|
||||
#[allow(deprecated)]
|
||||
let line = locator.compute_line_index(start);
|
||||
warn!("Expected rule codes on `noqa` directive: \"{line}\"");
|
||||
}
|
||||
Directive::Codes(
|
||||
leading_spaces.as_str().text_len(),
|
||||
TextRange::at(start, noqa.as_str().text_len()),
|
||||
codes,
|
||||
trailing_spaces.as_str().text_len(),
|
||||
)
|
||||
impl<'a> Directive<'a> {
|
||||
/// Extract the noqa `Directive` from a line of Python source code.
|
||||
pub(crate) fn try_extract(text: &'a str, offset: TextSize) -> Result<Option<Self>, ParseError> {
|
||||
for (char_index, char) in text.char_indices() {
|
||||
// Only bother checking for the `noqa` literal if the character is `n` or `N`.
|
||||
if !matches!(char, 'n' | 'N') {
|
||||
continue;
|
||||
}
|
||||
|
||||
(Some(leading_spaces), Some(noqa), None, Some(trailing_spaces)) => Directive::All(
|
||||
leading_spaces.as_str().text_len(),
|
||||
TextRange::at(
|
||||
range.start() + TextSize::try_from(noqa.start()).unwrap(),
|
||||
noqa.as_str().text_len(),
|
||||
),
|
||||
trailing_spaces.as_str().text_len(),
|
||||
),
|
||||
_ => Directive::None,
|
||||
},
|
||||
None => Directive::None,
|
||||
}
|
||||
}
|
||||
|
||||
enum ParsedExemption<'a> {
|
||||
None,
|
||||
All,
|
||||
Codes(Vec<&'a str>),
|
||||
}
|
||||
|
||||
/// Return a [`ParsedExemption`] for a given comment line.
|
||||
fn parse_file_exemption(line: &str) -> ParsedExemption {
|
||||
let line = line.trim_whitespace_start();
|
||||
|
||||
if line.starts_with("# flake8: noqa")
|
||||
|| line.starts_with("# flake8: NOQA")
|
||||
|| line.starts_with("# flake8: NoQA")
|
||||
{
|
||||
return ParsedExemption::All;
|
||||
}
|
||||
|
||||
if let Some(remainder) = line
|
||||
.strip_prefix("# ruff: noqa")
|
||||
.or_else(|| line.strip_prefix("# ruff: NOQA"))
|
||||
.or_else(|| line.strip_prefix("# ruff: NoQA"))
|
||||
{
|
||||
if remainder.is_empty() {
|
||||
return ParsedExemption::All;
|
||||
} else if let Some(codes) = remainder.strip_prefix(':') {
|
||||
let codes = codes
|
||||
.split(|c: char| c.is_whitespace() || c == ',')
|
||||
.map(str::trim)
|
||||
.filter(|code| !code.is_empty())
|
||||
.collect_vec();
|
||||
if codes.is_empty() {
|
||||
warn!("Expected rule codes on `noqa` directive: \"{line}\"");
|
||||
// Determine the start of the `noqa` literal.
|
||||
if !matches!(
|
||||
text[char_index..].as_bytes(),
|
||||
[b'n' | b'N', b'o' | b'O', b'q' | b'Q', b'a' | b'A', ..]
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
return ParsedExemption::Codes(codes);
|
||||
|
||||
let noqa_literal_start = char_index;
|
||||
let noqa_literal_end = noqa_literal_start + "noqa".len();
|
||||
|
||||
// Determine the start of the comment.
|
||||
let mut comment_start = noqa_literal_start;
|
||||
|
||||
// Trim any whitespace between the `#` character and the `noqa` literal.
|
||||
comment_start = text[..comment_start].trim_end().len();
|
||||
|
||||
// The next character has to be the `#` character.
|
||||
if text[..comment_start]
|
||||
.chars()
|
||||
.last()
|
||||
.map_or(false, |c| c != '#')
|
||||
{
|
||||
continue;
|
||||
}
|
||||
comment_start -= '#'.len_utf8();
|
||||
|
||||
// If the next character is `:`, then it's a list of codes. Otherwise, it's a directive
|
||||
// to ignore all rules.
|
||||
return Ok(Some(
|
||||
if text[noqa_literal_end..]
|
||||
.chars()
|
||||
.next()
|
||||
.map_or(false, |c| c == ':')
|
||||
{
|
||||
// E.g., `# noqa: F401, F841`.
|
||||
let mut codes_start = noqa_literal_end;
|
||||
|
||||
// Skip the `:` character.
|
||||
codes_start += ':'.len_utf8();
|
||||
|
||||
// Skip any whitespace between the `:` and the codes.
|
||||
codes_start += text[codes_start..]
|
||||
.find(|c: char| !c.is_whitespace())
|
||||
.unwrap_or(0);
|
||||
|
||||
// Extract the comma-separated list of codes.
|
||||
let mut codes = vec![];
|
||||
let mut codes_end = codes_start;
|
||||
let mut leading_space = 0;
|
||||
while let Some(code) = Self::lex_code(&text[codes_end + leading_space..]) {
|
||||
codes.push(code);
|
||||
codes_end += leading_space;
|
||||
codes_end += code.len();
|
||||
|
||||
// Codes can be comma- or whitespace-delimited. Compute the length of the
|
||||
// delimiter, but only add it in the next iteration, once we find the next
|
||||
// code.
|
||||
if let Some(space_between) =
|
||||
text[codes_end..].find(|c: char| !(c.is_whitespace() || c == ','))
|
||||
{
|
||||
leading_space = space_between;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If we didn't identify any codes, warn.
|
||||
if codes.is_empty() {
|
||||
return Err(ParseError::MissingCodes);
|
||||
}
|
||||
|
||||
let range = TextRange::new(
|
||||
TextSize::try_from(comment_start).unwrap(),
|
||||
TextSize::try_from(codes_end).unwrap(),
|
||||
);
|
||||
|
||||
Self::Codes(Codes {
|
||||
range: range.add(offset),
|
||||
codes,
|
||||
})
|
||||
} else {
|
||||
// E.g., `# noqa`.
|
||||
let range = TextRange::new(
|
||||
TextSize::try_from(comment_start).unwrap(),
|
||||
TextSize::try_from(noqa_literal_end).unwrap(),
|
||||
);
|
||||
Self::All(All {
|
||||
range: range.add(offset),
|
||||
})
|
||||
},
|
||||
));
|
||||
}
|
||||
warn!("Unexpected suffix on `noqa` directive: \"{line}\"");
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
ParsedExemption::None
|
||||
/// Lex an individual rule code (e.g., `F401`).
|
||||
#[inline]
|
||||
fn lex_code(line: &str) -> Option<&str> {
|
||||
// Extract, e.g., the `F` in `F401`.
|
||||
let prefix = line.chars().take_while(char::is_ascii_uppercase).count();
|
||||
// Extract, e.g., the `401` in `F401`.
|
||||
let suffix = line[prefix..]
|
||||
.chars()
|
||||
.take_while(char::is_ascii_digit)
|
||||
.count();
|
||||
if prefix > 0 && suffix > 0 {
|
||||
Some(&line[..prefix + suffix])
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct All {
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl Ranged for All {
|
||||
/// The range of the `noqa` directive.
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Codes<'a> {
|
||||
range: TextRange,
|
||||
codes: Vec<&'a str>,
|
||||
}
|
||||
|
||||
impl Codes<'_> {
|
||||
/// The codes that are ignored by the `noqa` directive.
|
||||
pub(crate) fn codes(&self) -> &[&str] {
|
||||
&self.codes
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for Codes<'_> {
|
||||
/// The range of the `noqa` directive.
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the string list of `codes` includes `code` (or an alias
|
||||
@@ -138,50 +203,230 @@ pub(crate) fn rule_is_ignored(
|
||||
) -> bool {
|
||||
let offset = noqa_line_for.resolve(offset);
|
||||
let line_range = locator.line_range(offset);
|
||||
match extract_noqa_directive(line_range, locator) {
|
||||
Directive::None => false,
|
||||
Directive::All(..) => true,
|
||||
Directive::Codes(.., codes, _) => includes(code, &codes),
|
||||
match Directive::try_extract(locator.slice(line_range), line_range.start()) {
|
||||
Ok(Some(Directive::All(_))) => true,
|
||||
Ok(Some(Directive::Codes(Codes { codes, range: _ }))) => includes(code, &codes),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// The file-level exemptions extracted from a given Python file.
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum FileExemption {
|
||||
None,
|
||||
/// The file is exempt from all rules.
|
||||
All,
|
||||
/// The file is exempt from the given rules.
|
||||
Codes(Vec<NoqaCode>),
|
||||
}
|
||||
|
||||
/// Extract the [`FileExemption`] for a given Python source file, enumerating any rules that are
|
||||
/// globally ignored within the file.
|
||||
pub(crate) fn file_exemption(contents: &str, comment_ranges: &[TextRange]) -> FileExemption {
|
||||
let mut exempt_codes: Vec<NoqaCode> = vec![];
|
||||
impl FileExemption {
|
||||
/// Extract the [`FileExemption`] for a given Python source file, enumerating any rules that are
|
||||
/// globally ignored within the file.
|
||||
pub(crate) fn try_extract(
|
||||
contents: &str,
|
||||
comment_ranges: &[TextRange],
|
||||
locator: &Locator,
|
||||
) -> Option<Self> {
|
||||
let mut exempt_codes: Vec<NoqaCode> = vec![];
|
||||
|
||||
for range in comment_ranges {
|
||||
match parse_file_exemption(&contents[*range]) {
|
||||
ParsedExemption::All => {
|
||||
return FileExemption::All;
|
||||
for range in comment_ranges {
|
||||
match ParsedFileExemption::try_extract(&contents[*range]) {
|
||||
Err(err) => {
|
||||
#[allow(deprecated)]
|
||||
let line = locator.compute_line_index(range.start());
|
||||
warn!("Invalid `# noqa` directive on line {line}: {err}");
|
||||
}
|
||||
Ok(Some(ParsedFileExemption::All)) => {
|
||||
return Some(Self::All);
|
||||
}
|
||||
Ok(Some(ParsedFileExemption::Codes(codes))) => {
|
||||
exempt_codes.extend(codes.into_iter().filter_map(|code| {
|
||||
if let Ok(rule) = Rule::from_code(get_redirect_target(code).unwrap_or(code))
|
||||
{
|
||||
Some(rule.noqa_code())
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
let line = locator.compute_line_index(range.start());
|
||||
warn!("Invalid code provided to `# ruff: noqa` on line {line}: {code}");
|
||||
None
|
||||
}
|
||||
}));
|
||||
}
|
||||
Ok(None) => {}
|
||||
}
|
||||
ParsedExemption::Codes(codes) => {
|
||||
exempt_codes.extend(codes.into_iter().filter_map(|code| {
|
||||
if let Ok(rule) = Rule::from_code(get_redirect_target(code).unwrap_or(code)) {
|
||||
Some(rule.noqa_code())
|
||||
} else {
|
||||
warn!("Invalid code provided to `# ruff: noqa`: {}", code);
|
||||
None
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
if exempt_codes.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(Self::Codes(exempt_codes))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An individual file-level exemption (e.g., `# ruff: noqa` or `# ruff: noqa: F401, F841`). Like
|
||||
/// [`FileExemption`], but only for a single line, as opposed to an aggregated set of exemptions
|
||||
/// across a source file.
|
||||
#[derive(Debug)]
|
||||
enum ParsedFileExemption<'a> {
|
||||
/// The file-level exemption ignores all rules (e.g., `# ruff: noqa`).
|
||||
All,
|
||||
/// The file-level exemption ignores specific rules (e.g., `# ruff: noqa: F401, F841`).
|
||||
Codes(Vec<&'a str>),
|
||||
}
|
||||
|
||||
impl<'a> ParsedFileExemption<'a> {
|
||||
/// Return a [`ParsedFileExemption`] for a given comment line.
|
||||
fn try_extract(line: &'a str) -> Result<Option<Self>, ParseError> {
|
||||
let line = Self::lex_whitespace(line);
|
||||
let Some(line) = Self::lex_char(line, '#') else {
|
||||
return Ok(None);
|
||||
};
|
||||
let line = Self::lex_whitespace(line);
|
||||
|
||||
let Some(line) = Self::lex_flake8(line).or_else(|| Self::lex_ruff(line)) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let line = Self::lex_whitespace(line);
|
||||
let Some(line) = Self::lex_char(line, ':') else {
|
||||
return Ok(None);
|
||||
};
|
||||
let line = Self::lex_whitespace(line);
|
||||
let Some(line) = Self::lex_noqa(line) else {
|
||||
return Ok(None);
|
||||
};
|
||||
let line = Self::lex_whitespace(line);
|
||||
|
||||
Ok(Some(if line.is_empty() {
|
||||
// Ex) `# ruff: noqa`
|
||||
Self::All
|
||||
} else {
|
||||
// Ex) `# ruff: noqa: F401, F841`
|
||||
let Some(line) = Self::lex_char(line, ':') else {
|
||||
return Err(ParseError::InvalidSuffix);
|
||||
};
|
||||
let line = Self::lex_whitespace(line);
|
||||
|
||||
// Extract the codes from the line (e.g., `F401, F841`).
|
||||
let mut codes = vec![];
|
||||
let mut line = line;
|
||||
while let Some(code) = Self::lex_code(line) {
|
||||
codes.push(code);
|
||||
line = &line[code.len()..];
|
||||
|
||||
// Codes can be comma- or whitespace-delimited.
|
||||
if let Some(rest) = Self::lex_delimiter(line).map(Self::lex_whitespace) {
|
||||
line = rest;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
ParsedExemption::None => {}
|
||||
|
||||
// If we didn't identify any codes, warn.
|
||||
if codes.is_empty() {
|
||||
return Err(ParseError::MissingCodes);
|
||||
}
|
||||
|
||||
Self::Codes(codes)
|
||||
}))
|
||||
}
|
||||
|
||||
/// Lex optional leading whitespace.
|
||||
#[inline]
|
||||
fn lex_whitespace(line: &str) -> &str {
|
||||
line.trim_start()
|
||||
}
|
||||
|
||||
/// Lex a specific character, or return `None` if the character is not the first character in
|
||||
/// the line.
|
||||
#[inline]
|
||||
fn lex_char(line: &str, c: char) -> Option<&str> {
|
||||
let mut chars = line.chars();
|
||||
if chars.next() == Some(c) {
|
||||
Some(chars.as_str())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
if exempt_codes.is_empty() {
|
||||
FileExemption::None
|
||||
} else {
|
||||
FileExemption::Codes(exempt_codes)
|
||||
/// Lex the "flake8" prefix of a `noqa` directive.
|
||||
#[inline]
|
||||
fn lex_flake8(line: &str) -> Option<&str> {
|
||||
line.strip_prefix("flake8")
|
||||
}
|
||||
|
||||
/// Lex the "ruff" prefix of a `noqa` directive.
|
||||
#[inline]
|
||||
fn lex_ruff(line: &str) -> Option<&str> {
|
||||
line.strip_prefix("ruff")
|
||||
}
|
||||
|
||||
/// Lex a `noqa` directive with case-insensitive matching.
|
||||
#[inline]
|
||||
fn lex_noqa(line: &str) -> Option<&str> {
|
||||
match line.as_bytes() {
|
||||
[b'n' | b'N', b'o' | b'O', b'q' | b'Q', b'a' | b'A', ..] => Some(&line["noqa".len()..]),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Lex a code delimiter, which can either be a comma or whitespace.
|
||||
#[inline]
|
||||
fn lex_delimiter(line: &str) -> Option<&str> {
|
||||
let mut chars = line.chars();
|
||||
if let Some(c) = chars.next() {
|
||||
if c == ',' || c.is_whitespace() {
|
||||
Some(chars.as_str())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Lex an individual rule code (e.g., `F401`).
|
||||
#[inline]
|
||||
fn lex_code(line: &str) -> Option<&str> {
|
||||
// Extract, e.g., the `F` in `F401`.
|
||||
let prefix = line.chars().take_while(char::is_ascii_uppercase).count();
|
||||
// Extract, e.g., the `401` in `F401`.
|
||||
let suffix = line[prefix..]
|
||||
.chars()
|
||||
.take_while(char::is_ascii_digit)
|
||||
.count();
|
||||
if prefix > 0 && suffix > 0 {
|
||||
Some(&line[..prefix + suffix])
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The result of an [`Importer::get_or_import_symbol`] call.
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum ParseError {
|
||||
/// The `noqa` directive was missing valid codes (e.g., `# noqa: unused-import` instead of `# noqa: F401`).
|
||||
MissingCodes,
|
||||
/// The `noqa` directive used an invalid suffix (e.g., `# noqa; F401` instead of `# noqa: F401`).
|
||||
InvalidSuffix,
|
||||
}
|
||||
|
||||
impl Display for ParseError {
|
||||
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ParseError::MissingCodes => fmt.write_str("expected a comma-separated list of codes (e.g., `# noqa: F401, F841`)."),
|
||||
ParseError::InvalidSuffix => {
|
||||
fmt.write_str("expected `:` followed by a comma-separated list of codes (e.g., `# noqa: F401, F841`).")
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for ParseError {}
|
||||
|
||||
/// Adds noqa comments to suppress all diagnostics of a file.
|
||||
pub(crate) fn add_noqa(
|
||||
path: &Path,
|
||||
@@ -215,23 +460,23 @@ fn add_noqa_inner(
|
||||
|
||||
// Whether the file is exempted from all checks.
|
||||
// Codes that are globally exempted (within the current file).
|
||||
let exemption = file_exemption(locator.contents(), commented_ranges);
|
||||
let exemption = FileExemption::try_extract(locator.contents(), commented_ranges, locator);
|
||||
let directives = NoqaDirectives::from_commented_ranges(commented_ranges, locator);
|
||||
|
||||
// Mark any non-ignored diagnostics.
|
||||
for diagnostic in diagnostics {
|
||||
match &exemption {
|
||||
FileExemption::All => {
|
||||
Some(FileExemption::All) => {
|
||||
// If the file is exempted, don't add any noqa directives.
|
||||
continue;
|
||||
}
|
||||
FileExemption::Codes(codes) => {
|
||||
Some(FileExemption::Codes(codes)) => {
|
||||
// If the diagnostic is ignored by a global exemption, don't add a noqa directive.
|
||||
if codes.contains(&diagnostic.kind.rule().noqa_code()) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
FileExemption::None => {}
|
||||
None => {}
|
||||
}
|
||||
|
||||
// Is the violation ignored by a `noqa` directive on the parent line?
|
||||
@@ -240,28 +485,27 @@ fn add_noqa_inner(
|
||||
directives.find_line_with_directive(noqa_line_for.resolve(parent))
|
||||
{
|
||||
match &directive_line.directive {
|
||||
Directive::All(..) => {
|
||||
Directive::All(_) => {
|
||||
continue;
|
||||
}
|
||||
Directive::Codes(.., codes, _) => {
|
||||
Directive::Codes(Codes { codes, range: _ }) => {
|
||||
if includes(diagnostic.kind.rule(), codes) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
Directive::None => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let noqa_offset = noqa_line_for.resolve(diagnostic.start());
|
||||
|
||||
// Or ignored by the directive itself
|
||||
// Or ignored by the directive itself?
|
||||
if let Some(directive_line) = directives.find_line_with_directive(noqa_offset) {
|
||||
match &directive_line.directive {
|
||||
Directive::All(..) => {
|
||||
Directive::All(_) => {
|
||||
continue;
|
||||
}
|
||||
Directive::Codes(.., codes, _) => {
|
||||
Directive::Codes(Codes { codes, range: _ }) => {
|
||||
let rule = diagnostic.kind.rule();
|
||||
if !includes(rule, codes) {
|
||||
matches_by_line
|
||||
@@ -274,7 +518,6 @@ fn add_noqa_inner(
|
||||
}
|
||||
continue;
|
||||
}
|
||||
Directive::None => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -296,7 +539,7 @@ fn add_noqa_inner(
|
||||
let line = locator.full_line(offset);
|
||||
|
||||
match directive {
|
||||
None | Some(Directive::None) => {
|
||||
None => {
|
||||
// Add existing content.
|
||||
output.push_str(line.trim_end());
|
||||
|
||||
@@ -308,10 +551,10 @@ fn add_noqa_inner(
|
||||
output.push_str(&line_ending);
|
||||
count += 1;
|
||||
}
|
||||
Some(Directive::All(..)) => {
|
||||
Some(Directive::All(_)) => {
|
||||
// Does not get inserted into the map.
|
||||
}
|
||||
Some(Directive::Codes(_, noqa_range, existing, _)) => {
|
||||
Some(Directive::Codes(Codes { range, codes })) => {
|
||||
// Reconstruct the line based on the preserved rule codes.
|
||||
// This enables us to tally the number of edits.
|
||||
let output_start = output.len();
|
||||
@@ -319,7 +562,7 @@ fn add_noqa_inner(
|
||||
// Add existing content.
|
||||
output.push_str(
|
||||
locator
|
||||
.slice(TextRange::new(offset, noqa_range.start()))
|
||||
.slice(TextRange::new(offset, range.start()))
|
||||
.trim_end(),
|
||||
);
|
||||
|
||||
@@ -331,8 +574,8 @@ fn add_noqa_inner(
|
||||
&mut output,
|
||||
rules
|
||||
.iter()
|
||||
.map(|r| r.noqa_code().to_string())
|
||||
.chain(existing.iter().map(ToString::to_string))
|
||||
.map(|rule| rule.noqa_code().to_string())
|
||||
.chain(codes.iter().map(ToString::to_string))
|
||||
.sorted_unstable(),
|
||||
);
|
||||
|
||||
@@ -366,9 +609,11 @@ fn push_codes<I: Display>(str: &mut String, codes: impl Iterator<Item = I>) {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct NoqaDirectiveLine<'a> {
|
||||
// The range of the text line for which the noqa directive applies.
|
||||
/// The range of the text line for which the noqa directive applies.
|
||||
pub(crate) range: TextRange,
|
||||
/// The noqa directive.
|
||||
pub(crate) directive: Directive<'a>,
|
||||
/// The codes that are ignored by the directive.
|
||||
pub(crate) matches: Vec<NoqaCode>,
|
||||
}
|
||||
|
||||
@@ -384,21 +629,23 @@ impl<'a> NoqaDirectives<'a> {
|
||||
) -> Self {
|
||||
let mut directives = Vec::new();
|
||||
|
||||
for comment_range in comment_ranges {
|
||||
let line_range = locator.line_range(comment_range.start());
|
||||
let directive = match extract_noqa_directive(line_range, locator) {
|
||||
Directive::None => {
|
||||
continue;
|
||||
for range in comment_ranges {
|
||||
match Directive::try_extract(locator.slice(*range), range.start()) {
|
||||
Err(err) => {
|
||||
#[allow(deprecated)]
|
||||
let line = locator.compute_line_index(range.start());
|
||||
warn!("Invalid `# noqa` directive on line {line}: {err}");
|
||||
}
|
||||
directive @ (Directive::All(..) | Directive::Codes(..)) => directive,
|
||||
};
|
||||
|
||||
// noqa comments are guaranteed to be single line.
|
||||
directives.push(NoqaDirectiveLine {
|
||||
range: line_range,
|
||||
directive,
|
||||
matches: Vec::new(),
|
||||
});
|
||||
Ok(Some(directive)) => {
|
||||
// noqa comments are guaranteed to be single line.
|
||||
directives.push(NoqaDirectiveLine {
|
||||
range: locator.line_range(range.start()),
|
||||
directive,
|
||||
matches: Vec::new(),
|
||||
});
|
||||
}
|
||||
Ok(None) => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Extend a mapping at the end of the file to also include the EOF token.
|
||||
@@ -460,7 +707,7 @@ impl NoqaMapping {
|
||||
}
|
||||
|
||||
/// Returns the re-mapped position or `position` if no mapping exists.
|
||||
pub fn resolve(&self, offset: TextSize) -> TextSize {
|
||||
pub(crate) fn resolve(&self, offset: TextSize) -> TextSize {
|
||||
let index = self.ranges.binary_search_by(|range| {
|
||||
if range.end() < offset {
|
||||
std::cmp::Ordering::Less
|
||||
@@ -478,7 +725,7 @@ impl NoqaMapping {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push_mapping(&mut self, range: TextRange) {
|
||||
pub(crate) fn push_mapping(&mut self, range: TextRange) {
|
||||
if let Some(last_range) = self.ranges.last_mut() {
|
||||
// Strictly sorted insertion
|
||||
if last_range.end() <= range.start() {
|
||||
@@ -511,28 +758,190 @@ impl FromIterator<TextRange> for NoqaMapping {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_debug_snapshot;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::source_code::Locator;
|
||||
use ruff_python_whitespace::LineEnding;
|
||||
|
||||
use crate::noqa::{add_noqa_inner, NoqaMapping, NOQA_LINE_REGEX};
|
||||
use crate::noqa::{add_noqa_inner, Directive, NoqaMapping, ParsedFileExemption};
|
||||
use crate::rules::pycodestyle::rules::AmbiguousVariableName;
|
||||
use crate::rules::pyflakes;
|
||||
use crate::rules::pyflakes::rules::UnusedVariable;
|
||||
|
||||
#[test]
|
||||
fn regex() {
|
||||
assert!(NOQA_LINE_REGEX.is_match("# noqa"));
|
||||
assert!(NOQA_LINE_REGEX.is_match("# NoQA"));
|
||||
fn noqa_all() {
|
||||
let source = "# noqa";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
assert!(NOQA_LINE_REGEX.is_match("# noqa: F401"));
|
||||
assert!(NOQA_LINE_REGEX.is_match("# NoQA: F401"));
|
||||
assert!(NOQA_LINE_REGEX.is_match("# noqa: F401, E501"));
|
||||
#[test]
|
||||
fn noqa_code() {
|
||||
let source = "# noqa: F401";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
assert!(NOQA_LINE_REGEX.is_match("# noqa:F401"));
|
||||
assert!(NOQA_LINE_REGEX.is_match("# NoQA:F401"));
|
||||
assert!(NOQA_LINE_REGEX.is_match("# noqa:F401, E501"));
|
||||
#[test]
|
||||
fn noqa_codes() {
|
||||
let source = "# noqa: F401, F841";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_all_case_insensitive() {
|
||||
let source = "# NOQA";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_code_case_insensitive() {
|
||||
let source = "# NOQA: F401";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_codes_case_insensitive() {
|
||||
let source = "# NOQA: F401, F841";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_leading_space() {
|
||||
let source = "# # noqa: F401";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_trailing_space() {
|
||||
let source = "# noqa: F401 #";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_all_no_space() {
|
||||
let source = "#noqa";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_code_no_space() {
|
||||
let source = "#noqa:F401";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_codes_no_space() {
|
||||
let source = "#noqa:F401,F841";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_all_multi_space() {
|
||||
let source = "# noqa";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_code_multi_space() {
|
||||
let source = "# noqa: F401";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_codes_multi_space() {
|
||||
let source = "# noqa: F401, F841";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_all_leading_comment() {
|
||||
let source = "# Some comment describing the noqa # noqa";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_code_leading_comment() {
|
||||
let source = "# Some comment describing the noqa # noqa: F401";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_codes_leading_comment() {
|
||||
let source = "# Some comment describing the noqa # noqa: F401, F841";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_all_trailing_comment() {
|
||||
let source = "# noqa # Some comment describing the noqa";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_code_trailing_comment() {
|
||||
let source = "# noqa: F401 # Some comment describing the noqa";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_codes_trailing_comment() {
|
||||
let source = "# noqa: F401, F841 # Some comment describing the noqa";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_invalid_codes() {
|
||||
let source = "# noqa: unused-import, F401, some other code";
|
||||
assert_debug_snapshot!(Directive::try_extract(source, TextSize::default()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flake8_exemption_all() {
|
||||
let source = "# flake8: noqa";
|
||||
assert_debug_snapshot!(ParsedFileExemption::try_extract(source));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ruff_exemption_all() {
|
||||
let source = "# ruff: noqa";
|
||||
assert_debug_snapshot!(ParsedFileExemption::try_extract(source));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flake8_exemption_all_no_space() {
|
||||
let source = "#flake8:noqa";
|
||||
assert_debug_snapshot!(ParsedFileExemption::try_extract(source));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ruff_exemption_all_no_space() {
|
||||
let source = "#ruff:noqa";
|
||||
assert_debug_snapshot!(ParsedFileExemption::try_extract(source));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flake8_exemption_codes() {
|
||||
// Note: Flake8 doesn't support this; it's treated as a blanket exemption.
|
||||
let source = "# flake8: noqa: F401, F841";
|
||||
assert_debug_snapshot!(ParsedFileExemption::try_extract(source));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ruff_exemption_codes() {
|
||||
let source = "# ruff: noqa: F401, F841";
|
||||
assert_debug_snapshot!(ParsedFileExemption::try_extract(source));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flake8_exemption_all_case_insensitive() {
|
||||
let source = "# flake8: NoQa";
|
||||
assert_debug_snapshot!(ParsedFileExemption::try_extract(source));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ruff_exemption_all_case_insensitive() {
|
||||
let source = "# ruff: NoQa";
|
||||
assert_debug_snapshot!(ParsedFileExemption::try_extract(source));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -550,7 +959,7 @@ mod tests {
|
||||
assert_eq!(output, format!("{contents}"));
|
||||
|
||||
let diagnostics = [Diagnostic::new(
|
||||
pyflakes::rules::UnusedVariable {
|
||||
UnusedVariable {
|
||||
name: "x".to_string(),
|
||||
},
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
@@ -574,7 +983,7 @@ mod tests {
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
),
|
||||
Diagnostic::new(
|
||||
pyflakes::rules::UnusedVariable {
|
||||
UnusedVariable {
|
||||
name: "x".to_string(),
|
||||
},
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
@@ -598,7 +1007,7 @@ mod tests {
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
),
|
||||
Diagnostic::new(
|
||||
pyflakes::rules::UnusedVariable {
|
||||
UnusedVariable {
|
||||
name: "x".to_string(),
|
||||
},
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
|
||||
@@ -7,7 +7,9 @@ use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::source_code::SourceFile;
|
||||
|
||||
use crate::message::Message;
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::ruff::rules::InvalidPyprojectToml;
|
||||
use crate::settings::Settings;
|
||||
use crate::IOError;
|
||||
|
||||
/// Unlike [`pyproject_toml::PyProjectToml`], in our case `build_system` is also optional
|
||||
@@ -20,9 +22,11 @@ struct PyProjectToml {
|
||||
project: Option<Project>,
|
||||
}
|
||||
|
||||
pub fn lint_pyproject_toml(source_file: SourceFile) -> Result<Vec<Message>> {
|
||||
pub fn lint_pyproject_toml(source_file: SourceFile, settings: &Settings) -> Result<Vec<Message>> {
|
||||
let mut messages = vec![];
|
||||
|
||||
let err = match toml::from_str::<PyProjectToml>(source_file.source_text()) {
|
||||
Ok(_) => return Ok(Vec::default()),
|
||||
Ok(_) => return Ok(messages),
|
||||
Err(err) => err,
|
||||
};
|
||||
|
||||
@@ -32,17 +36,20 @@ pub fn lint_pyproject_toml(source_file: SourceFile) -> Result<Vec<Message>> {
|
||||
None => TextRange::default(),
|
||||
Some(range) => {
|
||||
let Ok(end) = TextSize::try_from(range.end) else {
|
||||
let diagnostic = Diagnostic::new(
|
||||
IOError {
|
||||
message: "pyproject.toml is larger than 4GB".to_string(),
|
||||
},
|
||||
TextRange::default(),
|
||||
);
|
||||
return Ok(vec![Message::from_diagnostic(
|
||||
diagnostic,
|
||||
source_file,
|
||||
TextSize::default(),
|
||||
)]);
|
||||
if settings.rules.enabled(Rule::IOError) {
|
||||
let diagnostic = Diagnostic::new(
|
||||
IOError {
|
||||
message: "pyproject.toml is larger than 4GB".to_string(),
|
||||
},
|
||||
TextRange::default(),
|
||||
);
|
||||
messages.push(Message::from_diagnostic(
|
||||
diagnostic,
|
||||
source_file,
|
||||
TextSize::default(),
|
||||
));
|
||||
}
|
||||
return Ok(messages);
|
||||
};
|
||||
TextRange::new(
|
||||
// start <= end, so if end < 4GB follows start < 4GB
|
||||
@@ -52,11 +59,15 @@ pub fn lint_pyproject_toml(source_file: SourceFile) -> Result<Vec<Message>> {
|
||||
}
|
||||
};
|
||||
|
||||
let toml_err = err.message().to_string();
|
||||
let diagnostic = Diagnostic::new(InvalidPyprojectToml { message: toml_err }, range);
|
||||
Ok(vec![Message::from_diagnostic(
|
||||
diagnostic,
|
||||
source_file,
|
||||
TextSize::default(),
|
||||
)])
|
||||
if settings.rules.enabled(Rule::InvalidPyprojectToml) {
|
||||
let toml_err = err.message().to_string();
|
||||
let diagnostic = Diagnostic::new(InvalidPyprojectToml { message: toml_err }, range);
|
||||
messages.push(Message::from_diagnostic(
|
||||
diagnostic,
|
||||
source_file,
|
||||
TextSize::default(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(messages)
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ pub use codes::Rule;
|
||||
use ruff_macros::RuleNamespace;
|
||||
pub use rule_set::{RuleSet, RuleSetIterator};
|
||||
|
||||
use crate::codes::{self, RuleCodePrefix};
|
||||
use crate::codes::{self};
|
||||
|
||||
mod rule_set;
|
||||
|
||||
@@ -18,8 +18,10 @@ pub trait AsRule {
|
||||
impl Rule {
|
||||
pub fn from_code(code: &str) -> Result<Self, FromCodeError> {
|
||||
let (linter, code) = Linter::parse_code(code).ok_or(FromCodeError::Unknown)?;
|
||||
let prefix: RuleCodePrefix = RuleCodePrefix::parse(&linter, code)?;
|
||||
Ok(prefix.into_iter().next().unwrap())
|
||||
linter
|
||||
.all_rules()
|
||||
.find(|rule| rule.noqa_code().suffix() == code)
|
||||
.ok_or(FromCodeError::Unknown)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,9 +82,9 @@ pub enum Linter {
|
||||
/// [flake8-commas](https://pypi.org/project/flake8-commas/)
|
||||
#[prefix = "COM"]
|
||||
Flake8Commas,
|
||||
/// Copyright-related rules
|
||||
/// [flake8-copyright](https://pypi.org/project/flake8-copyright/)
|
||||
#[prefix = "CPY"]
|
||||
Copyright,
|
||||
Flake8Copyright,
|
||||
/// [flake8-comprehensions](https://pypi.org/project/flake8-comprehensions/)
|
||||
#[prefix = "C4"]
|
||||
Flake8Comprehensions,
|
||||
@@ -110,7 +112,7 @@ pub enum Linter {
|
||||
/// [flake8-import-conventions](https://github.com/joaopalmeiro/flake8-import-conventions)
|
||||
#[prefix = "ICN"]
|
||||
Flake8ImportConventions,
|
||||
/// [flake8-logging-format](https://pypi.org/project/flake8-logging-format/0.9.0/)
|
||||
/// [flake8-logging-format](https://pypi.org/project/flake8-logging-format/)
|
||||
#[prefix = "G"]
|
||||
Flake8LoggingFormat,
|
||||
/// [flake8-no-pep420](https://pypi.org/project/flake8-no-pep420/)
|
||||
@@ -179,7 +181,7 @@ pub enum Linter {
|
||||
/// [Pylint](https://pypi.org/project/pylint/)
|
||||
#[prefix = "PL"]
|
||||
Pylint,
|
||||
/// [tryceratops](https://pypi.org/project/tryceratops/1.1.0/)
|
||||
/// [tryceratops](https://pypi.org/project/tryceratops/)
|
||||
#[prefix = "TRY"]
|
||||
Tryceratops,
|
||||
/// [flynt](https://pypi.org/project/flynt/)
|
||||
@@ -216,30 +218,6 @@ pub trait RuleNamespace: Sized {
|
||||
fn url(&self) -> Option<&'static str>;
|
||||
}
|
||||
|
||||
/// The prefix and name for an upstream linter category.
|
||||
pub struct UpstreamCategory(pub RuleCodePrefix, pub &'static str);
|
||||
|
||||
impl Linter {
|
||||
pub const fn upstream_categories(&self) -> Option<&'static [UpstreamCategory]> {
|
||||
match self {
|
||||
Linter::Pycodestyle => Some(&[
|
||||
UpstreamCategory(RuleCodePrefix::Pycodestyle(codes::Pycodestyle::E), "Error"),
|
||||
UpstreamCategory(
|
||||
RuleCodePrefix::Pycodestyle(codes::Pycodestyle::W),
|
||||
"Warning",
|
||||
),
|
||||
]),
|
||||
Linter::Pylint => Some(&[
|
||||
UpstreamCategory(RuleCodePrefix::Pylint(codes::Pylint::C), "Convention"),
|
||||
UpstreamCategory(RuleCodePrefix::Pylint(codes::Pylint::E), "Error"),
|
||||
UpstreamCategory(RuleCodePrefix::Pylint(codes::Pylint::R), "Refactor"),
|
||||
UpstreamCategory(RuleCodePrefix::Pylint(codes::Pylint::W), "Warning"),
|
||||
]),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(is_macro::Is, Copy, Clone)]
|
||||
pub enum LintSource {
|
||||
Ast,
|
||||
@@ -250,6 +228,7 @@ pub enum LintSource {
|
||||
Imports,
|
||||
Noqa,
|
||||
Filesystem,
|
||||
PyprojectToml,
|
||||
}
|
||||
|
||||
impl Rule {
|
||||
@@ -257,6 +236,7 @@ impl Rule {
|
||||
/// physical lines).
|
||||
pub const fn lint_source(&self) -> LintSource {
|
||||
match self {
|
||||
Rule::InvalidPyprojectToml => LintSource::PyprojectToml,
|
||||
Rule::UnusedNOQA => LintSource::Noqa,
|
||||
Rule::BlanketNOQA
|
||||
| Rule::BlanketTypeIgnore
|
||||
|
||||
@@ -248,8 +248,8 @@ impl Renamer {
|
||||
| BindingKind::LoopVar
|
||||
| BindingKind::Global
|
||||
| BindingKind::Nonlocal(_)
|
||||
| BindingKind::ClassDefinition
|
||||
| BindingKind::FunctionDefinition
|
||||
| BindingKind::ClassDefinition(_)
|
||||
| BindingKind::FunctionDefinition(_)
|
||||
| BindingKind::Deletion
|
||||
| BindingKind::UnboundException(_) => {
|
||||
Some(Edit::range_replacement(target.to_string(), binding.range))
|
||||
|
||||
@@ -158,16 +158,16 @@ impl IntoIterator for &RuleSelector {
|
||||
}
|
||||
RuleSelector::C => RuleSelectorIter::Chain(
|
||||
Linter::Flake8Comprehensions
|
||||
.into_iter()
|
||||
.chain(Linter::McCabe.into_iter()),
|
||||
.rules()
|
||||
.chain(Linter::McCabe.rules()),
|
||||
),
|
||||
RuleSelector::T => RuleSelectorIter::Chain(
|
||||
Linter::Flake8Debugger
|
||||
.into_iter()
|
||||
.chain(Linter::Flake8Print.into_iter()),
|
||||
.rules()
|
||||
.chain(Linter::Flake8Print.rules()),
|
||||
),
|
||||
RuleSelector::Linter(linter) => RuleSelectorIter::Vec(linter.into_iter()),
|
||||
RuleSelector::Prefix { prefix, .. } => RuleSelectorIter::Vec(prefix.into_iter()),
|
||||
RuleSelector::Linter(linter) => RuleSelectorIter::Vec(linter.rules()),
|
||||
RuleSelector::Prefix { prefix, .. } => RuleSelectorIter::Vec(prefix.clone().rules()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -346,7 +346,7 @@ mod clap_completion {
|
||||
let prefix = p.linter().common_prefix();
|
||||
let code = p.short_code();
|
||||
|
||||
let mut rules_iter = p.into_iter();
|
||||
let mut rules_iter = p.rules();
|
||||
let rule1 = rules_iter.next();
|
||||
let rule2 = rules_iter.next();
|
||||
|
||||
|
||||
@@ -48,12 +48,11 @@ fn is_standalone_comment(line: &str) -> bool {
|
||||
|
||||
/// ERA001
|
||||
pub(crate) fn commented_out_code(
|
||||
diagnostics: &mut Vec<Diagnostic>,
|
||||
locator: &Locator,
|
||||
indexer: &Indexer,
|
||||
settings: &Settings,
|
||||
) -> Vec<Diagnostic> {
|
||||
let mut diagnostics = vec![];
|
||||
|
||||
) {
|
||||
for range in indexer.comment_ranges() {
|
||||
let line = locator.full_lines(*range);
|
||||
|
||||
@@ -69,6 +68,4 @@ pub(crate) fn commented_out_code(
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
diagnostics
|
||||
}
|
||||
|
||||
@@ -39,6 +39,7 @@ mod tests {
|
||||
#[test_case(Rule::SubprocessPopenWithShellEqualsTrue, Path::new("S602.py"))]
|
||||
#[test_case(Rule::SubprocessWithoutShellEqualsTrue, Path::new("S603.py"))]
|
||||
#[test_case(Rule::SuspiciousPickleUsage, Path::new("S301.py"))]
|
||||
#[test_case(Rule::SuspiciousEvalUsage, Path::new("S307.py"))]
|
||||
#[test_case(Rule::SuspiciousTelnetUsage, Path::new("S312.py"))]
|
||||
#[test_case(Rule::TryExceptContinue, Path::new("S112.py"))]
|
||||
#[test_case(Rule::TryExceptPass, Path::new("S110.py"))]
|
||||
|
||||
@@ -1,15 +1,39 @@
|
||||
use num_traits::ToPrimitive;
|
||||
use once_cell::sync::Lazy;
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustpython_parser::ast::{self, Constant, Expr, Keyword, Operator, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::call_path::compose_call_path;
|
||||
use ruff_python_ast::call_path::CallPath;
|
||||
use ruff_python_ast::helpers::SimpleCallArgs;
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for files with overly permissive permissions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Overly permissive file permissions may allow unintended access and
|
||||
/// arbitrary code execution.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.chmod("/etc/secrets.txt", 0o666) # rw-rw-rw-
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.chmod("/etc/secrets.txt", 0o600) # rw-------
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `os.chmod`](https://docs.python.org/3/library/os.html#os.chmod)
|
||||
/// - [Python documentation: `stat`](https://docs.python.org/3/library/stat.html)
|
||||
/// - [Common Weakness Enumeration: CWE-732](https://cwe.mitre.org/data/definitions/732.html)
|
||||
#[violation]
|
||||
pub struct BadFilePermissions {
|
||||
mask: u16,
|
||||
@@ -19,84 +43,7 @@ impl Violation for BadFilePermissions {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let BadFilePermissions { mask } = self;
|
||||
format!("`os.chmod` setting a permissive mask `{mask:#o}` on file or directory",)
|
||||
}
|
||||
}
|
||||
|
||||
const WRITE_WORLD: u16 = 0o2;
|
||||
const EXECUTE_GROUP: u16 = 0o10;
|
||||
|
||||
static PYSTAT_MAPPING: Lazy<FxHashMap<&'static str, u16>> = Lazy::new(|| {
|
||||
FxHashMap::from_iter([
|
||||
("stat.ST_MODE", 0o0),
|
||||
("stat.S_IFDOOR", 0o0),
|
||||
("stat.S_IFPORT", 0o0),
|
||||
("stat.ST_INO", 0o1),
|
||||
("stat.S_IXOTH", 0o1),
|
||||
("stat.UF_NODUMP", 0o1),
|
||||
("stat.ST_DEV", 0o2),
|
||||
("stat.S_IWOTH", 0o2),
|
||||
("stat.UF_IMMUTABLE", 0o2),
|
||||
("stat.ST_NLINK", 0o3),
|
||||
("stat.ST_UID", 0o4),
|
||||
("stat.S_IROTH", 0o4),
|
||||
("stat.UF_APPEND", 0o4),
|
||||
("stat.ST_GID", 0o5),
|
||||
("stat.ST_SIZE", 0o6),
|
||||
("stat.ST_ATIME", 0o7),
|
||||
("stat.S_IRWXO", 0o7),
|
||||
("stat.ST_MTIME", 0o10),
|
||||
("stat.S_IXGRP", 0o10),
|
||||
("stat.UF_OPAQUE", 0o10),
|
||||
("stat.ST_CTIME", 0o11),
|
||||
("stat.S_IWGRP", 0o20),
|
||||
("stat.UF_NOUNLINK", 0o20),
|
||||
("stat.S_IRGRP", 0o40),
|
||||
("stat.UF_COMPRESSED", 0o40),
|
||||
("stat.S_IRWXG", 0o70),
|
||||
("stat.S_IEXEC", 0o100),
|
||||
("stat.S_IXUSR", 0o100),
|
||||
("stat.S_IWRITE", 0o200),
|
||||
("stat.S_IWUSR", 0o200),
|
||||
("stat.S_IREAD", 0o400),
|
||||
("stat.S_IRUSR", 0o400),
|
||||
("stat.S_IRWXU", 0o700),
|
||||
("stat.S_ISVTX", 0o1000),
|
||||
("stat.S_ISGID", 0o2000),
|
||||
("stat.S_ENFMT", 0o2000),
|
||||
("stat.S_ISUID", 0o4000),
|
||||
])
|
||||
});
|
||||
|
||||
fn get_int_value(expr: &Expr) -> Option<u16> {
|
||||
match expr {
|
||||
Expr::Constant(ast::ExprConstant {
|
||||
value: Constant::Int(value),
|
||||
..
|
||||
}) => value.to_u16(),
|
||||
Expr::Attribute(_) => {
|
||||
compose_call_path(expr).and_then(|path| PYSTAT_MAPPING.get(path.as_str()).copied())
|
||||
}
|
||||
Expr::BinOp(ast::ExprBinOp {
|
||||
left,
|
||||
op,
|
||||
right,
|
||||
range: _,
|
||||
}) => {
|
||||
if let (Some(left_value), Some(right_value)) =
|
||||
(get_int_value(left), get_int_value(right))
|
||||
{
|
||||
match op {
|
||||
Operator::BitAnd => Some(left_value & right_value),
|
||||
Operator::BitOr => Some(left_value | right_value),
|
||||
Operator::BitXor => Some(left_value ^ right_value),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
format!("`os.chmod` setting a permissive mask `{mask:#o}` on file or directory")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,7 +63,7 @@ pub(crate) fn bad_file_permissions(
|
||||
{
|
||||
let call_args = SimpleCallArgs::new(args, keywords);
|
||||
if let Some(mode_arg) = call_args.argument("mode", 1) {
|
||||
if let Some(int_value) = get_int_value(mode_arg) {
|
||||
if let Some(int_value) = int_value(mode_arg, checker.semantic()) {
|
||||
if (int_value & WRITE_WORLD > 0) || (int_value & EXECUTE_GROUP > 0) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
BadFilePermissions { mask: int_value },
|
||||
@@ -127,3 +74,75 @@ pub(crate) fn bad_file_permissions(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const WRITE_WORLD: u16 = 0o2;
|
||||
const EXECUTE_GROUP: u16 = 0o10;
|
||||
|
||||
fn py_stat(call_path: &CallPath) -> Option<u16> {
|
||||
match call_path.as_slice() {
|
||||
["stat", "ST_MODE"] => Some(0o0),
|
||||
["stat", "S_IFDOOR"] => Some(0o0),
|
||||
["stat", "S_IFPORT"] => Some(0o0),
|
||||
["stat", "ST_INO"] => Some(0o1),
|
||||
["stat", "S_IXOTH"] => Some(0o1),
|
||||
["stat", "UF_NODUMP"] => Some(0o1),
|
||||
["stat", "ST_DEV"] => Some(0o2),
|
||||
["stat", "S_IWOTH"] => Some(0o2),
|
||||
["stat", "UF_IMMUTABLE"] => Some(0o2),
|
||||
["stat", "ST_NLINK"] => Some(0o3),
|
||||
["stat", "ST_UID"] => Some(0o4),
|
||||
["stat", "S_IROTH"] => Some(0o4),
|
||||
["stat", "UF_APPEND"] => Some(0o4),
|
||||
["stat", "ST_GID"] => Some(0o5),
|
||||
["stat", "ST_SIZE"] => Some(0o6),
|
||||
["stat", "ST_ATIME"] => Some(0o7),
|
||||
["stat", "S_IRWXO"] => Some(0o7),
|
||||
["stat", "ST_MTIME"] => Some(0o10),
|
||||
["stat", "S_IXGRP"] => Some(0o10),
|
||||
["stat", "UF_OPAQUE"] => Some(0o10),
|
||||
["stat", "ST_CTIME"] => Some(0o11),
|
||||
["stat", "S_IWGRP"] => Some(0o20),
|
||||
["stat", "UF_NOUNLINK"] => Some(0o20),
|
||||
["stat", "S_IRGRP"] => Some(0o40),
|
||||
["stat", "UF_COMPRESSED"] => Some(0o40),
|
||||
["stat", "S_IRWXG"] => Some(0o70),
|
||||
["stat", "S_IEXEC"] => Some(0o100),
|
||||
["stat", "S_IXUSR"] => Some(0o100),
|
||||
["stat", "S_IWRITE"] => Some(0o200),
|
||||
["stat", "S_IWUSR"] => Some(0o200),
|
||||
["stat", "S_IREAD"] => Some(0o400),
|
||||
["stat", "S_IRUSR"] => Some(0o400),
|
||||
["stat", "S_IRWXU"] => Some(0o700),
|
||||
["stat", "S_ISVTX"] => Some(0o1000),
|
||||
["stat", "S_ISGID"] => Some(0o2000),
|
||||
["stat", "S_ENFMT"] => Some(0o2000),
|
||||
["stat", "S_ISUID"] => Some(0o4000),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn int_value(expr: &Expr, model: &SemanticModel) -> Option<u16> {
|
||||
match expr {
|
||||
Expr::Constant(ast::ExprConstant {
|
||||
value: Constant::Int(value),
|
||||
..
|
||||
}) => value.to_u16(),
|
||||
Expr::Attribute(_) => model.resolve_call_path(expr).as_ref().and_then(py_stat),
|
||||
Expr::BinOp(ast::ExprBinOp {
|
||||
left,
|
||||
op,
|
||||
right,
|
||||
range: _,
|
||||
}) => {
|
||||
let left_value = int_value(left, model)?;
|
||||
let right_value = int_value(right, model)?;
|
||||
match op {
|
||||
Operator::BitAnd => Some(left_value & right_value),
|
||||
Operator::BitOr => Some(left_value | right_value),
|
||||
Operator::BitXor => Some(left_value ^ right_value),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,25 @@
|
||||
use rustpython_parser::ast::{self, Expr, Ranged};
|
||||
use rustpython_parser::ast::{Expr, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of the builtin `exec` function.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The `exec()` function is insecure as it allows for arbitrary code
|
||||
/// execution.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// exec("print('Hello World')")
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `exec`](https://docs.python.org/3/library/functions.html#exec)
|
||||
/// - [Common Weakness Enumeration: CWE-78](https://cwe.mitre.org/data/definitions/78.html)
|
||||
#[violation]
|
||||
pub struct ExecBuiltin;
|
||||
|
||||
@@ -14,12 +31,16 @@ impl Violation for ExecBuiltin {
|
||||
}
|
||||
|
||||
/// S102
|
||||
pub(crate) fn exec_used(expr: &Expr, func: &Expr) -> Option<Diagnostic> {
|
||||
let Expr::Name(ast::ExprName { id, .. }) = func else {
|
||||
return None;
|
||||
};
|
||||
if id != "exec" {
|
||||
return None;
|
||||
pub(crate) fn exec_used(checker: &mut Checker, func: &Expr) {
|
||||
if checker
|
||||
.semantic()
|
||||
.resolve_call_path(func)
|
||||
.map_or(false, |call_path| {
|
||||
matches!(call_path.as_slice(), ["" | "builtin", "exec"])
|
||||
})
|
||||
{
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(ExecBuiltin, func.range()));
|
||||
}
|
||||
Some(Diagnostic::new(ExecBuiltin, expr.range()))
|
||||
}
|
||||
|
||||
@@ -3,6 +3,27 @@ use ruff_text_size::TextRange;
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for hardcoded bindings to all network interfaces (`0.0.0.0`).
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Binding to all network interfaces is insecure as it allows access from
|
||||
/// unintended interfaces, which may be poorly secured or unauthorized.
|
||||
///
|
||||
/// Instead, bind to specific interfaces.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// ALLOWED_HOSTS = ["0.0.0.0"]
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// ALLOWED_HOSTS = ["127.0.0.1", "localhost"]
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Common Weakness Enumeration: CWE-200](https://cwe.mitre.org/data/definitions/200.html)
|
||||
#[violation]
|
||||
pub struct HardcodedBindAllInterfaces;
|
||||
|
||||
|
||||
@@ -1,11 +1,42 @@
|
||||
use rustpython_parser::ast::{Arg, ArgWithDefault, Arguments, Expr, Ranged};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
use super::super::helpers::{matches_password_name, string_literal};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for potential uses of hardcoded passwords in function argument
|
||||
/// defaults.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Including a hardcoded password in source code is a security risk, as an
|
||||
/// attacker could discover the password and use it to gain unauthorized
|
||||
/// access.
|
||||
///
|
||||
/// Instead, store passwords and other secrets in configuration files,
|
||||
/// environment variables, or other sources that are excluded from version
|
||||
/// control.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// def connect_to_server(password="hunter2"):
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
///
|
||||
/// def connect_to_server(password=os.environ["PASSWORD"]):
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Common Weakness Enumeration: CWE-259](https://cwe.mitre.org/data/definitions/259.html)
|
||||
#[violation]
|
||||
pub struct HardcodedPasswordDefault {
|
||||
name: String,
|
||||
|
||||
@@ -1,11 +1,38 @@
|
||||
use rustpython_parser::ast::{Keyword, Ranged};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
use super::super::helpers::{matches_password_name, string_literal};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for potential uses of hardcoded passwords in function calls.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Including a hardcoded password in source code is a security risk, as an
|
||||
/// attacker could discover the password and use it to gain unauthorized
|
||||
/// access.
|
||||
///
|
||||
/// Instead, store passwords and other secrets in configuration files,
|
||||
/// environment variables, or other sources that are excluded from version
|
||||
/// control.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// connect_to_server(password="hunter2")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// connect_to_server(password=os.environ["PASSWORD"])
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Common Weakness Enumeration: CWE-259](https://cwe.mitre.org/data/definitions/259.html)
|
||||
#[violation]
|
||||
pub struct HardcodedPasswordFuncArg {
|
||||
name: String,
|
||||
|
||||
@@ -7,6 +7,32 @@ use crate::checkers::ast::Checker;
|
||||
|
||||
use super::super::helpers::{matches_password_name, string_literal};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for potential uses of hardcoded passwords in strings.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Including a hardcoded password in source code is a security risk, as an
|
||||
/// attacker could discover the password and use it to gain unauthorized
|
||||
/// access.
|
||||
///
|
||||
/// Instead, store passwords and other secrets in configuration files,
|
||||
/// environment variables, or other sources that are excluded from version
|
||||
/// control.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// SECRET_KEY = "hunter2"
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// SECRET_KEY = os.environ["SECRET_KEY"]
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Common Weakness Enumeration: CWE-259](https://cwe.mitre.org/data/definitions/259.html)
|
||||
#[violation]
|
||||
pub struct HardcodedPasswordString {
|
||||
name: String,
|
||||
|
||||
@@ -3,6 +3,35 @@ use rustpython_parser::ast::{Expr, Ranged};
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for the use of hardcoded temporary file or directory paths.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The use of hardcoded paths for temporary files can be insecure. If an
|
||||
/// attacker discovers the location of a hardcoded path, they can replace the
|
||||
/// contents of the file or directory with a malicious payload.
|
||||
///
|
||||
/// Other programs may also read or write contents to these hardcoded paths,
|
||||
/// causing unexpected behavior.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// with open("/tmp/foo.txt", "w") as file:
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import tempfile
|
||||
///
|
||||
/// with tempfile.NamedTemporaryFile() as file:
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Common Weakness Enumeration: CWE-377](https://cwe.mitre.org/data/definitions/377.html)
|
||||
/// - [Common Weakness Enumeration: CWE-379](https://cwe.mitre.org/data/definitions/379.html)
|
||||
/// - [Python documentation: `tempfile`](https://docs.python.org/3/library/tempfile.html)
|
||||
#[violation]
|
||||
pub struct HardcodedTempFile {
|
||||
string: String,
|
||||
|
||||
@@ -8,6 +8,44 @@ use crate::checkers::ast::Checker;
|
||||
|
||||
use super::super::helpers::string_literal;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of weak or broken cryptographic hash functions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Weak or broken cryptographic hash functions may be susceptible to
|
||||
/// collision attacks (where two different inputs produce the same hash) or
|
||||
/// pre-image attacks (where an attacker can find an input that produces a
|
||||
/// given hash). This can lead to security vulnerabilities in applications
|
||||
/// that rely on these hash functions.
|
||||
///
|
||||
/// Avoid using weak or broken cryptographic hash functions in security
|
||||
/// contexts. Instead, use a known secure hash function such as SHA256.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import hashlib
|
||||
///
|
||||
///
|
||||
/// def certificate_is_valid(certificate: bytes, known_hash: str) -> bool:
|
||||
/// hash = hashlib.md5(certificate).hexdigest()
|
||||
/// return hash == known_hash
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import hashlib
|
||||
///
|
||||
///
|
||||
/// def certificate_is_valid(certificate: bytes, known_hash: str) -> bool:
|
||||
/// hash = hashlib.sha256(certificate).hexdigest()
|
||||
/// return hash == known_hash
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `hashlib` — Secure hashes and message digests](https://docs.python.org/3/library/hashlib.html)
|
||||
/// - [Common Weakness Enumeration: CWE-327](https://cwe.mitre.org/data/definitions/327.html)
|
||||
/// - [Common Weakness Enumeration: CWE-328](https://cwe.mitre.org/data/definitions/328.html)
|
||||
/// - [Common Weakness Enumeration: CWE-916](https://cwe.mitre.org/data/definitions/916.html)
|
||||
#[violation]
|
||||
pub struct HashlibInsecureHashFunction {
|
||||
string: String,
|
||||
|
||||
@@ -6,6 +6,31 @@ use ruff_python_ast::helpers::{is_const_none, SimpleCallArgs};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of the Python `requests` module that omit the `timeout`
|
||||
/// parameter.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The `timeout` parameter is used to set the maximum time to wait for a
|
||||
/// response from the server. By omitting the `timeout` parameter, the program
|
||||
/// may hang indefinitely while awaiting a response.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import requests
|
||||
///
|
||||
/// requests.get("https://www.example.com/")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import requests
|
||||
///
|
||||
/// requests.get("https://www.example.com/", timeout=10)
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Requests documentation: Timeouts](https://requests.readthedocs.io/en/latest/user/advanced/#timeouts)
|
||||
#[violation]
|
||||
pub struct RequestWithoutTimeout {
|
||||
implicit: bool,
|
||||
|
||||
@@ -350,7 +350,7 @@ fn is_wildcard_command(expr: &Expr) -> bool {
|
||||
if let Expr::List(ast::ExprList { elts, .. }) = expr {
|
||||
let mut has_star = false;
|
||||
let mut has_command = false;
|
||||
for elt in elts.iter() {
|
||||
for elt in elts {
|
||||
if let Some(text) = string_literal(elt) {
|
||||
has_star |= text.contains('*');
|
||||
has_command |= text.contains("chown")
|
||||
|
||||
@@ -9,6 +9,42 @@ use ruff_macros::{derive_message_formats, violation};
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::AsRule;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for calls to `pickle` functions or modules that wrap them.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Deserializing untrusted data with `pickle` and other deserialization
|
||||
/// modules is insecure as it can allow for the creation of arbitrary objects,
|
||||
/// which can then be used to achieve arbitrary code execution and otherwise
|
||||
/// unexpected behavior.
|
||||
///
|
||||
/// Avoid deserializing untrusted data with `pickle` and other deserialization
|
||||
/// modules. Instead, consider safer formats, such as JSON.
|
||||
///
|
||||
/// If you must deserialize untrusted data with `pickle`, consider signing the
|
||||
/// data with a secret key and verifying the signature before deserializing the
|
||||
/// payload, This will prevent an attacker from injecting arbitrary objects
|
||||
/// into the serialized data.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import pickle
|
||||
///
|
||||
/// with open("foo.pickle", "rb") as file:
|
||||
/// foo = pickle.load(file)
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import json
|
||||
///
|
||||
/// with open("foo.json", "rb") as file:
|
||||
/// foo = json.load(file)
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `pickle` — Python object serialization](https://docs.python.org/3/library/pickle.html)
|
||||
/// - [Common Weakness Enumeration: CWE-502](https://cwe.mitre.org/data/definitions/502.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousPickleUsage;
|
||||
|
||||
@@ -19,6 +55,41 @@ impl Violation for SuspiciousPickleUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for calls to `marshal` functions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Deserializing untrusted data with `marshal` is insecure as it can allow for
|
||||
/// the creation of arbitrary objects, which can then be used to achieve
|
||||
/// arbitrary code execution and otherwise unexpected behavior.
|
||||
///
|
||||
/// Avoid deserializing untrusted data with `marshal`. Instead, consider safer
|
||||
/// formats, such as JSON.
|
||||
///
|
||||
/// If you must deserialize untrusted data with `marshal`, consider signing the
|
||||
/// data with a secret key and verifying the signature before deserializing the
|
||||
/// payload, This will prevent an attacker from injecting arbitrary objects
|
||||
/// into the serialized data.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import marshal
|
||||
///
|
||||
/// with open("foo.marshal", "rb") as file:
|
||||
/// foo = pickle.load(file)
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import json
|
||||
///
|
||||
/// with open("foo.json", "rb") as file:
|
||||
/// foo = json.load(file)
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `marshal` — Internal Python object serialization](https://docs.python.org/3/library/marshal.html)
|
||||
/// - [Common Weakness Enumeration: CWE-502](https://cwe.mitre.org/data/definitions/502.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousMarshalUsage;
|
||||
|
||||
@@ -29,6 +100,42 @@ impl Violation for SuspiciousMarshalUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of weak or broken cryptographic hash functions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Weak or broken cryptographic hash functions may be susceptible to
|
||||
/// collision attacks (where two different inputs produce the same hash) or
|
||||
/// pre-image attacks (where an attacker can find an input that produces a
|
||||
/// given hash). This can lead to security vulnerabilities in applications
|
||||
/// that rely on these hash functions.
|
||||
///
|
||||
/// Avoid using weak or broken cryptographic hash functions in security
|
||||
/// contexts. Instead, use a known secure hash function such as SHA-256.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from cryptography.hazmat.primitives import hashes
|
||||
///
|
||||
/// digest = hashes.Hash(hashes.MD5())
|
||||
/// digest.update(b"Hello, world!")
|
||||
/// digest.finalize()
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from cryptography.hazmat.primitives import hashes
|
||||
///
|
||||
/// digest = hashes.Hash(hashes.SHA256())
|
||||
/// digest.update(b"Hello, world!")
|
||||
/// digest.finalize()
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `hashlib` — Secure hashes and message digests](https://docs.python.org/3/library/hashlib.html)
|
||||
/// - [Common Weakness Enumeration: CWE-327](https://cwe.mitre.org/data/definitions/327.html)
|
||||
/// - [Common Weakness Enumeration: CWE-328](https://cwe.mitre.org/data/definitions/328.html)
|
||||
/// - [Common Weakness Enumeration: CWE-916](https://cwe.mitre.org/data/definitions/916.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousInsecureHashUsage;
|
||||
|
||||
@@ -39,6 +146,34 @@ impl Violation for SuspiciousInsecureHashUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of weak or broken cryptographic ciphers.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Weak or broken cryptographic ciphers may be susceptible to attacks that
|
||||
/// allow an attacker to decrypt ciphertext without knowing the key or
|
||||
/// otherwise compromise the security of the cipher, such as forgeries.
|
||||
///
|
||||
/// Use strong, modern cryptographic ciphers instead of weak or broken ones.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from cryptography.hazmat.primitives.ciphers import Cipher, algorithms
|
||||
///
|
||||
/// algorithm = algorithms.ARC4(key)
|
||||
/// cipher = Cipher(algorithm, mode=None)
|
||||
/// encryptor = cipher.encryptor()
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from cryptography.fernet import Fernet
|
||||
///
|
||||
/// fernet = Fernet(key)
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Common Weakness Enumeration: CWE-327](https://cwe.mitre.org/data/definitions/327.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousInsecureCipherUsage;
|
||||
|
||||
@@ -49,6 +184,36 @@ impl Violation for SuspiciousInsecureCipherUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of weak or broken cryptographic cipher modes.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Weak or broken cryptographic ciphers may be susceptible to attacks that
|
||||
/// allow an attacker to decrypt ciphertext without knowing the key or
|
||||
/// otherwise compromise the security of the cipher, such as forgeries.
|
||||
///
|
||||
/// Use strong, modern cryptographic ciphers instead of weak or broken ones.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
///
|
||||
/// algorithm = algorithms.ARC4(key)
|
||||
/// cipher = Cipher(algorithm, mode=modes.ECB(iv))
|
||||
/// encryptor = cipher.encryptor()
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
///
|
||||
/// algorithm = algorithms.ARC4(key)
|
||||
/// cipher = Cipher(algorithm, mode=modes.CTR(iv))
|
||||
/// encryptor = cipher.encryptor()
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Common Weakness Enumeration: CWE-327](https://cwe.mitre.org/data/definitions/327.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousInsecureCipherModeUsage;
|
||||
|
||||
@@ -59,6 +224,40 @@ impl Violation for SuspiciousInsecureCipherModeUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `tempfile.mktemp`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `tempfile.mktemp` returns a pathname of a file that does not exist at the
|
||||
/// time the call is made; then, the caller is responsible for creating the
|
||||
/// file and subsequently using it. This is insecure because another process
|
||||
/// could create a file with the same name between the time the function
|
||||
/// returns and the time the caller creates the file.
|
||||
///
|
||||
/// `tempfile.mktemp` is deprecated in favor of `tempfile.mkstemp` which
|
||||
/// creates the file when it is called. Consider using `tempfile.mkstemp`
|
||||
/// instead, either directly or via a context manager such as
|
||||
/// `tempfile.TemporaryFile`.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import tempfile
|
||||
///
|
||||
/// tmp_file = tempfile.mktemp()
|
||||
/// with open(tmp_file, "w") as file:
|
||||
/// file.write("Hello, world!")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import tempfile
|
||||
///
|
||||
/// with tempfile.TemporaryFile() as file:
|
||||
/// file.write("Hello, world!")
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation:`mktemp`](https://docs.python.org/3/library/tempfile.html#tempfile.mktemp)
|
||||
#[violation]
|
||||
pub struct SuspiciousMktempUsage;
|
||||
|
||||
@@ -69,6 +268,32 @@ impl Violation for SuspiciousMktempUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of the builtin `eval()` function.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The `eval()` function is insecure as it enables arbitrary code execution.
|
||||
///
|
||||
/// If you need to evaluate an expression from a string, consider using
|
||||
/// `ast.literal_eval()` instead, which will raise an exception if the
|
||||
/// expression is not a valid Python literal.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// x = eval(input("Enter a number: "))
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from ast import literal_eval
|
||||
///
|
||||
/// x = literal_eval(input("Enter a number: "))
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `eval`](https://docs.python.org/3/library/functions.html#eval)
|
||||
/// - [Python documentation: `literal_eval`](https://docs.python.org/3/library/ast.html#ast.literal_eval)
|
||||
/// - [_Eval really is dangerous_ by Ned Batchelder](https://nedbatchelder.com/blog/201206/eval_really_is_dangerous.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousEvalUsage;
|
||||
|
||||
@@ -79,6 +304,35 @@ impl Violation for SuspiciousEvalUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of calls to `django.utils.safestring.mark_safe`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Cross-site scripting (XSS) vulnerabilities allow attackers to execute
|
||||
/// arbitrary JavaScript. To guard against XSS attacks, Django templates
|
||||
/// assumes that data is unsafe and automatically escapes malicious strings
|
||||
/// before rending them.
|
||||
///
|
||||
/// `django.utils.safestring.mark_safe` marks a string as safe for use in HTML
|
||||
/// templates, bypassing XSS protection. This is dangerous because it may allow
|
||||
/// cross-site scripting attacks if the string is not properly escaped.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from django.utils.safestring import mark_safe
|
||||
///
|
||||
/// content = mark_safe("<script>alert('Hello, world!')</script>") # XSS.
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// content = "<script>alert('Hello, world!')</script>" # Safe if rendered.
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Django documentation: `mark_safe`](https://docs.djangoproject.com/en/dev/ref/utils/#django.utils.safestring.mark_safe)
|
||||
/// - [Django documentation: Cross Site Scripting (XSS) protection](https://docs.djangoproject.com/en/dev/topics/security/#cross-site-scripting-xss-protection)
|
||||
/// - [Common Weakness Enumeration: CWE-80](https://cwe.mitre.org/data/definitions/80.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousMarkSafeUsage;
|
||||
|
||||
@@ -89,6 +343,44 @@ impl Violation for SuspiciousMarkSafeUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of URL open functions that unexpected schemes.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Some URL open functions allow the use of `file:` or custom schemes (for use
|
||||
/// instead of `http:` or `https:`). An attacker may be able to use these
|
||||
/// schemes to access or modify unauthorized resources, and cause unexpected
|
||||
/// behavior.
|
||||
///
|
||||
/// To mitigate this risk, audit all uses of URL open functions and ensure that
|
||||
/// only permitted schemes are used (e.g., allowing `http:` and `https:` and
|
||||
/// disallowing `file:` and `ftp:`).
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from urllib.request import urlopen
|
||||
///
|
||||
/// url = input("Enter a URL: ")
|
||||
///
|
||||
/// with urlopen(url) as response:
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from urllib.request import urlopen
|
||||
///
|
||||
/// url = input("Enter a URL: ")
|
||||
///
|
||||
/// if not url.startswith(("http:", "https:")):
|
||||
/// raise ValueError("URL must start with 'http:' or 'https:'")
|
||||
///
|
||||
/// with urlopen(url) as response:
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `urlopen`](https://docs.python.org/3/library/urllib.request.html#urllib.request.urlopen)
|
||||
#[violation]
|
||||
pub struct SuspiciousURLOpenUsage;
|
||||
|
||||
@@ -99,6 +391,34 @@ impl Violation for SuspiciousURLOpenUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of cryptographically weak pseudo-random number generators.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Cryptographically weak pseudo-random number generators are insecure as they
|
||||
/// are easily predictable. This can allow an attacker to guess the generated
|
||||
/// numbers and compromise the security of the system.
|
||||
///
|
||||
/// Instead, use a cryptographically secure pseudo-random number generator
|
||||
/// (such as using the [`secrets` module](https://docs.python.org/3/library/secrets.html))
|
||||
/// when generating random numbers for security purposes.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import random
|
||||
///
|
||||
/// random.randrange(10)
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import secrets
|
||||
///
|
||||
/// secrets.randbelow(10)
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `random` — Generate pseudo-random numbers](https://docs.python.org/3/library/random.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousNonCryptographicRandomUsage;
|
||||
|
||||
@@ -109,6 +429,36 @@ impl Violation for SuspiciousNonCryptographicRandomUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of insecure XML parsers.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Many XML parsers are vulnerable to XML attacks (such as entity expansion),
|
||||
/// which cause excessive memory and CPU usage by exploiting recursion. An
|
||||
/// attacker could use such methods to access unauthorized resources.
|
||||
///
|
||||
/// Consider using the `defusedxml` package when parsing untrusted XML data,
|
||||
/// to protect against XML attacks.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from xml.etree.cElementTree import parse
|
||||
///
|
||||
/// tree = parse("untrusted.xml") # Vulnerable to XML attacks.
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from defusedxml.cElementTree import parse
|
||||
///
|
||||
/// tree = parse("untrusted.xml")
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `xml` — XML processing modules](https://docs.python.org/3/library/xml.html)
|
||||
/// - [PyPI: `defusedxml`](https://pypi.org/project/defusedxml/)
|
||||
/// - [Common Weakness Enumeration: CWE-400](https://cwe.mitre.org/data/definitions/400.html)
|
||||
/// - [Common Weakness Enumeration: CWE-776](https://cwe.mitre.org/data/definitions/776.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousXMLCElementTreeUsage;
|
||||
|
||||
@@ -119,6 +469,36 @@ impl Violation for SuspiciousXMLCElementTreeUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of insecure XML parsers.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Many XML parsers are vulnerable to XML attacks (such as entity expansion),
|
||||
/// which cause excessive memory and CPU usage by exploiting recursion. An
|
||||
/// attacker could use such methods to access unauthorized resources.
|
||||
///
|
||||
/// Consider using the `defusedxml` package when parsing untrusted XML data,
|
||||
/// to protect against XML attacks.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from xml.etree.ElementTree import parse
|
||||
///
|
||||
/// tree = parse("untrusted.xml") # Vulnerable to XML attacks.
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from defusedxml.ElementTree import parse
|
||||
///
|
||||
/// tree = parse("untrusted.xml")
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `xml` — XML processing modules](https://docs.python.org/3/library/xml.html)
|
||||
/// - [PyPI: `defusedxml`](https://pypi.org/project/defusedxml/)
|
||||
/// - [Common Weakness Enumeration: CWE-400](https://cwe.mitre.org/data/definitions/400.html)
|
||||
/// - [Common Weakness Enumeration: CWE-776](https://cwe.mitre.org/data/definitions/776.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousXMLElementTreeUsage;
|
||||
|
||||
@@ -129,6 +509,36 @@ impl Violation for SuspiciousXMLElementTreeUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of insecure XML parsers.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Many XML parsers are vulnerable to XML attacks (such as entity expansion),
|
||||
/// which cause excessive memory and CPU usage by exploiting recursion. An
|
||||
/// attacker could use such methods to access unauthorized resources.
|
||||
///
|
||||
/// Consider using the `defusedxml` package when parsing untrusted XML data,
|
||||
/// to protect against XML attacks.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from xml.sax.expatreader import create_parser
|
||||
///
|
||||
/// parser = create_parser()
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from defusedxml.sax import create_parser
|
||||
///
|
||||
/// parser = create_parser()
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `xml` — XML processing modules](https://docs.python.org/3/library/xml.html)
|
||||
/// - [PyPI: `defusedxml`](https://pypi.org/project/defusedxml/)
|
||||
/// - [Common Weakness Enumeration: CWE-400](https://cwe.mitre.org/data/definitions/400.html)
|
||||
/// - [Common Weakness Enumeration: CWE-776](https://cwe.mitre.org/data/definitions/776.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousXMLExpatReaderUsage;
|
||||
|
||||
@@ -139,6 +549,36 @@ impl Violation for SuspiciousXMLExpatReaderUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of insecure XML parsers.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Many XML parsers are vulnerable to XML attacks (such as entity expansion),
|
||||
/// which cause excessive memory and CPU usage by exploiting recursion. An
|
||||
/// attacker could use such methods to access unauthorized resources.
|
||||
///
|
||||
/// Consider using the `defusedxml` package when parsing untrusted XML data,
|
||||
/// to protect against XML attacks.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from xml.dom.expatbuilder import parse
|
||||
///
|
||||
/// parse("untrusted.xml")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from defusedxml.expatbuilder import parse
|
||||
///
|
||||
/// tree = parse("untrusted.xml")
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `xml` — XML processing modules](https://docs.python.org/3/library/xml.html)
|
||||
/// - [PyPI: `defusedxml`](https://pypi.org/project/defusedxml/)
|
||||
/// - [Common Weakness Enumeration: CWE-400](https://cwe.mitre.org/data/definitions/400.html)
|
||||
/// - [Common Weakness Enumeration: CWE-776](https://cwe.mitre.org/data/definitions/776.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousXMLExpatBuilderUsage;
|
||||
|
||||
@@ -149,6 +589,36 @@ impl Violation for SuspiciousXMLExpatBuilderUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of insecure XML parsers.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Many XML parsers are vulnerable to XML attacks (such as entity expansion),
|
||||
/// which cause excessive memory and CPU usage by exploiting recursion. An
|
||||
/// attacker could use such methods to access unauthorized resources.
|
||||
///
|
||||
/// Consider using the `defusedxml` package when parsing untrusted XML data,
|
||||
/// to protect against XML attacks.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from xml.sax import make_parser
|
||||
///
|
||||
/// make_parser()
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from defusedxml.sax import make_parser
|
||||
///
|
||||
/// make_parser()
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `xml` — XML processing modules](https://docs.python.org/3/library/xml.html)
|
||||
/// - [PyPI: `defusedxml`](https://pypi.org/project/defusedxml/)
|
||||
/// - [Common Weakness Enumeration: CWE-400](https://cwe.mitre.org/data/definitions/400.html)
|
||||
/// - [Common Weakness Enumeration: CWE-776](https://cwe.mitre.org/data/definitions/776.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousXMLSaxUsage;
|
||||
|
||||
@@ -159,6 +629,36 @@ impl Violation for SuspiciousXMLSaxUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of insecure XML parsers.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Many XML parsers are vulnerable to XML attacks (such as entity expansion),
|
||||
/// which cause excessive memory and CPU usage by exploiting recursion. An
|
||||
/// attacker could use such methods to access unauthorized resources.
|
||||
///
|
||||
/// Consider using the `defusedxml` package when parsing untrusted XML data,
|
||||
/// to protect against XML attacks.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from xml.dom.minidom import parse
|
||||
///
|
||||
/// content = parse("untrusted.xml")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from defusedxml.minidom import parse
|
||||
///
|
||||
/// content = parse("untrusted.xml")
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `xml` — XML processing modules](https://docs.python.org/3/library/xml.html)
|
||||
/// - [PyPI: `defusedxml`](https://pypi.org/project/defusedxml/)
|
||||
/// - [Common Weakness Enumeration: CWE-400](https://cwe.mitre.org/data/definitions/400.html)
|
||||
/// - [Common Weakness Enumeration: CWE-776](https://cwe.mitre.org/data/definitions/776.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousXMLMiniDOMUsage;
|
||||
|
||||
@@ -169,6 +669,36 @@ impl Violation for SuspiciousXMLMiniDOMUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of insecure XML parsers.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Many XML parsers are vulnerable to XML attacks (such as entity expansion),
|
||||
/// which cause excessive memory and CPU usage by exploiting recursion. An
|
||||
/// attacker could use such methods to access unauthorized resources.
|
||||
///
|
||||
/// Consider using the `defusedxml` package when parsing untrusted XML data,
|
||||
/// to protect against XML attacks.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from xml.dom.pulldom import parse
|
||||
///
|
||||
/// content = parse("untrusted.xml")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from defusedxml.pulldom import parse
|
||||
///
|
||||
/// content = parse("untrusted.xml")
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `xml` — XML processing modules](https://docs.python.org/3/library/xml.html)
|
||||
/// - [PyPI: `defusedxml`](https://pypi.org/project/defusedxml/)
|
||||
/// - [Common Weakness Enumeration: CWE-400](https://cwe.mitre.org/data/definitions/400.html)
|
||||
/// - [Common Weakness Enumeration: CWE-776](https://cwe.mitre.org/data/definitions/776.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousXMLPullDOMUsage;
|
||||
|
||||
@@ -179,16 +709,68 @@ impl Violation for SuspiciousXMLPullDOMUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of insecure XML parsers.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Many XML parsers are vulnerable to XML attacks (such as entity expansion),
|
||||
/// which cause excessive memory and CPU usage by exploiting recursion. An
|
||||
/// attacker could use such methods to access unauthorized resources.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from lxml import etree
|
||||
///
|
||||
/// content = etree.parse("untrusted.xml")
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [PyPI: `lxml`](https://pypi.org/project/lxml/)
|
||||
/// - [Common Weakness Enumeration: CWE-400](https://cwe.mitre.org/data/definitions/400.html)
|
||||
/// - [Common Weakness Enumeration: CWE-776](https://cwe.mitre.org/data/definitions/776.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousXMLETreeUsage;
|
||||
|
||||
impl Violation for SuspiciousXMLETreeUsage {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Using `xml` to parse untrusted data is known to be vulnerable to XML attacks; use `defusedxml` equivalents")
|
||||
format!("Using `lxml` to parse untrusted data is known to be vulnerable to XML attacks")
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `ssl._create_unverified_context`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// [PEP 476] enabled certificate and hostname validation by default in Python
|
||||
/// standard library HTTP clients. Previously, Python did not validate
|
||||
/// certificates by default, which could allow an attacker to perform a "man in
|
||||
/// the middle" attack by intercepting and modifying traffic between client and
|
||||
/// server.
|
||||
///
|
||||
/// To support legacy environments, `ssl._create_unverified_context` reverts to
|
||||
/// the previous behavior that does perform verification. Otherwise, use
|
||||
/// `ssl.create_default_context` to create a secure context.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import ssl
|
||||
///
|
||||
/// context = ssl._create_unverified_context()
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import ssl
|
||||
///
|
||||
/// context = ssl.create_default_context()
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 476 – Enabling certificate verification by default for stdlib http clients: Opting out](https://peps.python.org/pep-0476/#opting-out)
|
||||
/// - [Python documentation: `ssl` — TLS/SSL wrapper for socket objects](https://docs.python.org/3/library/ssl.html)
|
||||
///
|
||||
/// [PEP 476]: https://peps.python.org/pep-0476/
|
||||
#[violation]
|
||||
pub struct SuspiciousUnverifiedContextUsage;
|
||||
|
||||
@@ -199,6 +781,17 @@ impl Violation for SuspiciousUnverifiedContextUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for the use of Telnet-related functions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Telnet is considered insecure because it does not encrypt data sent over
|
||||
/// the connection and is vulnerable to numerous attacks.
|
||||
///
|
||||
/// Instead, consider using a more secure protocol such as SSH.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `telnetlib` — Telnet client](https://docs.python.org/3/library/telnetlib.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousTelnetUsage;
|
||||
|
||||
@@ -209,6 +802,17 @@ impl Violation for SuspiciousTelnetUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for the use of FTP-related functions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// FTP is considered insecure as it does not encrypt data sent over the
|
||||
/// connection and is thus vulnerable to numerous attacks.
|
||||
///
|
||||
/// Instead, consider using FTPS (which secures FTP using SSL/TLS) or SFTP.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `ftplib` — FTP protocol client](https://docs.python.org/3/library/ftplib.html)
|
||||
#[violation]
|
||||
pub struct SuspiciousFTPLibUsage;
|
||||
|
||||
@@ -219,7 +823,7 @@ impl Violation for SuspiciousFTPLibUsage {
|
||||
}
|
||||
}
|
||||
|
||||
/// S001
|
||||
/// S301, S302, S303, S304, S305, S306, S307, S308, S310, S311, S312, S313, S314, S315, S316, S317, S318, S319, S320, S321, S323
|
||||
pub(crate) fn suspicious_function_call(checker: &mut Checker, expr: &Expr) {
|
||||
let Expr::Call(ast::ExprCall { func, .. }) = expr else {
|
||||
return;
|
||||
@@ -246,7 +850,7 @@ pub(crate) fn suspicious_function_call(checker: &mut Checker, expr: &Expr) {
|
||||
// Mktemp
|
||||
["tempfile", "mktemp"] => Some(SuspiciousMktempUsage.into()),
|
||||
// Eval
|
||||
["eval"] => Some(SuspiciousEvalUsage.into()),
|
||||
["" | "builtins", "eval"] => Some(SuspiciousEvalUsage.into()),
|
||||
// MarkSafe
|
||||
["django", "utils", "safestring", "mark_safe"] => Some(SuspiciousMarkSafeUsage.into()),
|
||||
// URLOpen
|
||||
|
||||
@@ -6,6 +6,40 @@ use ruff_macros::{derive_message_formats, violation};
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::rules::flake8_bandit::helpers::is_untyped_exception;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of the `try`-`except`-`continue` pattern.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The `try`-`except`-`continue` pattern suppresses all exceptions.
|
||||
/// Suppressing exceptions may hide errors that could otherwise reveal
|
||||
/// unexpected behavior, security vulnerabilities, or malicious activity.
|
||||
/// Instead, consider logging the exception.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import logging
|
||||
///
|
||||
/// while predicate:
|
||||
/// try:
|
||||
/// ...
|
||||
/// except Exception:
|
||||
/// continue
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import logging
|
||||
///
|
||||
/// while predicate:
|
||||
/// try:
|
||||
/// ...
|
||||
/// except Exception as exc:
|
||||
/// logging.exception("Error occurred")
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Common Weakness Enumeration: CWE-703](https://cwe.mitre.org/data/definitions/703.html)
|
||||
/// - [Python documentation: `logging`](https://docs.python.org/3/library/logging.html)
|
||||
#[violation]
|
||||
pub struct TryExceptContinue;
|
||||
|
||||
|
||||
@@ -6,6 +6,36 @@ use ruff_macros::{derive_message_formats, violation};
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::rules::flake8_bandit::helpers::is_untyped_exception;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of the `try`-`except`-`pass` pattern.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The `try`-`except`-`pass` pattern suppresses all exceptions. Suppressing
|
||||
/// exceptions may hide errors that could otherwise reveal unexpected behavior,
|
||||
/// security vulnerabilities, or malicious activity. Instead, consider logging
|
||||
/// the exception.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// try:
|
||||
/// ...
|
||||
/// except Exception:
|
||||
/// pass
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import logging
|
||||
///
|
||||
/// try:
|
||||
/// ...
|
||||
/// except Exception as exc:
|
||||
/// logging.exception("Exception occurred")
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Common Weakness Enumeration: CWE-703](https://cwe.mitre.org/data/definitions/703.html)
|
||||
/// - [Python documentation: `logging`](https://docs.python.org/3/library/logging.html)
|
||||
#[violation]
|
||||
pub struct TryExceptPass;
|
||||
|
||||
|
||||
@@ -59,12 +59,7 @@ impl From<Options> for Settings {
|
||||
.hardcoded_tmp_directory
|
||||
.unwrap_or_else(default_tmp_dirs)
|
||||
.into_iter()
|
||||
.chain(
|
||||
options
|
||||
.hardcoded_tmp_directory_extend
|
||||
.unwrap_or_default()
|
||||
.into_iter(),
|
||||
)
|
||||
.chain(options.hardcoded_tmp_directory_extend.unwrap_or_default())
|
||||
.collect(),
|
||||
check_typed_exception: options.check_typed_exception.unwrap_or(false),
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ S102.py:3:5: S102 Use of `exec` detected
|
||||
1 | def fn():
|
||||
2 | # Error
|
||||
3 | exec('x = 2')
|
||||
| ^^^^^^^^^^^^^ S102
|
||||
| ^^^^ S102
|
||||
4 |
|
||||
5 | exec('y = 3')
|
||||
|
|
||||
@@ -16,7 +16,7 @@ S102.py:5:1: S102 Use of `exec` detected
|
||||
3 | exec('x = 2')
|
||||
4 |
|
||||
5 | exec('y = 3')
|
||||
| ^^^^^^^^^^^^^ S102
|
||||
| ^^^^ S102
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/flake8_bandit/mod.rs
|
||||
---
|
||||
S307.py:3:7: S307 Use of possibly insecure function; consider using `ast.literal_eval`
|
||||
|
|
||||
1 | import os
|
||||
2 |
|
||||
3 | print(eval("1+1")) # S307
|
||||
| ^^^^^^^^^^^ S307
|
||||
4 | print(eval("os.getcwd()")) # S307
|
||||
|
|
||||
|
||||
S307.py:4:7: S307 Use of possibly insecure function; consider using `ast.literal_eval`
|
||||
|
|
||||
3 | print(eval("1+1")) # S307
|
||||
4 | print(eval("os.getcwd()")) # S307
|
||||
| ^^^^^^^^^^^^^^^^^^^ S307
|
||||
|
|
||||
|
||||
|
||||
@@ -63,59 +63,64 @@ pub(crate) fn blind_except(
|
||||
let Expr::Name(ast::ExprName { id, .. }) = &type_ else {
|
||||
return;
|
||||
};
|
||||
for exception in ["BaseException", "Exception"] {
|
||||
if id == exception && checker.semantic().is_builtin(exception) {
|
||||
// If the exception is re-raised, don't flag an error.
|
||||
if body.iter().any(|stmt| {
|
||||
if let Stmt::Raise(ast::StmtRaise { exc, .. }) = stmt {
|
||||
if let Some(exc) = exc {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = exc.as_ref() {
|
||||
name.map_or(false, |name| id == name)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
true
|
||||
}
|
||||
|
||||
if !matches!(id.as_str(), "BaseException" | "Exception") {
|
||||
return;
|
||||
}
|
||||
|
||||
if !checker.semantic().is_builtin(id) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the exception is re-raised, don't flag an error.
|
||||
if body.iter().any(|stmt| {
|
||||
if let Stmt::Raise(ast::StmtRaise { exc, .. }) = stmt {
|
||||
if let Some(exc) = exc {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = exc.as_ref() {
|
||||
name.map_or(false, |name| id == name)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}) {
|
||||
continue;
|
||||
} else {
|
||||
true
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the exception is logged, don't flag an error.
|
||||
if body.iter().any(|stmt| {
|
||||
if let Stmt::Expr(ast::StmtExpr { value, range: _ }) = stmt {
|
||||
if let Expr::Call(ast::ExprCall { func, keywords, .. }) = value.as_ref() {
|
||||
if logging::is_logger_candidate(func, checker.semantic()) {
|
||||
if let Some(attribute) = func.as_attribute_expr() {
|
||||
let attr = attribute.attr.as_str();
|
||||
if attr == "exception" {
|
||||
// If the exception is logged, don't flag an error.
|
||||
if body.iter().any(|stmt| {
|
||||
if let Stmt::Expr(ast::StmtExpr { value, range: _ }) = stmt {
|
||||
if let Expr::Call(ast::ExprCall { func, keywords, .. }) = value.as_ref() {
|
||||
if logging::is_logger_candidate(func, checker.semantic()) {
|
||||
if let Some(attribute) = func.as_attribute_expr() {
|
||||
let attr = attribute.attr.as_str();
|
||||
if attr == "exception" {
|
||||
return true;
|
||||
}
|
||||
if attr == "error" {
|
||||
if let Some(keyword) = find_keyword(keywords, "exc_info") {
|
||||
if is_const_true(&keyword.value) {
|
||||
return true;
|
||||
}
|
||||
if attr == "error" {
|
||||
if let Some(keyword) = find_keyword(keywords, "exc_info") {
|
||||
if is_const_true(&keyword.value) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}) {
|
||||
continue;
|
||||
}
|
||||
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
BlindExcept {
|
||||
name: id.to_string(),
|
||||
},
|
||||
type_.range(),
|
||||
));
|
||||
}
|
||||
false
|
||||
}) {
|
||||
return;
|
||||
}
|
||||
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
BlindExcept {
|
||||
name: id.to_string(),
|
||||
},
|
||||
type_.range(),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -19,7 +19,6 @@ mod tests {
|
||||
#[test_case(Rule::AssertRaisesException, Path::new("B017.py"))]
|
||||
#[test_case(Rule::AssignmentToOsEnviron, Path::new("B003.py"))]
|
||||
#[test_case(Rule::CachedInstanceMethod, Path::new("B019.py"))]
|
||||
#[test_case(Rule::RaiseLiteral, Path::new("B016.py"))]
|
||||
#[test_case(Rule::DuplicateHandlerException, Path::new("B014.py"))]
|
||||
#[test_case(Rule::DuplicateTryBlockException, Path::new("B025.py"))]
|
||||
#[test_case(Rule::DuplicateValue, Path::new("B033.py"))]
|
||||
@@ -35,7 +34,9 @@ mod tests {
|
||||
#[test_case(Rule::LoopVariableOverridesIterator, Path::new("B020.py"))]
|
||||
#[test_case(Rule::MutableArgumentDefault, Path::new("B006_B008.py"))]
|
||||
#[test_case(Rule::NoExplicitStacklevel, Path::new("B028.py"))]
|
||||
#[test_case(Rule::RaiseLiteral, Path::new("B016.py"))]
|
||||
#[test_case(Rule::RaiseWithoutFromInsideExcept, Path::new("B904.py"))]
|
||||
#[test_case(Rule::ReSubPositionalArgs, Path::new("B034.py"))]
|
||||
#[test_case(Rule::RedundantTupleInExceptionHandler, Path::new("B013.py"))]
|
||||
#[test_case(Rule::ReuseOfGroupbyGenerator, Path::new("B031.py"))]
|
||||
#[test_case(Rule::SetAttrWithConstant, Path::new("B009_B010.py"))]
|
||||
|
||||
@@ -17,6 +17,7 @@ pub(crate) use mutable_argument_default::*;
|
||||
pub(crate) use no_explicit_stacklevel::*;
|
||||
pub(crate) use raise_literal::*;
|
||||
pub(crate) use raise_without_from_inside_except::*;
|
||||
pub(crate) use re_sub_positional_args::*;
|
||||
pub(crate) use redundant_tuple_in_exception_handler::*;
|
||||
pub(crate) use reuse_of_groupby_generator::*;
|
||||
pub(crate) use setattr_with_constant::*;
|
||||
@@ -50,6 +51,7 @@ mod mutable_argument_default;
|
||||
mod no_explicit_stacklevel;
|
||||
mod raise_literal;
|
||||
mod raise_without_from_inside_except;
|
||||
mod re_sub_positional_args;
|
||||
mod redundant_tuple_in_exception_handler;
|
||||
mod reuse_of_groupby_generator;
|
||||
mod setattr_with_constant;
|
||||
|
||||
@@ -0,0 +1,112 @@
|
||||
use std::fmt;
|
||||
|
||||
use rustpython_parser::ast::{self, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for calls to `re.sub`, `re.subn`, and `re.split` that pass `count`,
|
||||
/// `maxsplit`, or `flags` as positional arguments.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Passing `count`, `maxsplit`, or `flags` as positional arguments to
|
||||
/// `re.sub`, re.subn`, or `re.split` can lead to confusion, as most methods in
|
||||
/// the `re` module accepts `flags` as the third positional argument, while
|
||||
/// `re.sub`, `re.subn`, and `re.split` have different signatures.
|
||||
///
|
||||
/// Instead, pass `count`, `maxsplit`, and `flags` as keyword arguments.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import re
|
||||
///
|
||||
/// re.split("pattern", "replacement", 1)
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import re
|
||||
///
|
||||
/// re.split("pattern", "replacement", maxsplit=1)
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `re.sub`](https://docs.python.org/3/library/re.html#re.sub)
|
||||
/// - [Python documentation: `re.subn`](https://docs.python.org/3/library/re.html#re.subn)
|
||||
/// - [Python documentation: `re.split`](https://docs.python.org/3/library/re.html#re.split)
|
||||
#[violation]
|
||||
pub struct ReSubPositionalArgs {
|
||||
method: Method,
|
||||
}
|
||||
|
||||
impl Violation for ReSubPositionalArgs {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let ReSubPositionalArgs { method } = self;
|
||||
let param_name = method.param_name();
|
||||
format!(
|
||||
"`{method}` should pass `{param_name}` and `flags` as keyword arguments to avoid confusion due to unintuitive argument positions"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// B034
|
||||
pub(crate) fn re_sub_positional_args(checker: &mut Checker, call: &ast::ExprCall) {
|
||||
let Some(method) = checker
|
||||
.semantic()
|
||||
.resolve_call_path(&call.func)
|
||||
.and_then(|call_path| match call_path.as_slice() {
|
||||
["re", "sub"] => Some(Method::Sub),
|
||||
["re", "subn"] => Some(Method::Subn),
|
||||
["re", "split"] => Some(Method::Split),
|
||||
_ => None,
|
||||
})
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
if call.args.len() > method.num_args() {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
ReSubPositionalArgs { method },
|
||||
call.range(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
enum Method {
|
||||
Sub,
|
||||
Subn,
|
||||
Split,
|
||||
}
|
||||
|
||||
impl fmt::Display for Method {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Self::Sub => fmt.write_str("re.sub"),
|
||||
Self::Subn => fmt.write_str("re.subn"),
|
||||
Self::Split => fmt.write_str("re.split"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Method {
|
||||
const fn param_name(self) -> &'static str {
|
||||
match self {
|
||||
Self::Sub => "count",
|
||||
Self::Subn => "count",
|
||||
Self::Split => "maxsplit",
|
||||
}
|
||||
}
|
||||
|
||||
const fn num_args(self) -> usize {
|
||||
match self {
|
||||
Self::Sub => 3,
|
||||
Self::Subn => 3,
|
||||
Self::Split => 2,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -351,7 +351,7 @@ pub(crate) fn reuse_of_groupby_generator(
|
||||
return;
|
||||
}
|
||||
let mut finder = GroupNameFinder::new(group_name);
|
||||
for stmt in body.iter() {
|
||||
for stmt in body {
|
||||
finder.visit_stmt(stmt);
|
||||
}
|
||||
for expr in finder.exprs {
|
||||
|
||||
@@ -0,0 +1,102 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/flake8_bugbear/mod.rs
|
||||
---
|
||||
B034.py:5:1: B034 `re.sub` should pass `count` and `flags` as keyword arguments to avoid confusion due to unintuitive argument positions
|
||||
|
|
||||
4 | # B034
|
||||
5 | re.sub("a", "b", "aaa", re.IGNORECASE)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B034
|
||||
6 | re.sub("a", "b", "aaa", 5)
|
||||
7 | re.sub("a", "b", "aaa", 5, re.IGNORECASE)
|
||||
|
|
||||
|
||||
B034.py:6:1: B034 `re.sub` should pass `count` and `flags` as keyword arguments to avoid confusion due to unintuitive argument positions
|
||||
|
|
||||
4 | # B034
|
||||
5 | re.sub("a", "b", "aaa", re.IGNORECASE)
|
||||
6 | re.sub("a", "b", "aaa", 5)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ B034
|
||||
7 | re.sub("a", "b", "aaa", 5, re.IGNORECASE)
|
||||
8 | re.subn("a", "b", "aaa", re.IGNORECASE)
|
||||
|
|
||||
|
||||
B034.py:7:1: B034 `re.sub` should pass `count` and `flags` as keyword arguments to avoid confusion due to unintuitive argument positions
|
||||
|
|
||||
5 | re.sub("a", "b", "aaa", re.IGNORECASE)
|
||||
6 | re.sub("a", "b", "aaa", 5)
|
||||
7 | re.sub("a", "b", "aaa", 5, re.IGNORECASE)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B034
|
||||
8 | re.subn("a", "b", "aaa", re.IGNORECASE)
|
||||
9 | re.subn("a", "b", "aaa", 5)
|
||||
|
|
||||
|
||||
B034.py:8:1: B034 `re.subn` should pass `count` and `flags` as keyword arguments to avoid confusion due to unintuitive argument positions
|
||||
|
|
||||
6 | re.sub("a", "b", "aaa", 5)
|
||||
7 | re.sub("a", "b", "aaa", 5, re.IGNORECASE)
|
||||
8 | re.subn("a", "b", "aaa", re.IGNORECASE)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B034
|
||||
9 | re.subn("a", "b", "aaa", 5)
|
||||
10 | re.subn("a", "b", "aaa", 5, re.IGNORECASE)
|
||||
|
|
||||
|
||||
B034.py:9:1: B034 `re.subn` should pass `count` and `flags` as keyword arguments to avoid confusion due to unintuitive argument positions
|
||||
|
|
||||
7 | re.sub("a", "b", "aaa", 5, re.IGNORECASE)
|
||||
8 | re.subn("a", "b", "aaa", re.IGNORECASE)
|
||||
9 | re.subn("a", "b", "aaa", 5)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ B034
|
||||
10 | re.subn("a", "b", "aaa", 5, re.IGNORECASE)
|
||||
11 | re.split(" ", "a a a a", re.I)
|
||||
|
|
||||
|
||||
B034.py:10:1: B034 `re.subn` should pass `count` and `flags` as keyword arguments to avoid confusion due to unintuitive argument positions
|
||||
|
|
||||
8 | re.subn("a", "b", "aaa", re.IGNORECASE)
|
||||
9 | re.subn("a", "b", "aaa", 5)
|
||||
10 | re.subn("a", "b", "aaa", 5, re.IGNORECASE)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B034
|
||||
11 | re.split(" ", "a a a a", re.I)
|
||||
12 | re.split(" ", "a a a a", 2)
|
||||
|
|
||||
|
||||
B034.py:11:1: B034 `re.split` should pass `maxsplit` and `flags` as keyword arguments to avoid confusion due to unintuitive argument positions
|
||||
|
|
||||
9 | re.subn("a", "b", "aaa", 5)
|
||||
10 | re.subn("a", "b", "aaa", 5, re.IGNORECASE)
|
||||
11 | re.split(" ", "a a a a", re.I)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B034
|
||||
12 | re.split(" ", "a a a a", 2)
|
||||
13 | re.split(" ", "a a a a", 2, re.I)
|
||||
|
|
||||
|
||||
B034.py:12:1: B034 `re.split` should pass `maxsplit` and `flags` as keyword arguments to avoid confusion due to unintuitive argument positions
|
||||
|
|
||||
10 | re.subn("a", "b", "aaa", 5, re.IGNORECASE)
|
||||
11 | re.split(" ", "a a a a", re.I)
|
||||
12 | re.split(" ", "a a a a", 2)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ B034
|
||||
13 | re.split(" ", "a a a a", 2, re.I)
|
||||
14 | sub("a", "b", "aaa", re.IGNORECASE)
|
||||
|
|
||||
|
||||
B034.py:13:1: B034 `re.split` should pass `maxsplit` and `flags` as keyword arguments to avoid confusion due to unintuitive argument positions
|
||||
|
|
||||
11 | re.split(" ", "a a a a", re.I)
|
||||
12 | re.split(" ", "a a a a", 2)
|
||||
13 | re.split(" ", "a a a a", 2, re.I)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B034
|
||||
14 | sub("a", "b", "aaa", re.IGNORECASE)
|
||||
|
|
||||
|
||||
B034.py:14:1: B034 `re.sub` should pass `count` and `flags` as keyword arguments to avoid confusion due to unintuitive argument positions
|
||||
|
|
||||
12 | re.split(" ", "a a a a", 2)
|
||||
13 | re.split(" ", "a a a a", 2, re.I)
|
||||
14 | sub("a", "b", "aaa", re.IGNORECASE)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B034
|
||||
15 |
|
||||
16 | # OK
|
||||
|
|
||||
|
||||
|
||||
@@ -222,12 +222,11 @@ impl AlwaysAutofixableViolation for ProhibitedTrailingComma {
|
||||
|
||||
/// COM812, COM818, COM819
|
||||
pub(crate) fn trailing_commas(
|
||||
diagnostics: &mut Vec<Diagnostic>,
|
||||
tokens: &[LexResult],
|
||||
locator: &Locator,
|
||||
settings: &Settings,
|
||||
) -> Vec<Diagnostic> {
|
||||
let mut diagnostics = vec![];
|
||||
|
||||
) {
|
||||
let tokens = tokens
|
||||
.iter()
|
||||
.flatten()
|
||||
@@ -387,6 +386,4 @@ pub(crate) fn trailing_commas(
|
||||
stack.pop();
|
||||
}
|
||||
}
|
||||
|
||||
diagnostics
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ use ruff_diagnostics::{Edit, Fix};
|
||||
use ruff_python_ast::source_code::{Locator, Stylist};
|
||||
|
||||
use crate::autofix::codemods::CodegenStylist;
|
||||
use crate::rules::flake8_comprehensions::rules::ObjectType;
|
||||
use crate::{
|
||||
checkers::ast::Checker,
|
||||
cst::matchers::{
|
||||
@@ -511,7 +512,7 @@ fn pad_expression(content: String, range: TextRange, checker: &Checker) -> Strin
|
||||
// If the expression is immediately preceded by an opening brace, then
|
||||
// we need to add a space before the expression.
|
||||
let prefix = checker.locator.up_to(range.start());
|
||||
let left_pad = matches!(prefix.chars().rev().next(), Some('{'));
|
||||
let left_pad = matches!(prefix.chars().next_back(), Some('{'));
|
||||
|
||||
// If the expression is immediately preceded by an opening brace, then
|
||||
// we need to add a space before the expression.
|
||||
@@ -888,7 +889,7 @@ pub(crate) fn fix_unnecessary_map(
|
||||
stylist: &Stylist,
|
||||
expr: &rustpython_parser::ast::Expr,
|
||||
parent: Option<&rustpython_parser::ast::Expr>,
|
||||
kind: &str,
|
||||
object_type: ObjectType,
|
||||
) -> Result<Edit> {
|
||||
let module_text = locator.slice(expr.range());
|
||||
let mut tree = match_expression(module_text)?;
|
||||
@@ -948,8 +949,8 @@ pub(crate) fn fix_unnecessary_map(
|
||||
whitespace_after_in: ParenthesizableWhitespace::SimpleWhitespace(SimpleWhitespace(" ")),
|
||||
});
|
||||
|
||||
match kind {
|
||||
"generator" => {
|
||||
match object_type {
|
||||
ObjectType::Generator => {
|
||||
tree = Expression::GeneratorExp(Box::new(GeneratorExp {
|
||||
elt: func_body.body.clone(),
|
||||
for_in: compfor,
|
||||
@@ -957,7 +958,7 @@ pub(crate) fn fix_unnecessary_map(
|
||||
rpar: vec![RightParen::default()],
|
||||
}));
|
||||
}
|
||||
"list" => {
|
||||
ObjectType::List => {
|
||||
tree = Expression::ListComp(Box::new(ListComp {
|
||||
elt: func_body.body.clone(),
|
||||
for_in: compfor,
|
||||
@@ -967,7 +968,7 @@ pub(crate) fn fix_unnecessary_map(
|
||||
rpar: vec![],
|
||||
}));
|
||||
}
|
||||
"set" => {
|
||||
ObjectType::Set => {
|
||||
tree = Expression::SetComp(Box::new(SetComp {
|
||||
elt: func_body.body.clone(),
|
||||
for_in: compfor,
|
||||
@@ -977,7 +978,7 @@ pub(crate) fn fix_unnecessary_map(
|
||||
rbrace: RightCurlyBrace::default(),
|
||||
}));
|
||||
}
|
||||
"dict" => {
|
||||
ObjectType::Dict => {
|
||||
let (key, value) = if let Expression::Tuple(tuple) = func_body.body.as_ref() {
|
||||
if tuple.elements.len() != 2 {
|
||||
bail!("Expected two elements")
|
||||
@@ -1009,17 +1010,14 @@ pub(crate) fn fix_unnecessary_map(
|
||||
),
|
||||
}));
|
||||
}
|
||||
_ => {
|
||||
bail!("Expected generator, list, set or dict");
|
||||
}
|
||||
}
|
||||
|
||||
let mut content = tree.codegen_stylist(stylist);
|
||||
|
||||
// If the expression is embedded in an f-string, surround it with spaces to avoid
|
||||
// syntax errors.
|
||||
if kind == "set" || kind == "dict" {
|
||||
if let Some(rustpython_parser::ast::Expr::FormattedValue(_)) = parent {
|
||||
if matches!(object_type, ObjectType::Set | ObjectType::Dict) {
|
||||
if parent.map_or(false, rustpython_parser::ast::Expr::is_formatted_value_expr) {
|
||||
content = format!(" {content} ");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
use ruff_text_size::TextRange;
|
||||
use rustpython_parser::ast::{self, Expr, Ranged};
|
||||
use std::fmt;
|
||||
|
||||
use rustpython_parser::ast::{self, Arguments, Expr, ExprContext, Ranged, Stmt};
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_diagnostics::{AutofixKind, Violation};
|
||||
use ruff_diagnostics::{Diagnostic, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::includes_arg_name;
|
||||
use ruff_python_ast::visitor;
|
||||
use ruff_python_ast::visitor::Visitor;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::AsRule;
|
||||
@@ -40,7 +44,7 @@ use super::helpers;
|
||||
/// `{v: v ** 2 for v in values}`.
|
||||
#[violation]
|
||||
pub struct UnnecessaryMap {
|
||||
obj_type: String,
|
||||
object_type: ObjectType,
|
||||
}
|
||||
|
||||
impl Violation for UnnecessaryMap {
|
||||
@@ -48,21 +52,13 @@ impl Violation for UnnecessaryMap {
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let UnnecessaryMap { obj_type } = self;
|
||||
if obj_type == "generator" {
|
||||
format!("Unnecessary `map` usage (rewrite using a generator expression)")
|
||||
} else {
|
||||
format!("Unnecessary `map` usage (rewrite using a `{obj_type}` comprehension)")
|
||||
}
|
||||
let UnnecessaryMap { object_type } = self;
|
||||
format!("Unnecessary `map` usage (rewrite using a {object_type})")
|
||||
}
|
||||
|
||||
fn autofix_title(&self) -> Option<String> {
|
||||
let UnnecessaryMap { obj_type } = self;
|
||||
Some(if obj_type == "generator" {
|
||||
format!("Replace `map` using a generator expression")
|
||||
} else {
|
||||
format!("Replace `map` using a `{obj_type}` comprehension")
|
||||
})
|
||||
let UnnecessaryMap { object_type } = self;
|
||||
Some(format!("Replace `map` with a {object_type}"))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,116 +70,198 @@ pub(crate) fn unnecessary_map(
|
||||
func: &Expr,
|
||||
args: &[Expr],
|
||||
) {
|
||||
fn create_diagnostic(kind: &str, location: TextRange) -> Diagnostic {
|
||||
Diagnostic::new(
|
||||
UnnecessaryMap {
|
||||
obj_type: kind.to_string(),
|
||||
},
|
||||
location,
|
||||
)
|
||||
}
|
||||
|
||||
let Some(id) = helpers::expr_name(func) else {
|
||||
return;
|
||||
};
|
||||
match id {
|
||||
"map" => {
|
||||
if !checker.semantic().is_builtin(id) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Exclude the parent if already matched by other arms
|
||||
if let Some(Expr::Call(ast::ExprCall { func: f, .. })) = parent {
|
||||
if let Some(id_parent) = helpers::expr_name(f) {
|
||||
if id_parent == "dict" || id_parent == "set" || id_parent == "list" {
|
||||
let object_type = match id {
|
||||
"map" => ObjectType::Generator,
|
||||
"list" => ObjectType::List,
|
||||
"set" => ObjectType::Set,
|
||||
"dict" => ObjectType::Dict,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
if !checker.semantic().is_builtin(id) {
|
||||
return;
|
||||
}
|
||||
|
||||
match object_type {
|
||||
ObjectType::Generator => {
|
||||
// Exclude the parent if already matched by other arms.
|
||||
if let Some(Expr::Call(ast::ExprCall { func, .. })) = parent {
|
||||
if let Some(name) = helpers::expr_name(func) {
|
||||
if matches!(name, "list" | "set" | "dict") {
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if args.len() == 2 && matches!(&args[0], Expr::Lambda(_)) {
|
||||
let mut diagnostic = create_diagnostic("generator", expr.range());
|
||||
if checker.patch(diagnostic.kind.rule()) {
|
||||
#[allow(deprecated)]
|
||||
diagnostic.try_set_fix_from_edit(|| {
|
||||
fixes::fix_unnecessary_map(
|
||||
checker.locator,
|
||||
checker.stylist,
|
||||
expr,
|
||||
parent,
|
||||
"generator",
|
||||
)
|
||||
});
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
// Only flag, e.g., `map(lambda x: x + 1, iterable)`.
|
||||
let [Expr::Lambda(ast::ExprLambda { args, body, .. }), _] = args else {
|
||||
return;
|
||||
};
|
||||
|
||||
if late_binding(args, body) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
"list" | "set" => {
|
||||
if !checker.semantic().is_builtin(id) {
|
||||
ObjectType::List | ObjectType::Set => {
|
||||
// Only flag, e.g., `list(map(lambda x: x + 1, iterable))`.
|
||||
let [Expr::Call(ast::ExprCall { func, args, .. })] = args else {
|
||||
return;
|
||||
};
|
||||
|
||||
if args.len() != 2 {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(Expr::Call(ast::ExprCall { func, args, .. })) = args.first() {
|
||||
if args.len() != 2 {
|
||||
return;
|
||||
}
|
||||
let Some(argument) =
|
||||
helpers::first_argument_with_matching_function("map", func, args)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
if let Expr::Lambda(_) = argument {
|
||||
let mut diagnostic = create_diagnostic(id, expr.range());
|
||||
if checker.patch(diagnostic.kind.rule()) {
|
||||
#[allow(deprecated)]
|
||||
diagnostic.try_set_fix_from_edit(|| {
|
||||
fixes::fix_unnecessary_map(
|
||||
checker.locator,
|
||||
checker.stylist,
|
||||
expr,
|
||||
parent,
|
||||
id,
|
||||
)
|
||||
});
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
let Some(argument) = helpers::first_argument_with_matching_function("map", func, args)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Expr::Lambda(ast::ExprLambda { args, body, .. }) = argument else {
|
||||
return;
|
||||
};
|
||||
|
||||
if late_binding(args, body) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
"dict" => {
|
||||
if !checker.semantic().is_builtin(id) {
|
||||
ObjectType::Dict => {
|
||||
// Only flag, e.g., `dict(map(lambda v: (v, v ** 2), values))`.
|
||||
let [Expr::Call(ast::ExprCall { func, args, .. })] = args else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(argument) = helpers::first_argument_with_matching_function("map", func, args)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Expr::Lambda(ast::ExprLambda { args, body, .. }) = argument else {
|
||||
return;
|
||||
};
|
||||
|
||||
let (Expr::Tuple(ast::ExprTuple { elts, .. }) | Expr::List(ast::ExprList { elts, .. })) =
|
||||
body.as_ref()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
if elts.len() != 2 {
|
||||
return;
|
||||
}
|
||||
|
||||
if args.len() == 1 {
|
||||
if let Expr::Call(ast::ExprCall { func, args, .. }) = &args[0] {
|
||||
let Some(argument) =
|
||||
helpers::first_argument_with_matching_function("map", func, args)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
if let Expr::Lambda(ast::ExprLambda { body, .. }) = argument {
|
||||
if matches!(body.as_ref(), Expr::Tuple(ast::ExprTuple { elts, .. }) | Expr::List(ast::ExprList { elts, .. } ) if elts.len() == 2)
|
||||
{
|
||||
let mut diagnostic = create_diagnostic(id, expr.range());
|
||||
if checker.patch(diagnostic.kind.rule()) {
|
||||
#[allow(deprecated)]
|
||||
diagnostic.try_set_fix_from_edit(|| {
|
||||
fixes::fix_unnecessary_map(
|
||||
checker.locator,
|
||||
checker.stylist,
|
||||
expr,
|
||||
parent,
|
||||
id,
|
||||
)
|
||||
});
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
if late_binding(args, body) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut diagnostic = Diagnostic::new(UnnecessaryMap { object_type }, expr.range());
|
||||
if checker.patch(diagnostic.kind.rule()) {
|
||||
diagnostic.try_set_fix(|| {
|
||||
fixes::fix_unnecessary_map(checker.locator, checker.stylist, expr, parent, object_type)
|
||||
.map(Fix::suggested)
|
||||
});
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub(crate) enum ObjectType {
|
||||
Generator,
|
||||
List,
|
||||
Set,
|
||||
Dict,
|
||||
}
|
||||
|
||||
impl fmt::Display for ObjectType {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
ObjectType::Generator => fmt.write_str("generator expression"),
|
||||
ObjectType::List => fmt.write_str("`list` comprehension"),
|
||||
ObjectType::Set => fmt.write_str("`set` comprehension"),
|
||||
ObjectType::Dict => fmt.write_str("`dict` comprehension"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the lambda defined by the given arguments and body contains any names that
|
||||
/// are late-bound within nested lambdas.
|
||||
///
|
||||
/// For example, given:
|
||||
///
|
||||
/// ```python
|
||||
/// map(lambda x: lambda: x, range(4)) # (0, 1, 2, 3)
|
||||
/// ```
|
||||
///
|
||||
/// The `x` in the inner lambda is "late-bound". Specifically, rewriting the above as:
|
||||
///
|
||||
/// ```python
|
||||
/// (lambda: x for x in range(4)) # (3, 3, 3, 3)
|
||||
/// ```
|
||||
///
|
||||
/// Would yield an incorrect result, as the `x` in the inner lambda would be bound to the last
|
||||
/// value of `x` in the comprehension.
|
||||
fn late_binding(args: &Arguments, body: &Expr) -> bool {
|
||||
let mut visitor = LateBindingVisitor::new(args);
|
||||
visitor.visit_expr(body);
|
||||
visitor.late_bound
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct LateBindingVisitor<'a> {
|
||||
/// The arguments to the current lambda.
|
||||
args: &'a Arguments,
|
||||
/// The arguments to any lambdas within the current lambda body.
|
||||
lambdas: Vec<&'a Arguments>,
|
||||
/// Whether any names within the current lambda body are late-bound within nested lambdas.
|
||||
late_bound: bool,
|
||||
}
|
||||
|
||||
impl<'a> LateBindingVisitor<'a> {
|
||||
fn new(args: &'a Arguments) -> Self {
|
||||
Self {
|
||||
args,
|
||||
lambdas: Vec::new(),
|
||||
late_bound: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for LateBindingVisitor<'a> {
|
||||
fn visit_stmt(&mut self, _stmt: &'a Stmt) {}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
match expr {
|
||||
Expr::Lambda(ast::ExprLambda { args, .. }) => {
|
||||
self.lambdas.push(args);
|
||||
visitor::walk_expr(self, expr);
|
||||
self.lambdas.pop();
|
||||
}
|
||||
Expr::Name(ast::ExprName {
|
||||
id,
|
||||
ctx: ExprContext::Load,
|
||||
..
|
||||
}) => {
|
||||
// If we're within a nested lambda...
|
||||
if !self.lambdas.is_empty() {
|
||||
// If the name is defined in the current lambda...
|
||||
if includes_arg_name(id, self.args) {
|
||||
// And isn't overridden by any nested lambdas...
|
||||
if !self.lambdas.iter().any(|args| includes_arg_name(id, args)) {
|
||||
// Then it's late-bound.
|
||||
self.late_bound = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => visitor::walk_expr(self, expr),
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
fn visit_body(&mut self, _body: &'a [Stmt]) {}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user