Compare commits

...

27 Commits

Author SHA1 Message Date
Charlie Marsh
d645a19e0a Bump version to 0.0.232 2023-01-24 09:49:07 -05:00
Charlie Marsh
30ae0d3723 Add Dagger and Great Expectations (#2130) 2023-01-24 09:48:00 -05:00
Charlie Marsh
3fb9e76012 Remove unnecessary manual Generator invocations (#2129) 2023-01-24 09:38:12 -05:00
Eric Roberts
0f283ae98c Move pycodestyle rules into individual files (#2123) 2023-01-24 09:27:26 -05:00
Martin Fischer
269926cec4 refactor: Move redirects out of RuleCodePrefix
This commit removes rule redirects such as ("U" -> "UP") from the
RuleCodePrefix enum because they complicated the generation of that enum
(which we want to change to be prefix-agnostic in the future).

To preserve backwards compatibility redirects are now resolved
before the strum-generated RuleCodePrefix::from_str is invoked.

This change also brings two other advantages:

* Redirects are now only defined once
  (previously they had to be defined twice:
  once in ruff_macros/src/rule_code_prefix.rs
  and a second time in src/registry.rs).

* The deprecated redirects will no longer be suggested in IDE
  autocompletion within pyproject.toml since they are now no
  longer part of the ruff.schema.json.
2023-01-24 09:26:19 -05:00
Martin Fischer
28018442f6 refactor: Move ALL from RuleCodePrefix to RuleSelector 2023-01-24 09:26:19 -05:00
Martin Fischer
abc9810e2b refactor: Turn RuleSelector into a newtype around RuleCodePrefix
Yet another refactor to let us implement the many-to-many mapping
between codes and rules in a prefix-agnostic way.

We want to break up the RuleCodePrefix[1] enum into smaller enums.
To facilitate that this commit  introduces a new wrapping type around
RuleCodePrefix so that we can start breaking it apart.

[1]: Actually `RuleCodePrefix` is the previous name of the autogenerated
enum ... I renamed it in b19258a243 to
RuleSelector since `ALL` isn't a prefix. This commit now renames it back
but only because the new `RuleSelector` wrapper type, introduced in this
commit, will let us move the `ALL` variant from `RuleCodePrefix` to
`RuleSelector` in the next commit.
2023-01-24 09:26:19 -05:00
Charlie Marsh
a20482961b Add tryceratops to flake8-to-ruff 2023-01-24 08:41:18 -05:00
Charlie Marsh
d97c07818e Update flake8-to-ruff to include latest plugins (#2127)
Closes #2124 (along with a release).
2023-01-24 08:39:58 -05:00
Ville Skyttä
7e92485f43 feat: autofix multi-line-summary-*-line (#2093) 2023-01-24 08:17:13 -05:00
Charlie Marsh
930c3be69d Ignore generators in flake8-return rules (#2126)
We could do a better job of handling them, but they cause too many false-positives right now.

Closes #2119.
2023-01-24 08:15:26 -05:00
Aarni Koskela
24d0a980c5 flake8-annotations: deduplicate code between functions and methods (#2125) 2023-01-24 08:03:33 -05:00
Edgar R. M
f5f0ed280a Implement EXE001 and EXE002 from flake8-executable (#2118) 2023-01-24 08:02:47 -05:00
Martin Fischer
ca58c72fc9 refactor: Convention::codes to rules_to_be_ignored 2023-01-24 07:37:34 -05:00
Martin Fischer
c40f14620a refactor: Get rid of registry::CATEGORIES 2023-01-24 07:37:34 -05:00
Martin Fischer
04300ce258 refactor: Rename SuffixLength enum to Specificity 2023-01-24 07:37:34 -05:00
Martin Fischer
ead5f948d3 refactor: Move Colorize imports where they're used 2023-01-24 07:37:34 -05:00
Martin Fischer
e93e9fae82 refactor: Make flake8_to_ruff tests even more DRY 2023-01-24 07:37:34 -05:00
Martin Fischer
f5ddec0fb3 refactor: Move resolve_select to converter module
The function is only used there and is not plugin-specific
since it also specifies the default rule selectors (F, E, W).
2023-01-24 07:37:34 -05:00
Martin Fischer
3de2a57416 refactor: Use ..Options::default() for tests 2023-01-24 07:37:34 -05:00
Hugo
b29b4084ff Add apk instructions to README (#2121) 2023-01-24 07:29:03 -05:00
Aarni Koskela
c61ca4a953 Add Home Assistant to Readme (#2120) 2023-01-24 07:27:45 -05:00
Denis Gavrilyuk
58d5ac08a8 feat: implement TRY301 (#2113) 2023-01-24 07:25:26 -05:00
Charlie Marsh
cc63a4be6a Allow flagging of multiline implicit string concatenations (#2117)
At present, `ISC001` and `ISC002` flag concatenations like the following:

```py
"a" "b"  # ISC001
"a" \
  "b"  # ISC002
```

However, multiline concatenations are allowed.

This PR adds a setting:

```toml
[tool.ruff.flake8-implicit-str-concat]
allow-multiline = false
```

Which extends `ISC002` to _also_ flag multiline concatenations, like:

```py
(
  "a"  # ISC002
  "b"
)
```

Note that this is backwards compatible, as `allow-multiline` defaults to `true`.
2023-01-24 00:01:01 -05:00
Charlie Marsh
549a5d44bc Upgrade to toml v0.6.0 (#2116)
Closes #1894.
2023-01-23 19:22:42 -05:00
Denis Gavrilyuk
d65ce6308b feat: implement TRY200 (#2087)
#2056
2023-01-23 14:12:42 -05:00
Charlie Marsh
b988a268e4 Escape curly braces when converting .format() strings (#2112)
Closes #2111.
2023-01-23 14:11:24 -05:00
116 changed files with 2647 additions and 2120 deletions

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.231
rev: v0.0.232
hooks:
- id: ruff

53
Cargo.lock generated
View File

@@ -719,7 +719,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.231"
version = "0.0.232"
dependencies = [
"anyhow",
"clap 4.0.32",
@@ -733,7 +733,7 @@ dependencies = [
"serde_json",
"strum",
"strum_macros",
"toml_edit",
"toml 0.6.0",
]
[[package]]
@@ -1633,7 +1633,7 @@ checksum = "eda0fc3b0fb7c975631757e14d9049da17374063edb6ebbcbc54d880d4fe94e9"
dependencies = [
"once_cell",
"thiserror",
"toml",
"toml 0.5.11",
]
[[package]]
@@ -1828,7 +1828,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.231"
version = "0.0.232"
dependencies = [
"anyhow",
"bitflags",
@@ -1875,14 +1875,14 @@ dependencies = [
"textwrap",
"thiserror",
"titlecase",
"toml_edit",
"toml 0.6.0",
"wasm-bindgen",
"wasm-bindgen-test",
]
[[package]]
name = "ruff_cli"
version = "0.0.231"
version = "0.0.232"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
@@ -1919,7 +1919,7 @@ dependencies = [
[[package]]
name = "ruff_dev"
version = "0.0.231"
version = "0.0.232"
dependencies = [
"anyhow",
"clap 4.0.32",
@@ -1940,7 +1940,7 @@ dependencies = [
[[package]]
name = "ruff_macros"
version = "0.0.231"
version = "0.0.232"
dependencies = [
"once_cell",
"proc-macro2",
@@ -2200,6 +2200,15 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_spanned"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c68e921cef53841b8925c2abadd27c9b891d9613bdc43d6b823062866df38e8"
dependencies = [
"serde",
]
[[package]]
name = "shellexpand"
version = "3.0.0"
@@ -2480,32 +2489,44 @@ dependencies = [
[[package]]
name = "toml"
version = "0.5.10"
version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1333c76748e868a4d9d1017b5ab53171dfd095f70c712fdb4653a406547f598f"
checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234"
dependencies = [
"serde",
]
[[package]]
name = "toml_datetime"
version = "0.5.0"
name = "toml"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "808b51e57d0ef8f71115d8f3a01e7d3750d01c79cac4b3eda910f4389fdf92fd"
checksum = "4fb9d890e4dc9298b70f740f615f2e05b9db37dce531f6b24fb77ac993f9f217"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit",
]
[[package]]
name = "toml_datetime"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
version = "0.17.1"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a34cc558345efd7e88b9eda9626df2138b80bb46a7606f695e751c892bc7dac6"
checksum = "729bfd096e40da9c001f778f5cdecbd2957929a24e10e5883d9392220a751581"
dependencies = [
"indexmap",
"itertools",
"nom8",
"serde",
"serde_spanned",
"toml_datetime",
]

View File

@@ -8,7 +8,7 @@ default-members = [".", "ruff_cli"]
[package]
name = "ruff"
version = "0.0.231"
version = "0.0.232"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
edition = "2021"
rust-version = "1.65.0"
@@ -46,7 +46,7 @@ num-traits = "0.2.15"
once_cell = { version = "1.16.0" }
path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix_paths_on_wasm"] }
regex = { version = "1.6.0" }
ruff_macros = { version = "0.0.231", path = "ruff_macros" }
ruff_macros = { version = "0.0.232", path = "ruff_macros" }
rustc-hash = { version = "1.1.0" }
rustpython-ast = { features = ["unparse"], git = "https://github.com/RustPython/RustPython.git", rev = "4f38cb68e4a97aeea9eb19673803a0bd5f655383" }
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "4f38cb68e4a97aeea9eb19673803a0bd5f655383" }
@@ -61,7 +61,7 @@ strum_macros = { version = "0.24.3" }
textwrap = { version = "0.16.0" }
thiserror = { version = "1.0" }
titlecase = { version = "2.2.1" }
toml_edit = { version = "0.17.1", features = ["easy"] }
toml = { version = "0.6.0", features= ["parse"] }
# https://docs.rs/getrandom/0.2.7/getrandom/#webassembly-support
# For (future) wasm-pack support

View File

@@ -51,19 +51,22 @@ Ruff is extremely actively developed and used in major open-source projects like
- [Apache Airflow](https://github.com/apache/airflow)
- [Bokeh](https://github.com/bokeh/bokeh)
- [Zulip](https://github.com/zulip/zulip)
- [Dagster](https://github.com/dagster-io/dagster)
- [Pydantic](https://github.com/pydantic/pydantic)
- [Dagster](https://github.com/dagster-io/dagster)
- [Dagger](https://github.com/dagger/dagger)
- [Sphinx](https://github.com/sphinx-doc/sphinx)
- [Hatch](https://github.com/pypa/hatch)
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
- [Synapse (Matrix)](https://github.com/matrix-org/synapse)
- [Saleor](https://github.com/saleor/saleor)
- [Great Expectations](https://github.com/great-expectations/great_expectations)
- [Polars](https://github.com/pola-rs/polars)
- [Ibis](https://github.com/ibis-project/ibis)
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
- [Synapse (Matrix)](https://github.com/matrix-org/synapse)
- [SnowCLI (Snowflake)](https://github.com/Snowflake-Labs/snowcli)
- [cibuildwheel](https://github.com/pypa/cibuildwheel)
- [Saleor](https://github.com/saleor/saleor)
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
- [Home Assistant](https://github.com/home-assistant/core)
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
- [cibuildwheel (PyPA)](https://github.com/pypa/cibuildwheel)
Read the [launch blog post](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
@@ -180,6 +183,12 @@ For **Arch Linux** users, Ruff is also available as [`ruff`](https://archlinux.o
pacman -S ruff
```
For **Alpine** users, Ruff is also available as [`ruff`](https://pkgs.alpinelinux.org/package/edge/testing/x86_64/ruff) on the testing repositories:
```shell
apk add ruff
```
[![Packaging status](https://repology.org/badge/vertical-allrepos/ruff-python-linter.svg?exclude_unsupported=1)](https://repology.org/project/ruff-python-linter/versions)
### Usage
@@ -203,7 +212,7 @@ Ruff also works with [pre-commit](https://pre-commit.com):
```yaml
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.0.231'
rev: 'v0.0.232'
hooks:
- id: ruff
```
@@ -668,8 +677,8 @@ For more, see [pydocstyle](https://pypi.org/project/pydocstyle/) on PyPI.
| D209 | new-line-after-last-paragraph | Multi-line docstring closing quotes should be on a separate line | 🛠 |
| D210 | no-surrounding-whitespace | No whitespaces allowed surrounding docstring text | 🛠 |
| D211 | no-blank-line-before-class | No blank lines allowed before class docstring | 🛠 |
| D212 | multi-line-summary-first-line | Multi-line docstring summary should start at the first line | |
| D213 | multi-line-summary-second-line | Multi-line docstring summary should start at the second line | |
| D212 | multi-line-summary-first-line | Multi-line docstring summary should start at the first line | 🛠 |
| D213 | multi-line-summary-second-line | Multi-line docstring summary should start at the second line | 🛠 |
| D214 | section-not-over-indented | Section is over-indented ("{name}") | 🛠 |
| D215 | section-underline-not-over-indented | Section underline is over-indented ("{name}") | 🛠 |
| D300 | uses-triple-quotes | Use """triple double quotes""" | |
@@ -1181,6 +1190,8 @@ For more, see [flake8-executable](https://pypi.org/project/flake8-executable/) o
| Code | Name | Message | Fix |
| ---- | ---- | ------- | --- |
| EXE001 | shebang-not-executable | Shebang is present but file is not executable | |
| EXE002 | shebang-missing-executable-file | The file is executable but no shebang is present | |
| EXE003 | shebang-python | Shebang should contain "python" | |
| EXE004 | shebang-whitespace | Avoid whitespace before shebang | 🛠 |
| EXE005 | shebang-newline | Shebang should be at the beginning of the file | |
@@ -1200,8 +1211,10 @@ For more, see [tryceratops](https://pypi.org/project/tryceratops/1.1.0/) on PyPI
| Code | Name | Message | Fix |
| ---- | ---- | ------- | --- |
| TRY004 | prefer-type-error | Prefer `TypeError` exception for invalid type | 🛠 |
| TRY200 | reraise-no-cause | Use `raise from` to specify exception cause | |
| TRY201 | verbose-raise | Use `raise` without specifying exception name | |
| TRY300 | try-consider-else | Consider `else` block | |
| TRY301 | raise-within-try | Abstract `raise` to an inner function | |
### flake8-use-pathlib (PTH)
@@ -2683,6 +2696,28 @@ max-string-length = 20
---
### `flake8-implicit-str-concat`
#### [`allow-multiline`](#allow-multiline)
Whether to allow implicit string concatenations for multiline strings.
By default, implicit concatenations of multiline strings are
allowed (but continuation lines, delimited with a backslash, are
prohibited).
**Default value**: `true`
**Type**: `bool`
**Example usage**:
```toml
[tool.ruff.flake8-implicit-str-concat]
allow-multiline = false
```
---
### `flake8-import-conventions`
#### [`aliases`](#aliases)

View File

@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8_to_ruff"
version = "0.0.231"
version = "0.0.232"
dependencies = [
"anyhow",
"clap",
@@ -1975,7 +1975,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.231"
version = "0.0.232"
dependencies = [
"anyhow",
"bincode",

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.231"
version = "0.0.232"
edition = "2021"
[dependencies]
@@ -16,7 +16,7 @@ serde = { version = "1.0.147", features = ["derive"] }
serde_json = { version = "1.0.87" }
strum = { version = "0.24.1", features = ["strum_macros"] }
strum_macros = { version = "0.24.3" }
toml_edit = { version = "0.17.1", features = ["easy"] }
toml = { version = "0.6.0", features = ["parse"] }
[dev-dependencies]

View File

@@ -60,7 +60,7 @@ fn main() -> Result<()> {
// Create Ruff's pyproject.toml section.
let pyproject = flake8_to_ruff::convert(&config, &external_config, cli.plugin)?;
println!("{}", toml_edit::easy::to_string_pretty(&pyproject)?);
println!("{}", toml::to_string_pretty(&pyproject)?);
Ok(())
}

View File

@@ -7,7 +7,7 @@ build-backend = "maturin"
[project]
name = "ruff"
version = "0.0.231"
version = "0.0.232"
description = "An extremely fast Python linter, written in Rust."
authors = [
{ name = "Charlie Marsh", email = "charlie.r.marsh@gmail.com" },

View File

@@ -0,0 +1,4 @@
#!/usr/bin/python
if __name__ == '__main__':
print('I should be executable.')

View File

@@ -0,0 +1,2 @@
if __name__ == '__main__':
print('I should be executable.')

View File

@@ -0,0 +1,4 @@
#!/usr/bin/python
if __name__ == '__main__':
print('I should be executable.')

View File

@@ -0,0 +1,2 @@
if __name__ == '__main__':
print('I should be executable.')

View File

@@ -0,0 +1,2 @@
if __name__ == '__main__':
print('I should be executable.')

View File

@@ -0,0 +1,4 @@
#!/usr/bin/python
if __name__ == '__main__':
print('I should be executable.')

0
resources/test/fixtures/flake8_executable/EXE003.py vendored Normal file → Executable file
View File

0
resources/test/fixtures/flake8_executable/EXE004_1.py vendored Normal file → Executable file
View File

0
resources/test/fixtures/flake8_executable/EXE004_3.py vendored Normal file → Executable file
View File

0
resources/test/fixtures/flake8_executable/EXE005_1.py vendored Normal file → Executable file
View File

0
resources/test/fixtures/flake8_executable/EXE005_2.py vendored Normal file → Executable file
View File

0
resources/test/fixtures/flake8_executable/EXE005_3.py vendored Normal file → Executable file
View File

View File

@@ -114,3 +114,12 @@ def bar3(x, y, z):
else:
return z
return None
def prompts(self, foo):
if not foo:
return []
for x in foo:
yield x
yield x + 1

View File

@@ -44,6 +44,8 @@ print("foo {} ".format(x))
"{}".format(a)
'({}={{0!e}})'.format(a)
###
# Non-errors
###

View File

@@ -0,0 +1,27 @@
class MyException(Exception):
pass
class MainFunctionFailed(Exception):
pass
def process():
raise MyException
def bad():
try:
process()
except MyException:
raise MainFunctionFailed()
if True:
raise MainFunctionFailed()
def good():
try:
process()
except MyException as ex:
raise MainFunctionFailed() from ex

View File

@@ -0,0 +1,29 @@
class MyException(Exception):
pass
def bad():
try:
a = process()
if not a:
raise MyException(a)
raise MyException(a)
try:
b = process()
if not b:
raise MyException(b)
except Exception:
logger.exception("something failed")
except Exception:
logger.exception("something failed")
def good():
try:
a = process() # This throws the exception now
except MyException:
logger.exception("a failed")
except Exception:
logger.exception("something failed")

View File

@@ -175,6 +175,17 @@
}
]
},
"flake8-implicit-str-concat": {
"description": "Options for the `flake8-implicit-str-concat` plugin.",
"anyOf": [
{
"$ref": "#/definitions/Flake8ImplicitStrConcatOptions"
},
{
"type": "null"
}
]
},
"flake8-import-conventions": {
"description": "Options for the `flake8-import-conventions` plugin.",
"anyOf": [
@@ -626,6 +637,19 @@
},
"additionalProperties": false
},
"Flake8ImplicitStrConcatOptions": {
"type": "object",
"properties": {
"allow-multiline": {
"description": "Whether to allow implicit string concatenations for multiline strings. By default, implicit concatenations of multiline strings are allowed (but continuation lines, delimited with a backslash, are prohibited).",
"type": [
"boolean",
"null"
]
}
},
"additionalProperties": false
},
"Flake8ImportConventionsOptions": {
"type": "object",
"properties": {
@@ -1164,13 +1188,13 @@
"RuleSelector": {
"type": "string",
"enum": [
"ALL",
"A",
"A0",
"A00",
"A001",
"A002",
"A003",
"ALL",
"ANN",
"ANN0",
"ANN00",
@@ -1381,6 +1405,8 @@
"EXE",
"EXE0",
"EXE00",
"EXE001",
"EXE002",
"EXE003",
"EXE004",
"EXE005",
@@ -1458,15 +1484,6 @@
"I00",
"I001",
"I002",
"I2",
"I25",
"I252",
"IC",
"IC0",
"IC001",
"IC002",
"IC003",
"IC004",
"ICN",
"ICN0",
"ICN00",
@@ -1481,9 +1498,6 @@
"ISC001",
"ISC002",
"ISC003",
"M",
"M0",
"M001",
"N",
"N8",
"N80",
@@ -1521,23 +1535,6 @@
"PD9",
"PD90",
"PD901",
"PDV",
"PDV0",
"PDV002",
"PDV003",
"PDV004",
"PDV007",
"PDV008",
"PDV009",
"PDV01",
"PDV010",
"PDV011",
"PDV012",
"PDV013",
"PDV015",
"PDV9",
"PDV90",
"PDV901",
"PGH",
"PGH0",
"PGH00",
@@ -1672,17 +1669,6 @@
"Q001",
"Q002",
"Q003",
"R",
"R5",
"R50",
"R501",
"R502",
"R503",
"R504",
"R505",
"R506",
"R507",
"R508",
"RET",
"RET5",
"RET50",
@@ -1789,35 +1775,16 @@
"TRY004",
"TRY2",
"TRY20",
"TRY200",
"TRY201",
"TRY3",
"TRY30",
"TRY300",
"TRY301",
"TYP",
"TYP0",
"TYP00",
"TYP005",
"U",
"U0",
"U00",
"U001",
"U003",
"U004",
"U005",
"U006",
"U007",
"U008",
"U009",
"U01",
"U010",
"U011",
"U012",
"U013",
"U014",
"U015",
"U016",
"U017",
"U019",
"UP",
"UP0",
"UP00",

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_cli"
version = "0.0.231"
version = "0.0.232"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
edition = "2021"
rust-version = "1.65.0"

View File

@@ -3,11 +3,12 @@ use std::path::PathBuf;
use clap::{command, Parser};
use regex::Regex;
use ruff::logging::LogLevel;
use ruff::registry::{Rule, RuleSelector};
use ruff::registry::Rule;
use ruff::resolver::ConfigProcessor;
use ruff::settings::types::{
FilePattern, PatternPrefixPair, PerFileIgnore, PythonVersion, SerializationFormat,
};
use ruff::RuleSelector;
use rustc_hash::FxHashMap;
#[derive(Debug, Parser)]

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_dev"
version = "0.0.231"
version = "0.0.232"
edition = "2021"
[dependencies]

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_macros"
version = "0.0.231"
version = "0.0.232"
edition = "2021"
[lib]

View File

@@ -53,7 +53,7 @@ pub fn define_rule_mapping(mapping: &Mapping) -> proc_macro2::TokenStream {
let rulecodeprefix = super::rule_code_prefix::expand(
&Ident::new("Rule", Span::call_site()),
&Ident::new("RuleSelector", Span::call_site()),
&Ident::new("RuleCodePrefix", Span::call_site()),
mapping.entries.iter().map(|(code, ..)| code),
|code| code_to_name[code],
);

View File

@@ -1,89 +1,9 @@
use std::collections::{BTreeMap, BTreeSet, HashMap};
use std::collections::{BTreeMap, BTreeSet};
use once_cell::sync::Lazy;
use proc_macro2::Span;
use quote::quote;
use syn::Ident;
const ALL: &str = "ALL";
/// A hash map from deprecated `RuleSelector` to latest
/// `RuleSelector`.
pub static PREFIX_REDIRECTS: Lazy<HashMap<&'static str, &'static str>> = Lazy::new(|| {
HashMap::from_iter([
// TODO(charlie): Remove by 2023-01-01.
("U001", "UP001"),
("U003", "UP003"),
("U004", "UP004"),
("U005", "UP005"),
("U006", "UP006"),
("U007", "UP007"),
("U008", "UP008"),
("U009", "UP009"),
("U010", "UP010"),
("U011", "UP011"),
("U012", "UP012"),
("U013", "UP013"),
("U014", "UP014"),
("U015", "UP015"),
("U016", "UP016"),
("U017", "UP017"),
("U019", "UP019"),
// TODO(charlie): Remove by 2023-02-01.
("I252", "TID252"),
("M001", "RUF100"),
// TODO(charlie): Remove by 2023-02-01.
("PDV002", "PD002"),
("PDV003", "PD003"),
("PDV004", "PD004"),
("PDV007", "PD007"),
("PDV008", "PD008"),
("PDV009", "PD009"),
("PDV010", "PD010"),
("PDV011", "PD011"),
("PDV012", "PD012"),
("PDV013", "PD013"),
("PDV015", "PD015"),
("PDV901", "PD901"),
// TODO(charlie): Remove by 2023-02-01.
("R501", "RET501"),
("R502", "RET502"),
("R503", "RET503"),
("R504", "RET504"),
("R505", "RET505"),
("R506", "RET506"),
("R507", "RET507"),
("R508", "RET508"),
("IC001", "ICN001"),
("IC002", "ICN001"),
("IC003", "ICN001"),
("IC004", "ICN001"),
// TODO(charlie): Remove by 2023-01-01.
("U", "UP"),
("U0", "UP0"),
("U00", "UP00"),
("U01", "UP01"),
// TODO(charlie): Remove by 2023-02-01.
("I2", "TID2"),
("I25", "TID25"),
("M", "RUF100"),
("M0", "RUF100"),
// TODO(charlie): Remove by 2023-02-01.
("PDV", "PD"),
("PDV0", "PD0"),
("PDV01", "PD01"),
("PDV9", "PD9"),
("PDV90", "PD90"),
// TODO(charlie): Remove by 2023-02-01.
("R", "RET"),
("R5", "RET5"),
("R50", "RET50"),
// TODO(charlie): Remove by 2023-02-01.
("IC", "ICN"),
("IC0", "ICN0"),
])
});
pub fn expand<'a>(
rule_type: &Ident,
prefix_ident: &Ident,
@@ -116,20 +36,8 @@ pub fn expand<'a>(
all_codes.insert(code_str);
}
prefix_to_codes.insert(ALL.to_string(), all_codes);
prefix_to_codes.insert("PL".to_string(), pl_codes);
// Add any prefix aliases (e.g., "U" to "UP").
for (alias, rule_code) in PREFIX_REDIRECTS.iter() {
prefix_to_codes.insert(
(*alias).to_string(),
prefix_to_codes
.get(*rule_code)
.unwrap_or_else(|| panic!("Unknown RuleCode: {alias:?}"))
.clone(),
);
}
let prefix_variants = prefix_to_codes.keys().map(|prefix| {
let prefix = Ident::new(prefix, Span::call_site());
quote! {
@@ -139,26 +47,9 @@ pub fn expand<'a>(
let prefix_impl = generate_impls(rule_type, prefix_ident, &prefix_to_codes, variant_name);
let prefix_redirects = PREFIX_REDIRECTS.iter().map(|(alias, rule_code)| {
let code = Ident::new(rule_code, Span::call_site());
quote! {
(#alias, #prefix_ident::#code)
}
});
quote! {
#[derive(PartialEq, Eq, PartialOrd, Ord)]
pub enum SuffixLength {
None,
Zero,
One,
Two,
Three,
Four,
Five,
}
#[derive(
::strum_macros::EnumIter,
::strum_macros::EnumString,
::strum_macros::AsRefStr,
Debug,
@@ -167,22 +58,15 @@ pub fn expand<'a>(
PartialOrd,
Ord,
Clone,
Hash,
::serde::Serialize,
::serde::Deserialize,
::schemars::JsonSchema,
)]
pub enum #prefix_ident {
#(#prefix_variants,)*
}
#prefix_impl
/// A hash map from deprecated `RuleSelector` to latest `RuleSelector`.
pub static PREFIX_REDIRECTS: ::once_cell::sync::Lazy<::rustc_hash::FxHashMap<&'static str, #prefix_ident>> = ::once_cell::sync::Lazy::new(|| {
::rustc_hash::FxHashMap::from_iter([
#(#prefix_redirects),*
])
});
}
}
@@ -200,64 +84,37 @@ fn generate_impls<'a>(
}
});
let prefix = Ident::new(prefix_str, Span::call_site());
if let Some(target) = PREFIX_REDIRECTS.get(prefix_str.as_str()) {
quote! {
#prefix_ident::#prefix => {
crate::warn_user_once!(
"`{}` has been remapped to `{}`", #prefix_str, #target
);
vec![#(#codes),*].into_iter()
}
}
} else {
quote! {
#prefix_ident::#prefix => vec![#(#codes),*].into_iter(),
}
quote! {
#prefix_ident::#prefix => vec![#(#codes),*].into_iter(),
}
});
let specificity_match_arms = prefix_to_codes.keys().map(|prefix_str| {
let prefix = Ident::new(prefix_str, Span::call_site());
if prefix_str == ALL {
quote! {
#prefix_ident::#prefix => SuffixLength::None,
}
} else {
let mut num_numeric = prefix_str.chars().filter(|char| char.is_numeric()).count();
if prefix_str != "PL" && prefix_str.starts_with("PL") {
num_numeric += 1;
}
let suffix_len = match num_numeric {
0 => quote! { SuffixLength::Zero },
1 => quote! { SuffixLength::One },
2 => quote! { SuffixLength::Two },
3 => quote! { SuffixLength::Three },
4 => quote! { SuffixLength::Four },
5 => quote! { SuffixLength::Five },
_ => panic!("Invalid prefix: {prefix}"),
};
quote! {
#prefix_ident::#prefix => #suffix_len,
}
let mut num_numeric = prefix_str.chars().filter(|char| char.is_numeric()).count();
if prefix_str != "PL" && prefix_str.starts_with("PL") {
num_numeric += 1;
}
});
let categories = prefix_to_codes.keys().map(|prefix_str| {
if prefix_str.chars().all(char::is_alphabetic)
&& !PREFIX_REDIRECTS.contains_key(&prefix_str.as_str())
{
let prefix = Ident::new(prefix_str, Span::call_site());
quote! {
#prefix_ident::#prefix,
}
} else {
quote! {}
let suffix_len = match num_numeric {
0 => quote! { Specificity::Linter },
1 => quote! { Specificity::Code1Char },
2 => quote! { Specificity::Code2Chars },
3 => quote! { Specificity::Code3Chars },
4 => quote! { Specificity::Code4Chars },
5 => quote! { Specificity::Code5Chars },
_ => panic!("Invalid prefix: {prefix}"),
};
quote! {
#prefix_ident::#prefix => #suffix_len,
}
});
quote! {
impl #prefix_ident {
pub fn specificity(&self) -> SuffixLength {
pub(crate) fn specificity(&self) -> crate::rule_selector::Specificity {
use crate::rule_selector::Specificity;
#[allow(clippy::match_same_arms)]
match self {
#(#specificity_match_arms)*
@@ -270,15 +127,11 @@ fn generate_impls<'a>(
type IntoIter = ::std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
use colored::Colorize;
#[allow(clippy::match_same_arms)]
match self {
#(#into_iter_match_arms)*
}
}
}
pub const CATEGORIES: &[#prefix_ident] = &[#(#categories)*];
}
}

View File

@@ -84,14 +84,14 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
if field != "Pycodestyle" {
into_iter_match_arms.extend(quote! {
#ident::#field => RuleSelector::#prefix_ident.into_iter(),
#ident::#field => RuleCodePrefix::#prefix_ident.into_iter(),
});
}
}
into_iter_match_arms.extend(quote! {
#ident::Pycodestyle => {
let rules: Vec<_> = (&RuleSelector::E).into_iter().chain(&RuleSelector::W).collect();
let rules: Vec<_> = (&RuleCodePrefix::E).into_iter().chain(&RuleCodePrefix::W).collect();
rules.into_iter()
}
});

View File

@@ -1582,6 +1582,9 @@ where
if self.settings.rules.enabled(&Rule::VerboseRaise) {
tryceratops::rules::verbose_raise(self, handlers);
}
if self.settings.rules.enabled(&Rule::RaiseWithinTry) {
tryceratops::rules::raise_within_try(self, body);
}
}
StmtKind::Assign { targets, value, .. } => {
if self.settings.rules.enabled(&Rule::DoNotAssignLambda) {
@@ -3357,6 +3360,9 @@ where
body,
);
}
if self.settings.rules.enabled(&Rule::ReraiseNoCause) {
tryceratops::rules::reraise_no_cause(self, body);
}
match name {
Some(name) => {
if self.settings.rules.enabled(&Rule::AmbiguousVariableName) {

View File

@@ -1,8 +1,12 @@
//! Lint rules based on checking raw physical lines.
use std::path::Path;
use crate::registry::{Diagnostic, Rule};
use crate::rules::flake8_executable::helpers::extract_shebang;
use crate::rules::flake8_executable::rules::{shebang_newline, shebang_python, shebang_whitespace};
use crate::rules::flake8_executable::helpers::{extract_shebang, ShebangDirective};
use crate::rules::flake8_executable::rules::{
shebang_missing, shebang_newline, shebang_not_executable, shebang_python, shebang_whitespace,
};
use crate::rules::pycodestyle::rules::{
doc_line_too_long, line_too_long, mixed_spaces_and_tabs, no_newline_at_end_of_file,
};
@@ -11,6 +15,7 @@ use crate::rules::pyupgrade::rules::unnecessary_coding_comment;
use crate::settings::{flags, Settings};
pub fn check_lines(
path: &Path,
contents: &str,
commented_lines: &[usize],
doc_lines: &[usize],
@@ -18,8 +23,11 @@ pub fn check_lines(
autofix: flags::Autofix,
) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = vec![];
let mut has_any_shebang = false;
let enforce_blanket_noqa = settings.rules.enabled(&Rule::BlanketNOQA);
let enforce_shebang_not_executable = settings.rules.enabled(&Rule::ShebangNotExecutable);
let enforce_shebang_missing = settings.rules.enabled(&Rule::ShebangMissingExecutableFile);
let enforce_shebang_whitespace = settings.rules.enabled(&Rule::ShebangWhitespace);
let enforce_shebang_newline = settings.rules.enabled(&Rule::ShebangNewline);
let enforce_shebang_python = settings.rules.enabled(&Rule::ShebangPython);
@@ -68,8 +76,23 @@ pub fn check_lines(
}
}
if enforce_shebang_whitespace || enforce_shebang_newline || enforce_shebang_python {
if enforce_shebang_missing
|| enforce_shebang_not_executable
|| enforce_shebang_whitespace
|| enforce_shebang_newline
|| enforce_shebang_python
{
let shebang = extract_shebang(line);
if enforce_shebang_not_executable {
if let Some(diagnostic) = shebang_not_executable(path, index, &shebang) {
diagnostics.push(diagnostic);
}
}
if enforce_shebang_missing {
if !has_any_shebang && matches!(shebang, ShebangDirective::Match(_, _, _, _)) {
has_any_shebang = true;
}
}
if enforce_shebang_whitespace {
if let Some(diagnostic) =
shebang_whitespace(index, &shebang, fix_shebang_whitespace)
@@ -124,12 +147,20 @@ pub fn check_lines(
}
}
if enforce_shebang_missing && !has_any_shebang {
if let Some(diagnostic) = shebang_missing(path) {
diagnostics.push(diagnostic);
}
}
diagnostics
}
#[cfg(test)]
mod tests {
use std::path::Path;
use super::check_lines;
use crate::registry::Rule;
use crate::settings::{flags, Settings};
@@ -139,6 +170,7 @@ mod tests {
let line = "'\u{4e9c}' * 2"; // 7 in UTF-32, 9 in UTF-8.
let check_with_max_line_length = |line_length: usize| {
check_lines(
Path::new("foo.py"),
line,
&[],
&[],

View File

@@ -6,7 +6,8 @@ use rustpython_parser::ast::Location;
use crate::ast::types::Range;
use crate::fix::Fix;
use crate::noqa::{is_file_exempt, Directive};
use crate::registry::{Diagnostic, DiagnosticKind, Rule, CODE_REDIRECTS};
use crate::registry::{Diagnostic, DiagnosticKind, Rule};
use crate::rule_redirects::get_redirect_target;
use crate::settings::{flags, Settings};
use crate::violations::UnusedCodes;
use crate::{noqa, violations};
@@ -123,7 +124,7 @@ pub fn check_noqa(
let mut valid_codes = vec![];
let mut self_ignore = false;
for code in codes {
let code = CODE_REDIRECTS.get(code).map_or(code, |r| r.code());
let code = get_redirect_target(code).unwrap_or(code);
if code == Rule::UnusedNOQA.code() {
self_ignore = true;
break;

View File

@@ -124,9 +124,12 @@ pub fn check_tokens(
// ISC001, ISC002
if enforce_implicit_string_concatenation {
diagnostics.extend(
flake8_implicit_str_concat::rules::implicit(tokens)
.into_iter()
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
flake8_implicit_str_concat::rules::implicit(
tokens,
&settings.flake8_implicit_str_concat,
)
.into_iter()
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
);
}

View File

@@ -1,12 +1,12 @@
use std::collections::{BTreeSet, HashMap};
use anyhow::Result;
use colored::Colorize;
use super::external_config::ExternalConfig;
use super::plugin::Plugin;
use super::{parser, plugin};
use crate::registry::RuleSelector;
use crate::registry::RuleCodePrefix;
use crate::rule_selector::{prefix_to_selector, RuleSelector};
use crate::rules::flake8_pytest_style::types::{
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
};
@@ -21,6 +21,12 @@ use crate::settings::options::Options;
use crate::settings::pyproject::Pyproject;
use crate::warn_user;
const DEFAULT_SELECTORS: &[RuleSelector] = &[
prefix_to_selector(RuleCodePrefix::F),
prefix_to_selector(RuleCodePrefix::E),
prefix_to_selector(RuleCodePrefix::W),
];
pub fn convert(
config: &HashMap<String, HashMap<String, Option<String>>>,
external_config: &ExternalConfig,
@@ -76,7 +82,7 @@ pub fn convert(
.as_ref()
.map(|value| BTreeSet::from_iter(parser::parse_prefix_codes(value)))
})
.unwrap_or_else(|| plugin::resolve_select(&plugins));
.unwrap_or_else(|| resolve_select(&plugins));
let mut ignore = flake8
.get("ignore")
.and_then(|value| {
@@ -406,21 +412,47 @@ pub fn convert(
Ok(Pyproject::new(options))
}
/// Resolve the set of enabled `RuleSelector` values for the given
/// plugins.
fn resolve_select(plugins: &[Plugin]) -> BTreeSet<RuleSelector> {
let mut select: BTreeSet<_> = DEFAULT_SELECTORS.iter().cloned().collect();
select.extend(plugins.iter().map(Plugin::selector));
select
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use anyhow::Result;
use itertools::Itertools;
use super::super::plugin::Plugin;
use super::convert;
use crate::flake8_to_ruff::converter::DEFAULT_SELECTORS;
use crate::flake8_to_ruff::ExternalConfig;
use crate::registry::RuleSelector;
use crate::registry::RuleCodePrefix;
use crate::rule_selector::RuleSelector;
use crate::rules::pydocstyle::settings::Convention;
use crate::rules::{flake8_quotes, pydocstyle};
use crate::settings::options::Options;
use crate::settings::pyproject::Pyproject;
fn default_options(plugins: impl IntoIterator<Item = RuleSelector>) -> Options {
Options {
ignore: Some(vec![]),
select: Some(
DEFAULT_SELECTORS
.iter()
.cloned()
.chain(plugins)
.sorted()
.collect(),
),
..Options::default()
}
}
#[test]
fn it_converts_empty() -> Result<()> {
let actual = convert(
@@ -428,55 +460,7 @@ mod tests {
&ExternalConfig::default(),
None,
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
extend: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
external: None,
fix: None,
fix_only: None,
fixable: None,
format: None,
force_exclude: None,
ignore: Some(vec![]),
ignore_init_module_imports: None,
line_length: None,
namespace_packages: None,
per_file_ignores: None,
required_version: None,
respect_gitignore: None,
select: Some(vec![RuleSelector::E, RuleSelector::F, RuleSelector::W]),
show_source: None,
src: None,
target_version: None,
unfixable: None,
typing_modules: None,
task_tags: None,
update_check: None,
flake8_annotations: None,
flake8_bandit: None,
flake8_bugbear: None,
flake8_builtins: None,
flake8_errmsg: None,
flake8_pytest_style: None,
flake8_quotes: None,
flake8_tidy_imports: None,
flake8_import_conventions: None,
flake8_unused_arguments: None,
isort: None,
mccabe: None,
pep8_naming: None,
pycodestyle: None,
pydocstyle: None,
pylint: None,
pyupgrade: None,
});
let expected = Pyproject::new(default_options([]));
assert_eq!(actual, expected);
Ok(())
@@ -493,53 +477,8 @@ mod tests {
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
extend: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
external: None,
fix: None,
fix_only: None,
fixable: None,
format: None,
force_exclude: None,
ignore: Some(vec![]),
ignore_init_module_imports: None,
line_length: Some(100),
namespace_packages: None,
per_file_ignores: None,
required_version: None,
respect_gitignore: None,
select: Some(vec![RuleSelector::E, RuleSelector::F, RuleSelector::W]),
show_source: None,
src: None,
target_version: None,
unfixable: None,
typing_modules: None,
task_tags: None,
update_check: None,
flake8_annotations: None,
flake8_bandit: None,
flake8_bugbear: None,
flake8_builtins: None,
flake8_errmsg: None,
flake8_pytest_style: None,
flake8_quotes: None,
flake8_tidy_imports: None,
flake8_import_conventions: None,
flake8_unused_arguments: None,
isort: None,
mccabe: None,
pep8_naming: None,
pycodestyle: None,
pydocstyle: None,
pylint: None,
pyupgrade: None,
..default_options([])
});
assert_eq!(actual, expected);
@@ -557,53 +496,8 @@ mod tests {
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
extend: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
external: None,
fix: None,
fix_only: None,
fixable: None,
format: None,
force_exclude: None,
ignore: Some(vec![]),
ignore_init_module_imports: None,
line_length: Some(100),
namespace_packages: None,
per_file_ignores: None,
required_version: None,
respect_gitignore: None,
select: Some(vec![RuleSelector::E, RuleSelector::F, RuleSelector::W]),
show_source: None,
src: None,
target_version: None,
unfixable: None,
typing_modules: None,
task_tags: None,
update_check: None,
flake8_annotations: None,
flake8_bandit: None,
flake8_bugbear: None,
flake8_builtins: None,
flake8_errmsg: None,
flake8_pytest_style: None,
flake8_quotes: None,
flake8_tidy_imports: None,
flake8_import_conventions: None,
flake8_unused_arguments: None,
isort: None,
mccabe: None,
pep8_naming: None,
pycodestyle: None,
pydocstyle: None,
pylint: None,
pyupgrade: None,
..default_options([])
});
assert_eq!(actual, expected);
@@ -620,55 +514,7 @@ mod tests {
&ExternalConfig::default(),
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
extend: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
external: None,
fix: None,
fix_only: None,
fixable: None,
format: None,
force_exclude: None,
ignore: Some(vec![]),
ignore_init_module_imports: None,
line_length: None,
namespace_packages: None,
per_file_ignores: None,
required_version: None,
respect_gitignore: None,
select: Some(vec![RuleSelector::E, RuleSelector::F, RuleSelector::W]),
show_source: None,
src: None,
target_version: None,
unfixable: None,
typing_modules: None,
task_tags: None,
update_check: None,
flake8_annotations: None,
flake8_bandit: None,
flake8_bugbear: None,
flake8_builtins: None,
flake8_errmsg: None,
flake8_pytest_style: None,
flake8_quotes: None,
flake8_tidy_imports: None,
flake8_import_conventions: None,
flake8_unused_arguments: None,
isort: None,
mccabe: None,
pep8_naming: None,
pycodestyle: None,
pydocstyle: None,
pylint: None,
pyupgrade: None,
});
let expected = Pyproject::new(default_options([]));
assert_eq!(actual, expected);
Ok(())
@@ -685,58 +531,13 @@ mod tests {
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
extend: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
external: None,
fix: None,
fix_only: None,
fixable: None,
format: None,
force_exclude: None,
ignore: Some(vec![]),
ignore_init_module_imports: None,
line_length: None,
namespace_packages: None,
per_file_ignores: None,
required_version: None,
respect_gitignore: None,
select: Some(vec![RuleSelector::E, RuleSelector::F, RuleSelector::W]),
show_source: None,
src: None,
target_version: None,
unfixable: None,
typing_modules: None,
task_tags: None,
update_check: None,
flake8_annotations: None,
flake8_bandit: None,
flake8_bugbear: None,
flake8_builtins: None,
flake8_errmsg: None,
flake8_pytest_style: None,
flake8_quotes: Some(flake8_quotes::settings::Options {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,
docstring_quotes: None,
avoid_escape: None,
}),
flake8_tidy_imports: None,
flake8_import_conventions: None,
flake8_unused_arguments: None,
isort: None,
mccabe: None,
pep8_naming: None,
pycodestyle: None,
pydocstyle: None,
pylint: None,
pyupgrade: None,
..default_options([])
});
assert_eq!(actual, expected);
@@ -757,60 +558,10 @@ mod tests {
Some(vec![Plugin::Flake8Docstrings]),
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
extend: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
external: None,
fix: None,
fix_only: None,
fixable: None,
format: None,
force_exclude: None,
ignore: Some(vec![]),
ignore_init_module_imports: None,
line_length: None,
namespace_packages: None,
per_file_ignores: None,
required_version: None,
respect_gitignore: None,
select: Some(vec![
RuleSelector::D,
RuleSelector::E,
RuleSelector::F,
RuleSelector::W,
]),
show_source: None,
src: None,
target_version: None,
unfixable: None,
typing_modules: None,
task_tags: None,
update_check: None,
flake8_annotations: None,
flake8_bandit: None,
flake8_bugbear: None,
flake8_builtins: None,
flake8_errmsg: None,
flake8_pytest_style: None,
flake8_quotes: None,
flake8_tidy_imports: None,
flake8_import_conventions: None,
flake8_unused_arguments: None,
isort: None,
mccabe: None,
pep8_naming: None,
pycodestyle: None,
pydocstyle: Some(pydocstyle::settings::Options {
convention: Some(Convention::Numpy),
}),
pylint: None,
pyupgrade: None,
..default_options([RuleCodePrefix::D.into()])
});
assert_eq!(actual, expected);
@@ -828,63 +579,13 @@ mod tests {
None,
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
extend: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
external: None,
fix: None,
fix_only: None,
fixable: None,
format: None,
force_exclude: None,
ignore: Some(vec![]),
ignore_init_module_imports: None,
line_length: None,
namespace_packages: None,
per_file_ignores: None,
required_version: None,
respect_gitignore: None,
select: Some(vec![
RuleSelector::E,
RuleSelector::F,
RuleSelector::Q,
RuleSelector::W,
]),
show_source: None,
src: None,
target_version: None,
unfixable: None,
typing_modules: None,
task_tags: None,
update_check: None,
flake8_annotations: None,
flake8_bandit: None,
flake8_bugbear: None,
flake8_builtins: None,
flake8_errmsg: None,
flake8_pytest_style: None,
flake8_quotes: Some(flake8_quotes::settings::Options {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,
docstring_quotes: None,
avoid_escape: None,
}),
flake8_tidy_imports: None,
flake8_import_conventions: None,
flake8_unused_arguments: None,
isort: None,
mccabe: None,
pep8_naming: None,
pycodestyle: None,
pydocstyle: None,
pylint: None,
pyupgrade: None,
..default_options([RuleCodePrefix::Q.into()])
});
assert_eq!(actual, expected);

View File

@@ -1,12 +1,11 @@
use std::str::FromStr;
use anyhow::{bail, Result};
use colored::Colorize;
use once_cell::sync::Lazy;
use regex::Regex;
use rustc_hash::FxHashMap;
use crate::registry::{RuleSelector, PREFIX_REDIRECTS};
use crate::rule_selector::RuleSelector;
use crate::settings::types::PatternPrefixPair;
use crate::warn_user;
@@ -21,9 +20,7 @@ pub fn parse_prefix_codes(value: &str) -> Vec<RuleSelector> {
if code.is_empty() {
continue;
}
if let Some(code) = PREFIX_REDIRECTS.get(code) {
codes.push(code.clone());
} else if let Ok(code) = RuleSelector::from_str(code) {
if let Ok(code) = RuleSelector::from_str(code) {
codes.push(code);
} else {
warn_user!("Unsupported prefix code: {code}");
@@ -89,14 +86,7 @@ impl State {
fn parse(&self) -> Vec<PatternPrefixPair> {
let mut codes: Vec<PatternPrefixPair> = vec![];
for code in &self.codes {
if let Some(code) = PREFIX_REDIRECTS.get(code.as_str()) {
for filename in &self.filenames {
codes.push(PatternPrefixPair {
pattern: filename.clone(),
prefix: code.clone(),
});
}
} else if let Ok(code) = RuleSelector::from_str(code) {
if let Ok(code) = RuleSelector::from_str(code) {
for filename in &self.filenames {
codes.push(PatternPrefixPair {
pattern: filename.clone(),
@@ -206,7 +196,8 @@ mod tests {
use anyhow::Result;
use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
use crate::registry::RuleSelector;
use crate::registry::RuleCodePrefix;
use crate::rule_selector::RuleSelector;
use crate::settings::types::PatternPrefixPair;
#[test]
@@ -220,19 +211,19 @@ mod tests {
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401");
let expected = vec![RuleSelector::F401];
let expected = vec![RuleCodePrefix::F401.into()];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,");
let expected = vec![RuleSelector::F401];
let expected = vec![RuleCodePrefix::F401.into()];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,E501");
let expected = vec![RuleSelector::F401, RuleSelector::E501];
let expected = vec![RuleCodePrefix::F401.into(), RuleCodePrefix::E501.into()];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401, E501");
let expected = vec![RuleSelector::F401, RuleSelector::E501];
let expected = vec![RuleCodePrefix::F401.into(), RuleCodePrefix::E501.into()];
assert_eq!(actual, expected);
}
@@ -285,11 +276,11 @@ mod tests {
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "locust/test/*".to_string(),
prefix: RuleSelector::F841,
prefix: RuleCodePrefix::F841.into(),
},
PatternPrefixPair {
pattern: "examples/*".to_string(),
prefix: RuleSelector::F841,
prefix: RuleCodePrefix::F841.into(),
},
];
assert_eq!(actual, expected);
@@ -305,23 +296,23 @@ mod tests {
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "t/*".to_string(),
prefix: RuleSelector::D,
prefix: RuleCodePrefix::D.into(),
},
PatternPrefixPair {
pattern: "setup.py".to_string(),
prefix: RuleSelector::D,
prefix: RuleCodePrefix::D.into(),
},
PatternPrefixPair {
pattern: "examples/*".to_string(),
prefix: RuleSelector::D,
prefix: RuleCodePrefix::D.into(),
},
PatternPrefixPair {
pattern: "docs/*".to_string(),
prefix: RuleSelector::D,
prefix: RuleCodePrefix::D.into(),
},
PatternPrefixPair {
pattern: "extra/*".to_string(),
prefix: RuleSelector::D,
prefix: RuleCodePrefix::D.into(),
},
];
assert_eq!(actual, expected);
@@ -343,47 +334,47 @@ mod tests {
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "scrapy/__init__.py".to_string(),
prefix: RuleSelector::E402,
prefix: RuleCodePrefix::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/core/downloader/handlers/http.py".to_string(),
prefix: RuleSelector::F401,
prefix: RuleCodePrefix::F401.into(),
},
PatternPrefixPair {
pattern: "scrapy/http/__init__.py".to_string(),
prefix: RuleSelector::F401,
prefix: RuleCodePrefix::F401.into(),
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: RuleSelector::E402,
prefix: RuleCodePrefix::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: RuleSelector::F401,
prefix: RuleCodePrefix::F401.into(),
},
PatternPrefixPair {
pattern: "scrapy/selector/__init__.py".to_string(),
prefix: RuleSelector::F401,
prefix: RuleCodePrefix::F401.into(),
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: RuleSelector::E402,
prefix: RuleCodePrefix::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: RuleSelector::F401,
prefix: RuleCodePrefix::F401.into(),
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: RuleSelector::F403,
prefix: RuleCodePrefix::F403.into(),
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: RuleSelector::F405,
prefix: RuleCodePrefix::F405.into(),
},
PatternPrefixPair {
pattern: "tests/test_loader.py".to_string(),
prefix: RuleSelector::E741,
prefix: RuleCodePrefix::E741.into(),
},
];
assert_eq!(actual, expected);

View File

@@ -4,32 +4,44 @@ use std::str::FromStr;
use anyhow::anyhow;
use crate::registry::RuleSelector;
use crate::registry::RuleCodePrefix;
use crate::rule_selector::RuleSelector;
#[derive(Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum Plugin {
Flake82020,
Flake8Annotations,
Flake8Bandit,
Flake8BlindExcept,
Flake8BooleanTrap,
Flake8Bugbear,
Flake8Builtins,
Flake8Commas,
Flake8Comprehensions,
Flake8Datetimez,
Flake8Debugger,
Flake8Docstrings,
Flake8Eradicate,
Flake8ErrMsg,
Flake8Executable,
Flake8ImplicitStrConcat,
Flake8ImportConventions,
Flake8NoPep420,
Flake8Pie,
Flake8Print,
Flake8PytestStyle,
Flake8Quotes,
Flake8Return,
Flake8Simplify,
Flake8TidyImports,
Flake8TypeChecking,
Flake8UnusedArguments,
Flake8UsePathlib,
McCabe,
PEP8Naming,
PandasVet,
Pyupgrade,
Tryceratops,
}
impl FromStr for Plugin {
@@ -37,28 +49,39 @@ impl FromStr for Plugin {
fn from_str(string: &str) -> Result<Self, Self::Err> {
match string {
"flake8-2020" => Ok(Plugin::Flake82020),
"flake8-annotations" => Ok(Plugin::Flake8Annotations),
"flake8-bandit" => Ok(Plugin::Flake8Bandit),
"flake8-blind-except" => Ok(Plugin::Flake8BlindExcept),
"flake8-boolean-trap" => Ok(Plugin::Flake8BooleanTrap),
"flake8-bugbear" => Ok(Plugin::Flake8Bugbear),
"flake8-builtins" => Ok(Plugin::Flake8Builtins),
"flake8-commas" => Ok(Plugin::Flake8Commas),
"flake8-comprehensions" => Ok(Plugin::Flake8Comprehensions),
"flake8-datetimez" => Ok(Plugin::Flake8Datetimez),
"flake8-debugger" => Ok(Plugin::Flake8Debugger),
"flake8-docstrings" => Ok(Plugin::Flake8Docstrings),
"flake8-eradicate" => Ok(Plugin::Flake8Eradicate),
"flake8-errmsg" => Ok(Plugin::Flake8ErrMsg),
"flake8-executable" => Ok(Plugin::Flake8Executable),
"flake8-implicit-str-concat" => Ok(Plugin::Flake8ImplicitStrConcat),
"flake8-import-conventions" => Ok(Plugin::Flake8ImportConventions),
"flake8-no-pep420" => Ok(Plugin::Flake8NoPep420),
"flake8-pie" => Ok(Plugin::Flake8Pie),
"flake8-print" => Ok(Plugin::Flake8Print),
"flake8-pytest-style" => Ok(Plugin::Flake8PytestStyle),
"flake8-quotes" => Ok(Plugin::Flake8Quotes),
"flake8-return" => Ok(Plugin::Flake8Return),
"flake8-simplify" => Ok(Plugin::Flake8Simplify),
"flake8-tidy-imports" => Ok(Plugin::Flake8TidyImports),
"flake8-type-checking" => Ok(Plugin::Flake8TypeChecking),
"flake8-unused-arguments" => Ok(Plugin::Flake8UnusedArguments),
"flake8-use-pathlib" => Ok(Plugin::Flake8UsePathlib),
"mccabe" => Ok(Plugin::McCabe),
"pandas-vet" => Ok(Plugin::PandasVet),
"pep8-naming" => Ok(Plugin::PEP8Naming),
"pandas-vet" => Ok(Plugin::PandasVet),
"pyupgrade" => Ok(Plugin::Pyupgrade),
"tryceratops" => Ok(Plugin::Tryceratops),
_ => Err(anyhow!("Unknown plugin: {string}")),
}
}
@@ -70,60 +93,81 @@ impl fmt::Debug for Plugin {
f,
"{}",
match self {
Plugin::Flake82020 => "flake8-2020",
Plugin::Flake8Annotations => "flake8-annotations",
Plugin::Flake8Bandit => "flake8-bandit",
Plugin::Flake8BlindExcept => "flake8-blind-except",
Plugin::Flake8BooleanTrap => "flake8-boolean-trap",
Plugin::Flake8Bugbear => "flake8-bugbear",
Plugin::Flake8Builtins => "flake8-builtins",
Plugin::Flake8Commas => "flake8-commas",
Plugin::Flake8Comprehensions => "flake8-comprehensions",
Plugin::Flake8Datetimez => "flake8-datetimez",
Plugin::Flake8Debugger => "flake8-debugger",
Plugin::Flake8Docstrings => "flake8-docstrings",
Plugin::Flake8Eradicate => "flake8-eradicate",
Plugin::Flake8ErrMsg => "flake8-errmsg",
Plugin::Flake8Executable => "flake8-executable",
Plugin::Flake8ImplicitStrConcat => "flake8-implicit-str-concat",
Plugin::Flake8ImportConventions => "flake8-import-conventions",
Plugin::Flake8NoPep420 => "flake8-no-pep420",
Plugin::Flake8Pie => "flake8-pie",
Plugin::Flake8Print => "flake8-print",
Plugin::Flake8PytestStyle => "flake8-pytest-style",
Plugin::Flake8Quotes => "flake8-quotes",
Plugin::Flake8Return => "flake8-return",
Plugin::Flake8Simplify => "flake8-simplify",
Plugin::Flake8TidyImports => "flake8-tidy-imports",
Plugin::Flake8TypeChecking => "flake8-type-checking",
Plugin::Flake8UnusedArguments => "flake8-unused-arguments",
Plugin::Flake8UsePathlib => "flake8-use-pathlib",
Plugin::McCabe => "mccabe",
Plugin::PEP8Naming => "pep8-naming",
Plugin::PandasVet => "pandas-vet",
Plugin::Pyupgrade => "pyupgrade",
Plugin::Tryceratops => "tryceratops",
}
)
}
}
// TODO(martin): Convert into `impl From<Plugin> for Linter`
impl Plugin {
pub fn selector(&self) -> RuleSelector {
match self {
Plugin::Flake8Annotations => RuleSelector::ANN,
Plugin::Flake8Bandit => RuleSelector::S,
// TODO(charlie): Handle rename of `B` to `BLE`.
Plugin::Flake8BlindExcept => RuleSelector::BLE,
Plugin::Flake8Bugbear => RuleSelector::B,
Plugin::Flake8Builtins => RuleSelector::A,
Plugin::Flake8Comprehensions => RuleSelector::C4,
Plugin::Flake8Datetimez => RuleSelector::DTZ,
Plugin::Flake8Debugger => RuleSelector::T1,
Plugin::Flake8Docstrings => RuleSelector::D,
// TODO(charlie): Handle rename of `E` to `ERA`.
Plugin::Flake8Eradicate => RuleSelector::ERA,
Plugin::Flake8ErrMsg => RuleSelector::EM,
Plugin::Flake8ImplicitStrConcat => RuleSelector::ISC,
Plugin::Flake8Print => RuleSelector::T2,
Plugin::Flake8PytestStyle => RuleSelector::PT,
Plugin::Flake8Quotes => RuleSelector::Q,
Plugin::Flake8Return => RuleSelector::RET,
Plugin::Flake8Simplify => RuleSelector::SIM,
Plugin::Flake8TidyImports => RuleSelector::TID25,
Plugin::McCabe => RuleSelector::C9,
Plugin::PandasVet => RuleSelector::PD,
Plugin::PEP8Naming => RuleSelector::N,
Plugin::Pyupgrade => RuleSelector::UP,
Plugin::Flake82020 => RuleCodePrefix::YTT.into(),
Plugin::Flake8Annotations => RuleCodePrefix::ANN.into(),
Plugin::Flake8Bandit => RuleCodePrefix::S.into(),
Plugin::Flake8BlindExcept => RuleCodePrefix::BLE.into(),
Plugin::Flake8BooleanTrap => RuleCodePrefix::FBT.into(),
Plugin::Flake8Bugbear => RuleCodePrefix::B.into(),
Plugin::Flake8Builtins => RuleCodePrefix::A.into(),
Plugin::Flake8Commas => RuleCodePrefix::COM.into(),
Plugin::Flake8Comprehensions => RuleCodePrefix::C4.into(),
Plugin::Flake8Datetimez => RuleCodePrefix::DTZ.into(),
Plugin::Flake8Debugger => RuleCodePrefix::T1.into(),
Plugin::Flake8Docstrings => RuleCodePrefix::D.into(),
Plugin::Flake8Eradicate => RuleCodePrefix::ERA.into(),
Plugin::Flake8ErrMsg => RuleCodePrefix::EM.into(),
Plugin::Flake8Executable => RuleCodePrefix::EXE.into(),
Plugin::Flake8ImplicitStrConcat => RuleCodePrefix::ISC.into(),
Plugin::Flake8ImportConventions => RuleCodePrefix::ICN.into(),
Plugin::Flake8NoPep420 => RuleCodePrefix::INP.into(),
Plugin::Flake8Pie => RuleCodePrefix::PIE.into(),
Plugin::Flake8Print => RuleCodePrefix::T2.into(),
Plugin::Flake8PytestStyle => RuleCodePrefix::PT.into(),
Plugin::Flake8Quotes => RuleCodePrefix::Q.into(),
Plugin::Flake8Return => RuleCodePrefix::RET.into(),
Plugin::Flake8Simplify => RuleCodePrefix::SIM.into(),
Plugin::Flake8TidyImports => RuleCodePrefix::TID.into(),
Plugin::Flake8TypeChecking => RuleCodePrefix::TYP.into(),
Plugin::Flake8UnusedArguments => RuleCodePrefix::ARG.into(),
Plugin::Flake8UsePathlib => RuleCodePrefix::PTH.into(),
Plugin::McCabe => RuleCodePrefix::C9.into(),
Plugin::PEP8Naming => RuleCodePrefix::N.into(),
Plugin::PandasVet => RuleCodePrefix::PD.into(),
Plugin::Pyupgrade => RuleCodePrefix::UP.into(),
Plugin::Tryceratops => RuleCodePrefix::TRY.into(),
}
}
}
@@ -250,26 +294,40 @@ pub fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> V
/// For example, if the user ignores `ANN101`, we should infer that
/// `flake8-annotations` is active.
pub fn infer_plugins_from_codes(selectors: &BTreeSet<RuleSelector>) -> Vec<Plugin> {
// Ignore cases in which we've knowingly changed rule prefixes.
[
Plugin::Flake82020,
Plugin::Flake8Annotations,
Plugin::Flake8Bandit,
Plugin::Flake8BlindExcept,
// Plugin::Flake8BlindExcept,
Plugin::Flake8BooleanTrap,
Plugin::Flake8Bugbear,
Plugin::Flake8Builtins,
// Plugin::Flake8Commas,
Plugin::Flake8Comprehensions,
Plugin::Flake8Datetimez,
Plugin::Flake8Debugger,
Plugin::Flake8Docstrings,
Plugin::Flake8Eradicate,
// Plugin::Flake8Eradicate,
Plugin::Flake8ErrMsg,
Plugin::Flake8Executable,
Plugin::Flake8ImplicitStrConcat,
// Plugin::Flake8ImportConventions,
Plugin::Flake8NoPep420,
Plugin::Flake8Pie,
Plugin::Flake8Print,
Plugin::Flake8PytestStyle,
Plugin::Flake8Quotes,
Plugin::Flake8Return,
Plugin::Flake8Simplify,
Plugin::Flake8TidyImports,
Plugin::PandasVet,
// Plugin::Flake8TidyImports,
// Plugin::Flake8TypeChecking,
Plugin::Flake8UnusedArguments,
// Plugin::Flake8UsePathlib,
Plugin::McCabe,
Plugin::PEP8Naming,
Plugin::PandasVet,
Plugin::Tryceratops,
]
.into_iter()
.filter(|plugin| {
@@ -286,14 +344,6 @@ pub fn infer_plugins_from_codes(selectors: &BTreeSet<RuleSelector>) -> Vec<Plugi
.collect()
}
/// Resolve the set of enabled `RuleSelector` values for the given
/// plugins.
pub fn resolve_select(plugins: &[Plugin]) -> BTreeSet<RuleSelector> {
let mut select = BTreeSet::from([RuleSelector::F, RuleSelector::E, RuleSelector::W]);
select.extend(plugins.iter().map(Plugin::selector));
select
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;

View File

@@ -19,6 +19,6 @@ pub struct Pyproject {
pub fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
let contents = std::fs::read_to_string(path)?;
let pyproject = toml_edit::easy::from_str::<Pyproject>(&contents)?;
let pyproject = toml::from_str::<Pyproject>(&contents)?;
Ok(pyproject)
}

View File

@@ -37,6 +37,8 @@ mod noqa;
mod python;
pub mod registry;
pub mod resolver;
mod rule_redirects;
mod rule_selector;
mod rules;
mod rustpython_helpers;
pub mod settings;
@@ -47,6 +49,7 @@ mod violations;
mod visibility;
use cfg_if::cfg_if;
pub use rule_selector::RuleSelector;
pub use violation::{AutofixKind, Availability as AutofixAvailability};
pub use violations::IOError;

View File

@@ -10,9 +10,9 @@ use crate::linter::check_path;
use crate::registry::Rule;
use crate::rules::{
flake8_annotations, flake8_bandit, flake8_bugbear, flake8_builtins, flake8_errmsg,
flake8_import_conventions, flake8_pytest_style, flake8_quotes, flake8_tidy_imports,
flake8_unused_arguments, isort, mccabe, pep8_naming, pycodestyle, pydocstyle, pylint,
pyupgrade,
flake8_implicit_str_concat, flake8_import_conventions, flake8_pytest_style, flake8_quotes,
flake8_tidy_imports, flake8_unused_arguments, isort, mccabe, pep8_naming, pycodestyle,
pydocstyle, pylint, pyupgrade,
};
use crate::rustpython_helpers::tokenize;
use crate::settings::configuration::Configuration;
@@ -142,6 +142,9 @@ pub fn defaultSettings() -> Result<JsValue, JsValue> {
flake8_pytest_style: Some(flake8_pytest_style::settings::Settings::default().into()),
flake8_quotes: Some(flake8_quotes::settings::Settings::default().into()),
flake8_tidy_imports: Some(flake8_tidy_imports::Settings::default().into()),
flake8_implicit_str_concat: Some(
flake8_implicit_str_concat::settings::Settings::default().into(),
),
flake8_import_conventions: Some(
flake8_import_conventions::settings::Settings::default().into(),
),

View File

@@ -141,6 +141,7 @@ pub fn check_path(
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Lines))
{
diagnostics.extend(check_lines(
path,
contents,
indexer.commented_lines(),
&doc_lines,

View File

@@ -4,6 +4,7 @@ use fern;
#[macro_export]
macro_rules! warn_user_once {
($($arg:tt)*) => {
use colored::Colorize;
static WARNED: std::sync::atomic::AtomicBool = std::sync::atomic::AtomicBool::new(false);
if !WARNED.swap(true, std::sync::atomic::Ordering::SeqCst) {
let message = format!("{}", format_args!($($arg)*));
@@ -20,6 +21,7 @@ macro_rules! warn_user_once {
#[macro_export]
macro_rules! warn_user {
($($arg:tt)*) => {
use colored::Colorize;
let message = format!("{}", format_args!($($arg)*));
eprintln!(
"{}{} {}",

View File

@@ -8,7 +8,8 @@ use once_cell::sync::Lazy;
use regex::Regex;
use rustc_hash::{FxHashMap, FxHashSet};
use crate::registry::{Diagnostic, Rule, CODE_REDIRECTS};
use crate::registry::{Diagnostic, Rule};
use crate::rule_redirects::get_redirect_target;
use crate::settings::hashable::HashableHashSet;
use crate::source_code::LineEnding;
@@ -71,13 +72,9 @@ pub fn extract_noqa_directive(line: &str) -> Directive {
/// thereof).
pub fn includes(needle: &Rule, haystack: &[&str]) -> bool {
let needle: &str = needle.code();
haystack.iter().any(|candidate| {
if let Some(candidate) = CODE_REDIRECTS.get(candidate) {
needle == candidate.code()
} else {
&needle == candidate
}
})
haystack
.iter()
.any(|candidate| needle == get_redirect_target(candidate).unwrap_or(candidate))
}
pub fn add_noqa(

View File

@@ -1,14 +1,13 @@
//! Registry of [`Rule`] to [`DiagnosticKind`] mappings.
use once_cell::sync::Lazy;
use ruff_macros::RuleNamespace;
use rustc_hash::FxHashMap;
use rustpython_parser::ast::Location;
use serde::{Deserialize, Serialize};
use strum_macros::{AsRefStr, EnumIter};
use crate::ast::types::Range;
use crate::fix::Fix;
use crate::rule_selector::{prefix_to_selector, RuleSelector};
use crate::violation::Violation;
use crate::{rules, violations};
@@ -424,6 +423,8 @@ ruff_macros::define_rule_mapping!(
// flake8-no-pep420
INP001 => violations::ImplicitNamespacePackage,
// flake8-executable
EXE001 => rules::flake8_executable::rules::ShebangNotExecutable,
EXE002 => rules::flake8_executable::rules::ShebangMissingExecutableFile,
EXE003 => rules::flake8_executable::rules::ShebangPython,
EXE004 => rules::flake8_executable::rules::ShebangWhitespace,
EXE005 => rules::flake8_executable::rules::ShebangNewline,
@@ -431,8 +432,10 @@ ruff_macros::define_rule_mapping!(
TYP005 => rules::flake8_type_checking::rules::EmptyTypeCheckingBlock,
// tryceratops
TRY004 => rules::tryceratops::rules::PreferTypeError,
TRY200 => rules::tryceratops::rules::ReraiseNoCause,
TRY201 => rules::tryceratops::rules::VerboseRaise,
TRY300 => rules::tryceratops::rules::TryConsiderElse,
TRY301 => rules::tryceratops::rules::RaiseWithinTry,
// flake8-use-pathlib
PTH100 => rules::flake8_use_pathlib::violations::PathlibAbspath,
PTH101 => rules::flake8_use_pathlib::violations::PathlibChmod,
@@ -603,19 +606,25 @@ pub trait RuleNamespace: Sized {
/// The prefix, name and selector for an upstream linter category.
pub struct LinterCategory(pub &'static str, pub &'static str, pub RuleSelector);
// TODO(martin): Move these constant definitions back to Linter::categories impl
// once RuleSelector is an enum with a Linter variant
const PYCODESTYLE_CATEGORIES: &[LinterCategory] = &[
LinterCategory("E", "Error", prefix_to_selector(RuleCodePrefix::E)),
LinterCategory("W", "Warning", prefix_to_selector(RuleCodePrefix::W)),
];
const PYLINT_CATEGORIES: &[LinterCategory] = &[
LinterCategory("PLC", "Convention", prefix_to_selector(RuleCodePrefix::PLC)),
LinterCategory("PLE", "Error", prefix_to_selector(RuleCodePrefix::PLE)),
LinterCategory("PLR", "Refactor", prefix_to_selector(RuleCodePrefix::PLR)),
LinterCategory("PLW", "Warning", prefix_to_selector(RuleCodePrefix::PLW)),
];
impl Linter {
pub fn categories(&self) -> Option<&'static [LinterCategory]> {
match self {
Linter::Pycodestyle => Some(&[
LinterCategory("E", "Error", RuleSelector::E),
LinterCategory("W", "Warning", RuleSelector::W),
]),
Linter::Pylint => Some(&[
LinterCategory("PLC", "Convention", RuleSelector::PLC),
LinterCategory("PLE", "Error", RuleSelector::PLE),
LinterCategory("PLR", "Refactor", RuleSelector::PLR),
LinterCategory("PLW", "Warning", RuleSelector::PLW),
]),
Linter::Pycodestyle => Some(PYCODESTYLE_CATEGORIES),
Linter::Pylint => Some(PYLINT_CATEGORIES),
_ => None,
}
}
@@ -644,6 +653,8 @@ impl Rule {
| Rule::MixedSpacesAndTabs
| Rule::NoNewLineAtEndOfFile
| Rule::PEP3120UnnecessaryCodingComment
| Rule::ShebangMissingExecutableFile
| Rule::ShebangNotExecutable
| Rule::ShebangNewline
| Rule::ShebangPython
| Rule::ShebangWhitespace => &LintSource::Lines,
@@ -655,9 +666,9 @@ impl Rule {
| Rule::BadQuotesInlineString
| Rule::BadQuotesMultilineString
| Rule::CommentedOutCode
| Rule::MultiLineImplicitStringConcatenation
| Rule::ExtraneousParentheses
| Rule::InvalidEscapeSequence
| Rule::MultiLineImplicitStringConcatenation
| Rule::SingleLineImplicitStringConcatenation
| Rule::TrailingCommaMissing
| Rule::TrailingCommaOnBareTupleProhibited
@@ -709,60 +720,6 @@ pub const INCOMPATIBLE_CODES: &[(Rule, Rule, &str)] = &[(
Consider adding `D203` to `ignore`.",
)];
/// A hash map from deprecated to latest `Rule`.
pub static CODE_REDIRECTS: Lazy<FxHashMap<&'static str, Rule>> = Lazy::new(|| {
FxHashMap::from_iter([
// TODO(charlie): Remove by 2023-01-01.
("U001", Rule::UselessMetaclassType),
("U003", Rule::TypeOfPrimitive),
("U004", Rule::UselessObjectInheritance),
("U005", Rule::DeprecatedUnittestAlias),
("U006", Rule::UsePEP585Annotation),
("U007", Rule::UsePEP604Annotation),
("U008", Rule::SuperCallWithParameters),
("U009", Rule::PEP3120UnnecessaryCodingComment),
("U010", Rule::UnnecessaryFutureImport),
("U011", Rule::LRUCacheWithoutParameters),
("U012", Rule::UnnecessaryEncodeUTF8),
("U013", Rule::ConvertTypedDictFunctionalToClass),
("U014", Rule::ConvertNamedTupleFunctionalToClass),
("U015", Rule::RedundantOpenModes),
("U016", Rule::RemoveSixCompat),
("U017", Rule::DatetimeTimezoneUTC),
("U019", Rule::TypingTextStrAlias),
// TODO(charlie): Remove by 2023-02-01.
("I252", Rule::RelativeImports),
("M001", Rule::UnusedNOQA),
// TODO(charlie): Remove by 2023-02-01.
("PDV002", Rule::UseOfInplaceArgument),
("PDV003", Rule::UseOfDotIsNull),
("PDV004", Rule::UseOfDotNotNull),
("PDV007", Rule::UseOfDotIx),
("PDV008", Rule::UseOfDotAt),
("PDV009", Rule::UseOfDotIat),
("PDV010", Rule::UseOfDotPivotOrUnstack),
("PDV011", Rule::UseOfDotValues),
("PDV012", Rule::UseOfDotReadTable),
("PDV013", Rule::UseOfDotStack),
("PDV015", Rule::UseOfPdMerge),
("PDV901", Rule::DfIsABadVariableName),
// TODO(charlie): Remove by 2023-02-01.
("R501", Rule::UnnecessaryReturnNone),
("R502", Rule::ImplicitReturnValue),
("R503", Rule::ImplicitReturn),
("R504", Rule::UnnecessaryAssign),
("R505", Rule::SuperfluousElseReturn),
("R506", Rule::SuperfluousElseRaise),
("R507", Rule::SuperfluousElseContinue),
("R508", Rule::SuperfluousElseBreak),
// TODO(charlie): Remove by 2023-02-01.
("IC001", Rule::ImportAliasIsNotConventional),
("IC002", Rule::ImportAliasIsNotConventional),
("IC003", Rule::ImportAliasIsNotConventional),
("IC004", Rule::ImportAliasIsNotConventional),
])
});
#[cfg(test)]
mod tests {
use strum::IntoEnumIterator;

85
src/rule_redirects.rs Normal file
View File

@@ -0,0 +1,85 @@
use std::collections::HashMap;
use once_cell::sync::Lazy;
/// Returns the redirect target for the given code.
pub(crate) fn get_redirect_target(code: &str) -> Option<&'static str> {
REDIRECTS.get(code).copied()
}
/// Returns the code and the redirect target if the given code is a redirect.
/// (The same code is returned to obtain it with a static lifetime).
pub(crate) fn get_redirect(code: &str) -> Option<(&'static str, &'static str)> {
REDIRECTS.get_key_value(code).map(|(k, v)| (*k, *v))
}
static REDIRECTS: Lazy<HashMap<&'static str, &'static str>> = Lazy::new(|| {
HashMap::from_iter([
// TODO(charlie): Remove by 2023-02-01.
("R", "RET"),
("R5", "RET5"),
("R50", "RET50"),
("R501", "RET501"),
("R502", "RET502"),
("R503", "RET503"),
("R504", "RET504"),
("R505", "RET505"),
("R506", "RET506"),
("R507", "RET507"),
("R508", "RET508"),
("IC", "ICN"),
("IC0", "ICN0"),
("IC00", "ICN00"),
("IC001", "ICN001"),
("IC002", "ICN001"),
("IC003", "ICN001"),
("IC004", "ICN001"),
// TODO(charlie): Remove by 2023-01-01.
("U", "UP"),
("U0", "UP0"),
("U00", "UP00"),
("U001", "UP001"),
("U003", "UP003"),
("U004", "UP004"),
("U005", "UP005"),
("U006", "UP006"),
("U007", "UP007"),
("U008", "UP008"),
("U009", "UP009"),
("U01", "UP01"),
("U010", "UP010"),
("U011", "UP011"),
("U012", "UP012"),
("U013", "UP013"),
("U014", "UP014"),
("U015", "UP015"),
("U016", "UP016"),
("U017", "UP017"),
("U019", "UP019"),
// TODO(charlie): Remove by 2023-02-01.
("I2", "TID2"),
("I25", "TID25"),
("I252", "TID252"),
("M", "RUF100"),
("M0", "RUF100"),
("M001", "RUF100"),
// TODO(charlie): Remove by 2023-02-01.
("PDV", "PD"),
("PDV0", "PD0"),
("PDV002", "PD002"),
("PDV003", "PD003"),
("PDV004", "PD004"),
("PDV007", "PD007"),
("PDV008", "PD008"),
("PDV009", "PD009"),
("PDV01", "PD01"),
("PDV010", "PD010"),
("PDV011", "PD011"),
("PDV012", "PD012"),
("PDV013", "PD013"),
("PDV015", "PD015"),
("PDV9", "PD9"),
("PDV90", "PD90"),
("PDV901", "PD901"),
])
});

183
src/rule_selector.rs Normal file
View File

@@ -0,0 +1,183 @@
use std::str::FromStr;
use schemars::_serde_json::Value;
use schemars::schema::{InstanceType, Schema, SchemaObject};
use schemars::JsonSchema;
use serde::de::{self, Visitor};
use serde::{Deserialize, Serialize};
use strum::IntoEnumIterator;
use crate::registry::{Rule, RuleCodePrefix, RuleIter};
use crate::rule_redirects::get_redirect;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum RuleSelector {
/// All rules
All,
Prefix {
prefix: RuleCodePrefix,
redirected_from: Option<&'static str>,
},
}
impl FromStr for RuleSelector {
type Err = ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s == "ALL" {
Ok(Self::All)
} else {
let (s, redirected_from) = match get_redirect(s) {
Some((from, target)) => (target, Some(from)),
None => (s, None),
};
Ok(Self::Prefix {
prefix: RuleCodePrefix::from_str(s)
.map_err(|_| ParseError::Unknown(s.to_string()))?,
redirected_from,
})
}
}
}
#[derive(Debug, thiserror::Error)]
pub enum ParseError {
#[error("Unknown rule selector `{0}`")]
// TODO(martin): tell the user how to discover rule codes via the CLI once such a command is
// implemented (but that should of course be done only in ruff_cli and not here)
Unknown(String),
}
impl Serialize for RuleSelector {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match self {
RuleSelector::All => serializer.serialize_str("ALL"),
RuleSelector::Prefix { prefix, .. } => prefix.serialize(serializer),
}
}
}
impl<'de> Deserialize<'de> for RuleSelector {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
// We are not simply doing:
// let s: &str = Deserialize::deserialize(deserializer)?;
// FromStr::from_str(s).map_err(de::Error::custom)
// here because the toml crate apparently doesn't support that
// (as of toml v0.6.0 running `cargo test` failed with the above two lines)
deserializer.deserialize_str(SelectorVisitor)
}
}
struct SelectorVisitor;
impl Visitor<'_> for SelectorVisitor {
type Value = RuleSelector;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str(
"expected a string code identifying a linter or specific rule, or a partial rule code \
or ALL to refer to all rules",
)
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
FromStr::from_str(v).map_err(de::Error::custom)
}
}
impl From<RuleCodePrefix> for RuleSelector {
fn from(prefix: RuleCodePrefix) -> Self {
Self::Prefix {
prefix,
redirected_from: None,
}
}
}
impl IntoIterator for &RuleSelector {
type IntoIter = RuleSelectorIter;
type Item = Rule;
fn into_iter(self) -> Self::IntoIter {
match self {
RuleSelector::All => RuleSelectorIter::All(Rule::iter()),
RuleSelector::Prefix { prefix, .. } => RuleSelectorIter::Prefix(prefix.into_iter()),
}
}
}
pub enum RuleSelectorIter {
All(RuleIter),
Prefix(std::vec::IntoIter<Rule>),
}
impl Iterator for RuleSelectorIter {
type Item = Rule;
fn next(&mut self) -> Option<Self::Item> {
match self {
RuleSelectorIter::All(iter) => iter.next(),
RuleSelectorIter::Prefix(iter) => iter.next(),
}
}
}
/// A const alternative to the `impl From<RuleCodePrefix> for RuleSelector`
// to let us keep the fields of RuleSelector private.
// Note that Rust doesn't yet support `impl const From<RuleCodePrefix> for
// RuleSelector` (see https://github.com/rust-lang/rust/issues/67792).
// TODO(martin): Remove once RuleSelector is an enum with Linter & Rule variants
pub(crate) const fn prefix_to_selector(prefix: RuleCodePrefix) -> RuleSelector {
RuleSelector::Prefix {
prefix,
redirected_from: None,
}
}
impl JsonSchema for RuleSelector {
fn schema_name() -> String {
"RuleSelector".to_string()
}
fn json_schema(_gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
Schema::Object(SchemaObject {
instance_type: Some(InstanceType::String.into()),
enum_values: Some(
std::iter::once("ALL".to_string())
.chain(RuleCodePrefix::iter().map(|s| s.as_ref().to_string()))
.map(Value::String)
.collect(),
),
..SchemaObject::default()
})
}
}
impl RuleSelector {
pub(crate) fn specificity(&self) -> Specificity {
match self {
RuleSelector::All => Specificity::All,
RuleSelector::Prefix { prefix, .. } => prefix.specificity(),
}
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord)]
pub(crate) enum Specificity {
All,
Linter,
Code1Char,
Code2Chars,
Code3Chars,
Code4Chars,
Code5Chars,
}

View File

@@ -74,147 +74,14 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
DefinitionKind::Package => {}
DefinitionKind::Class(_) => {}
DefinitionKind::NestedClass(_) => {}
DefinitionKind::Function(stmt) | DefinitionKind::NestedFunction(stmt) => {
DefinitionKind::Function(stmt)
| DefinitionKind::NestedFunction(stmt)
| DefinitionKind::Method(stmt) => {
let is_method = matches!(definition.kind, DefinitionKind::Method(_));
let (name, args, returns, body) = match_function_def(stmt);
let mut has_any_typed_arg = false;
// ANN001, ANN401
for arg in args
.args
.iter()
.chain(args.posonlyargs.iter())
.chain(args.kwonlyargs.iter())
{
if let Some(expr) = &arg.node.annotation {
if checker
.settings
.rules
.enabled(&Rule::DynamicallyTypedExpression)
{
check_dynamically_typed(checker, expr, || arg.node.arg.to_string());
};
} else {
if !(checker.settings.flake8_annotations.suppress_dummy_args
&& checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
{
if checker
.settings
.rules
.enabled(&Rule::MissingTypeFunctionArgument)
{
checker.diagnostics.push(Diagnostic::new(
violations::MissingTypeFunctionArgument(arg.node.arg.to_string()),
Range::from_located(arg),
));
}
}
}
}
// ANN002, ANN401
if let Some(arg) = &args.vararg {
if let Some(expr) = &arg.node.annotation {
if !checker.settings.flake8_annotations.allow_star_arg_any {
if checker
.settings
.rules
.enabled(&Rule::DynamicallyTypedExpression)
{
let name = &arg.node.arg;
check_dynamically_typed(checker, expr, || format!("*{name}"));
}
}
} else {
if !(checker.settings.flake8_annotations.suppress_dummy_args
&& checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
{
if checker.settings.rules.enabled(&Rule::MissingTypeArgs) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingTypeArgs(arg.node.arg.to_string()),
Range::from_located(arg),
));
}
}
}
}
// ANN003, ANN401
if let Some(arg) = &args.kwarg {
if let Some(expr) = &arg.node.annotation {
if !checker.settings.flake8_annotations.allow_star_arg_any {
if checker
.settings
.rules
.enabled(&Rule::DynamicallyTypedExpression)
{
let name = &arg.node.arg;
check_dynamically_typed(checker, expr, || format!("**{name}"));
}
}
} else {
if !(checker.settings.flake8_annotations.suppress_dummy_args
&& checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
{
if checker.settings.rules.enabled(&Rule::MissingTypeKwargs) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingTypeKwargs(arg.node.arg.to_string()),
Range::from_located(arg),
));
}
}
}
}
// ANN201, ANN202, ANN401
if let Some(expr) = &returns {
if checker
.settings
.rules
.enabled(&Rule::DynamicallyTypedExpression)
{
check_dynamically_typed(checker, expr, || name.to_string());
};
} else {
// Allow omission of return annotation in `__init__` functions, if the function
// only returns `None` (explicitly or implicitly).
if checker.settings.flake8_annotations.suppress_none_returning
&& is_none_returning(body)
{
return;
}
match visibility {
Visibility::Public => {
if checker
.settings
.rules
.enabled(&Rule::MissingReturnTypePublicFunction)
{
checker.diagnostics.push(Diagnostic::new(
violations::MissingReturnTypePublicFunction(name.to_string()),
helpers::identifier_range(stmt, checker.locator),
));
}
}
Visibility::Private => {
if checker
.settings
.rules
.enabled(&Rule::MissingReturnTypePrivateFunction)
{
checker.diagnostics.push(Diagnostic::new(
violations::MissingReturnTypePrivateFunction(name.to_string()),
helpers::identifier_range(stmt, checker.locator),
));
}
}
}
}
}
DefinitionKind::Method(stmt) => {
let (name, args, returns, body) = match_function_def(stmt);
let mut has_any_typed_arg = false;
// ANN001
for arg in args
.args
.iter()
@@ -222,10 +89,10 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
.chain(args.kwonlyargs.iter())
.skip(
// If this is a non-static method, skip `cls` or `self`.
usize::from(!visibility::is_staticmethod(
checker,
cast::decorator_list(stmt),
)),
usize::from(
is_method
&& !visibility::is_staticmethod(checker, cast::decorator_list(stmt)),
),
)
{
// ANN401 for dynamically typed arguments
@@ -313,7 +180,7 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
}
// ANN101, ANN102
if !visibility::is_staticmethod(checker, cast::decorator_list(stmt)) {
if is_method && !visibility::is_staticmethod(checker, cast::decorator_list(stmt)) {
if let Some(arg) = args.args.first() {
if arg.node.annotation.is_none() {
if visibility::is_classmethod(checker, cast::decorator_list(stmt)) {
@@ -335,7 +202,7 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
}
}
// ANN201, ANN202
// ANN201, ANN202, ANN401
if let Some(expr) = &returns {
if checker
.settings
@@ -353,7 +220,7 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
return;
}
if visibility::is_classmethod(checker, cast::decorator_list(stmt)) {
if is_method && visibility::is_classmethod(checker, cast::decorator_list(stmt)) {
if checker
.settings
.rules
@@ -364,7 +231,9 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
helpers::identifier_range(stmt, checker.locator),
));
}
} else if visibility::is_staticmethod(checker, cast::decorator_list(stmt)) {
} else if is_method
&& visibility::is_staticmethod(checker, cast::decorator_list(stmt))
{
if checker
.settings
.rules
@@ -375,7 +244,7 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
helpers::identifier_range(stmt, checker.locator),
));
}
} else if visibility::is_init(cast::name(stmt)) {
} else if is_method && visibility::is_init(cast::name(stmt)) {
// Allow omission of return annotation in `__init__` functions, as long as at
// least one argument is typed.
if checker
@@ -401,7 +270,7 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
checker.diagnostics.push(diagnostic);
}
}
} else if visibility::is_magic(cast::name(stmt)) {
} else if is_method && visibility::is_magic(cast::name(stmt)) {
if checker
.settings
.rules

View File

@@ -1,10 +1,10 @@
use rustpython_ast::{Constant, Expr, ExprContext, ExprKind, Location, Stmt, StmtKind};
use crate::ast::helpers::unparse_stmt;
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::source_code::Generator;
use crate::violations;
fn assertion_error(msg: Option<&Expr>) -> Stmt {
@@ -48,10 +48,8 @@ pub fn assert_false(checker: &mut Checker, stmt: &Stmt, test: &Expr, msg: Option
let mut diagnostic = Diagnostic::new(violations::DoNotAssertFalse, Range::from_located(test));
if checker.patch(diagnostic.kind.rule()) {
let mut generator: Generator = checker.stylist.into();
generator.unparse_stmt(&assertion_error(msg));
diagnostic.amend(Fix::replacement(
generator.generate(),
unparse_stmt(&assertion_error(msg), checker.stylist),
stmt.location,
stmt.end_location.unwrap(),
));

View File

@@ -3,11 +3,11 @@ use rustc_hash::{FxHashMap, FxHashSet};
use rustpython_ast::{Excepthandler, ExcepthandlerKind, Expr, ExprContext, ExprKind, Location};
use crate::ast::helpers;
use crate::ast::helpers::unparse_expr;
use crate::ast::types::{CallPath, Range};
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::{Diagnostic, Rule};
use crate::source_code::Generator;
use crate::violations;
fn type_pattern(elts: Vec<&Expr>) -> Expr {
@@ -59,14 +59,12 @@ fn duplicate_handler_exceptions<'a>(
Range::from_located(expr),
);
if checker.patch(diagnostic.kind.rule()) {
let mut generator: Generator = checker.stylist.into();
if unique_elts.len() == 1 {
generator.unparse_expr(unique_elts[0], 0);
} else {
generator.unparse_expr(&type_pattern(unique_elts), 0);
}
diagnostic.amend(Fix::replacement(
generator.generate(),
if unique_elts.len() == 1 {
unparse_expr(unique_elts[0], checker.stylist)
} else {
unparse_expr(&type_pattern(unique_elts), checker.stylist)
},
expr.location,
expr.end_location.unwrap(),
));

View File

@@ -1,12 +1,12 @@
use rustpython_ast::{Constant, Expr, ExprContext, ExprKind, Location};
use crate::ast::helpers::unparse_expr;
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::python::identifiers::is_identifier;
use crate::python::keyword::KWLIST;
use crate::registry::Diagnostic;
use crate::source_code::Generator;
use crate::violations;
fn attribute(value: &Expr, attr: &str) -> Expr {
@@ -48,10 +48,8 @@ pub fn getattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, ar
let mut diagnostic =
Diagnostic::new(violations::GetAttrWithConstant, Range::from_located(expr));
if checker.patch(diagnostic.kind.rule()) {
let mut generator: Generator = checker.stylist.into();
generator.unparse_expr(&attribute(obj, value), 0);
diagnostic.amend(Fix::replacement(
generator.generate(),
unparse_expr(&attribute(obj, value), checker.stylist),
expr.location,
expr.end_location.unwrap(),
));

View File

@@ -5,7 +5,6 @@ use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::source_code::Generator;
use crate::violations;
/// B013
@@ -25,10 +24,8 @@ pub fn redundant_tuple_in_exception_handler(checker: &mut Checker, handlers: &[E
Range::from_located(type_),
);
if checker.patch(diagnostic.kind.rule()) {
let mut generator: Generator = checker.stylist.into();
generator.unparse_expr(elt, 0);
diagnostic.amend(Fix::replacement(
generator.generate(),
unparse_expr(elt, checker.stylist),
type_.location,
type_.end_location.unwrap(),
));

View File

@@ -1,12 +1,13 @@
use rustpython_ast::{Constant, Expr, ExprContext, ExprKind, Location, Stmt, StmtKind};
use crate::ast::helpers::unparse_stmt;
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::python::identifiers::is_identifier;
use crate::python::keyword::KWLIST;
use crate::registry::Diagnostic;
use crate::source_code::{Generator, Stylist};
use crate::source_code::Stylist;
use crate::violations;
fn assignment(obj: &Expr, name: &str, value: &Expr, stylist: &Stylist) -> String {
@@ -27,9 +28,7 @@ fn assignment(obj: &Expr, name: &str, value: &Expr, stylist: &Stylist) -> String
type_comment: None,
},
);
let mut generator: Generator = stylist.into();
generator.unparse_stmt(&stmt);
generator.generate()
unparse_stmt(&stmt, stylist)
}
/// B010

View File

@@ -13,6 +13,12 @@ mod tests {
use crate::registry::Rule;
use crate::settings;
#[test_case(Path::new("EXE001_1.py"); "EXE001_1")]
#[test_case(Path::new("EXE001_2.py"); "EXE001_2")]
#[test_case(Path::new("EXE001_3.py"); "EXE001_3")]
#[test_case(Path::new("EXE002_1.py"); "EXE002_1")]
#[test_case(Path::new("EXE002_2.py"); "EXE002_2")]
#[test_case(Path::new("EXE002_3.py"); "EXE002_3")]
#[test_case(Path::new("EXE003.py"); "EXE003")]
#[test_case(Path::new("EXE004_1.py"); "EXE004_1")]
#[test_case(Path::new("EXE004_2.py"); "EXE004_2")]
@@ -27,6 +33,8 @@ mod tests {
.join(path)
.as_path(),
&settings::Settings::for_rules(vec![
Rule::ShebangNotExecutable,
Rule::ShebangMissingExecutableFile,
Rule::ShebangWhitespace,
Rule::ShebangNewline,
Rule::ShebangPython,

View File

@@ -1,7 +1,11 @@
pub use shebang_missing::{shebang_missing, ShebangMissingExecutableFile};
pub use shebang_newline::{shebang_newline, ShebangNewline};
pub use shebang_not_executable::{shebang_not_executable, ShebangNotExecutable};
pub use shebang_python::{shebang_python, ShebangPython};
pub use shebang_whitespace::{shebang_whitespace, ShebangWhitespace};
mod shebang_missing;
mod shebang_newline;
mod shebang_not_executable;
mod shebang_python;
mod shebang_whitespace;

View File

@@ -0,0 +1,42 @@
#[cfg(not(target_family = "wasm"))]
use std::os::unix::prelude::MetadataExt;
use std::path::Path;
use ruff_macros::derive_message_formats;
#[cfg(not(target_family = "wasm"))]
use crate::ast::types::Range;
use crate::define_violation;
use crate::registry::Diagnostic;
use crate::violation::Violation;
define_violation!(
pub struct ShebangMissingExecutableFile;
);
impl Violation for ShebangMissingExecutableFile {
#[derive_message_formats]
fn message(&self) -> String {
format!("The file is executable but no shebang is present")
}
}
/// EXE002
#[cfg(not(target_family = "wasm"))]
pub fn shebang_missing(filepath: &Path) -> Option<Diagnostic> {
if let Ok(metadata) = filepath.metadata() {
// Check if file is executable by anyone
if metadata.mode() & 0o111 == 0 {
None
} else {
let diagnostic = Diagnostic::new(ShebangMissingExecutableFile, Range::default());
Some(diagnostic)
}
} else {
None
}
}
#[cfg(target_family = "wasm")]
pub fn shebang_missing(_filepath: &Path) -> Option<Diagnostic> {
None
}

View File

@@ -0,0 +1,63 @@
#[cfg(not(target_family = "wasm"))]
use std::os::unix::prelude::MetadataExt;
use std::path::Path;
use ruff_macros::derive_message_formats;
#[cfg(not(target_family = "wasm"))]
use rustpython_ast::Location;
#[cfg(not(target_family = "wasm"))]
use crate::ast::types::Range;
use crate::define_violation;
use crate::registry::Diagnostic;
use crate::rules::flake8_executable::helpers::ShebangDirective;
use crate::violation::Violation;
define_violation!(
pub struct ShebangNotExecutable;
);
impl Violation for ShebangNotExecutable {
#[derive_message_formats]
fn message(&self) -> String {
format!("Shebang is present but file is not executable")
}
}
/// EXE001
#[cfg(not(target_family = "wasm"))]
pub fn shebang_not_executable(
filepath: &Path,
lineno: usize,
shebang: &ShebangDirective,
) -> Option<Diagnostic> {
if let ShebangDirective::Match(_, start, end, _) = shebang {
if let Ok(metadata) = filepath.metadata() {
// Check if file is executable by anyone
if metadata.mode() & 0o111 == 0 {
let diagnostic = Diagnostic::new(
ShebangNotExecutable,
Range::new(
Location::new(lineno + 1, *start),
Location::new(lineno + 1, *end),
),
);
Some(diagnostic)
} else {
None
}
} else {
None
}
} else {
None
}
}
#[cfg(target_family = "wasm")]
pub fn shebang_not_executable(
_filepath: &Path,
_lineno: usize,
_shebang: &ShebangDirective,
) -> Option<Diagnostic> {
None
}

View File

@@ -0,0 +1,15 @@
---
source: src/rules/flake8_executable/mod.rs
expression: diagnostics
---
- kind:
ShebangNotExecutable: ~
location:
row: 1
column: 2
end_location:
row: 1
column: 17
fix: ~
parent: ~

View File

@@ -0,0 +1,6 @@
---
source: src/rules/flake8_executable/mod.rs
expression: diagnostics
---
[]

View File

@@ -0,0 +1,6 @@
---
source: src/rules/flake8_executable/mod.rs
expression: diagnostics
---
[]

View File

@@ -0,0 +1,15 @@
---
source: src/rules/flake8_executable/mod.rs
expression: diagnostics
---
- kind:
ShebangMissingExecutableFile: ~
location:
row: 1
column: 0
end_location:
row: 1
column: 0
fix: ~
parent: ~

View File

@@ -0,0 +1,6 @@
---
source: src/rules/flake8_executable/mod.rs
expression: diagnostics
---
[]

View File

@@ -0,0 +1,6 @@
---
source: src/rules/flake8_executable/mod.rs
expression: diagnostics
---
[]

View File

@@ -1,5 +1,6 @@
//! Rules from [flake8-implicit-str-concat](https://pypi.org/project/flake8-implicit-str-concat/).
pub(crate) mod rules;
pub mod settings;
#[cfg(test)]
mod tests {
@@ -26,4 +27,24 @@ mod tests {
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
#[test_case(Rule::SingleLineImplicitStringConcatenation, Path::new("ISC.py"); "ISC001")]
#[test_case(Rule::MultiLineImplicitStringConcatenation, Path::new("ISC.py"); "ISC002")]
#[test_case(Rule::ExplicitStringConcatenation, Path::new("ISC.py"); "ISC003")]
fn multiline(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("multiline_{}_{}", rule_code.code(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_implicit_str_concat")
.join(path)
.as_path(),
&settings::Settings {
flake8_implicit_str_concat: super::settings::Settings {
allow_multiline: false,
},
..settings::Settings::for_rule(rule_code)
},
)?;
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
}

View File

@@ -4,13 +4,20 @@ use rustpython_parser::lexer::{LexResult, Tok};
use crate::ast::types::Range;
use crate::registry::Diagnostic;
use crate::rules::flake8_implicit_str_concat::settings::Settings;
use crate::violations;
/// ISC001, ISC002
pub fn implicit(tokens: &[LexResult]) -> Vec<Diagnostic> {
pub fn implicit(tokens: &[LexResult], settings: &Settings) -> Vec<Diagnostic> {
let mut diagnostics = vec![];
for ((a_start, a_tok, a_end), (b_start, b_tok, b_end)) in
tokens.iter().flatten().tuple_windows()
for ((a_start, a_tok, a_end), (b_start, b_tok, b_end)) in tokens
.iter()
.flatten()
.filter(|(_, tok, _)| {
!matches!(tok, Tok::Comment(..))
&& (settings.allow_multiline || !matches!(tok, Tok::NonLogicalNewline))
})
.tuple_windows()
{
if matches!(a_tok, Tok::String { .. }) && matches!(b_tok, Tok::String { .. }) {
if a_end.row() == b_start.row() {
@@ -22,8 +29,6 @@ pub fn implicit(tokens: &[LexResult]) -> Vec<Diagnostic> {
},
));
} else {
// Not on the same line, and no NonLogicalNewline between a and b =>
// concatantion over a continuation line.
diagnostics.push(Diagnostic::new(
violations::MultiLineImplicitStringConcatenation,
Range {

View File

@@ -0,0 +1,57 @@
//! Settings for the `flake8-implicit-str-concat` plugin.
use ruff_macros::ConfigurationOptions;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, JsonSchema,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8ImplicitStrConcatOptions"
)]
pub struct Options {
#[option(
default = r#"true"#,
value_type = "bool",
example = r#"
allow-multiline = false
"#
)]
/// Whether to allow implicit string concatenations for multiline strings.
/// By default, implicit concatenations of multiline strings are
/// allowed (but continuation lines, delimited with a backslash, are
/// prohibited).
pub allow_multiline: Option<bool>,
}
#[derive(Debug, Hash)]
pub struct Settings {
pub allow_multiline: bool,
}
impl Default for Settings {
fn default() -> Self {
Self {
allow_multiline: true,
}
}
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
allow_multiline: options.allow_multiline.unwrap_or(true),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
allow_multiline: Some(settings.allow_multiline),
}
}
}

View File

@@ -0,0 +1,25 @@
---
source: src/rules/flake8_implicit_str_concat/mod.rs
expression: diagnostics
---
- kind:
SingleLineImplicitStringConcatenation: ~
location:
row: 1
column: 4
end_location:
row: 1
column: 11
fix: ~
parent: ~
- kind:
SingleLineImplicitStringConcatenation: ~
location:
row: 1
column: 8
end_location:
row: 1
column: 15
fix: ~
parent: ~

View File

@@ -0,0 +1,45 @@
---
source: src/rules/flake8_implicit_str_concat/mod.rs
expression: diagnostics
---
- kind:
MultiLineImplicitStringConcatenation: ~
location:
row: 5
column: 4
end_location:
row: 6
column: 9
fix: ~
parent: ~
- kind:
MultiLineImplicitStringConcatenation: ~
location:
row: 24
column: 2
end_location:
row: 25
column: 7
fix: ~
parent: ~
- kind:
MultiLineImplicitStringConcatenation: ~
location:
row: 29
column: 2
end_location:
row: 30
column: 7
fix: ~
parent: ~
- kind:
MultiLineImplicitStringConcatenation: ~
location:
row: 34
column: 2
end_location:
row: 35
column: 8
fix: ~
parent: ~

View File

@@ -0,0 +1,45 @@
---
source: src/rules/flake8_implicit_str_concat/mod.rs
expression: diagnostics
---
- kind:
ExplicitStringConcatenation: ~
location:
row: 3
column: 4
end_location:
row: 3
column: 17
fix: ~
parent: ~
- kind:
ExplicitStringConcatenation: ~
location:
row: 9
column: 2
end_location:
row: 10
column: 7
fix: ~
parent: ~
- kind:
ExplicitStringConcatenation: ~
location:
row: 14
column: 2
end_location:
row: 15
column: 7
fix: ~
parent: ~
- kind:
ExplicitStringConcatenation: ~
location:
row: 19
column: 2
end_location:
row: 20
column: 8
fix: ~
parent: ~

View File

@@ -2,7 +2,7 @@ use rustpython_ast::{Constant, Expr, ExprContext, ExprKind};
use super::super::types;
use super::helpers::{is_pytest_parametrize, split_names};
use crate::ast::helpers::create_expr;
use crate::ast::helpers::{create_expr, unparse_expr};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
@@ -31,8 +31,7 @@ fn elts_to_csv(elts: &[Expr], checker: &Checker) -> Option<String> {
return None;
}
let mut generator: Generator = checker.stylist.into();
generator.unparse_expr(
Some(unparse_expr(
&create_expr(ExprKind::Constant {
value: Constant::Str(elts.iter().fold(String::new(), |mut acc, elt| {
if let ExprKind::Constant {
@@ -49,9 +48,8 @@ fn elts_to_csv(elts: &[Expr], checker: &Checker) -> Option<String> {
})),
kind: None,
}),
0,
);
Some(generator.generate())
checker.stylist,
))
}
/// PT006
@@ -102,24 +100,22 @@ fn check_names(checker: &mut Checker, expr: &Expr) {
Range::from_located(expr),
);
if checker.patch(diagnostic.kind.rule()) {
let mut generator: Generator = checker.stylist.into();
generator.unparse_expr(
&create_expr(ExprKind::List {
elts: names
.iter()
.map(|&name| {
create_expr(ExprKind::Constant {
value: Constant::Str(name.to_string()),
kind: None,
})
})
.collect(),
ctx: ExprContext::Load,
}),
0,
);
diagnostic.amend(Fix::replacement(
generator.generate(),
unparse_expr(
&create_expr(ExprKind::List {
elts: names
.iter()
.map(|&name| {
create_expr(ExprKind::Constant {
value: Constant::Str(name.to_string()),
kind: None,
})
})
.collect(),
ctx: ExprContext::Load,
}),
checker.stylist,
),
expr.location,
expr.end_location.unwrap(),
));
@@ -144,16 +140,14 @@ fn check_names(checker: &mut Checker, expr: &Expr) {
Range::from_located(expr),
);
if checker.patch(diagnostic.kind.rule()) {
let mut generator: Generator = checker.stylist.into();
generator.unparse_expr(
&create_expr(ExprKind::List {
elts: elts.clone(),
ctx: ExprContext::Load,
}),
0,
);
diagnostic.amend(Fix::replacement(
generator.generate(),
unparse_expr(
&create_expr(ExprKind::List {
elts: elts.clone(),
ctx: ExprContext::Load,
}),
checker.stylist,
),
expr.location,
expr.end_location.unwrap(),
));
@@ -285,10 +279,8 @@ fn handle_single_name(checker: &mut Checker, expr: &Expr, value: &Expr) {
);
if checker.patch(diagnostic.kind.rule()) {
let mut generator: Generator = checker.stylist.into();
generator.unparse_expr(&create_expr(value.node.clone()), 0);
diagnostic.amend(Fix::replacement(
generator.generate(),
unparse_expr(&create_expr(value.node.clone()), checker.stylist),
expr.location,
expr.end_location.unwrap(),
));

View File

@@ -325,6 +325,11 @@ pub fn function(checker: &mut Checker, body: &[Stmt]) {
visitor.stack
};
// Avoid false positives for generators.
if !stack.yields.is_empty() {
return;
}
if checker.settings.rules.enabled(&Rule::SuperfluousElseReturn)
|| checker.settings.rules.enabled(&Rule::SuperfluousElseRaise)
|| checker

View File

@@ -7,6 +7,7 @@ use crate::ast::visitor::Visitor;
#[derive(Default)]
pub struct Stack<'a> {
pub returns: Vec<(&'a Stmt, Option<&'a Expr>)>,
pub yields: Vec<&'a Expr>,
pub ifs: Vec<&'a Stmt>,
pub elifs: Vec<&'a Stmt>,
pub refs: FxHashMap<&'a str, Vec<Location>>,
@@ -116,7 +117,6 @@ impl<'a> Visitor<'a> for ReturnVisitor<'a> {
.push((stmt.location, stmt.end_location.unwrap()));
visitor::walk_stmt(self, stmt);
}
_ => {
visitor::walk_stmt(self, stmt);
}
@@ -143,8 +143,8 @@ impl<'a> Visitor<'a> for ReturnVisitor<'a> {
.or_insert_with(Vec::new)
.push(expr.location);
}
ExprKind::JoinedStr { .. } => {
visitor::walk_expr(self, expr);
ExprKind::YieldFrom { .. } | ExprKind::Yield { .. } => {
self.stack.yields.push(expr);
}
_ => visitor::walk_expr(self, expr),
}

View File

@@ -2,12 +2,12 @@ use rustpython_ast::{
Comprehension, Constant, Expr, ExprContext, ExprKind, Stmt, StmtKind, Unaryop,
};
use crate::ast::helpers::{create_expr, create_stmt};
use crate::ast::helpers::{create_expr, create_stmt, unparse_stmt};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::{Diagnostic, Rule};
use crate::source_code::{Generator, Stylist};
use crate::source_code::Stylist;
use crate::violations;
struct Loop<'a> {
@@ -147,26 +147,27 @@ fn return_values_for_siblings<'a>(stmt: &'a Stmt, sibling: &'a Stmt) -> Option<L
/// Generate a return statement for an `any` or `all` builtin comprehension.
fn return_stmt(id: &str, test: &Expr, target: &Expr, iter: &Expr, stylist: &Stylist) -> String {
let mut generator: Generator = stylist.into();
generator.unparse_stmt(&create_stmt(StmtKind::Return {
value: Some(Box::new(create_expr(ExprKind::Call {
func: Box::new(create_expr(ExprKind::Name {
id: id.to_string(),
ctx: ExprContext::Load,
})),
args: vec![create_expr(ExprKind::GeneratorExp {
elt: Box::new(test.clone()),
generators: vec![Comprehension {
target: target.clone(),
iter: iter.clone(),
ifs: vec![],
is_async: 0,
}],
})],
keywords: vec![],
}))),
}));
generator.generate()
unparse_stmt(
&create_stmt(StmtKind::Return {
value: Some(Box::new(create_expr(ExprKind::Call {
func: Box::new(create_expr(ExprKind::Name {
id: id.to_string(),
ctx: ExprContext::Load,
})),
args: vec![create_expr(ExprKind::GeneratorExp {
elt: Box::new(test.clone()),
generators: vec![Comprehension {
target: target.clone(),
iter: iter.clone(),
ifs: vec![],
is_async: 0,
}],
})],
keywords: vec![],
}))),
}),
stylist,
)
}
/// SIM110, SIM111

View File

@@ -12,14 +12,14 @@ mod tests {
use textwrap::dedent;
use crate::linter::check_path;
use crate::registry::{Rule, RuleSelector};
use crate::registry::{Rule, RuleCodePrefix};
use crate::settings::flags;
use crate::source_code::{Indexer, Locator, Stylist};
use crate::{directives, rustpython_helpers, settings};
fn rule_code(contents: &str, expected: &[Rule]) -> Result<()> {
let contents = dedent(contents);
let settings = settings::Settings::for_rules(&RuleSelector::PD);
let settings = settings::Settings::for_rules(&RuleCodePrefix::PD);
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
let locator = Locator::new(&contents);
let stylist = Stylist::from_contents(&contents, &locator);

View File

@@ -0,0 +1,3 @@
pub fn is_ambiguous_name(name: &str) -> bool {
name == "l" || name == "I" || name == "O"
}

View File

@@ -2,6 +2,8 @@
pub(crate) mod rules;
pub mod settings;
pub mod helpers;
#[cfg(test)]
mod tests {
use std::path::Path;

View File

@@ -1,657 +0,0 @@
use itertools::izip;
use once_cell::sync::Lazy;
use regex::Regex;
use rustc_hash::FxHashMap;
use rustpython_ast::{Arguments, Constant, Excepthandler, Location, Stmt, StmtKind, Unaryop};
use rustpython_parser::ast::{Cmpop, Expr, ExprKind};
use crate::ast::helpers;
use crate::ast::helpers::{
create_expr, except_range, match_leading_content, match_trailing_content, unparse_expr,
};
use crate::ast::types::Range;
use crate::ast::whitespace::leading_space;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::settings::Settings;
use crate::source_code::{Generator, Locator, Stylist};
use crate::violations;
static URL_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^https?://\S+$").unwrap());
fn is_overlong(
line: &str,
line_length: usize,
limit: usize,
ignore_overlong_task_comments: bool,
task_tags: &[String],
) -> bool {
if line_length <= limit {
return false;
}
let mut chunks = line.split_whitespace();
let (Some(first), Some(second)) = (chunks.next(), chunks.next()) else {
// Single word / no printable chars - no way to make the line shorter
return false;
};
if first == "#" {
if ignore_overlong_task_comments {
let second = second.trim_end_matches(':');
if task_tags.iter().any(|tag| tag == second) {
return false;
}
}
// Do not enforce the line length for commented lines that end with a URL
// or contain only a single word.
if chunks.last().map_or(true, |c| URL_REGEX.is_match(c)) {
return false;
}
}
true
}
/// E501
pub fn line_too_long(lineno: usize, line: &str, settings: &Settings) -> Option<Diagnostic> {
let line_length = line.chars().count();
let limit = settings.line_length;
if is_overlong(
line,
line_length,
limit,
settings.pycodestyle.ignore_overlong_task_comments,
&settings.task_tags,
) {
Some(Diagnostic::new(
violations::LineTooLong(line_length, limit),
Range::new(
Location::new(lineno + 1, limit),
Location::new(lineno + 1, line_length),
),
))
} else {
None
}
}
/// E101
pub fn mixed_spaces_and_tabs(lineno: usize, line: &str) -> Option<Diagnostic> {
let indent = leading_space(line);
if indent.contains(' ') && indent.contains('\t') {
Some(Diagnostic::new(
violations::MixedSpacesAndTabs,
Range::new(
Location::new(lineno + 1, 0),
Location::new(lineno + 1, indent.chars().count()),
),
))
} else {
None
}
}
/// W505
pub fn doc_line_too_long(lineno: usize, line: &str, settings: &Settings) -> Option<Diagnostic> {
let Some(limit) = settings.pycodestyle.max_doc_length else {
return None;
};
let line_length = line.chars().count();
if is_overlong(
line,
line_length,
limit,
settings.pycodestyle.ignore_overlong_task_comments,
&settings.task_tags,
) {
Some(Diagnostic::new(
violations::DocLineTooLong(line_length, limit),
Range::new(
Location::new(lineno + 1, limit),
Location::new(lineno + 1, line_length),
),
))
} else {
None
}
}
fn compare(left: &Expr, ops: &[Cmpop], comparators: &[Expr], stylist: &Stylist) -> String {
unparse_expr(
&create_expr(ExprKind::Compare {
left: Box::new(left.clone()),
ops: ops.to_vec(),
comparators: comparators.to_vec(),
}),
stylist,
)
}
/// E711, E712
pub fn literal_comparisons(
checker: &mut Checker,
expr: &Expr,
left: &Expr,
ops: &[Cmpop],
comparators: &[Expr],
check_none_comparisons: bool,
check_true_false_comparisons: bool,
) {
// Mapping from (bad operator index) to (replacement operator). As we iterate
// through the list of operators, we apply "dummy" fixes for each error,
// then replace the entire expression at the end with one "real" fix, to
// avoid conflicts.
let mut bad_ops: FxHashMap<usize, Cmpop> = FxHashMap::default();
let mut diagnostics: Vec<Diagnostic> = vec![];
let op = ops.first().unwrap();
// Check `left`.
let mut comparator = left;
let next = &comparators[0];
if check_none_comparisons
&& matches!(
comparator.node,
ExprKind::Constant {
value: Constant::None,
kind: None
}
)
{
if matches!(op, Cmpop::Eq) {
let diagnostic = Diagnostic::new(
violations::NoneComparison(op.into()),
Range::from_located(comparator),
);
if checker.patch(diagnostic.kind.rule()) && !helpers::is_constant_non_singleton(next) {
bad_ops.insert(0, Cmpop::Is);
}
diagnostics.push(diagnostic);
}
if matches!(op, Cmpop::NotEq) {
let diagnostic = Diagnostic::new(
violations::NoneComparison(op.into()),
Range::from_located(comparator),
);
if checker.patch(diagnostic.kind.rule()) && !helpers::is_constant_non_singleton(next) {
bad_ops.insert(0, Cmpop::IsNot);
}
diagnostics.push(diagnostic);
}
}
if check_true_false_comparisons {
if let ExprKind::Constant {
value: Constant::Bool(value),
kind: None,
} = comparator.node
{
if matches!(op, Cmpop::Eq) {
let diagnostic = Diagnostic::new(
violations::TrueFalseComparison(value, op.into()),
Range::from_located(comparator),
);
if checker.patch(diagnostic.kind.rule())
&& !helpers::is_constant_non_singleton(next)
{
bad_ops.insert(0, Cmpop::Is);
}
diagnostics.push(diagnostic);
}
if matches!(op, Cmpop::NotEq) {
let diagnostic = Diagnostic::new(
violations::TrueFalseComparison(value, op.into()),
Range::from_located(comparator),
);
if checker.patch(diagnostic.kind.rule())
&& !helpers::is_constant_non_singleton(next)
{
bad_ops.insert(0, Cmpop::IsNot);
}
diagnostics.push(diagnostic);
}
}
}
// Check each comparator in order.
for (idx, (op, next)) in izip!(ops, comparators).enumerate() {
if check_none_comparisons
&& matches!(
next.node,
ExprKind::Constant {
value: Constant::None,
kind: None
}
)
{
if matches!(op, Cmpop::Eq) {
let diagnostic = Diagnostic::new(
violations::NoneComparison(op.into()),
Range::from_located(next),
);
if checker.patch(diagnostic.kind.rule())
&& !helpers::is_constant_non_singleton(comparator)
{
bad_ops.insert(idx, Cmpop::Is);
}
diagnostics.push(diagnostic);
}
if matches!(op, Cmpop::NotEq) {
let diagnostic = Diagnostic::new(
violations::NoneComparison(op.into()),
Range::from_located(next),
);
if checker.patch(diagnostic.kind.rule())
&& !helpers::is_constant_non_singleton(comparator)
{
bad_ops.insert(idx, Cmpop::IsNot);
}
diagnostics.push(diagnostic);
}
}
if check_true_false_comparisons {
if let ExprKind::Constant {
value: Constant::Bool(value),
kind: None,
} = next.node
{
if matches!(op, Cmpop::Eq) {
let diagnostic = Diagnostic::new(
violations::TrueFalseComparison(value, op.into()),
Range::from_located(next),
);
if checker.patch(diagnostic.kind.rule())
&& !helpers::is_constant_non_singleton(comparator)
{
bad_ops.insert(idx, Cmpop::Is);
}
diagnostics.push(diagnostic);
}
if matches!(op, Cmpop::NotEq) {
let diagnostic = Diagnostic::new(
violations::TrueFalseComparison(value, op.into()),
Range::from_located(next),
);
if checker.patch(diagnostic.kind.rule())
&& !helpers::is_constant_non_singleton(comparator)
{
bad_ops.insert(idx, Cmpop::IsNot);
}
diagnostics.push(diagnostic);
}
}
}
comparator = next;
}
// TODO(charlie): Respect `noqa` directives. If one of the operators has a
// `noqa`, but another doesn't, both will be removed here.
if !bad_ops.is_empty() {
// Replace the entire comparison expression.
let ops = ops
.iter()
.enumerate()
.map(|(idx, op)| bad_ops.get(&idx).unwrap_or(op))
.cloned()
.collect::<Vec<_>>();
let content = compare(left, &ops, comparators, checker.stylist);
for diagnostic in &mut diagnostics {
diagnostic.amend(Fix::replacement(
content.to_string(),
expr.location,
expr.end_location.unwrap(),
));
}
}
checker.diagnostics.extend(diagnostics);
}
/// E713, E714
pub fn not_tests(
checker: &mut Checker,
expr: &Expr,
op: &Unaryop,
operand: &Expr,
check_not_in: bool,
check_not_is: bool,
) {
if matches!(op, Unaryop::Not) {
if let ExprKind::Compare {
left,
ops,
comparators,
..
} = &operand.node
{
let should_fix = ops.len() == 1;
for op in ops.iter() {
match op {
Cmpop::In => {
if check_not_in {
let mut diagnostic = Diagnostic::new(
violations::NotInTest,
Range::from_located(operand),
);
if checker.patch(diagnostic.kind.rule()) && should_fix {
diagnostic.amend(Fix::replacement(
compare(left, &[Cmpop::NotIn], comparators, checker.stylist),
expr.location,
expr.end_location.unwrap(),
));
}
checker.diagnostics.push(diagnostic);
}
}
Cmpop::Is => {
if check_not_is {
let mut diagnostic = Diagnostic::new(
violations::NotIsTest,
Range::from_located(operand),
);
if checker.patch(diagnostic.kind.rule()) && should_fix {
diagnostic.amend(Fix::replacement(
compare(left, &[Cmpop::IsNot], comparators, checker.stylist),
expr.location,
expr.end_location.unwrap(),
));
}
checker.diagnostics.push(diagnostic);
}
}
_ => {}
}
}
}
}
}
/// E721
pub fn type_comparison(ops: &[Cmpop], comparators: &[Expr], location: Range) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = vec![];
for (op, right) in izip!(ops, comparators) {
if !matches!(op, Cmpop::Is | Cmpop::IsNot | Cmpop::Eq | Cmpop::NotEq) {
continue;
}
match &right.node {
ExprKind::Call { func, args, .. } => {
if let ExprKind::Name { id, .. } = &func.node {
// Ex) type(False)
if id == "type" {
if let Some(arg) = args.first() {
// Allow comparison for types which are not obvious.
if !matches!(
arg.node,
ExprKind::Name { .. }
| ExprKind::Constant {
value: Constant::None,
kind: None
}
) {
diagnostics
.push(Diagnostic::new(violations::TypeComparison, location));
}
}
}
}
}
ExprKind::Attribute { value, .. } => {
if let ExprKind::Name { id, .. } = &value.node {
// Ex) types.IntType
if id == "types" {
diagnostics.push(Diagnostic::new(violations::TypeComparison, location));
}
}
}
_ => {}
}
}
diagnostics
}
/// E722
pub fn do_not_use_bare_except(
type_: Option<&Expr>,
body: &[Stmt],
handler: &Excepthandler,
locator: &Locator,
) -> Option<Diagnostic> {
if type_.is_none()
&& !body
.iter()
.any(|stmt| matches!(stmt.node, StmtKind::Raise { exc: None, .. }))
{
Some(Diagnostic::new(
violations::DoNotUseBareExcept,
except_range(handler, locator),
))
} else {
None
}
}
fn function(name: &str, args: &Arguments, body: &Expr, stylist: &Stylist) -> String {
let body = Stmt::new(
Location::default(),
Location::default(),
StmtKind::Return {
value: Some(Box::new(body.clone())),
},
);
let func = Stmt::new(
Location::default(),
Location::default(),
StmtKind::FunctionDef {
name: name.to_string(),
args: Box::new(args.clone()),
body: vec![body],
decorator_list: vec![],
returns: None,
type_comment: None,
},
);
let mut generator: Generator = stylist.into();
generator.unparse_stmt(&func);
generator.generate()
}
/// E731
pub fn do_not_assign_lambda(checker: &mut Checker, target: &Expr, value: &Expr, stmt: &Stmt) {
if let ExprKind::Name { id, .. } = &target.node {
if let ExprKind::Lambda { args, body } = &value.node {
let mut diagnostic = Diagnostic::new(
violations::DoNotAssignLambda(id.to_string()),
Range::from_located(stmt),
);
if checker.patch(diagnostic.kind.rule()) {
if !match_leading_content(stmt, checker.locator)
&& !match_trailing_content(stmt, checker.locator)
{
let first_line = checker.locator.slice_source_code_range(&Range::new(
Location::new(stmt.location.row(), 0),
Location::new(stmt.location.row() + 1, 0),
));
let indentation = &leading_space(first_line);
let mut indented = String::new();
for (idx, line) in function(id, args, body, checker.stylist)
.lines()
.enumerate()
{
if idx == 0 {
indented.push_str(line);
} else {
indented.push('\n');
indented.push_str(indentation);
indented.push_str(line);
}
}
diagnostic.amend(Fix::replacement(
indented,
stmt.location,
stmt.end_location.unwrap(),
));
}
}
checker.diagnostics.push(diagnostic);
}
}
}
fn is_ambiguous_name(name: &str) -> bool {
name == "l" || name == "I" || name == "O"
}
/// E741
pub fn ambiguous_variable_name(name: &str, range: Range) -> Option<Diagnostic> {
if is_ambiguous_name(name) {
Some(Diagnostic::new(
violations::AmbiguousVariableName(name.to_string()),
range,
))
} else {
None
}
}
/// E742
pub fn ambiguous_class_name<F>(name: &str, locate: F) -> Option<Diagnostic>
where
F: FnOnce() -> Range,
{
if is_ambiguous_name(name) {
Some(Diagnostic::new(
violations::AmbiguousClassName(name.to_string()),
locate(),
))
} else {
None
}
}
/// E743
pub fn ambiguous_function_name<F>(name: &str, locate: F) -> Option<Diagnostic>
where
F: FnOnce() -> Range,
{
if is_ambiguous_name(name) {
Some(Diagnostic::new(
violations::AmbiguousFunctionName(name.to_string()),
locate(),
))
} else {
None
}
}
/// W292
pub fn no_newline_at_end_of_file(contents: &str, autofix: bool) -> Option<Diagnostic> {
if !contents.ends_with('\n') {
// Note: if `lines.last()` is `None`, then `contents` is empty (and so we don't
// want to raise W292 anyway).
if let Some(line) = contents.lines().last() {
// Both locations are at the end of the file (and thus the same).
let location = Location::new(contents.lines().count(), line.len());
let mut diagnostic = Diagnostic::new(
violations::NoNewLineAtEndOfFile,
Range::new(location, location),
);
if autofix {
diagnostic.amend(Fix::insertion("\n".to_string(), location));
}
return Some(diagnostic);
}
}
None
}
// See: https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
const VALID_ESCAPE_SEQUENCES: &[char; 23] = &[
'\n', '\\', '\'', '"', 'a', 'b', 'f', 'n', 'r', 't', 'v', '0', '1', '2', '3', '4', '5', '6',
'7', 'x', // Escape sequences only recognized in string literals
'N', 'u', 'U',
];
/// Return the quotation markers used for a String token.
fn extract_quote(text: &str) -> &str {
for quote in ["'''", "\"\"\"", "'", "\""] {
if text.ends_with(quote) {
return quote;
}
}
panic!("Unable to find quotation mark for String token")
}
/// W605
pub fn invalid_escape_sequence(
locator: &Locator,
start: Location,
end: Location,
autofix: bool,
) -> Vec<Diagnostic> {
let mut diagnostics = vec![];
let text = locator.slice_source_code_range(&Range::new(start, end));
// Determine whether the string is single- or triple-quoted.
let quote = extract_quote(text);
let quote_pos = text.find(quote).unwrap();
let prefix = text[..quote_pos].to_lowercase();
let body = &text[(quote_pos + quote.len())..(text.len() - quote.len())];
if !prefix.contains('r') {
for (row_offset, line) in body.lines().enumerate() {
let chars: Vec<char> = line.chars().collect();
for col_offset in 0..chars.len() {
if chars[col_offset] != '\\' {
continue;
}
// If the previous character was also a backslash, skip.
if col_offset > 0 && chars[col_offset - 1] == '\\' {
continue;
}
// If we're at the end of the line, skip.
if col_offset == chars.len() - 1 {
continue;
}
// If the next character is a valid escape sequence, skip.
let next_char = chars[col_offset + 1];
if VALID_ESCAPE_SEQUENCES.contains(&next_char) {
continue;
}
// Compute the location of the escape sequence by offsetting the location of the
// string token by the characters we've seen thus far.
let col = if row_offset == 0 {
start.column() + prefix.len() + quote.len() + col_offset
} else {
col_offset
};
let location = Location::new(start.row() + row_offset, col);
let end_location = Location::new(location.row(), location.column() + 2);
let mut diagnostic = Diagnostic::new(
violations::InvalidEscapeSequence(next_char),
Range::new(location, end_location),
);
if autofix {
diagnostic.amend(Fix::insertion(r"\".to_string(), location));
}
diagnostics.push(diagnostic);
}
}
}
diagnostics
}

View File

@@ -0,0 +1,19 @@
use crate::ast::types::Range;
use crate::registry::Diagnostic;
use crate::rules::pycodestyle::helpers::is_ambiguous_name;
use crate::violations;
/// E742
pub fn ambiguous_class_name<F>(name: &str, locate: F) -> Option<Diagnostic>
where
F: FnOnce() -> Range,
{
if is_ambiguous_name(name) {
Some(Diagnostic::new(
violations::AmbiguousClassName(name.to_string()),
locate(),
))
} else {
None
}
}

View File

@@ -0,0 +1,19 @@
use crate::ast::types::Range;
use crate::registry::Diagnostic;
use crate::rules::pycodestyle::helpers::is_ambiguous_name;
use crate::violations;
/// E743
pub fn ambiguous_function_name<F>(name: &str, locate: F) -> Option<Diagnostic>
where
F: FnOnce() -> Range,
{
if is_ambiguous_name(name) {
Some(Diagnostic::new(
violations::AmbiguousFunctionName(name.to_string()),
locate(),
))
} else {
None
}
}

View File

@@ -0,0 +1,16 @@
use crate::ast::types::Range;
use crate::registry::Diagnostic;
use crate::rules::pycodestyle::helpers::is_ambiguous_name;
use crate::violations;
/// E741
pub fn ambiguous_variable_name(name: &str, range: Range) -> Option<Diagnostic> {
if is_ambiguous_name(name) {
Some(Diagnostic::new(
violations::AmbiguousVariableName(name.to_string()),
range,
))
} else {
None
}
}

View File

@@ -0,0 +1,76 @@
use rustpython_ast::{Arguments, Location, Stmt, StmtKind};
use rustpython_parser::ast::{Expr, ExprKind};
use crate::ast::helpers::{match_leading_content, match_trailing_content, unparse_stmt};
use crate::ast::types::Range;
use crate::ast::whitespace::leading_space;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::source_code::Stylist;
use crate::violations;
/// E731
pub fn do_not_assign_lambda(checker: &mut Checker, target: &Expr, value: &Expr, stmt: &Stmt) {
if let ExprKind::Name { id, .. } = &target.node {
if let ExprKind::Lambda { args, body } = &value.node {
let mut diagnostic = Diagnostic::new(
violations::DoNotAssignLambda(id.to_string()),
Range::from_located(stmt),
);
if checker.patch(diagnostic.kind.rule()) {
if !match_leading_content(stmt, checker.locator)
&& !match_trailing_content(stmt, checker.locator)
{
let first_line = checker.locator.slice_source_code_range(&Range::new(
Location::new(stmt.location.row(), 0),
Location::new(stmt.location.row() + 1, 0),
));
let indentation = &leading_space(first_line);
let mut indented = String::new();
for (idx, line) in function(id, args, body, checker.stylist)
.lines()
.enumerate()
{
if idx == 0 {
indented.push_str(line);
} else {
indented.push('\n');
indented.push_str(indentation);
indented.push_str(line);
}
}
diagnostic.amend(Fix::replacement(
indented,
stmt.location,
stmt.end_location.unwrap(),
));
}
}
checker.diagnostics.push(diagnostic);
}
}
}
fn function(name: &str, args: &Arguments, body: &Expr, stylist: &Stylist) -> String {
let body = Stmt::new(
Location::default(),
Location::default(),
StmtKind::Return {
value: Some(Box::new(body.clone())),
},
);
let func = Stmt::new(
Location::default(),
Location::default(),
StmtKind::FunctionDef {
name: name.to_string(),
args: Box::new(args.clone()),
body: vec![body],
decorator_list: vec![],
returns: None,
type_comment: None,
},
);
unparse_stmt(&func, stylist)
}

View File

@@ -0,0 +1,28 @@
use rustpython_ast::{Excepthandler, Stmt, StmtKind};
use rustpython_parser::ast::Expr;
use crate::ast::helpers::except_range;
use crate::registry::Diagnostic;
use crate::source_code::Locator;
use crate::violations;
/// E722
pub fn do_not_use_bare_except(
type_: Option<&Expr>,
body: &[Stmt],
handler: &Excepthandler,
locator: &Locator,
) -> Option<Diagnostic> {
if type_.is_none()
&& !body
.iter()
.any(|stmt| matches!(stmt.node, StmtKind::Raise { exc: None, .. }))
{
Some(Diagnostic::new(
violations::DoNotUseBareExcept,
except_range(handler, locator),
))
} else {
None
}
}

View File

@@ -0,0 +1,33 @@
use rustpython_ast::Location;
use crate::ast::types::Range;
use crate::registry::Diagnostic;
use crate::rules::pycodestyle::rules::is_overlong;
use crate::settings::Settings;
use crate::violations;
/// W505
pub fn doc_line_too_long(lineno: usize, line: &str, settings: &Settings) -> Option<Diagnostic> {
let Some(limit) = settings.pycodestyle.max_doc_length else {
return None;
};
let line_length = line.chars().count();
if is_overlong(
line,
line_length,
limit,
settings.pycodestyle.ignore_overlong_task_comments,
&settings.task_tags,
) {
Some(Diagnostic::new(
violations::DocLineTooLong(line_length, limit),
Range::new(
Location::new(lineno + 1, limit),
Location::new(lineno + 1, line_length),
),
))
} else {
None
}
}

View File

@@ -0,0 +1,90 @@
use rustpython_ast::Location;
use crate::ast::types::Range;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::source_code::Locator;
use crate::violations;
// See: https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
const VALID_ESCAPE_SEQUENCES: &[char; 23] = &[
'\n', '\\', '\'', '"', 'a', 'b', 'f', 'n', 'r', 't', 'v', '0', '1', '2', '3', '4', '5', '6',
'7', 'x', // Escape sequences only recognized in string literals
'N', 'u', 'U',
];
/// Return the quotation markers used for a String token.
fn extract_quote(text: &str) -> &str {
for quote in ["'''", "\"\"\"", "'", "\""] {
if text.ends_with(quote) {
return quote;
}
}
panic!("Unable to find quotation mark for String token")
}
/// W605
pub fn invalid_escape_sequence(
locator: &Locator,
start: Location,
end: Location,
autofix: bool,
) -> Vec<Diagnostic> {
let mut diagnostics = vec![];
let text = locator.slice_source_code_range(&Range::new(start, end));
// Determine whether the string is single- or triple-quoted.
let quote = extract_quote(text);
let quote_pos = text.find(quote).unwrap();
let prefix = text[..quote_pos].to_lowercase();
let body = &text[(quote_pos + quote.len())..(text.len() - quote.len())];
if !prefix.contains('r') {
for (row_offset, line) in body.lines().enumerate() {
let chars: Vec<char> = line.chars().collect();
for col_offset in 0..chars.len() {
if chars[col_offset] != '\\' {
continue;
}
// If the previous character was also a backslash, skip.
if col_offset > 0 && chars[col_offset - 1] == '\\' {
continue;
}
// If we're at the end of the line, skip.
if col_offset == chars.len() - 1 {
continue;
}
// If the next character is a valid escape sequence, skip.
let next_char = chars[col_offset + 1];
if VALID_ESCAPE_SEQUENCES.contains(&next_char) {
continue;
}
// Compute the location of the escape sequence by offsetting the location of the
// string token by the characters we've seen thus far.
let col = if row_offset == 0 {
start.column() + prefix.len() + quote.len() + col_offset
} else {
col_offset
};
let location = Location::new(start.row() + row_offset, col);
let end_location = Location::new(location.row(), location.column() + 2);
let mut diagnostic = Diagnostic::new(
violations::InvalidEscapeSequence(next_char),
Range::new(location, end_location),
);
if autofix {
diagnostic.amend(Fix::insertion(r"\".to_string(), location));
}
diagnostics.push(diagnostic);
}
}
}
diagnostics
}

View File

@@ -0,0 +1,39 @@
use once_cell::sync::Lazy;
use regex::Regex;
static URL_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^https?://\S+$").unwrap());
pub fn is_overlong(
line: &str,
line_length: usize,
limit: usize,
ignore_overlong_task_comments: bool,
task_tags: &[String],
) -> bool {
if line_length <= limit {
return false;
}
let mut chunks = line.split_whitespace();
let (Some(first), Some(second)) = (chunks.next(), chunks.next()) else {
// Single word / no printable chars - no way to make the line shorter
return false;
};
if first == "#" {
if ignore_overlong_task_comments {
let second = second.trim_end_matches(':');
if task_tags.iter().any(|tag| tag == second) {
return false;
}
}
// Do not enforce the line length for commented lines that end with a URL
// or contain only a single word.
if chunks.last().map_or(true, |c| URL_REGEX.is_match(c)) {
return false;
}
}
true
}

View File

@@ -0,0 +1,30 @@
use rustpython_ast::Location;
use crate::ast::types::Range;
use crate::registry::Diagnostic;
use crate::rules::pycodestyle::rules::is_overlong;
use crate::settings::Settings;
use crate::violations;
/// E501
pub fn line_too_long(lineno: usize, line: &str, settings: &Settings) -> Option<Diagnostic> {
let line_length = line.chars().count();
let limit = settings.line_length;
if is_overlong(
line,
line_length,
limit,
settings.pycodestyle.ignore_overlong_task_comments,
&settings.task_tags,
) {
Some(Diagnostic::new(
violations::LineTooLong(line_length, limit),
Range::new(
Location::new(lineno + 1, limit),
Location::new(lineno + 1, line_length),
),
))
} else {
None
}
}

View File

@@ -0,0 +1,206 @@
use itertools::izip;
use rustc_hash::FxHashMap;
use rustpython_ast::Constant;
use rustpython_parser::ast::{Cmpop, Expr, ExprKind};
use crate::ast::helpers;
use crate::ast::helpers::{create_expr, unparse_expr};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::source_code::Stylist;
use crate::violations;
/// E711, E712
pub fn literal_comparisons(
checker: &mut Checker,
expr: &Expr,
left: &Expr,
ops: &[Cmpop],
comparators: &[Expr],
check_none_comparisons: bool,
check_true_false_comparisons: bool,
) {
// Mapping from (bad operator index) to (replacement operator). As we iterate
// through the list of operators, we apply "dummy" fixes for each error,
// then replace the entire expression at the end with one "real" fix, to
// avoid conflicts.
let mut bad_ops: FxHashMap<usize, Cmpop> = FxHashMap::default();
let mut diagnostics: Vec<Diagnostic> = vec![];
let op = ops.first().unwrap();
// Check `left`.
let mut comparator = left;
let next = &comparators[0];
if check_none_comparisons
&& matches!(
comparator.node,
ExprKind::Constant {
value: Constant::None,
kind: None
}
)
{
if matches!(op, Cmpop::Eq) {
let diagnostic = Diagnostic::new(
violations::NoneComparison(op.into()),
Range::from_located(comparator),
);
if checker.patch(diagnostic.kind.rule()) && !helpers::is_constant_non_singleton(next) {
bad_ops.insert(0, Cmpop::Is);
}
diagnostics.push(diagnostic);
}
if matches!(op, Cmpop::NotEq) {
let diagnostic = Diagnostic::new(
violations::NoneComparison(op.into()),
Range::from_located(comparator),
);
if checker.patch(diagnostic.kind.rule()) && !helpers::is_constant_non_singleton(next) {
bad_ops.insert(0, Cmpop::IsNot);
}
diagnostics.push(diagnostic);
}
}
if check_true_false_comparisons {
if let ExprKind::Constant {
value: Constant::Bool(value),
kind: None,
} = comparator.node
{
if matches!(op, Cmpop::Eq) {
let diagnostic = Diagnostic::new(
violations::TrueFalseComparison(value, op.into()),
Range::from_located(comparator),
);
if checker.patch(diagnostic.kind.rule())
&& !helpers::is_constant_non_singleton(next)
{
bad_ops.insert(0, Cmpop::Is);
}
diagnostics.push(diagnostic);
}
if matches!(op, Cmpop::NotEq) {
let diagnostic = Diagnostic::new(
violations::TrueFalseComparison(value, op.into()),
Range::from_located(comparator),
);
if checker.patch(diagnostic.kind.rule())
&& !helpers::is_constant_non_singleton(next)
{
bad_ops.insert(0, Cmpop::IsNot);
}
diagnostics.push(diagnostic);
}
}
}
// Check each comparator in order.
for (idx, (op, next)) in izip!(ops, comparators).enumerate() {
if check_none_comparisons
&& matches!(
next.node,
ExprKind::Constant {
value: Constant::None,
kind: None
}
)
{
if matches!(op, Cmpop::Eq) {
let diagnostic = Diagnostic::new(
violations::NoneComparison(op.into()),
Range::from_located(next),
);
if checker.patch(diagnostic.kind.rule())
&& !helpers::is_constant_non_singleton(comparator)
{
bad_ops.insert(idx, Cmpop::Is);
}
diagnostics.push(diagnostic);
}
if matches!(op, Cmpop::NotEq) {
let diagnostic = Diagnostic::new(
violations::NoneComparison(op.into()),
Range::from_located(next),
);
if checker.patch(diagnostic.kind.rule())
&& !helpers::is_constant_non_singleton(comparator)
{
bad_ops.insert(idx, Cmpop::IsNot);
}
diagnostics.push(diagnostic);
}
}
if check_true_false_comparisons {
if let ExprKind::Constant {
value: Constant::Bool(value),
kind: None,
} = next.node
{
if matches!(op, Cmpop::Eq) {
let diagnostic = Diagnostic::new(
violations::TrueFalseComparison(value, op.into()),
Range::from_located(next),
);
if checker.patch(diagnostic.kind.rule())
&& !helpers::is_constant_non_singleton(comparator)
{
bad_ops.insert(idx, Cmpop::Is);
}
diagnostics.push(diagnostic);
}
if matches!(op, Cmpop::NotEq) {
let diagnostic = Diagnostic::new(
violations::TrueFalseComparison(value, op.into()),
Range::from_located(next),
);
if checker.patch(diagnostic.kind.rule())
&& !helpers::is_constant_non_singleton(comparator)
{
bad_ops.insert(idx, Cmpop::IsNot);
}
diagnostics.push(diagnostic);
}
}
}
comparator = next;
}
// TODO(charlie): Respect `noqa` directives. If one of the operators has a
// `noqa`, but another doesn't, both will be removed here.
if !bad_ops.is_empty() {
// Replace the entire comparison expression.
let ops = ops
.iter()
.enumerate()
.map(|(idx, op)| bad_ops.get(&idx).unwrap_or(op))
.cloned()
.collect::<Vec<_>>();
let content = compare(left, &ops, comparators, checker.stylist);
for diagnostic in &mut diagnostics {
diagnostic.amend(Fix::replacement(
content.to_string(),
expr.location,
expr.end_location.unwrap(),
));
}
}
checker.diagnostics.extend(diagnostics);
}
pub fn compare(left: &Expr, ops: &[Cmpop], comparators: &[Expr], stylist: &Stylist) -> String {
unparse_expr(
&create_expr(ExprKind::Compare {
left: Box::new(left.clone()),
ops: ops.to_vec(),
comparators: comparators.to_vec(),
}),
stylist,
)
}

View File

@@ -0,0 +1,23 @@
use rustpython_ast::Location;
use crate::ast::types::Range;
use crate::ast::whitespace::leading_space;
use crate::registry::Diagnostic;
use crate::violations;
/// E101
pub fn mixed_spaces_and_tabs(lineno: usize, line: &str) -> Option<Diagnostic> {
let indent = leading_space(line);
if indent.contains(' ') && indent.contains('\t') {
Some(Diagnostic::new(
violations::MixedSpacesAndTabs,
Range::new(
Location::new(lineno + 1, 0),
Location::new(lineno + 1, indent.chars().count()),
),
))
} else {
None
}
}

View File

@@ -0,0 +1,29 @@
pub use ambiguous_class_name::ambiguous_class_name;
pub use ambiguous_function_name::ambiguous_function_name;
pub use ambiguous_variable_name::ambiguous_variable_name;
pub use do_not_assign_lambda::do_not_assign_lambda;
pub use do_not_use_bare_except::do_not_use_bare_except;
pub use doc_line_too_long::doc_line_too_long;
pub use invalid_escape_sequence::invalid_escape_sequence;
pub use is_overlong::is_overlong;
pub use line_too_long::line_too_long;
pub use literal_comparisons::literal_comparisons;
pub use mixed_spaces_and_tabs::mixed_spaces_and_tabs;
pub use no_newline_at_end_of_file::no_newline_at_end_of_file;
pub use not_tests::not_tests;
pub use type_comparison::type_comparison;
mod ambiguous_class_name;
mod ambiguous_function_name;
mod ambiguous_variable_name;
mod do_not_assign_lambda;
mod do_not_use_bare_except;
mod doc_line_too_long;
mod invalid_escape_sequence;
mod is_overlong;
mod line_too_long;
mod literal_comparisons;
mod mixed_spaces_and_tabs;
mod no_newline_at_end_of_file;
mod not_tests;
mod type_comparison;

View File

@@ -0,0 +1,27 @@
use rustpython_ast::Location;
use crate::ast::types::Range;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::violations;
/// W292
pub fn no_newline_at_end_of_file(contents: &str, autofix: bool) -> Option<Diagnostic> {
if !contents.ends_with('\n') {
// Note: if `lines.last()` is `None`, then `contents` is empty (and so we don't
// want to raise W292 anyway).
if let Some(line) = contents.lines().last() {
// Both locations are at the end of the file (and thus the same).
let location = Location::new(contents.lines().count(), line.len());
let mut diagnostic = Diagnostic::new(
violations::NoNewLineAtEndOfFile,
Range::new(location, location),
);
if autofix {
diagnostic.amend(Fix::insertion("\n".to_string(), location));
}
return Some(diagnostic);
}
}
None
}

View File

@@ -0,0 +1,80 @@
use rustpython_ast::Unaryop;
use rustpython_parser::ast::{Cmpop, Expr, ExprKind};
use crate::ast::helpers::{create_expr, unparse_expr};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::source_code::Stylist;
use crate::violations;
/// E713, E714
pub fn not_tests(
checker: &mut Checker,
expr: &Expr,
op: &Unaryop,
operand: &Expr,
check_not_in: bool,
check_not_is: bool,
) {
if matches!(op, Unaryop::Not) {
if let ExprKind::Compare {
left,
ops,
comparators,
..
} = &operand.node
{
let should_fix = ops.len() == 1;
for op in ops.iter() {
match op {
Cmpop::In => {
if check_not_in {
let mut diagnostic = Diagnostic::new(
violations::NotInTest,
Range::from_located(operand),
);
if checker.patch(diagnostic.kind.rule()) && should_fix {
diagnostic.amend(Fix::replacement(
compare(left, &[Cmpop::NotIn], comparators, checker.stylist),
expr.location,
expr.end_location.unwrap(),
));
}
checker.diagnostics.push(diagnostic);
}
}
Cmpop::Is => {
if check_not_is {
let mut diagnostic = Diagnostic::new(
violations::NotIsTest,
Range::from_located(operand),
);
if checker.patch(diagnostic.kind.rule()) && should_fix {
diagnostic.amend(Fix::replacement(
compare(left, &[Cmpop::IsNot], comparators, checker.stylist),
expr.location,
expr.end_location.unwrap(),
));
}
checker.diagnostics.push(diagnostic);
}
}
_ => {}
}
}
}
}
}
pub fn compare(left: &Expr, ops: &[Cmpop], comparators: &[Expr], stylist: &Stylist) -> String {
unparse_expr(
&create_expr(ExprKind::Compare {
left: Box::new(left.clone()),
ops: ops.to_vec(),
comparators: comparators.to_vec(),
}),
stylist,
)
}

View File

@@ -0,0 +1,52 @@
use itertools::izip;
use rustpython_ast::Constant;
use rustpython_parser::ast::{Cmpop, Expr, ExprKind};
use crate::ast::types::Range;
use crate::registry::Diagnostic;
use crate::violations;
/// E721
pub fn type_comparison(ops: &[Cmpop], comparators: &[Expr], location: Range) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = vec![];
for (op, right) in izip!(ops, comparators) {
if !matches!(op, Cmpop::Is | Cmpop::IsNot | Cmpop::Eq | Cmpop::NotEq) {
continue;
}
match &right.node {
ExprKind::Call { func, args, .. } => {
if let ExprKind::Name { id, .. } = &func.node {
// Ex) type(False)
if id == "type" {
if let Some(arg) = args.first() {
// Allow comparison for types which are not obvious.
if !matches!(
arg.node,
ExprKind::Name { .. }
| ExprKind::Constant {
value: Constant::None,
kind: None
}
) {
diagnostics
.push(Diagnostic::new(violations::TypeComparison, location));
}
}
}
}
}
ExprKind::Attribute { value, .. } => {
if let ExprKind::Name { id, .. } = &value.node {
// Ex) types.IntType
if id == "types" {
diagnostics.push(Diagnostic::new(violations::TypeComparison, location));
}
}
}
_ => {}
}
}
diagnostics
}

View File

@@ -2,8 +2,11 @@ use crate::ast::types::Range;
use crate::ast::whitespace::LinesWithTrailingNewline;
use crate::checkers::ast::Checker;
use crate::docstrings::constants;
use crate::docstrings::definition::Docstring;
use crate::docstrings::definition::{DefinitionKind, Docstring};
use crate::fix::Fix;
use crate::message::Location;
use crate::registry::{Diagnostic, Rule};
use crate::rules::pydocstyle::helpers::leading_quote;
use crate::violations;
/// D212, D213
@@ -14,8 +17,8 @@ pub fn multi_line_summary_start(checker: &mut Checker, docstring: &Docstring) {
if LinesWithTrailingNewline::from(body).nth(1).is_none() {
return;
};
let Some(first_line) = contents
.lines()
let mut content_lines = contents.lines();
let Some(first_line) = content_lines
.next()
else
{
@@ -27,10 +30,26 @@ pub fn multi_line_summary_start(checker: &mut Checker, docstring: &Docstring) {
.rules
.enabled(&Rule::MultiLineSummaryFirstLine)
{
checker.diagnostics.push(Diagnostic::new(
let mut diagnostic = Diagnostic::new(
violations::MultiLineSummaryFirstLine,
Range::from_located(docstring.expr),
));
);
if checker.patch(diagnostic.kind.rule()) {
let location = docstring.expr.location;
let mut end_row = location.row() + 1;
// Delete until first non-whitespace char.
for line in content_lines {
if let Some(end_column) = line.find(|c: char| !c.is_whitespace()) {
let start =
Location::new(location.row(), location.column() + first_line.len());
let end = Location::new(end_row, end_column);
diagnostic.amend(Fix::deletion(start, end));
break;
}
end_row += 1;
}
}
checker.diagnostics.push(diagnostic);
}
} else {
if checker
@@ -38,10 +57,57 @@ pub fn multi_line_summary_start(checker: &mut Checker, docstring: &Docstring) {
.rules
.enabled(&Rule::MultiLineSummarySecondLine)
{
checker.diagnostics.push(Diagnostic::new(
let mut diagnostic = Diagnostic::new(
violations::MultiLineSummarySecondLine,
Range::from_located(docstring.expr),
));
);
if checker.patch(diagnostic.kind.rule()) {
let mut indentation = String::from(docstring.indentation);
let mut fixable = true;
if !indentation.chars().all(char::is_whitespace) {
fixable = false;
// If the docstring isn't on its own line, look at the parent indentation, and
// add the default indentation to get the "right" level.
if let DefinitionKind::Class(parent)
| DefinitionKind::NestedClass(parent)
| DefinitionKind::Function(parent)
| DefinitionKind::NestedFunction(parent)
| DefinitionKind::Method(parent) = &docstring.kind
{
let parent_indentation =
checker.locator.slice_source_code_range(&Range::new(
Location::new(parent.location.row(), 0),
Location::new(parent.location.row(), parent.location.column()),
));
if parent_indentation.chars().all(char::is_whitespace) {
indentation.clear();
indentation.push_str(parent_indentation);
indentation.push_str(checker.stylist.indentation());
fixable = true;
}
};
}
if fixable {
let location = docstring.expr.location;
let prefix = leading_quote(contents).unwrap();
// Use replacement instead of insert to trim possible whitespace between leading
// quote and text.
let repl = format!(
"{}{}{}",
checker.stylist.line_ending().as_str(),
indentation,
first_line.strip_prefix(prefix).unwrap().trim_start()
);
diagnostic.amend(Fix::replacement(
repl,
Location::new(location.row(), location.column() + prefix.len()),
Location::new(location.row(), location.column() + first_line.len()),
));
}
}
checker.diagnostics.push(diagnostic);
}
}
}

View File

@@ -4,7 +4,7 @@ use ruff_macros::ConfigurationOptions;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::registry::RuleSelector;
use crate::registry::Rule;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
@@ -18,55 +18,50 @@ pub enum Convention {
}
impl Convention {
pub fn codes(self) -> &'static [RuleSelector] {
pub fn rules_to_be_ignored(self) -> &'static [Rule] {
match self {
Convention::Google => &[
// All errors except D203, D204, D213, D215, D400, D401, D404, D406, D407, D408,
// D409 and D413.
RuleSelector::D203,
RuleSelector::D204,
RuleSelector::D213,
RuleSelector::D215,
RuleSelector::D400,
RuleSelector::D404,
RuleSelector::D406,
RuleSelector::D407,
RuleSelector::D408,
RuleSelector::D409,
RuleSelector::D413,
Rule::OneBlankLineBeforeClass,
Rule::OneBlankLineAfterClass,
Rule::MultiLineSummarySecondLine,
Rule::SectionUnderlineNotOverIndented,
Rule::EndsInPeriod,
Rule::NoThisPrefix,
Rule::NewLineAfterSectionName,
Rule::DashedUnderlineAfterSection,
Rule::SectionUnderlineAfterName,
Rule::SectionUnderlineMatchesSectionLength,
Rule::BlankLineAfterLastSection,
],
Convention::Numpy => &[
// All errors except D107, D203, D212, D213, D402, D413, D415, D416, and D417.
RuleSelector::D107,
RuleSelector::D203,
RuleSelector::D212,
RuleSelector::D213,
RuleSelector::D402,
RuleSelector::D413,
RuleSelector::D415,
RuleSelector::D416,
RuleSelector::D417,
Rule::PublicInit,
Rule::OneBlankLineBeforeClass,
Rule::MultiLineSummaryFirstLine,
Rule::MultiLineSummarySecondLine,
Rule::NoSignature,
Rule::BlankLineAfterLastSection,
Rule::EndsInPunctuation,
Rule::SectionNameEndsInColon,
Rule::DocumentAllArguments,
],
Convention::Pep257 => &[
// All errors except D203, D212, D213, D214, D215, D404, D405, D406, D407, D408,
// D409, D410, D411, D413, D415, D416 and D417.
RuleSelector::D203,
RuleSelector::D212,
RuleSelector::D213,
RuleSelector::D214,
RuleSelector::D215,
RuleSelector::D404,
RuleSelector::D405,
RuleSelector::D406,
RuleSelector::D407,
RuleSelector::D408,
RuleSelector::D409,
RuleSelector::D410,
RuleSelector::D411,
RuleSelector::D413,
RuleSelector::D415,
RuleSelector::D416,
RuleSelector::D417,
Rule::OneBlankLineBeforeClass,
Rule::MultiLineSummaryFirstLine,
Rule::MultiLineSummarySecondLine,
Rule::SectionNotOverIndented,
Rule::SectionUnderlineNotOverIndented,
Rule::NoThisPrefix,
Rule::CapitalizeSectionName,
Rule::NewLineAfterSectionName,
Rule::DashedUnderlineAfterSection,
Rule::SectionUnderlineAfterName,
Rule::SectionUnderlineMatchesSectionLength,
Rule::BlankLineAfterSection,
Rule::BlankLineBeforeSection,
Rule::BlankLineAfterLastSection,
Rule::EndsInPunctuation,
Rule::SectionNameEndsInColon,
Rule::DocumentAllArguments,
],
}
}

View File

@@ -10,7 +10,14 @@ expression: diagnostics
end_location:
row: 131
column: 7
fix: ~
fix:
content: ""
location:
row: 129
column: 7
end_location:
row: 130
column: 4
parent: ~
- kind:
MultiLineSummaryFirstLine: ~
@@ -20,7 +27,14 @@ expression: diagnostics
end_location:
row: 599
column: 13
fix: ~
fix:
content: ""
location:
row: 597
column: 7
end_location:
row: 599
column: 4
parent: ~
- kind:
MultiLineSummaryFirstLine: ~
@@ -30,6 +44,13 @@ expression: diagnostics
end_location:
row: 626
column: 14
fix: ~
fix:
content: ""
location:
row: 624
column: 7
end_location:
row: 626
column: 4
parent: ~

View File

@@ -10,7 +10,14 @@ expression: diagnostics
end_location:
row: 203
column: 7
fix: ~
fix:
content: "\n Summary."
location:
row: 200
column: 7
end_location:
row: 200
column: 15
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -20,7 +27,14 @@ expression: diagnostics
end_location:
row: 215
column: 7
fix: ~
fix:
content: "\n Summary."
location:
row: 210
column: 7
end_location:
row: 210
column: 15
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -30,7 +44,14 @@ expression: diagnostics
end_location:
row: 224
column: 7
fix: ~
fix:
content: "\n Summary."
location:
row: 220
column: 7
end_location:
row: 220
column: 15
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -40,7 +61,14 @@ expression: diagnostics
end_location:
row: 234
column: 7
fix: ~
fix:
content: "\n Summary."
location:
row: 230
column: 7
end_location:
row: 230
column: 15
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -50,7 +78,14 @@ expression: diagnostics
end_location:
row: 244
column: 3
fix: ~
fix:
content: "\n Summary."
location:
row: 240
column: 7
end_location:
row: 240
column: 15
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -60,7 +95,14 @@ expression: diagnostics
end_location:
row: 254
column: 7
fix: ~
fix:
content: "\n Summary."
location:
row: 250
column: 7
end_location:
row: 250
column: 15
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -70,7 +112,14 @@ expression: diagnostics
end_location:
row: 264
column: 11
fix: ~
fix:
content: "\n Summary."
location:
row: 260
column: 7
end_location:
row: 260
column: 15
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -80,7 +129,14 @@ expression: diagnostics
end_location:
row: 274
column: 7
fix: ~
fix:
content: "\n Summary."
location:
row: 270
column: 7
end_location:
row: 270
column: 15
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -90,7 +146,14 @@ expression: diagnostics
end_location:
row: 283
column: 19
fix: ~
fix:
content: "\n Summary."
location:
row: 281
column: 7
end_location:
row: 281
column: 15
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -100,7 +163,14 @@ expression: diagnostics
end_location:
row: 302
column: 7
fix: ~
fix:
content: "\n Whitespace at the beginning."
location:
row: 299
column: 7
end_location:
row: 299
column: 36
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -110,7 +180,14 @@ expression: diagnostics
end_location:
row: 348
column: 7
fix: ~
fix:
content: "\n Exclude some backslashes from D301."
location:
row: 343
column: 7
end_location:
row: 343
column: 42
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -120,7 +197,14 @@ expression: diagnostics
end_location:
row: 386
column: 7
fix: ~
fix:
content: "\n First line."
location:
row: 383
column: 7
end_location:
row: 383
column: 18
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -130,7 +214,14 @@ expression: diagnostics
end_location:
row: 396
column: 7
fix: ~
fix:
content: "\n One liner."
location:
row: 392
column: 7
end_location:
row: 392
column: 17
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -140,7 +231,14 @@ expression: diagnostics
end_location:
row: 441
column: 7
fix: ~
fix:
content: "\n First Line."
location:
row: 438
column: 39
end_location:
row: 438
column: 50
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -150,7 +248,14 @@ expression: diagnostics
end_location:
row: 454
column: 7
fix: ~
fix:
content: "\n Check for a bug where the previous function caused an assertion."
location:
row: 450
column: 7
end_location:
row: 450
column: 71
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -160,7 +265,14 @@ expression: diagnostics
end_location:
row: 532
column: 7
fix: ~
fix:
content: "\n A Blah."
location:
row: 526
column: 7
end_location:
row: 526
column: 14
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -170,7 +282,14 @@ expression: diagnostics
end_location:
row: 549
column: 7
fix: ~
fix:
content: "\n Leading space."
location:
row: 546
column: 7
end_location:
row: 546
column: 21
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -180,7 +299,14 @@ expression: diagnostics
end_location:
row: 558
column: 7
fix: ~
fix:
content: "\n Leading space."
location:
row: 555
column: 7
end_location:
row: 555
column: 21
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -190,7 +316,14 @@ expression: diagnostics
end_location:
row: 571
column: 7
fix: ~
fix:
content: "\n Trailing and leading space."
location:
row: 568
column: 7
end_location:
row: 568
column: 34
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -200,7 +333,14 @@ expression: diagnostics
end_location:
row: 590
column: 21
fix: ~
fix:
content: "\n Summary."
location:
row: 588
column: 7
end_location:
row: 588
column: 15
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -210,7 +350,14 @@ expression: diagnostics
end_location:
row: 608
column: 7
fix: ~
fix:
content: "\n Wrong."
location:
row: 606
column: 8
end_location:
row: 606
column: 14
parent: ~
- kind:
MultiLineSummarySecondLine: ~
@@ -220,6 +367,13 @@ expression: diagnostics
end_location:
row: 617
column: 7
fix: ~
fix:
content: "\n Wrong.\""
location:
row: 615
column: 7
end_location:
row: 615
column: 14
parent: ~

View File

@@ -15,7 +15,7 @@ mod tests {
use textwrap::dedent;
use crate::linter::{check_path, test_path};
use crate::registry::{Rule, RuleSelector};
use crate::registry::{Rule, RuleCodePrefix};
use crate::settings::flags;
use crate::source_code::{Indexer, Locator, Stylist};
use crate::{directives, rustpython_helpers, settings};
@@ -209,7 +209,7 @@ mod tests {
/// Note that all tests marked with `#[ignore]` should be considered TODOs.
fn flakes(contents: &str, expected: &[Rule]) -> Result<()> {
let contents = dedent(contents);
let settings = settings::Settings::for_rules(&RuleSelector::F);
let settings = settings::Settings::for_rules(&RuleCodePrefix::F);
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
let locator = Locator::new(&contents);
let stylist = Stylist::from_contents(&contents, &locator);

View File

@@ -12,6 +12,7 @@ use crate::fix::Fix;
use crate::registry::{Diagnostic, Rule};
use crate::rules::pydocstyle::helpers::{leading_quote, trailing_quote};
use crate::rules::pyflakes::format::FormatSummary;
use crate::rules::pyupgrade::helpers::curly_escape;
use crate::violations;
/// Like [`FormatSummary`], but maps positional and keyword arguments to their
@@ -217,13 +218,7 @@ fn try_convert_to_f_string(checker: &Checker, expr: &Expr) -> Option<String> {
converted.push('}');
}
FormatPart::Literal(value) => {
if value.starts_with('{') {
converted.push('{');
}
converted.push_str(&value);
if value.ends_with('}') {
converted.push('}');
}
converted.push_str(&curly_escape(&value));
}
}
}

View File

@@ -1,11 +1,11 @@
use rustpython_ast::{Constant, Expr, ExprContext, ExprKind, Keyword, StmtKind};
use crate::ast::helpers::{create_expr, create_stmt};
use crate::ast::helpers::{create_expr, create_stmt, unparse_expr, unparse_stmt};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::{Diagnostic, Rule};
use crate::source_code::{Generator, Locator, Stylist};
use crate::source_code::{Locator, Stylist};
use crate::violations;
/// Return `true` if the call path is a reference to `${module}.${any}`.
@@ -130,10 +130,8 @@ fn replace_call_on_arg_by_arg_method_call(
fn replace_by_expr_kind(node: ExprKind, expr: &Expr, patch: bool, stylist: &Stylist) -> Diagnostic {
let mut diagnostic = Diagnostic::new(violations::RemoveSixCompat, Range::from_located(expr));
if patch {
let mut generator: Generator = stylist.into();
generator.unparse_expr(&create_expr(node), 0);
diagnostic.amend(Fix::replacement(
generator.generate(),
unparse_expr(&create_expr(node), stylist),
expr.location,
expr.end_location.unwrap(),
));
@@ -144,10 +142,8 @@ fn replace_by_expr_kind(node: ExprKind, expr: &Expr, patch: bool, stylist: &Styl
fn replace_by_stmt_kind(node: StmtKind, expr: &Expr, patch: bool, stylist: &Stylist) -> Diagnostic {
let mut diagnostic = Diagnostic::new(violations::RemoveSixCompat, Range::from_located(expr));
if patch {
let mut generator: Generator = stylist.into();
generator.unparse_stmt(&create_stmt(node));
diagnostic.amend(Fix::replacement(
generator.generate(),
unparse_stmt(&create_stmt(node), stylist),
expr.location,
expr.end_location.unwrap(),
));

Some files were not shown because too many files have changed in this diff Show More