Compare commits
34 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
118a93260a | ||
|
|
1c16255884 | ||
|
|
16c4552946 | ||
|
|
0ba3989b3d | ||
|
|
3435e15cba | ||
|
|
781bbbc286 | ||
|
|
acf0b82f19 | ||
|
|
057414ddd4 | ||
|
|
ca94e9aa26 | ||
|
|
797b5bd261 | ||
|
|
a64f62f439 | ||
|
|
058ee8e6bf | ||
|
|
39fc1f0c1b | ||
|
|
34842b4c4b | ||
|
|
dfa6fa8f83 | ||
|
|
6131c819ed | ||
|
|
79ba420faa | ||
|
|
d16ba890ae | ||
|
|
6b6851bf1f | ||
|
|
056718ce75 | ||
|
|
4521fdf021 | ||
|
|
8e479628f2 | ||
|
|
2a11c4b1f1 | ||
|
|
a8cde5a936 | ||
|
|
1822b57ed5 | ||
|
|
c679570041 | ||
|
|
edcb3a7217 | ||
|
|
6e43dc7270 | ||
|
|
570d0864f2 | ||
|
|
d22e96916c | ||
|
|
043d31dcdf | ||
|
|
1392e4cced | ||
|
|
59ee89a091 | ||
|
|
6a7c3728ee |
54
.github/workflows/ci.yaml
vendored
54
.github/workflows/ci.yaml
vendored
@@ -121,31 +121,35 @@ jobs:
|
||||
- run: cargo test --all
|
||||
- run: cargo test --package ruff --test black_compatibility_test -- --ignored
|
||||
|
||||
wasm-pack-test:
|
||||
name: "wasm-pack test"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly-2022-11-01
|
||||
override: true
|
||||
- uses: actions/cache@v3
|
||||
env:
|
||||
cache-name: cache-cargo
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-build-${{ env.cache-name }}-
|
||||
${{ runner.os }}-build-
|
||||
${{ runner.os }}-
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- run: wasm-pack test --node
|
||||
# TODO(charlie): Re-enable the `wasm-pack` tests.
|
||||
# See: https://github.com/charliermarsh/ruff/issues/1425
|
||||
# wasm-pack-test:
|
||||
# name: "wasm-pack test"
|
||||
# runs-on: ubuntu-latest
|
||||
# env:
|
||||
# WASM_BINDGEN_TEST_TIMEOUT: 60
|
||||
# steps:
|
||||
# - uses: actions/checkout@v3
|
||||
# - uses: actions-rs/toolchain@v1
|
||||
# with:
|
||||
# profile: minimal
|
||||
# toolchain: nightly-2022-11-01
|
||||
# override: true
|
||||
# - uses: actions/cache@v3
|
||||
# env:
|
||||
# cache-name: cache-cargo
|
||||
# with:
|
||||
# path: |
|
||||
# ~/.cargo/registry
|
||||
# ~/.cargo/git
|
||||
# key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
# restore-keys: |
|
||||
# ${{ runner.os }}-build-${{ env.cache-name }}-
|
||||
# ${{ runner.os }}-build-
|
||||
# ${{ runner.os }}-
|
||||
# - uses: jetli/wasm-pack-action@v0.4.0
|
||||
# - uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
# - run: wasm-pack test --node
|
||||
|
||||
maturin-build:
|
||||
name: "maturin build"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: v0.0.196
|
||||
rev: v0.0.200
|
||||
hooks:
|
||||
- id: ruff
|
||||
|
||||
|
||||
9
Cargo.lock
generated
9
Cargo.lock
generated
@@ -750,7 +750,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.196-dev.0"
|
||||
version = "0.0.200-dev.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.0.32",
|
||||
@@ -1878,7 +1878,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.196"
|
||||
version = "0.0.200"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
@@ -1930,6 +1930,7 @@ dependencies = [
|
||||
"serde-wasm-bindgen",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"similar",
|
||||
"strum",
|
||||
"strum_macros",
|
||||
"test-case",
|
||||
@@ -1945,7 +1946,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_dev"
|
||||
version = "0.0.196"
|
||||
version = "0.0.200"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.0.32",
|
||||
@@ -1966,7 +1967,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_macros"
|
||||
version = "0.0.196"
|
||||
version = "0.0.200"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
11
Cargo.toml
11
Cargo.toml
@@ -6,9 +6,15 @@ members = [
|
||||
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.0.196"
|
||||
version = "0.0.200"
|
||||
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.65.0"
|
||||
documentation = "https://github.com/charliermarsh/ruff"
|
||||
homepage = "https://github.com/charliermarsh/ruff"
|
||||
repository = "https://github.com/charliermarsh/ruff"
|
||||
readme = "README.md"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
name = "ruff"
|
||||
@@ -45,7 +51,7 @@ path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix
|
||||
quick-junit = { version = "0.3.2" }
|
||||
regex = { version = "1.6.0" }
|
||||
ropey = { version = "1.5.0", features = ["cr_lines", "simd"], default-features = false }
|
||||
ruff_macros = { version = "0.0.196", path = "ruff_macros" }
|
||||
ruff_macros = { version = "0.0.200", path = "ruff_macros" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
rustpython-ast = { features = ["unparse"], git = "https://github.com/RustPython/RustPython.git", rev = "68d26955b3e24198a150315e7959719b03709dee" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "68d26955b3e24198a150315e7959719b03709dee" }
|
||||
@@ -55,6 +61,7 @@ semver = { version = "1.0.16" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_json = { version = "1.0.87" }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.2.1" }
|
||||
strum = { version = "0.24.1", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.24.3" }
|
||||
textwrap = { version = "0.16.0" }
|
||||
|
||||
73
README.md
73
README.md
@@ -167,7 +167,7 @@ Ruff also works with [pre-commit](https://pre-commit.com):
|
||||
```yaml
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.0.196'
|
||||
rev: 'v0.0.200'
|
||||
hooks:
|
||||
- id: ruff
|
||||
# Respect `exclude` and `extend-exclude` settings.
|
||||
@@ -225,8 +225,8 @@ max-complexity = 10
|
||||
```
|
||||
|
||||
As an example, the following would configure Ruff to: (1) avoid checking for line-length
|
||||
violations (`E501`); (2), always autofix, but never remove unused imports (`F401`); and (3) ignore
|
||||
import-at-top-of-file errors (`E402`) in `__init__.py` files:
|
||||
violations (`E501`); (2) never remove unused imports (`F401`); and (3) ignore import-at-top-of-file
|
||||
errors (`E402`) in `__init__.py` files:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
@@ -236,8 +236,7 @@ select = ["E", "F"]
|
||||
# Never enforce `E501` (line length violations).
|
||||
ignore = ["E501"]
|
||||
|
||||
# Always autofix, but never try to fix `F401` (unused imports).
|
||||
fix = true
|
||||
# Never try to fix `F401` (unused imports).
|
||||
unfixable = ["F401"]
|
||||
|
||||
# Ignore `E402` (import violations) in all `__init__.py` files, and in `path/to/file.py`.
|
||||
@@ -257,6 +256,17 @@ select = ["E", "F", "Q"]
|
||||
docstring-quotes = "double"
|
||||
```
|
||||
|
||||
Ruff mirrors Flake8's error code system, in which each error code consists of a one-to-three letter
|
||||
prefix, followed by three digits (e.g., `F401`). The prefix indicates that "source" of the error
|
||||
code (e.g., `F` for Pyflakes, `E` for `pycodestyle`, `ANN` for `flake8-annotations`). The set of
|
||||
enabled errors is determined by the `select` and `ignore` options, which support both the full
|
||||
error code (e.g., `F401`) and the prefix (e.g., `F`).
|
||||
|
||||
As a special-case, Ruff also supports the `ALL` error code, which enables all error codes. Note that
|
||||
some of the `pydocstyle` error codes are conflicting (e.g., `D203` and `D211`) as they represent
|
||||
alternative docstring formats. Enabling `ALL` without further configuration may result in suboptimal
|
||||
behavior, especially for the `pydocstyle` plugin.
|
||||
|
||||
As an alternative to `pyproject.toml`, Ruff will also respect a `ruff.toml` file, which implements
|
||||
an equivalent schema (though the `[tool.ruff]` hierarchy can be omitted). For example, the above
|
||||
`pyproject.toml` described above would be represented via the following `ruff.toml`:
|
||||
@@ -313,6 +323,8 @@ Options:
|
||||
Attempt to automatically fix lint errors
|
||||
--fix-only
|
||||
Fix any fixable lint errors, but don't report on leftover violations. Implies `--fix`
|
||||
--diff
|
||||
Avoid writing any fixed files back; instead, output a diff for each changed file to stdout
|
||||
-n, --no-cache
|
||||
Disable cache reads
|
||||
--select <SELECT>
|
||||
@@ -334,13 +346,15 @@ Options:
|
||||
--per-file-ignores <PER_FILE_IGNORES>
|
||||
List of mappings from file pattern to code to exclude
|
||||
--format <FORMAT>
|
||||
Output serialization format for error messages [possible values: text, json, junit, grouped, github]
|
||||
Output serialization format for error messages [possible values: text, json, junit, grouped, github, gitlab]
|
||||
--show-source
|
||||
Show violations with source code
|
||||
--respect-gitignore
|
||||
Respect file exclusions via `.gitignore` and other standard ignore files
|
||||
--force-exclude
|
||||
Enforce exclusions, even for paths passed to Ruff directly on the command-line
|
||||
--update-check
|
||||
Enable or disable automatic update checks
|
||||
--show-files
|
||||
See the files Ruff will be run against with the current settings
|
||||
--show-settings
|
||||
@@ -661,6 +675,8 @@ For more, see [pyupgrade](https://pypi.org/project/pyupgrade/3.2.0/) on PyPI.
|
||||
| UP019 | TypingTextStrAlias | `typing.Text` is deprecated, use `str` | 🛠 |
|
||||
| UP020 | OpenAlias | Use builtin `open` | 🛠 |
|
||||
| UP021 | ReplaceUniversalNewlines | `universal_newlines` is deprecated, use `text` | 🛠 |
|
||||
| UP022 | ReplaceStdoutStderr | Sending stdout and stderr to pipe is deprecated, use `capture_output` | 🛠 |
|
||||
| UP023 | RewriteCElementTree | `cElementTree` is deprecated, use `ElementTree` | 🛠 |
|
||||
|
||||
### pep8-naming (N)
|
||||
|
||||
@@ -957,6 +973,7 @@ For more, see [pygrep-hooks](https://github.com/pre-commit/pygrep-hooks) on GitH
|
||||
| PGH001 | NoEval | No builtin `eval()` allowed | |
|
||||
| PGH002 | DeprecatedLogWarn | `warn` is deprecated in favor of `warning` | |
|
||||
| PGH003 | BlanketTypeIgnore | Use specific error codes when ignoring type issues | |
|
||||
| PGH004 | BlanketNOQA | Use specific error codes when using `noqa` | |
|
||||
|
||||
### Pylint (PLC, PLE, PLR, PLW)
|
||||
|
||||
@@ -984,6 +1001,7 @@ For more, see [Pylint](https://pypi.org/project/pylint/2.15.7/) on PyPI.
|
||||
| RUF001 | AmbiguousUnicodeCharacterString | String contains ambiguous unicode character '𝐁' (did you mean 'B'?) | 🛠 |
|
||||
| RUF002 | AmbiguousUnicodeCharacterDocstring | Docstring contains ambiguous unicode character '𝐁' (did you mean 'B'?) | 🛠 |
|
||||
| RUF003 | AmbiguousUnicodeCharacterComment | Comment contains ambiguous unicode character '𝐁' (did you mean 'B'?) | |
|
||||
| RUF004 | KeywordArgumentBeforeStarArgument | Keyword argument `...` must come after starred arguments | |
|
||||
| RUF100 | UnusedNOQA | Unused blanket `noqa` directive | 🛠 |
|
||||
|
||||
<!-- End auto-generated sections. -->
|
||||
@@ -1368,6 +1386,22 @@ src = ["src", "tests"]
|
||||
known-first-party = ["my_module1", "my_module2"]
|
||||
```
|
||||
|
||||
### Does Ruff support Jupyter Notebooks?
|
||||
|
||||
Ruff is integrated into [nbQA](https://github.com/nbQA-dev/nbQA), a tool for running linters and
|
||||
code formatters over Jupyter Notebooks.
|
||||
|
||||
After installing `ruff` and `nbqa`, you can run Ruff over a notebook like so:
|
||||
|
||||
```shell
|
||||
> nbqa ruff Untitled.ipynb
|
||||
Untitled.ipynb:cell_1:2:5: F841 Local variable `x` is assigned to but never used
|
||||
Untitled.ipynb:cell_2:1:1: E402 Module level import not at top of file
|
||||
Untitled.ipynb:cell_2:1:8: F401 `os` imported but unused
|
||||
Found 3 error(s).
|
||||
1 potentially fixable with the --fix option.
|
||||
```
|
||||
|
||||
### Does Ruff support NumPy- or Google-style docstrings?
|
||||
|
||||
Yes! To enable a specific docstring convention, start by enabling all `pydocstyle` error codes, and
|
||||
@@ -1902,8 +1936,9 @@ force-exclude = true
|
||||
|
||||
The style in which violation messages should be formatted: `"text"`
|
||||
(default), `"grouped"` (group messages by file), `"json"`
|
||||
(machine-readable), `"junit"` (machine-readable XML), or `"github"`
|
||||
(GitHub Actions annotations).
|
||||
(machine-readable), `"junit"` (machine-readable XML), `"github"`
|
||||
(GitHub Actions annotations) or `"gitlab"`
|
||||
(GitLab CI code quality report).
|
||||
|
||||
**Default value**: `"text"`
|
||||
|
||||
@@ -2165,6 +2200,24 @@ unfixable = ["F401"]
|
||||
|
||||
---
|
||||
|
||||
#### [`update-check`](#update-check)
|
||||
|
||||
Enable or disable automatic update checks (overridden by the
|
||||
`--update-check` and `--no-update-check` command-line flags).
|
||||
|
||||
**Default value**: `true`
|
||||
|
||||
**Type**: `bool`
|
||||
|
||||
**Example usage**:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
update-check = false
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `flake8-annotations`
|
||||
|
||||
#### [`allow-star-arg-any`](#allow-star-arg-any)
|
||||
@@ -2408,7 +2461,7 @@ multiline-quotes = "single"
|
||||
#### [`ban-relative-imports`](#ban-relative-imports)
|
||||
|
||||
Whether to ban all relative imports (`"all"`), or only those imports
|
||||
that extend into the parent module and beyond (`"parents"`).
|
||||
that extend into the parent module or beyond (`"parents"`).
|
||||
|
||||
**Default value**: `"parents"`
|
||||
|
||||
@@ -2693,7 +2746,7 @@ Whether to use Google-style or Numpy-style conventions when detecting
|
||||
docstring sections. By default, conventions will be inferred from
|
||||
the available sections.
|
||||
|
||||
**Default value**: `"convention"`
|
||||
**Default value**: `None`
|
||||
|
||||
**Type**: `Convention`
|
||||
|
||||
|
||||
4
flake8_to_ruff/Cargo.lock
generated
4
flake8_to_ruff/Cargo.lock
generated
@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flake8_to_ruff"
|
||||
version = "0.0.196"
|
||||
version = "0.0.200"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -1975,7 +1975,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.196"
|
||||
version = "0.0.200"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.196-dev.0"
|
||||
version = "0.0.200-dev.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
|
||||
@@ -4,11 +4,12 @@ use anyhow::Result;
|
||||
use ruff::checks_gen::CheckCodePrefix;
|
||||
use ruff::flake8_quotes::settings::Quote;
|
||||
use ruff::flake8_tidy_imports::settings::Strictness;
|
||||
use ruff::pydocstyle::settings::Convention;
|
||||
use ruff::settings::options::Options;
|
||||
use ruff::settings::pyproject::Pyproject;
|
||||
use ruff::{
|
||||
flake8_annotations, flake8_bugbear, flake8_errmsg, flake8_quotes, flake8_tidy_imports, mccabe,
|
||||
pep8_naming,
|
||||
pep8_naming, pydocstyle,
|
||||
};
|
||||
|
||||
use crate::black::Black;
|
||||
@@ -91,6 +92,7 @@ pub fn convert(
|
||||
let mut flake8_tidy_imports = flake8_tidy_imports::settings::Options::default();
|
||||
let mut mccabe = mccabe::settings::Options::default();
|
||||
let mut pep8_naming = pep8_naming::settings::Options::default();
|
||||
let mut pydocstyle = pydocstyle::settings::Options::default();
|
||||
for (key, value) in flake8 {
|
||||
if let Some(value) = value {
|
||||
match key.as_str() {
|
||||
@@ -200,9 +202,12 @@ pub fn convert(
|
||||
_ => eprintln!("Unexpected '{key}' value: {value}"),
|
||||
},
|
||||
// flake8-docstrings
|
||||
"docstring-convention" => {
|
||||
// No-op (handled above).
|
||||
}
|
||||
"docstring-convention" => match value.trim() {
|
||||
"google" => pydocstyle.convention = Some(Convention::Google),
|
||||
"numpy" => pydocstyle.convention = Some(Convention::Numpy),
|
||||
"pep257" | "all" => pydocstyle.convention = None,
|
||||
_ => eprintln!("Unexpected '{key}' value: {value}"),
|
||||
},
|
||||
// mccabe
|
||||
"max-complexity" | "max_complexity" => match value.clone().parse::<usize>() {
|
||||
Ok(max_complexity) => mccabe.max_complexity = Some(max_complexity),
|
||||
@@ -247,6 +252,9 @@ pub fn convert(
|
||||
if pep8_naming != pep8_naming::settings::Options::default() {
|
||||
options.pep8_naming = Some(pep8_naming);
|
||||
}
|
||||
if pydocstyle != pydocstyle::settings::Options::default() {
|
||||
options.pydocstyle = Some(pydocstyle);
|
||||
}
|
||||
|
||||
// Extract any settings from the existing `pyproject.toml`.
|
||||
if let Some(black) = black {
|
||||
@@ -271,9 +279,10 @@ mod tests {
|
||||
|
||||
use anyhow::Result;
|
||||
use ruff::checks_gen::CheckCodePrefix;
|
||||
use ruff::flake8_quotes;
|
||||
use ruff::pydocstyle::settings::Convention;
|
||||
use ruff::settings::options::Options;
|
||||
use ruff::settings::pyproject::Pyproject;
|
||||
use ruff::{flake8_quotes, pydocstyle};
|
||||
|
||||
use crate::converter::convert;
|
||||
use crate::plugin::Plugin;
|
||||
@@ -287,6 +296,7 @@ mod tests {
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
allowed_confusables: None,
|
||||
cache_dir: None,
|
||||
dummy_variable_rgx: None,
|
||||
exclude: None,
|
||||
extend: None,
|
||||
@@ -314,7 +324,7 @@ mod tests {
|
||||
src: None,
|
||||
target_version: None,
|
||||
unfixable: None,
|
||||
cache_dir: None,
|
||||
update_check: None,
|
||||
flake8_annotations: None,
|
||||
flake8_bugbear: None,
|
||||
flake8_errmsg: None,
|
||||
@@ -345,6 +355,7 @@ mod tests {
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
allowed_confusables: None,
|
||||
cache_dir: None,
|
||||
dummy_variable_rgx: None,
|
||||
exclude: None,
|
||||
extend: None,
|
||||
@@ -372,7 +383,7 @@ mod tests {
|
||||
src: None,
|
||||
target_version: None,
|
||||
unfixable: None,
|
||||
cache_dir: None,
|
||||
update_check: None,
|
||||
flake8_annotations: None,
|
||||
flake8_bugbear: None,
|
||||
flake8_errmsg: None,
|
||||
@@ -403,6 +414,7 @@ mod tests {
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
allowed_confusables: None,
|
||||
cache_dir: None,
|
||||
dummy_variable_rgx: None,
|
||||
exclude: None,
|
||||
extend: None,
|
||||
@@ -430,7 +442,7 @@ mod tests {
|
||||
src: None,
|
||||
target_version: None,
|
||||
unfixable: None,
|
||||
cache_dir: None,
|
||||
update_check: None,
|
||||
flake8_annotations: None,
|
||||
flake8_bugbear: None,
|
||||
flake8_errmsg: None,
|
||||
@@ -461,6 +473,7 @@ mod tests {
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
allowed_confusables: None,
|
||||
cache_dir: None,
|
||||
dummy_variable_rgx: None,
|
||||
exclude: None,
|
||||
extend: None,
|
||||
@@ -488,7 +501,7 @@ mod tests {
|
||||
src: None,
|
||||
target_version: None,
|
||||
unfixable: None,
|
||||
cache_dir: None,
|
||||
update_check: None,
|
||||
flake8_annotations: None,
|
||||
flake8_bugbear: None,
|
||||
flake8_errmsg: None,
|
||||
@@ -519,6 +532,7 @@ mod tests {
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
allowed_confusables: None,
|
||||
cache_dir: None,
|
||||
dummy_variable_rgx: None,
|
||||
exclude: None,
|
||||
extend: None,
|
||||
@@ -546,7 +560,7 @@ mod tests {
|
||||
src: None,
|
||||
target_version: None,
|
||||
unfixable: None,
|
||||
cache_dir: None,
|
||||
update_check: None,
|
||||
flake8_annotations: None,
|
||||
flake8_bugbear: None,
|
||||
flake8_errmsg: None,
|
||||
@@ -585,6 +599,7 @@ mod tests {
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
allowed_confusables: None,
|
||||
cache_dir: None,
|
||||
dummy_variable_rgx: None,
|
||||
exclude: None,
|
||||
extend: None,
|
||||
@@ -648,7 +663,7 @@ mod tests {
|
||||
src: None,
|
||||
target_version: None,
|
||||
unfixable: None,
|
||||
cache_dir: None,
|
||||
update_check: None,
|
||||
flake8_annotations: None,
|
||||
flake8_bugbear: None,
|
||||
flake8_errmsg: None,
|
||||
@@ -659,7 +674,9 @@ mod tests {
|
||||
isort: None,
|
||||
mccabe: None,
|
||||
pep8_naming: None,
|
||||
pydocstyle: None,
|
||||
pydocstyle: Some(pydocstyle::settings::Options {
|
||||
convention: Some(Convention::Numpy),
|
||||
}),
|
||||
pyupgrade: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
@@ -679,6 +696,7 @@ mod tests {
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
allowed_confusables: None,
|
||||
cache_dir: None,
|
||||
dummy_variable_rgx: None,
|
||||
exclude: None,
|
||||
extend: None,
|
||||
@@ -707,7 +725,7 @@ mod tests {
|
||||
src: None,
|
||||
target_version: None,
|
||||
unfixable: None,
|
||||
cache_dir: None,
|
||||
update_check: None,
|
||||
flake8_annotations: None,
|
||||
flake8_bugbear: None,
|
||||
flake8_errmsg: None,
|
||||
|
||||
@@ -8,3 +8,7 @@ In-browser playground for Ruff. Available [https://ruff.pages.dev/](https://ruff
|
||||
root directory.
|
||||
- Install TypeScript dependencies with: `npm install`.
|
||||
- Start the development server with: `npm run dev`.
|
||||
|
||||
## Implementation
|
||||
|
||||
Design based on [Tailwind Play](https://play.tailwindcss.com/). Themed with [`ayu`](https://github.com/dempfi/ayu).
|
||||
|
||||
@@ -13,17 +13,10 @@
|
||||
rel="icon"
|
||||
href="data:image/svg+xml,<svg xmlns=%22http://www.w3.org/2000/svg%22 viewBox=%220 0 100 100%22><text y=%22.9em%22 font-size=%2290%22>🛠️</text></svg>"
|
||||
/>
|
||||
<link rel="stylesheet" href="https://rsms.me/inter/inter.css" />
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<div style="display: flex; position: fixed; right: 16px; top: 16px">
|
||||
<a href="https://GitHub.com/charliermarsh/ruff"
|
||||
><img
|
||||
src="https://img.shields.io/github/stars/charliermarsh/ruff.svg?style=social&label=GitHub&maxAge=2592000&?logoWidth=100"
|
||||
alt="GitHub stars"
|
||||
style="width: 120px"
|
||||
/></a>
|
||||
</div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
915
playground/package-lock.json
generated
915
playground/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -14,6 +14,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@monaco-editor/react": "^4.4.6",
|
||||
"classnames": "^2.3.2",
|
||||
"lz-string": "^1.4.4",
|
||||
"monaco-editor": "^0.34.1",
|
||||
"react": "^18.2.0",
|
||||
@@ -25,13 +26,16 @@
|
||||
"@typescript-eslint/eslint-plugin": "^5.47.1",
|
||||
"@typescript-eslint/parser": "^5.47.1",
|
||||
"@vitejs/plugin-react-swc": "^3.0.0",
|
||||
"autoprefixer": "^10.4.13",
|
||||
"eslint": "^8.30.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
"eslint-plugin-prettier": "^4.2.1",
|
||||
"eslint-plugin-react": "^7.31.11",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"postcss": "^8.4.20",
|
||||
"prettier": "^2.8.1",
|
||||
"tailwindcss": "^3.2.4",
|
||||
"typescript": "^4.9.3",
|
||||
"vite": "^4.0.0"
|
||||
}
|
||||
|
||||
6
playground/postcss.config.cjs
Normal file
6
playground/postcss.config.cjs
Normal file
@@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
};
|
||||
@@ -1,200 +0,0 @@
|
||||
import lzstring from "lz-string";
|
||||
import Editor, { useMonaco } from "@monaco-editor/react";
|
||||
import { MarkerSeverity } from "monaco-editor/esm/vs/editor/editor.api";
|
||||
import { useEffect, useState, useCallback } from "react";
|
||||
|
||||
import init, { Check, check } from "./pkg/ruff.js";
|
||||
import { AVAILABLE_OPTIONS } from "./ruff_options";
|
||||
import { Config, getDefaultConfig, toRuffConfig } from "./config";
|
||||
import { Options } from "./Options";
|
||||
|
||||
const DEFAULT_SOURCE =
|
||||
"# Define a function that takes an integer n and returns the nth number in the Fibonacci\n" +
|
||||
"# sequence.\n" +
|
||||
"def fibonacci(n):\n" +
|
||||
" if n == 0:\n" +
|
||||
" return 0\n" +
|
||||
" elif n == 1:\n" +
|
||||
" return 1\n" +
|
||||
" else:\n" +
|
||||
" return fibonacci(n-1) + fibonacci(n-2)\n" +
|
||||
"\n" +
|
||||
"# Use a for loop to generate and print the first 10 numbers in the Fibonacci sequence.\n" +
|
||||
"for i in range(10):\n" +
|
||||
" print(fibonacci(i))\n" +
|
||||
"\n" +
|
||||
"# Output:\n" +
|
||||
"# 0\n" +
|
||||
"# 1\n" +
|
||||
"# 1\n" +
|
||||
"# 2\n" +
|
||||
"# 3\n" +
|
||||
"# 5\n" +
|
||||
"# 8\n" +
|
||||
"# 13\n" +
|
||||
"# 21\n" +
|
||||
"# 34\n";
|
||||
|
||||
function restoreConfigAndSource(): [Config, string] {
|
||||
const value = lzstring.decompressFromEncodedURIComponent(
|
||||
window.location.hash.slice(1)
|
||||
);
|
||||
let config = {};
|
||||
let source = DEFAULT_SOURCE;
|
||||
|
||||
if (value) {
|
||||
const parts = value.split("$$$");
|
||||
config = JSON.parse(parts[0]);
|
||||
source = parts[1];
|
||||
}
|
||||
|
||||
return [config, source];
|
||||
}
|
||||
|
||||
function persistConfigAndSource(config: Config, source: string) {
|
||||
window.location.hash = lzstring.compressToEncodedURIComponent(
|
||||
JSON.stringify(config) + "$$$" + source
|
||||
);
|
||||
}
|
||||
|
||||
const defaultConfig = getDefaultConfig(AVAILABLE_OPTIONS);
|
||||
|
||||
export default function App() {
|
||||
const monaco = useMonaco();
|
||||
const [initialized, setInitialized] = useState<boolean>(false);
|
||||
const [config, setConfig] = useState<Config | null>(null);
|
||||
const [source, setSource] = useState<string | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
init().then(() => setInitialized(true));
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (source == null && config == null && monaco) {
|
||||
const [config, source] = restoreConfigAndSource();
|
||||
setConfig(config);
|
||||
setSource(source);
|
||||
}
|
||||
}, [monaco, source, config]);
|
||||
|
||||
useEffect(() => {
|
||||
if (config != null && source != null) {
|
||||
persistConfigAndSource(config, source);
|
||||
}
|
||||
}, [config, source]);
|
||||
|
||||
useEffect(() => {
|
||||
const editor = monaco?.editor;
|
||||
const model = editor?.getModels()[0];
|
||||
if (!editor || !model || !initialized || source == null || config == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
let checks: Check[];
|
||||
try {
|
||||
checks = check(source, toRuffConfig(config));
|
||||
setError(null);
|
||||
} catch (e) {
|
||||
setError(String(e));
|
||||
return;
|
||||
}
|
||||
|
||||
editor.setModelMarkers(
|
||||
model,
|
||||
"owner",
|
||||
checks.map((check) => ({
|
||||
startLineNumber: check.location.row,
|
||||
startColumn: check.location.column + 1,
|
||||
endLineNumber: check.end_location.row,
|
||||
endColumn: check.end_location.column + 1,
|
||||
message: `${check.code}: ${check.message}`,
|
||||
severity: MarkerSeverity.Error,
|
||||
}))
|
||||
);
|
||||
|
||||
const codeActionProvider = monaco?.languages.registerCodeActionProvider(
|
||||
"python",
|
||||
{
|
||||
// @ts-expect-error: The type definition is wrong.
|
||||
provideCodeActions: function (model, position) {
|
||||
const actions = checks
|
||||
.filter((check) => position.startLineNumber === check.location.row)
|
||||
.filter((check) => check.fix)
|
||||
.map((check) => ({
|
||||
title: `Fix ${check.code}`,
|
||||
id: `fix-${check.code}`,
|
||||
kind: "quickfix",
|
||||
edit: check.fix
|
||||
? {
|
||||
edits: [
|
||||
{
|
||||
resource: model.uri,
|
||||
versionId: model.getVersionId(),
|
||||
edit: {
|
||||
range: {
|
||||
startLineNumber: check.fix.location.row,
|
||||
startColumn: check.fix.location.column + 1,
|
||||
endLineNumber: check.fix.end_location.row,
|
||||
endColumn: check.fix.end_location.column + 1,
|
||||
},
|
||||
text: check.fix.content,
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
: undefined,
|
||||
}));
|
||||
return { actions, dispose: () => {} };
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
return () => {
|
||||
codeActionProvider?.dispose();
|
||||
};
|
||||
}, [config, source, monaco, initialized]);
|
||||
|
||||
const handleEditorChange = useCallback(
|
||||
(value: string | undefined) => {
|
||||
setSource(value || "");
|
||||
},
|
||||
[setSource]
|
||||
);
|
||||
|
||||
const handleOptionChange = useCallback(
|
||||
(groupName: string, fieldName: string, value: string) => {
|
||||
const group = Object.assign({}, (config || {})[groupName]);
|
||||
if (value === defaultConfig[groupName][fieldName] || value === "") {
|
||||
delete group[fieldName];
|
||||
} else {
|
||||
group[fieldName] = value;
|
||||
}
|
||||
|
||||
setConfig({
|
||||
...config,
|
||||
[groupName]: group,
|
||||
});
|
||||
},
|
||||
[config]
|
||||
);
|
||||
|
||||
return (
|
||||
<div id="app">
|
||||
<Options
|
||||
config={config}
|
||||
defaultConfig={defaultConfig}
|
||||
onChange={handleOptionChange}
|
||||
/>
|
||||
<Editor
|
||||
options={{ readOnly: false, minimap: { enabled: false } }}
|
||||
wrapperProps={{ className: "editor" }}
|
||||
defaultLanguage="python"
|
||||
value={source || ""}
|
||||
theme={"light"}
|
||||
onChange={handleEditorChange}
|
||||
/>
|
||||
{error && <div id="error">{error}</div>}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
137
playground/src/Editor/Editor.tsx
Normal file
137
playground/src/Editor/Editor.tsx
Normal file
@@ -0,0 +1,137 @@
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { persist, restore } from "./config";
|
||||
import { DEFAULT_CONFIG_SOURCE, DEFAULT_PYTHON_SOURCE } from "../constants";
|
||||
import { ErrorMessage } from "./ErrorMessage";
|
||||
import Header from "./Header";
|
||||
import init, { check, current_version, Check } from "../pkg";
|
||||
import SettingsEditor from "./SettingsEditor";
|
||||
import SourceEditor from "./SourceEditor";
|
||||
import Themes from "./Themes";
|
||||
|
||||
type Tab = "Source" | "Settings";
|
||||
|
||||
export default function Editor() {
|
||||
const [initialized, setInitialized] = useState<boolean>(false);
|
||||
const [version, setVersion] = useState<string | null>(null);
|
||||
const [tab, setTab] = useState<Tab>("Source");
|
||||
const [edit, setEdit] = useState<number>(0);
|
||||
const [configSource, setConfigSource] = useState<string | null>(null);
|
||||
const [pythonSource, setPythonSource] = useState<string | null>(null);
|
||||
const [checks, setChecks] = useState<Check[]>([]);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
init().then(() => setInitialized(true));
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!initialized || configSource == null || pythonSource == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
let config: any;
|
||||
let checks: Check[];
|
||||
|
||||
try {
|
||||
config = JSON.parse(configSource);
|
||||
} catch (e) {
|
||||
setChecks([]);
|
||||
setError((e as Error).message);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
checks = check(pythonSource, config);
|
||||
} catch (e) {
|
||||
setError(e as string);
|
||||
return;
|
||||
}
|
||||
|
||||
setError(null);
|
||||
setChecks(checks);
|
||||
}, [initialized, configSource, pythonSource]);
|
||||
|
||||
useEffect(() => {
|
||||
if (configSource == null || pythonSource == null) {
|
||||
const payload = restore();
|
||||
if (payload) {
|
||||
const [configSource, pythonSource] = payload;
|
||||
setConfigSource(configSource);
|
||||
setPythonSource(pythonSource);
|
||||
} else {
|
||||
setConfigSource(DEFAULT_CONFIG_SOURCE);
|
||||
setPythonSource(DEFAULT_PYTHON_SOURCE);
|
||||
}
|
||||
}
|
||||
}, [configSource, pythonSource]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!initialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
setVersion(current_version());
|
||||
}, [initialized]);
|
||||
|
||||
const handleShare = useCallback(() => {
|
||||
if (!initialized || configSource == null || pythonSource == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
persist(configSource, pythonSource);
|
||||
}, [initialized, configSource, pythonSource]);
|
||||
|
||||
const handlePythonSourceChange = useCallback((pythonSource: string) => {
|
||||
setEdit((edit) => edit + 1);
|
||||
setPythonSource(pythonSource);
|
||||
}, []);
|
||||
|
||||
const handleConfigSourceChange = useCallback((configSource: string) => {
|
||||
setEdit((edit) => edit + 1);
|
||||
setConfigSource(configSource);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<main className={"h-full w-full flex flex-auto"}>
|
||||
<Header
|
||||
edit={edit}
|
||||
version={version}
|
||||
tab={tab}
|
||||
onChange={setTab}
|
||||
onShare={initialized ? handleShare : undefined}
|
||||
/>
|
||||
|
||||
<Themes />
|
||||
|
||||
<div className={"mt-12 relative flex-auto"}>
|
||||
{initialized && configSource != null && pythonSource != null ? (
|
||||
<>
|
||||
<SourceEditor
|
||||
visible={tab === "Source"}
|
||||
source={pythonSource}
|
||||
checks={checks}
|
||||
onChange={handlePythonSourceChange}
|
||||
/>
|
||||
<SettingsEditor
|
||||
visible={tab === "Settings"}
|
||||
source={configSource}
|
||||
onChange={handleConfigSourceChange}
|
||||
/>
|
||||
</>
|
||||
) : null}
|
||||
</div>
|
||||
{error && tab === "Source" ? (
|
||||
<div
|
||||
style={{
|
||||
position: "fixed",
|
||||
left: "10%",
|
||||
right: "10%",
|
||||
bottom: "10%",
|
||||
}}
|
||||
>
|
||||
<ErrorMessage>{error}</ErrorMessage>
|
||||
</div>
|
||||
) : null}
|
||||
</main>
|
||||
);
|
||||
}
|
||||
26
playground/src/Editor/ErrorMessage.tsx
Normal file
26
playground/src/Editor/ErrorMessage.tsx
Normal file
@@ -0,0 +1,26 @@
|
||||
function truncate(str: string, length: number) {
|
||||
if (str.length > length) {
|
||||
return str.slice(0, length) + "...";
|
||||
} else {
|
||||
return str;
|
||||
}
|
||||
}
|
||||
|
||||
export function ErrorMessage({ children }: { children: string }) {
|
||||
return (
|
||||
<div
|
||||
className="bg-orange-100 border-l-4 border-orange-500 text-orange-700 p-4"
|
||||
role="alert"
|
||||
>
|
||||
<p className="font-bold">Error</p>
|
||||
<p className="block sm:inline">
|
||||
{truncate(
|
||||
children.startsWith("Error: ")
|
||||
? children.slice("Error: ".length)
|
||||
: children,
|
||||
120
|
||||
)}
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
73
playground/src/Editor/Header.tsx
Normal file
73
playground/src/Editor/Header.tsx
Normal file
@@ -0,0 +1,73 @@
|
||||
import classNames from "classnames";
|
||||
import ShareButton from "./ShareButton";
|
||||
import VersionTag from "./VersionTag";
|
||||
|
||||
export type Tab = "Source" | "Settings";
|
||||
|
||||
export default function Header({
|
||||
edit,
|
||||
version,
|
||||
tab,
|
||||
onChange,
|
||||
onShare,
|
||||
}: {
|
||||
edit: number;
|
||||
version: string | null;
|
||||
tab: Tab;
|
||||
onChange: (tab: Tab) => void;
|
||||
onShare?: () => void;
|
||||
}) {
|
||||
return (
|
||||
<div
|
||||
className="w-full flex items-center justify-between flex-none pl-5 sm:pl-6 pr-4 lg:pr-6 absolute z-10 top-0 left-0 -mb-px antialiased border-b border-gray-200 dark:border-gray-800"
|
||||
style={{ background: "#f8f9fa" }}
|
||||
>
|
||||
<div className="flex space-x-5">
|
||||
<button
|
||||
type="button"
|
||||
className={classNames(
|
||||
"relative flex py-3 text-sm leading-6 font-semibold focus:outline-none",
|
||||
tab === "Source"
|
||||
? "text-ayu"
|
||||
: "text-gray-700 hover:text-gray-900 focus:text-gray-900 dark:text-gray-300 dark:hover:text-white"
|
||||
)}
|
||||
onClick={() => onChange("Source")}
|
||||
>
|
||||
<span
|
||||
className={classNames(
|
||||
"absolute bottom-0 inset-x-0 bg-ayu h-0.5 rounded-full transition-opacity duration-150",
|
||||
tab === "Source" ? "opacity-100" : "opacity-0"
|
||||
)}
|
||||
/>
|
||||
Source
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
className={classNames(
|
||||
"relative flex py-3 text-sm leading-6 font-semibold focus:outline-none",
|
||||
tab === "Settings"
|
||||
? "text-ayu"
|
||||
: "text-gray-700 hover:text-gray-900 focus:text-gray-900 dark:text-gray-300 dark:hover:text-white"
|
||||
)}
|
||||
onClick={() => onChange("Settings")}
|
||||
>
|
||||
<span
|
||||
className={classNames(
|
||||
"absolute bottom-0 inset-x-0 bg-ayu h-0.5 rounded-full transition-opacity duration-150",
|
||||
tab === "Settings" ? "opacity-100" : "opacity-0"
|
||||
)}
|
||||
/>
|
||||
Settings
|
||||
</button>
|
||||
{version ? (
|
||||
<div className={"flex items-center"}>
|
||||
<VersionTag>v{version}</VersionTag>
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
<div className={"hidden sm:flex items-center min-w-0"}>
|
||||
<ShareButton key={edit} onShare={onShare} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
54
playground/src/Editor/SettingsEditor.tsx
Normal file
54
playground/src/Editor/SettingsEditor.tsx
Normal file
@@ -0,0 +1,54 @@
|
||||
/**
|
||||
* Editor for the settings JSON.
|
||||
*/
|
||||
|
||||
import Editor, { useMonaco } from "@monaco-editor/react";
|
||||
import { useCallback, useEffect } from "react";
|
||||
import schema from "../../../ruff.schema.json";
|
||||
|
||||
export default function SettingsEditor({
|
||||
visible,
|
||||
source,
|
||||
onChange,
|
||||
}: {
|
||||
visible: boolean;
|
||||
source: string;
|
||||
onChange: (source: string) => void;
|
||||
}) {
|
||||
const monaco = useMonaco();
|
||||
|
||||
useEffect(() => {
|
||||
monaco?.languages.json.jsonDefaults.setDiagnosticsOptions({
|
||||
schemas: [
|
||||
{
|
||||
uri: "https://raw.githubusercontent.com/charliermarsh/ruff/main/ruff.schema.json",
|
||||
fileMatch: ["*"],
|
||||
schema,
|
||||
},
|
||||
],
|
||||
});
|
||||
}, [monaco]);
|
||||
|
||||
const handleChange = useCallback(
|
||||
(value: string | undefined) => {
|
||||
onChange(value ?? "");
|
||||
},
|
||||
[onChange]
|
||||
);
|
||||
return (
|
||||
<Editor
|
||||
options={{
|
||||
readOnly: false,
|
||||
minimap: { enabled: false },
|
||||
fontSize: 14,
|
||||
roundedSelection: false,
|
||||
scrollBeyondLastLine: false,
|
||||
}}
|
||||
wrapperProps={visible ? {} : { style: { display: "none" } }}
|
||||
language={"json"}
|
||||
value={source}
|
||||
theme={"Ayu-Light"}
|
||||
onChange={handleChange}
|
||||
/>
|
||||
);
|
||||
}
|
||||
53
playground/src/Editor/ShareButton.tsx
Normal file
53
playground/src/Editor/ShareButton.tsx
Normal file
@@ -0,0 +1,53 @@
|
||||
import { useEffect, useState } from "react";
|
||||
|
||||
export default function ShareButton({ onShare }: { onShare?: () => void }) {
|
||||
const [copied, setCopied] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (copied) {
|
||||
const timeout = setTimeout(() => setCopied(false), 2000);
|
||||
return () => clearTimeout(timeout);
|
||||
}
|
||||
}, [copied]);
|
||||
|
||||
return copied ? (
|
||||
<button
|
||||
type="button"
|
||||
className="relative flex-none rounded-md text-sm font-semibold leading-6 py-1.5 px-3 cursor-auto text-ayu shadow-copied dark:bg-ayu/10"
|
||||
>
|
||||
<span
|
||||
className="absolute inset-0 flex items-center justify-center invisible"
|
||||
aria-hidden="true"
|
||||
>
|
||||
Share
|
||||
</span>
|
||||
<span className="" aria-hidden="false">
|
||||
Copied!
|
||||
</span>
|
||||
</button>
|
||||
) : (
|
||||
<button
|
||||
type="button"
|
||||
className="relative flex-none rounded-md text-sm font-semibold leading-6 py-1.5 px-3 enabled:hover:bg-ayu/80 bg-ayu text-white shadow-sm dark:shadow-highlight/20 disabled:opacity-50"
|
||||
disabled={!onShare || copied}
|
||||
onClick={
|
||||
onShare
|
||||
? () => {
|
||||
setCopied(true);
|
||||
onShare();
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
>
|
||||
<span
|
||||
className="absolute inset-0 flex items-center justify-center"
|
||||
aria-hidden="false"
|
||||
>
|
||||
Share
|
||||
</span>
|
||||
<span className="invisible" aria-hidden="true">
|
||||
Copied!
|
||||
</span>
|
||||
</button>
|
||||
);
|
||||
}
|
||||
114
playground/src/Editor/SourceEditor.tsx
Normal file
114
playground/src/Editor/SourceEditor.tsx
Normal file
@@ -0,0 +1,114 @@
|
||||
/**
|
||||
* Editor for the Python source code.
|
||||
*/
|
||||
|
||||
import Editor, { useMonaco } from "@monaco-editor/react";
|
||||
import { MarkerSeverity, MarkerTag } from "monaco-editor";
|
||||
import { useCallback, useEffect } from "react";
|
||||
import { Check } from "../pkg";
|
||||
|
||||
export type Mode = "JSON" | "Python";
|
||||
|
||||
export default function SourceEditor({
|
||||
visible,
|
||||
source,
|
||||
checks,
|
||||
onChange,
|
||||
}: {
|
||||
visible: boolean;
|
||||
source: string;
|
||||
checks: Check[];
|
||||
onChange: (pythonSource: string) => void;
|
||||
}) {
|
||||
const monaco = useMonaco();
|
||||
|
||||
useEffect(() => {
|
||||
const editor = monaco?.editor;
|
||||
const model = editor?.getModels()[0];
|
||||
if (!editor || !model) {
|
||||
return;
|
||||
}
|
||||
|
||||
editor.setModelMarkers(
|
||||
model,
|
||||
"owner",
|
||||
checks.map((check) => ({
|
||||
startLineNumber: check.location.row,
|
||||
startColumn: check.location.column + 1,
|
||||
endLineNumber: check.end_location.row,
|
||||
endColumn: check.end_location.column + 1,
|
||||
message: `${check.code}: ${check.message}`,
|
||||
severity: MarkerSeverity.Error,
|
||||
tags:
|
||||
check.code === "F401" || check.code === "F841"
|
||||
? [MarkerTag.Unnecessary]
|
||||
: [],
|
||||
}))
|
||||
);
|
||||
|
||||
const codeActionProvider = monaco?.languages.registerCodeActionProvider(
|
||||
"python",
|
||||
{
|
||||
// @ts-expect-error: The type definition is wrong.
|
||||
provideCodeActions: function (model, position) {
|
||||
const actions = checks
|
||||
.filter((check) => position.startLineNumber === check.location.row)
|
||||
.filter((check) => check.fix)
|
||||
.map((check) => ({
|
||||
title: `Fix ${check.code}`,
|
||||
id: `fix-${check.code}`,
|
||||
kind: "quickfix",
|
||||
edit: check.fix
|
||||
? {
|
||||
edits: [
|
||||
{
|
||||
resource: model.uri,
|
||||
versionId: model.getVersionId(),
|
||||
edit: {
|
||||
range: {
|
||||
startLineNumber: check.fix.location.row,
|
||||
startColumn: check.fix.location.column + 1,
|
||||
endLineNumber: check.fix.end_location.row,
|
||||
endColumn: check.fix.end_location.column + 1,
|
||||
},
|
||||
text: check.fix.content,
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
: undefined,
|
||||
}));
|
||||
return { actions, dispose: () => {} };
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
return () => {
|
||||
codeActionProvider?.dispose();
|
||||
};
|
||||
}, [checks, monaco]);
|
||||
|
||||
const handleChange = useCallback(
|
||||
(value: string | undefined) => {
|
||||
onChange(value ?? "");
|
||||
},
|
||||
[onChange]
|
||||
);
|
||||
|
||||
return (
|
||||
<Editor
|
||||
options={{
|
||||
readOnly: false,
|
||||
minimap: { enabled: false },
|
||||
fontSize: 14,
|
||||
roundedSelection: false,
|
||||
scrollBeyondLastLine: false,
|
||||
}}
|
||||
wrapperProps={visible ? {} : { style: { display: "none" } }}
|
||||
theme={"Ayu-Light"}
|
||||
language={"python"}
|
||||
value={source}
|
||||
onChange={handleChange}
|
||||
/>
|
||||
);
|
||||
}
|
||||
645
playground/src/Editor/Themes.tsx
Normal file
645
playground/src/Editor/Themes.tsx
Normal file
@@ -0,0 +1,645 @@
|
||||
import { useMonaco } from "@monaco-editor/react";
|
||||
import { useEffect } from "react";
|
||||
|
||||
export default function Themes() {
|
||||
const monaco = useMonaco();
|
||||
|
||||
useEffect(() => {
|
||||
// Generated via `monaco-vscode-textmate-theme-converter`.
|
||||
// See: https://github.com/ayu-theme/vscode-ayu/blob/91839e8a9dfa78d61e58dbcf9b52272a01fee66a/ayu-light.json.
|
||||
monaco?.editor.defineTheme("Ayu-Light", {
|
||||
inherit: false,
|
||||
base: "vs-dark",
|
||||
colors: {
|
||||
focusBorder: "#ffaa33b3",
|
||||
foreground: "#8a9199",
|
||||
"widget.shadow": "#00000026",
|
||||
"selection.background": "#035bd626",
|
||||
"icon.foreground": "#8a9199",
|
||||
errorForeground: "#e65050",
|
||||
descriptionForeground: "#8a9199",
|
||||
"textBlockQuote.background": "#f3f4f5",
|
||||
"textLink.foreground": "#ffaa33",
|
||||
"textLink.activeForeground": "#ffaa33",
|
||||
"textPreformat.foreground": "#5c6166",
|
||||
"button.background": "#ffaa33",
|
||||
"button.foreground": "#f8f9fa",
|
||||
"button.hoverBackground": "#f9a52e",
|
||||
"button.secondaryBackground": "#8a919933",
|
||||
"button.secondaryForeground": "#5c6166",
|
||||
"button.secondaryHoverBackground": "#8a919980",
|
||||
"dropdown.background": "#fcfcfc",
|
||||
"dropdown.foreground": "#8a9199",
|
||||
"dropdown.border": "#8a919945",
|
||||
"input.background": "#fcfcfc",
|
||||
"input.border": "#8a919945",
|
||||
"input.foreground": "#5c6166",
|
||||
"input.placeholderForeground": "#8a919980",
|
||||
"inputOption.activeBorder": "#f4a0284d",
|
||||
"inputOption.activeBackground": "#ffaa3333",
|
||||
"inputOption.activeForeground": "#f4a028",
|
||||
"inputValidation.errorBackground": "#fcfcfc",
|
||||
"inputValidation.errorBorder": "#e65050",
|
||||
"inputValidation.infoBackground": "#f8f9fa",
|
||||
"inputValidation.infoBorder": "#55b4d4",
|
||||
"inputValidation.warningBackground": "#f8f9fa",
|
||||
"inputValidation.warningBorder": "#f2ae49",
|
||||
"scrollbar.shadow": "#6b7d8f00",
|
||||
"scrollbarSlider.background": "#8a919966",
|
||||
"scrollbarSlider.hoverBackground": "#8a919999",
|
||||
"scrollbarSlider.activeBackground": "#8a9199b3",
|
||||
"badge.background": "#ffaa3333",
|
||||
"badge.foreground": "#f4a028",
|
||||
"progressBar.background": "#ffaa33",
|
||||
"list.activeSelectionBackground": "#56728f1f",
|
||||
"list.activeSelectionForeground": "#5c6166",
|
||||
"list.focusBackground": "#56728f1f",
|
||||
"list.focusForeground": "#5c6166",
|
||||
"list.focusOutline": "#56728f1f",
|
||||
"list.highlightForeground": "#ffaa33",
|
||||
"list.deemphasizedForeground": "#e65050",
|
||||
"list.hoverBackground": "#56728f1f",
|
||||
"list.inactiveSelectionBackground": "#6b7d8f1f",
|
||||
"list.inactiveSelectionForeground": "#8a9199",
|
||||
"list.invalidItemForeground": "#8a91994d",
|
||||
"list.errorForeground": "#e65050",
|
||||
"tree.indentGuidesStroke": "#8a919959",
|
||||
"listFilterWidget.background": "#f3f4f5",
|
||||
"listFilterWidget.outline": "#ffaa33",
|
||||
"listFilterWidget.noMatchesOutline": "#e65050",
|
||||
"list.filterMatchBackground": "#8f30efcc",
|
||||
"list.filterMatchBorder": "#9f40ffcc",
|
||||
"activityBar.background": "#f8f9fa",
|
||||
"activityBar.foreground": "#8a9199cc",
|
||||
"activityBar.inactiveForeground": "#8a919999",
|
||||
"activityBar.border": "#f8f9fa",
|
||||
"activityBar.activeBorder": "#ffaa33b3",
|
||||
"activityBarBadge.background": "#ffaa33",
|
||||
"activityBarBadge.foreground": "#f8f9fa",
|
||||
"sideBar.background": "#f8f9fa",
|
||||
"sideBar.border": "#f8f9fa",
|
||||
"sideBarTitle.foreground": "#8a9199",
|
||||
"sideBarSectionHeader.background": "#f8f9fa",
|
||||
"sideBarSectionHeader.foreground": "#8a9199",
|
||||
"sideBarSectionHeader.border": "#f8f9fa",
|
||||
"minimap.background": "#f8f9fa",
|
||||
"minimap.selectionHighlight": "#035bd626",
|
||||
"minimap.errorHighlight": "#e65050",
|
||||
"minimap.findMatchHighlight": "#9f40ff2b",
|
||||
"minimapGutter.addedBackground": "#6cbf43",
|
||||
"minimapGutter.modifiedBackground": "#478acc",
|
||||
"minimapGutter.deletedBackground": "#ff7383",
|
||||
"editorGroup.border": "#6b7d8f1f",
|
||||
"editorGroup.background": "#f3f4f5",
|
||||
"editorGroupHeader.noTabsBackground": "#f8f9fa",
|
||||
"editorGroupHeader.tabsBackground": "#f8f9fa",
|
||||
"editorGroupHeader.tabsBorder": "#f8f9fa",
|
||||
"tab.activeBackground": "#f8f9fa",
|
||||
"tab.activeForeground": "#5c6166",
|
||||
"tab.border": "#f8f9fa",
|
||||
"tab.activeBorder": "#ffaa33",
|
||||
"tab.unfocusedActiveBorder": "#8a9199",
|
||||
"tab.inactiveBackground": "#f8f9fa",
|
||||
"tab.inactiveForeground": "#8a9199",
|
||||
"tab.unfocusedActiveForeground": "#8a9199",
|
||||
"tab.unfocusedInactiveForeground": "#8a9199",
|
||||
"editor.background": "#f8f9fa",
|
||||
"editor.foreground": "#5c6166",
|
||||
"editorLineNumber.foreground": "#8a919966",
|
||||
"editorLineNumber.activeForeground": "#8a9199cc",
|
||||
"editorCursor.foreground": "#ffaa33",
|
||||
"editor.inactiveSelectionBackground": "#035bd612",
|
||||
"editor.selectionBackground": "#035bd626",
|
||||
"editor.selectionHighlightBackground": "#6cbf4326",
|
||||
"editor.selectionHighlightBorder": "#6cbf4300",
|
||||
"editor.wordHighlightBackground": "#478acc14",
|
||||
"editor.wordHighlightStrongBackground": "#6cbf4314",
|
||||
"editor.wordHighlightBorder": "#478acc80",
|
||||
"editor.wordHighlightStrongBorder": "#6cbf4380",
|
||||
"editor.findMatchBackground": "#9f40ff2b",
|
||||
"editor.findMatchBorder": "#9f40ff2b",
|
||||
"editor.findMatchHighlightBackground": "#9f40ffcc",
|
||||
"editor.findMatchHighlightBorder": "#8f30efcc",
|
||||
"editor.findRangeHighlightBackground": "#9f40ff40",
|
||||
"editor.rangeHighlightBackground": "#9f40ff33",
|
||||
"editor.lineHighlightBackground": "#8a91991a",
|
||||
"editorLink.activeForeground": "#ffaa33",
|
||||
"editorWhitespace.foreground": "#8a919966",
|
||||
"editorIndentGuide.background": "#8a91992e",
|
||||
"editorIndentGuide.activeBackground": "#8a919959",
|
||||
"editorRuler.foreground": "#8a91992e",
|
||||
"editorCodeLens.foreground": "#787b8099",
|
||||
"editorBracketMatch.background": "#8a91994d",
|
||||
"editorBracketMatch.border": "#8a91994d",
|
||||
"editor.snippetTabstopHighlightBackground": "#6cbf4333",
|
||||
"editorOverviewRuler.border": "#6b7d8f1f",
|
||||
"editorOverviewRuler.modifiedForeground": "#478acc",
|
||||
"editorOverviewRuler.addedForeground": "#6cbf43",
|
||||
"editorOverviewRuler.deletedForeground": "#ff7383",
|
||||
"editorOverviewRuler.errorForeground": "#e65050",
|
||||
"editorOverviewRuler.warningForeground": "#ffaa33",
|
||||
"editorOverviewRuler.bracketMatchForeground": "#8a9199b3",
|
||||
"editorOverviewRuler.wordHighlightForeground": "#478acc66",
|
||||
"editorOverviewRuler.wordHighlightStrongForeground": "#6cbf4366",
|
||||
"editorOverviewRuler.findMatchForeground": "#9f40ff2b",
|
||||
"editorError.foreground": "#e65050",
|
||||
"editorWarning.foreground": "#ffaa33",
|
||||
"editorGutter.modifiedBackground": "#478acccc",
|
||||
"editorGutter.addedBackground": "#6cbf43cc",
|
||||
"editorGutter.deletedBackground": "#ff7383cc",
|
||||
"diffEditor.insertedTextBackground": "#6cbf431f",
|
||||
"diffEditor.removedTextBackground": "#ff73831f",
|
||||
"diffEditor.diagonalFill": "#6b7d8f1f",
|
||||
"editorWidget.background": "#f3f4f5",
|
||||
"editorWidget.border": "#6b7d8f1f",
|
||||
"editorHoverWidget.background": "#f3f4f5",
|
||||
"editorHoverWidget.border": "#6b7d8f1f",
|
||||
"editorSuggestWidget.background": "#f3f4f5",
|
||||
"editorSuggestWidget.border": "#6b7d8f1f",
|
||||
"editorSuggestWidget.highlightForeground": "#ffaa33",
|
||||
"editorSuggestWidget.selectedBackground": "#56728f1f",
|
||||
"debugExceptionWidget.border": "#6b7d8f1f",
|
||||
"debugExceptionWidget.background": "#f3f4f5",
|
||||
"editorMarkerNavigation.background": "#f3f4f5",
|
||||
"peekView.border": "#56728f1f",
|
||||
"peekViewTitle.background": "#56728f1f",
|
||||
"peekViewTitleDescription.foreground": "#8a9199",
|
||||
"peekViewTitleLabel.foreground": "#5c6166",
|
||||
"peekViewEditor.background": "#f3f4f5",
|
||||
"peekViewEditor.matchHighlightBackground": "#9f40ffcc",
|
||||
"peekViewEditor.matchHighlightBorder": "#8f30efcc",
|
||||
"peekViewResult.background": "#f3f4f5",
|
||||
"peekViewResult.fileForeground": "#5c6166",
|
||||
"peekViewResult.lineForeground": "#8a9199",
|
||||
"peekViewResult.matchHighlightBackground": "#9f40ffcc",
|
||||
"peekViewResult.selectionBackground": "#56728f1f",
|
||||
"panel.background": "#f8f9fa",
|
||||
"panel.border": "#6b7d8f1f",
|
||||
"panelTitle.activeBorder": "#ffaa33",
|
||||
"panelTitle.activeForeground": "#5c6166",
|
||||
"panelTitle.inactiveForeground": "#8a9199",
|
||||
"statusBar.background": "#f8f9fa",
|
||||
"statusBar.foreground": "#8a9199",
|
||||
"statusBar.border": "#f8f9fa",
|
||||
"statusBar.debuggingBackground": "#ed9366",
|
||||
"statusBar.debuggingForeground": "#fcfcfc",
|
||||
"statusBar.noFolderBackground": "#f3f4f5",
|
||||
"statusBarItem.activeBackground": "#8a919933",
|
||||
"statusBarItem.hoverBackground": "#8a919933",
|
||||
"statusBarItem.prominentBackground": "#6b7d8f1f",
|
||||
"statusBarItem.prominentHoverBackground": "#00000030",
|
||||
"statusBarItem.remoteBackground": "#ffaa33",
|
||||
"statusBarItem.remoteForeground": "#fcfcfc",
|
||||
"titleBar.activeBackground": "#f8f9fa",
|
||||
"titleBar.activeForeground": "#5c6166",
|
||||
"titleBar.inactiveBackground": "#f8f9fa",
|
||||
"titleBar.inactiveForeground": "#8a9199",
|
||||
"titleBar.border": "#f8f9fa",
|
||||
"extensionButton.prominentForeground": "#fcfcfc",
|
||||
"extensionButton.prominentBackground": "#ffaa33",
|
||||
"extensionButton.prominentHoverBackground": "#f9a52e",
|
||||
"pickerGroup.border": "#6b7d8f1f",
|
||||
"pickerGroup.foreground": "#8a919980",
|
||||
"debugToolBar.background": "#f3f4f5",
|
||||
"debugIcon.breakpointForeground": "#ed9366",
|
||||
"debugIcon.breakpointDisabledForeground": "#ed936680",
|
||||
"debugConsoleInputIcon.foreground": "#ffaa33",
|
||||
"welcomePage.tileBackground": "#f8f9fa",
|
||||
"welcomePage.tileShadow": "#00000026",
|
||||
"welcomePage.progress.background": "#8a91991a",
|
||||
"welcomePage.buttonBackground": "#ffaa3366",
|
||||
"walkThrough.embeddedEditorBackground": "#f3f4f5",
|
||||
"gitDecoration.modifiedResourceForeground": "#478accb3",
|
||||
"gitDecoration.deletedResourceForeground": "#ff7383b3",
|
||||
"gitDecoration.untrackedResourceForeground": "#6cbf43b3",
|
||||
"gitDecoration.ignoredResourceForeground": "#8a919980",
|
||||
"gitDecoration.conflictingResourceForeground": "",
|
||||
"gitDecoration.submoduleResourceForeground": "#a37accb3",
|
||||
"settings.headerForeground": "#5c6166",
|
||||
"settings.modifiedItemIndicator": "#478acc",
|
||||
"keybindingLabel.background": "#8a91991a",
|
||||
"keybindingLabel.foreground": "#5c6166",
|
||||
"keybindingLabel.border": "#5c61661a",
|
||||
"keybindingLabel.bottomBorder": "#5c61661a",
|
||||
"terminal.background": "#f8f9fa",
|
||||
"terminal.foreground": "#5c6166",
|
||||
"terminal.ansiBlack": "#000000",
|
||||
"terminal.ansiRed": "#ea6c6d",
|
||||
"terminal.ansiGreen": "#6cbf43",
|
||||
"terminal.ansiYellow": "#eca944",
|
||||
"terminal.ansiBlue": "#3199e1",
|
||||
"terminal.ansiMagenta": "#9e75c7",
|
||||
"terminal.ansiCyan": "#46ba94",
|
||||
"terminal.ansiWhite": "#c7c7c7",
|
||||
"terminal.ansiBrightBlack": "#686868",
|
||||
"terminal.ansiBrightRed": "#f07171",
|
||||
"terminal.ansiBrightGreen": "#86b300",
|
||||
"terminal.ansiBrightYellow": "#f2ae49",
|
||||
"terminal.ansiBrightBlue": "#399ee6",
|
||||
"terminal.ansiBrightMagenta": "#a37acc",
|
||||
"terminal.ansiBrightCyan": "#4cbf99",
|
||||
"terminal.ansiBrightWhite": "#d1d1d1",
|
||||
},
|
||||
rules: [
|
||||
{
|
||||
fontStyle: "italic",
|
||||
foreground: "#787b8099",
|
||||
token: "comment",
|
||||
},
|
||||
{
|
||||
foreground: "#86b300",
|
||||
token: "string",
|
||||
},
|
||||
{
|
||||
foreground: "#86b300",
|
||||
token: "constant.other.symbol",
|
||||
},
|
||||
{
|
||||
foreground: "#4cbf99",
|
||||
token: "string.regexp",
|
||||
},
|
||||
{
|
||||
foreground: "#4cbf99",
|
||||
token: "constant.character",
|
||||
},
|
||||
{
|
||||
foreground: "#4cbf99",
|
||||
token: "constant.other",
|
||||
},
|
||||
{
|
||||
foreground: "#a37acc",
|
||||
token: "constant.numeric",
|
||||
},
|
||||
{
|
||||
foreground: "#a37acc",
|
||||
token: "constant.language",
|
||||
},
|
||||
{
|
||||
foreground: "#5c6166",
|
||||
token: "variable",
|
||||
},
|
||||
{
|
||||
foreground: "#5c6166",
|
||||
token: "variable.parameter.function-call",
|
||||
},
|
||||
{
|
||||
foreground: "#f07171",
|
||||
token: "variable.member",
|
||||
},
|
||||
{
|
||||
fontStyle: "italic",
|
||||
foreground: "#55b4d4",
|
||||
token: "variable.language",
|
||||
},
|
||||
{
|
||||
foreground: "#fa8d3e",
|
||||
token: "storage",
|
||||
},
|
||||
{
|
||||
foreground: "#fa8d3e",
|
||||
token: "keyword",
|
||||
},
|
||||
{
|
||||
foreground: "#ed9366",
|
||||
token: "keyword.operator",
|
||||
},
|
||||
{
|
||||
foreground: "#5c6166b3",
|
||||
token: "punctuation.separator",
|
||||
},
|
||||
{
|
||||
foreground: "#5c6166b3",
|
||||
token: "punctuation.terminator",
|
||||
},
|
||||
{
|
||||
foreground: "#5c6166",
|
||||
token: "punctuation.section",
|
||||
},
|
||||
{
|
||||
foreground: "#ed9366",
|
||||
token: "punctuation.accessor",
|
||||
},
|
||||
{
|
||||
foreground: "#fa8d3e",
|
||||
token: "punctuation.definition.template-expression",
|
||||
},
|
||||
{
|
||||
foreground: "#fa8d3e",
|
||||
token: "punctuation.section.embedded",
|
||||
},
|
||||
{
|
||||
foreground: "#5c6166",
|
||||
token: "meta.embedded",
|
||||
},
|
||||
{
|
||||
foreground: "#399ee6",
|
||||
token: "source.java storage.type",
|
||||
},
|
||||
{
|
||||
foreground: "#399ee6",
|
||||
token: "source.haskell storage.type",
|
||||
},
|
||||
{
|
||||
foreground: "#399ee6",
|
||||
token: "source.c storage.type",
|
||||
},
|
||||
{
|
||||
foreground: "#55b4d4",
|
||||
token: "entity.other.inherited-class",
|
||||
},
|
||||
{
|
||||
foreground: "#fa8d3e",
|
||||
token: "storage.type.function",
|
||||
},
|
||||
{
|
||||
foreground: "#55b4d4",
|
||||
token: "source.java storage.type.primitive",
|
||||
},
|
||||
{
|
||||
foreground: "#f2ae49",
|
||||
token: "entity.name.function",
|
||||
},
|
||||
{
|
||||
foreground: "#a37acc",
|
||||
token: "variable.parameter",
|
||||
},
|
||||
{
|
||||
foreground: "#a37acc",
|
||||
token: "meta.parameter",
|
||||
},
|
||||
{
|
||||
foreground: "#f2ae49",
|
||||
token: "variable.function",
|
||||
},
|
||||
{
|
||||
foreground: "#f2ae49",
|
||||
token: "variable.annotation",
|
||||
},
|
||||
{
|
||||
foreground: "#f2ae49",
|
||||
token: "meta.function-call.generic",
|
||||
},
|
||||
{
|
||||
foreground: "#f2ae49",
|
||||
token: "support.function.go",
|
||||
},
|
||||
{
|
||||
foreground: "#f07171",
|
||||
token: "support.function",
|
||||
},
|
||||
{
|
||||
foreground: "#f07171",
|
||||
token: "support.macro",
|
||||
},
|
||||
{
|
||||
foreground: "#86b300",
|
||||
token: "entity.name.import",
|
||||
},
|
||||
{
|
||||
foreground: "#86b300",
|
||||
token: "entity.name.package",
|
||||
},
|
||||
{
|
||||
foreground: "#399ee6",
|
||||
token: "entity.name",
|
||||
},
|
||||
{
|
||||
foreground: "#55b4d4",
|
||||
token: "entity.name.tag",
|
||||
},
|
||||
{
|
||||
foreground: "#55b4d4",
|
||||
token: "meta.tag.sgml",
|
||||
},
|
||||
{
|
||||
foreground: "#399ee6",
|
||||
token: "support.class.component",
|
||||
},
|
||||
{
|
||||
foreground: "#55b4d480",
|
||||
token: "punctuation.definition.tag.end",
|
||||
},
|
||||
{
|
||||
foreground: "#55b4d480",
|
||||
token: "punctuation.definition.tag.begin",
|
||||
},
|
||||
{
|
||||
foreground: "#55b4d480",
|
||||
token: "punctuation.definition.tag",
|
||||
},
|
||||
{
|
||||
foreground: "#f2ae49",
|
||||
token: "entity.other.attribute-name",
|
||||
},
|
||||
{
|
||||
fontStyle: "italic",
|
||||
foreground: "#ed9366",
|
||||
token: "support.constant",
|
||||
},
|
||||
{
|
||||
foreground: "#55b4d4",
|
||||
token: "support.type",
|
||||
},
|
||||
{
|
||||
foreground: "#55b4d4",
|
||||
token: "support.class",
|
||||
},
|
||||
{
|
||||
foreground: "#55b4d4",
|
||||
token: "source.go storage.type",
|
||||
},
|
||||
{
|
||||
foreground: "#e6ba7e",
|
||||
token: "meta.decorator variable.other",
|
||||
},
|
||||
{
|
||||
foreground: "#e6ba7e",
|
||||
token: "meta.decorator punctuation.decorator",
|
||||
},
|
||||
{
|
||||
foreground: "#e6ba7e",
|
||||
token: "storage.type.annotation",
|
||||
},
|
||||
{
|
||||
foreground: "#e65050",
|
||||
token: "invalid",
|
||||
},
|
||||
{
|
||||
foreground: "#c594c5",
|
||||
token: "meta.diff",
|
||||
},
|
||||
{
|
||||
foreground: "#c594c5",
|
||||
token: "meta.diff.header",
|
||||
},
|
||||
{
|
||||
foreground: "#f2ae49",
|
||||
token: "source.ruby variable.other.readwrite",
|
||||
},
|
||||
{
|
||||
foreground: "#399ee6",
|
||||
token: "source.css entity.name.tag",
|
||||
},
|
||||
{
|
||||
foreground: "#399ee6",
|
||||
token: "source.sass entity.name.tag",
|
||||
},
|
||||
{
|
||||
foreground: "#399ee6",
|
||||
token: "source.scss entity.name.tag",
|
||||
},
|
||||
{
|
||||
foreground: "#399ee6",
|
||||
token: "source.less entity.name.tag",
|
||||
},
|
||||
{
|
||||
foreground: "#399ee6",
|
||||
token: "source.stylus entity.name.tag",
|
||||
},
|
||||
{
|
||||
foreground: "#787b8099",
|
||||
token: "source.css support.type",
|
||||
},
|
||||
{
|
||||
foreground: "#787b8099",
|
||||
token: "source.sass support.type",
|
||||
},
|
||||
{
|
||||
foreground: "#787b8099",
|
||||
token: "source.scss support.type",
|
||||
},
|
||||
{
|
||||
foreground: "#787b8099",
|
||||
token: "source.less support.type",
|
||||
},
|
||||
{
|
||||
foreground: "#787b8099",
|
||||
token: "source.stylus support.type",
|
||||
},
|
||||
{
|
||||
fontStyle: "normal",
|
||||
foreground: "#55b4d4",
|
||||
token: "support.type.property-name",
|
||||
},
|
||||
{
|
||||
foreground: "#787b8099",
|
||||
token: "constant.numeric.line-number.find-in-files - match",
|
||||
},
|
||||
{
|
||||
foreground: "#fa8d3e",
|
||||
token: "constant.numeric.line-number.match",
|
||||
},
|
||||
{
|
||||
foreground: "#86b300",
|
||||
token: "entity.name.filename.find-in-files",
|
||||
},
|
||||
{
|
||||
foreground: "#e65050",
|
||||
token: "message.error",
|
||||
},
|
||||
{
|
||||
fontStyle: "bold",
|
||||
foreground: "#86b300",
|
||||
token: "markup.heading",
|
||||
},
|
||||
{
|
||||
fontStyle: "bold",
|
||||
foreground: "#86b300",
|
||||
token: "markup.heading entity.name",
|
||||
},
|
||||
{
|
||||
foreground: "#55b4d4",
|
||||
token: "markup.underline.link",
|
||||
},
|
||||
{
|
||||
foreground: "#55b4d4",
|
||||
token: "string.other.link",
|
||||
},
|
||||
{
|
||||
fontStyle: "italic",
|
||||
foreground: "#f07171",
|
||||
token: "markup.italic",
|
||||
},
|
||||
{
|
||||
fontStyle: "bold",
|
||||
foreground: "#f07171",
|
||||
token: "markup.bold",
|
||||
},
|
||||
{
|
||||
fontStyle: "bold italic",
|
||||
token: "markup.italic markup.bold",
|
||||
},
|
||||
{
|
||||
fontStyle: "bold italic",
|
||||
token: "markup.bold markup.italic",
|
||||
},
|
||||
{
|
||||
background: "#5c616605",
|
||||
token: "markup.raw",
|
||||
},
|
||||
{
|
||||
background: "#5c61660f",
|
||||
token: "markup.raw.inline",
|
||||
},
|
||||
{
|
||||
fontStyle: "bold",
|
||||
background: "#5c61660f",
|
||||
foreground: "#787b8099",
|
||||
token: "meta.separator",
|
||||
},
|
||||
{
|
||||
foreground: "#4cbf99",
|
||||
fontStyle: "italic",
|
||||
token: "markup.quote",
|
||||
},
|
||||
{
|
||||
foreground: "#f2ae49",
|
||||
token: "markup.list punctuation.definition.list.begin",
|
||||
},
|
||||
{
|
||||
foreground: "#6cbf43",
|
||||
token: "markup.inserted",
|
||||
},
|
||||
{
|
||||
foreground: "#478acc",
|
||||
token: "markup.changed",
|
||||
},
|
||||
{
|
||||
foreground: "#ff7383",
|
||||
token: "markup.deleted",
|
||||
},
|
||||
{
|
||||
foreground: "#e6ba7e",
|
||||
token: "markup.strike",
|
||||
},
|
||||
{
|
||||
background: "#5c61660f",
|
||||
foreground: "#55b4d4",
|
||||
token: "markup.table",
|
||||
},
|
||||
{
|
||||
foreground: "#ed9366",
|
||||
token: "text.html.markdown markup.inline.raw",
|
||||
},
|
||||
{
|
||||
background: "#787b8099",
|
||||
foreground: "#787b8099",
|
||||
token: "text.html.markdown meta.dummy.line-break",
|
||||
},
|
||||
{
|
||||
background: "#5c6166",
|
||||
foreground: "#787b8099",
|
||||
token: "punctuation.definition.markdown",
|
||||
},
|
||||
// Edits.
|
||||
{
|
||||
foreground: "#fa8d3e",
|
||||
token: "number",
|
||||
},
|
||||
],
|
||||
encodedTokensColors: [],
|
||||
});
|
||||
}, [monaco]);
|
||||
|
||||
return null;
|
||||
}
|
||||
26
playground/src/Editor/VersionTag.tsx
Normal file
26
playground/src/Editor/VersionTag.tsx
Normal file
@@ -0,0 +1,26 @@
|
||||
import classNames from "classnames";
|
||||
import { ReactNode } from "react";
|
||||
|
||||
export default function VersionTag({ children }: { children: ReactNode }) {
|
||||
return (
|
||||
<div
|
||||
className={classNames(
|
||||
"text-gray-500",
|
||||
"text-xs",
|
||||
"leading-5",
|
||||
"font-semibold",
|
||||
"bg-gray-400/10",
|
||||
"rounded-full",
|
||||
"py-1",
|
||||
"px-3",
|
||||
"flex",
|
||||
"items-center",
|
||||
"dark:bg-gray-800",
|
||||
"dark:text-gray-400",
|
||||
"dark:shadow-highlight/4"
|
||||
)}
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
63
playground/src/Editor/config.ts
Normal file
63
playground/src/Editor/config.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import lzstring from "lz-string";
|
||||
import { OptionGroup } from "../ruff_options";
|
||||
|
||||
export type Config = { [K: string]: any };
|
||||
|
||||
/**
|
||||
* Parse an encoded value from the options export.
|
||||
*
|
||||
* TODO(charlie): Use JSON for the default values.
|
||||
*/
|
||||
function parse(value: any): any {
|
||||
if (value == "None") {
|
||||
return null;
|
||||
}
|
||||
return JSON.parse(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The default configuration for the playground.
|
||||
*/
|
||||
export function defaultConfig(availableOptions: OptionGroup[]): Config {
|
||||
const config: Config = {};
|
||||
for (const group of availableOptions) {
|
||||
if (group.name == "globals") {
|
||||
for (const field of group.fields) {
|
||||
config[field.name] = parse(field.default);
|
||||
}
|
||||
} else {
|
||||
config[group.name] = {};
|
||||
for (const field of group.fields) {
|
||||
config[group.name][field.name] = parse(field.default);
|
||||
}
|
||||
}
|
||||
}
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist the configuration to a URL.
|
||||
*/
|
||||
export function persist(configSource: string, pythonSource: string) {
|
||||
window.location.hash = lzstring.compressToEncodedURIComponent(
|
||||
configSource + "$$$" + pythonSource
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore the configuration from a URL.
|
||||
*/
|
||||
export function restore(): [string, string] | null {
|
||||
const value = lzstring.decompressFromEncodedURIComponent(
|
||||
window.location.hash.slice(1)
|
||||
);
|
||||
|
||||
if (value) {
|
||||
const parts = value.split("$$$");
|
||||
const configSource = parts[0];
|
||||
const pythonSource = parts[1];
|
||||
return [configSource, pythonSource];
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
3
playground/src/Editor/index.tsx
Normal file
3
playground/src/Editor/index.tsx
Normal file
@@ -0,0 +1,3 @@
|
||||
import Editor from "./Editor";
|
||||
|
||||
export default Editor;
|
||||
@@ -1,72 +0,0 @@
|
||||
import { Config } from "./config";
|
||||
import { AVAILABLE_OPTIONS } from "./ruff_options";
|
||||
|
||||
function OptionEntry({
|
||||
config,
|
||||
defaultConfig,
|
||||
groupName,
|
||||
fieldName,
|
||||
onChange,
|
||||
}: {
|
||||
config: Config | null;
|
||||
defaultConfig: Config;
|
||||
groupName: string;
|
||||
fieldName: string;
|
||||
onChange: (groupName: string, fieldName: string, value: string) => void;
|
||||
}) {
|
||||
const value =
|
||||
config && config[groupName] && config[groupName][fieldName]
|
||||
? config[groupName][fieldName]
|
||||
: "";
|
||||
|
||||
return (
|
||||
<span>
|
||||
<label>
|
||||
{fieldName}
|
||||
<input
|
||||
value={value}
|
||||
placeholder={defaultConfig[groupName][fieldName]}
|
||||
type="text"
|
||||
onChange={(event) => {
|
||||
onChange(groupName, fieldName, event.target.value);
|
||||
}}
|
||||
/>
|
||||
</label>
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
export function Options({
|
||||
config,
|
||||
defaultConfig,
|
||||
onChange,
|
||||
}: {
|
||||
config: Config | null;
|
||||
defaultConfig: Config;
|
||||
onChange: (groupName: string, fieldName: string, value: string) => void;
|
||||
}) {
|
||||
return (
|
||||
<div className="options">
|
||||
{AVAILABLE_OPTIONS.map((group) => (
|
||||
<details key={group.name}>
|
||||
<summary>{group.name}</summary>
|
||||
<div>
|
||||
<ul>
|
||||
{group.fields.map((field) => (
|
||||
<li key={field.name}>
|
||||
<OptionEntry
|
||||
config={config}
|
||||
defaultConfig={defaultConfig}
|
||||
groupName={group.name}
|
||||
fieldName={field.name}
|
||||
onChange={onChange}
|
||||
/>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
import { OptionGroup } from "./ruff_options";
|
||||
|
||||
export type Config = { [key: string]: { [key: string]: string } };
|
||||
|
||||
export function getDefaultConfig(availableOptions: OptionGroup[]): Config {
|
||||
const config: Config = {};
|
||||
availableOptions.forEach((group) => {
|
||||
config[group.name] = {};
|
||||
group.fields.forEach((f) => {
|
||||
config[group.name][f.name] = f.default;
|
||||
});
|
||||
});
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert the config in the application to something Ruff accepts.
|
||||
*
|
||||
* Application config is always nested one level. Ruff allows for some
|
||||
* top-level options.
|
||||
*
|
||||
* Any option value is parsed as JSON to convert it to a native JS object.
|
||||
* If that fails, e.g. while a user is typing, we let the application handle that
|
||||
* and show an error.
|
||||
*/
|
||||
export function toRuffConfig(config: Config): any {
|
||||
const convertValue = (value: string): any => {
|
||||
return value === "None" ? null : JSON.parse(value);
|
||||
};
|
||||
|
||||
const result: any = {};
|
||||
Object.keys(config).forEach((group_name) => {
|
||||
const fields = config[group_name];
|
||||
if (!fields || Object.keys(fields).length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (group_name === "globals") {
|
||||
Object.keys(fields).forEach((field_name) => {
|
||||
result[field_name] = convertValue(fields[field_name]);
|
||||
});
|
||||
} else {
|
||||
result[group_name] = {};
|
||||
|
||||
Object.keys(fields).forEach((field_name) => {
|
||||
result[group_name][field_name] = convertValue(fields[field_name]);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
40
playground/src/constants.ts
Normal file
40
playground/src/constants.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { defaultConfig } from "./Editor/config";
|
||||
import { AVAILABLE_OPTIONS } from "./ruff_options";
|
||||
|
||||
export const DEFAULT_PYTHON_SOURCE =
|
||||
"import os\n" +
|
||||
"\n" +
|
||||
"# Define a function that takes an integer n and returns the nth number in the Fibonacci\n" +
|
||||
"# sequence.\n" +
|
||||
"def fibonacci(n):\n" +
|
||||
' """Compute the nth number in the Fibonacci sequence."""\n' +
|
||||
" x = 1\n" +
|
||||
" if n == 0:\n" +
|
||||
" return 0\n" +
|
||||
" elif n == 1:\n" +
|
||||
" return 1\n" +
|
||||
" else:\n" +
|
||||
" return fibonacci(n - 1) + fibonacci(n - 2)\n" +
|
||||
"\n" +
|
||||
"\n" +
|
||||
"# Use a for loop to generate and print the first 10 numbers in the Fibonacci sequence.\n" +
|
||||
"for i in range(10):\n" +
|
||||
" print(fibonacci(i))\n" +
|
||||
"\n" +
|
||||
"# Output:\n" +
|
||||
"# 0\n" +
|
||||
"# 1\n" +
|
||||
"# 1\n" +
|
||||
"# 2\n" +
|
||||
"# 3\n" +
|
||||
"# 5\n" +
|
||||
"# 8\n" +
|
||||
"# 13\n" +
|
||||
"# 21\n" +
|
||||
"# 34\n";
|
||||
|
||||
export const DEFAULT_CONFIG_SOURCE = JSON.stringify(
|
||||
defaultConfig(AVAILABLE_OPTIONS),
|
||||
null,
|
||||
2
|
||||
);
|
||||
24
playground/src/index.css
Normal file
24
playground/src/index.css
Normal file
@@ -0,0 +1,24 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body,
|
||||
html,
|
||||
#root {
|
||||
margin: 0;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.shadow-copied {
|
||||
--tw-shadow: 0 0 0 1px #f07171, inset 0 0 0 1px #f07171;
|
||||
--tw-shadow-colored: 0 0 0 1px var(--tw-shadow-color),
|
||||
inset 0 0 0 1px var(--tw-shadow-color);
|
||||
|
||||
box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000),
|
||||
var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow);
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
import React from "react";
|
||||
import ReactDOM from "react-dom/client";
|
||||
import App from "./App";
|
||||
import "./style.css";
|
||||
import Editor from "./Editor";
|
||||
import "./index.css";
|
||||
|
||||
ReactDOM.createRoot(document.getElementById("root") as HTMLElement).render(
|
||||
<React.StrictMode>
|
||||
<App />
|
||||
<Editor />
|
||||
</React.StrictMode>
|
||||
);
|
||||
|
||||
@@ -71,6 +71,11 @@ export const AVAILABLE_OPTIONS: OptionGroup[] = [
|
||||
"default": '[]',
|
||||
"type": 'Vec<CheckCodePrefix>',
|
||||
},
|
||||
{
|
||||
"name": "update-check",
|
||||
"default": 'true',
|
||||
"type": 'bool',
|
||||
},
|
||||
]},
|
||||
{"name": "flake8-annotations", "fields": [
|
||||
{
|
||||
@@ -225,7 +230,7 @@ export const AVAILABLE_OPTIONS: OptionGroup[] = [
|
||||
{"name": "pydocstyle", "fields": [
|
||||
{
|
||||
"name": "convention",
|
||||
"default": '"convention"',
|
||||
"default": 'None',
|
||||
"type": 'Convention',
|
||||
},
|
||||
]},
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body,
|
||||
html,
|
||||
#root,
|
||||
#app {
|
||||
margin: 0;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#app {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.options {
|
||||
height: 100vh;
|
||||
overflow-y: scroll;
|
||||
padding: 1em;
|
||||
min-width: 300px;
|
||||
border-right: 1px solid lightgray;
|
||||
}
|
||||
|
||||
.options ul {
|
||||
padding-left: 1em;
|
||||
list-style-type: none;
|
||||
}
|
||||
|
||||
.options li {
|
||||
margin-bottom: 0.3em;
|
||||
}
|
||||
|
||||
.options details {
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.options summary {
|
||||
font-size: 1.3rem;
|
||||
}
|
||||
|
||||
.options input {
|
||||
display: block;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.editor {
|
||||
padding: 1em;
|
||||
}
|
||||
|
||||
#error {
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
width: 100%;
|
||||
min-height: 1em;
|
||||
padding: 1em;
|
||||
background: darkred;
|
||||
color: white;
|
||||
}
|
||||
17
playground/tailwind.config.cjs
Normal file
17
playground/tailwind.config.cjs
Normal file
@@ -0,0 +1,17 @@
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
const defaultTheme = require("tailwindcss/defaultTheme");
|
||||
|
||||
module.exports = {
|
||||
content: ["./index.html", "./src/**/*.{js,ts,jsx,tsx}"],
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
ayu: "#f07171",
|
||||
},
|
||||
fontFamily: {
|
||||
sans: ["Inter var", ...defaultTheme.fontFamily.sans],
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [],
|
||||
};
|
||||
@@ -34,6 +34,7 @@ bindings = "bin"
|
||||
strip = true
|
||||
|
||||
[tool.ruff]
|
||||
update-check = true
|
||||
|
||||
[tool.ruff.isort]
|
||||
force-wrap-aliases = true
|
||||
|
||||
11
resources/test/fixtures/pygrep-hooks/PGH004_0.py
vendored
Normal file
11
resources/test/fixtures/pygrep-hooks/PGH004_0.py
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
x = 1 # noqa
|
||||
x = 1 # NOQA:F401,W203
|
||||
# noqa
|
||||
# NOQA
|
||||
# noqa:F401
|
||||
# noqa:F401,W203
|
||||
|
||||
x = 1
|
||||
x = 1 # noqa: F401, W203
|
||||
# noqa: F401
|
||||
# noqa: F401, W203
|
||||
42
resources/test/fixtures/pyupgrade/UP022.py
vendored
Normal file
42
resources/test/fixtures/pyupgrade/UP022.py
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
from subprocess import run
|
||||
import subprocess
|
||||
|
||||
output = run(["foo"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
|
||||
output = subprocess.run(["foo"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
|
||||
output = subprocess.run(stdout=subprocess.PIPE, args=["foo"], stderr=subprocess.PIPE)
|
||||
|
||||
output = subprocess.run(
|
||||
["foo"], stdout=subprocess.PIPE, check=True, stderr=subprocess.PIPE
|
||||
)
|
||||
|
||||
output = subprocess.run(
|
||||
["foo"], stderr=subprocess.PIPE, check=True, stdout=subprocess.PIPE
|
||||
)
|
||||
|
||||
output = subprocess.run(
|
||||
["foo"],
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
close_fds=True,
|
||||
)
|
||||
|
||||
if output:
|
||||
output = subprocess.run(
|
||||
["foo"],
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
|
||||
# Examples that should NOT trigger the rule
|
||||
from foo import PIPE
|
||||
subprocess.run(["foo"], stdout=PIPE, stderr=PIPE)
|
||||
run(["foo"], stdout=None, stderr=PIPE)
|
||||
31
resources/test/fixtures/pyupgrade/UP023.py
vendored
Normal file
31
resources/test/fixtures/pyupgrade/UP023.py
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
# These two imports have something after cElementTree, so they should be fixed.
|
||||
from xml.etree.cElementTree import XML, Element, SubElement
|
||||
import xml.etree.cElementTree as ET
|
||||
|
||||
# Weird spacing should not cause issues.
|
||||
from xml.etree.cElementTree import XML
|
||||
import xml.etree.cElementTree as ET
|
||||
|
||||
# Multi line imports should also work fine.
|
||||
from xml.etree.cElementTree import (
|
||||
XML,
|
||||
Element,
|
||||
SubElement,
|
||||
)
|
||||
if True:
|
||||
import xml.etree.cElementTree as ET
|
||||
from xml.etree import cElementTree as CET
|
||||
|
||||
from xml.etree import cElementTree as ET
|
||||
|
||||
import contextlib, xml.etree.cElementTree as ET
|
||||
|
||||
# This should fix the second, but not the first invocation.
|
||||
import xml.etree.cElementTree, xml.etree.cElementTree as ET
|
||||
|
||||
# The below items should NOT be changed.
|
||||
import xml.etree.cElementTree
|
||||
|
||||
from .xml.etree.cElementTree import XML
|
||||
|
||||
from xml.etree import cElementTree
|
||||
15
resources/test/fixtures/ruff/RUF004.py
vendored
Normal file
15
resources/test/fixtures/ruff/RUF004.py
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
def f(*args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
a = (1, 2)
|
||||
b = (3, 4)
|
||||
c = (5, 6)
|
||||
d = (7, 8)
|
||||
|
||||
f(a, b)
|
||||
f(a, kw=b)
|
||||
f(*a, kw=b)
|
||||
f(kw=a, *b)
|
||||
f(kw=a, *b, *c)
|
||||
f(*a, kw=b, *c, kw1=d)
|
||||
@@ -111,7 +111,7 @@
|
||||
}
|
||||
},
|
||||
"flake8-annotations": {
|
||||
"description": "Plugins Options for the `flake8-annotations` plugin.",
|
||||
"description": "Options for the `flake8-annotations` plugin.",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Flake8AnnotationsOptions"
|
||||
@@ -195,7 +195,7 @@
|
||||
]
|
||||
},
|
||||
"format": {
|
||||
"description": "The style in which violation messages should be formatted: `\"text\"` (default), `\"grouped\"` (group messages by file), `\"json\"` (machine-readable), `\"junit\"` (machine-readable XML), or `\"github\"` (GitHub Actions annotations).",
|
||||
"description": "The style in which violation messages should be formatted: `\"text\"` (default), `\"grouped\"` (group messages by file), `\"json\"` (machine-readable), `\"junit\"` (machine-readable XML), `\"github\"` (GitHub Actions annotations) or `\"gitlab\"` (GitLab CI code quality report).",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SerializationFormat"
|
||||
@@ -364,6 +364,13 @@
|
||||
"items": {
|
||||
"$ref": "#/definitions/CheckCodePrefix"
|
||||
}
|
||||
},
|
||||
"update-check": {
|
||||
"description": "Enable or disable automatic update checks (overridden by the `--update-check` and `--no-update-check` command-line flags).",
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
@@ -377,6 +384,7 @@
|
||||
"A001",
|
||||
"A002",
|
||||
"A003",
|
||||
"ALL",
|
||||
"ANN",
|
||||
"ANN0",
|
||||
"ANN00",
|
||||
@@ -725,6 +733,7 @@
|
||||
"PGH001",
|
||||
"PGH002",
|
||||
"PGH003",
|
||||
"PGH004",
|
||||
"PLC",
|
||||
"PLC0",
|
||||
"PLC04",
|
||||
@@ -805,6 +814,7 @@
|
||||
"RUF001",
|
||||
"RUF002",
|
||||
"RUF003",
|
||||
"RUF004",
|
||||
"RUF1",
|
||||
"RUF10",
|
||||
"RUF100",
|
||||
@@ -879,6 +889,8 @@
|
||||
"UP02",
|
||||
"UP020",
|
||||
"UP021",
|
||||
"UP022",
|
||||
"UP023",
|
||||
"W",
|
||||
"W2",
|
||||
"W29",
|
||||
@@ -906,10 +918,21 @@
|
||||
]
|
||||
},
|
||||
"Convention": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"google",
|
||||
"numpy"
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Use Google-style docstrings.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"google"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Use NumPy-style docstrings.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"numpy"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"Flake8AnnotationsOptions": {
|
||||
@@ -1053,7 +1076,7 @@
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"ban-relative-imports": {
|
||||
"description": "Whether to ban all relative imports (`\"all\"`), or only those imports that extend into the parent module and beyond (`\"parents\"`).",
|
||||
"description": "Whether to ban all relative imports (`\"all\"`), or only those imports that extend into the parent module or beyond (`\"parents\"`).",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Strictness"
|
||||
@@ -1249,10 +1272,21 @@
|
||||
]
|
||||
},
|
||||
"Quote": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"single",
|
||||
"double"
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Use single quotes (`'`).",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"single"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Use double quotes (`\"`).",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"double"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"SerializationFormat": {
|
||||
@@ -1262,14 +1296,26 @@
|
||||
"json",
|
||||
"junit",
|
||||
"grouped",
|
||||
"github"
|
||||
"github",
|
||||
"gitlab"
|
||||
]
|
||||
},
|
||||
"Strictness": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"parents",
|
||||
"all"
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Ban imports that extend into the parent module or beyond.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"parents"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Ban all relative imports.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"all"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"Version": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_dev"
|
||||
version = "0.0.196"
|
||||
version = "0.0.200"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -13,6 +13,8 @@ use itertools::Itertools;
|
||||
use ruff::checks::{CheckCode, PREFIX_REDIRECTS};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
const ALL: &str = "ALL";
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
pub struct Cli {
|
||||
@@ -34,9 +36,15 @@ pub fn main(cli: &Cli) -> Result<()> {
|
||||
let code_suffix_len = code_str.len() - code_prefix_len;
|
||||
for i in 0..=code_suffix_len {
|
||||
let prefix = code_str[..code_prefix_len + i].to_string();
|
||||
let entry = prefix_to_codes.entry(prefix).or_default();
|
||||
entry.insert(check_code.clone());
|
||||
prefix_to_codes
|
||||
.entry(prefix)
|
||||
.or_default()
|
||||
.insert(check_code.clone());
|
||||
}
|
||||
prefix_to_codes
|
||||
.entry(ALL.to_string())
|
||||
.or_default()
|
||||
.insert(check_code.clone());
|
||||
}
|
||||
|
||||
// Add any prefix aliases (e.g., "U" to "UP").
|
||||
@@ -79,6 +87,7 @@ pub fn main(cli: &Cli) -> Result<()> {
|
||||
.derive("Eq")
|
||||
.derive("PartialOrd")
|
||||
.derive("Ord")
|
||||
.push_variant(Variant::new("None"))
|
||||
.push_variant(Variant::new("Zero"))
|
||||
.push_variant(Variant::new("One"))
|
||||
.push_variant(Variant::new("Two"))
|
||||
@@ -129,14 +138,18 @@ pub fn main(cli: &Cli) -> Result<()> {
|
||||
.line("#[allow(clippy::match_same_arms)]")
|
||||
.line("match self {");
|
||||
for prefix in prefix_to_codes.keys() {
|
||||
let num_numeric = prefix.chars().filter(|char| char.is_numeric()).count();
|
||||
let specificity = match num_numeric {
|
||||
0 => "Zero",
|
||||
1 => "One",
|
||||
2 => "Two",
|
||||
3 => "Three",
|
||||
4 => "Four",
|
||||
_ => panic!("Invalid prefix: {prefix}"),
|
||||
let specificity = if prefix == "ALL" {
|
||||
"None"
|
||||
} else {
|
||||
let num_numeric = prefix.chars().filter(|char| char.is_numeric()).count();
|
||||
match num_numeric {
|
||||
0 => "Zero",
|
||||
1 => "One",
|
||||
2 => "Two",
|
||||
3 => "Three",
|
||||
4 => "Four",
|
||||
_ => panic!("Invalid prefix: {prefix}"),
|
||||
}
|
||||
};
|
||||
gen = gen.line(format!(
|
||||
"CheckCodePrefix::{prefix} => SuffixLength::{specificity},"
|
||||
|
||||
@@ -5,7 +5,9 @@ use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::Args;
|
||||
use ruff::code_gen::SourceGenerator;
|
||||
use ruff::source_code_generator::SourceCodeGenerator;
|
||||
use ruff::source_code_locator::SourceCodeLocator;
|
||||
use ruff::source_code_style::SourceCodeStyleDetector;
|
||||
use rustpython_parser::parser;
|
||||
|
||||
#[derive(Args)]
|
||||
@@ -18,7 +20,9 @@ pub struct Cli {
|
||||
pub fn main(cli: &Cli) -> Result<()> {
|
||||
let contents = fs::read_to_string(&cli.file)?;
|
||||
let python_ast = parser::parse_program(&contents, &cli.file.to_string_lossy())?;
|
||||
let mut generator = SourceGenerator::new();
|
||||
let locator = SourceCodeLocator::new(&contents);
|
||||
let stylist = SourceCodeStyleDetector::from_contents(&contents, &locator);
|
||||
let mut generator = SourceCodeGenerator::new(stylist.indentation(), stylist.quote());
|
||||
generator.unparse_suite(&python_ast);
|
||||
println!("{}", generator.generate()?);
|
||||
Ok(())
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_macros"
|
||||
version = "0.0.196"
|
||||
version = "0.0.200"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
|
||||
@@ -14,6 +14,7 @@ use crate::source_code_locator::SourceCodeLocator;
|
||||
pub enum Mode {
|
||||
Generate,
|
||||
Apply,
|
||||
Diff,
|
||||
None,
|
||||
}
|
||||
|
||||
|
||||
17
src/cache.rs
17
src/cache.rs
@@ -12,7 +12,6 @@ use once_cell::sync::Lazy;
|
||||
use path_absolutize::Absolutize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::autofix::fixer;
|
||||
use crate::message::Message;
|
||||
use crate::settings::{flags, Settings};
|
||||
|
||||
@@ -48,7 +47,7 @@ fn content_dir() -> &'static Path {
|
||||
Path::new("content")
|
||||
}
|
||||
|
||||
fn cache_key<P: AsRef<Path>>(path: P, settings: &Settings, autofix: fixer::Mode) -> u64 {
|
||||
fn cache_key<P: AsRef<Path>>(path: P, settings: &Settings, autofix: flags::Autofix) -> u64 {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
CARGO_PKG_VERSION.hash(&mut hasher);
|
||||
path.as_ref().absolutize().unwrap().hash(&mut hasher);
|
||||
@@ -93,13 +92,8 @@ pub fn get<P: AsRef<Path>>(
|
||||
path: P,
|
||||
metadata: &Metadata,
|
||||
settings: &Settings,
|
||||
autofix: fixer::Mode,
|
||||
cache: flags::Cache,
|
||||
autofix: flags::Autofix,
|
||||
) -> Option<Vec<Message>> {
|
||||
if matches!(cache, flags::Cache::Disabled) {
|
||||
return None;
|
||||
};
|
||||
|
||||
let encoded = read_sync(&settings.cache_dir, cache_key(path, settings, autofix)).ok()?;
|
||||
let (mtime, messages) = match bincode::deserialize::<CheckResult>(&encoded[..]) {
|
||||
Ok(CheckResult {
|
||||
@@ -122,14 +116,9 @@ pub fn set<P: AsRef<Path>>(
|
||||
path: P,
|
||||
metadata: &Metadata,
|
||||
settings: &Settings,
|
||||
autofix: fixer::Mode,
|
||||
autofix: flags::Autofix,
|
||||
messages: &[Message],
|
||||
cache: flags::Cache,
|
||||
) {
|
||||
if matches!(cache, flags::Cache::Disabled) {
|
||||
return;
|
||||
};
|
||||
|
||||
let check_result = CheckResultRef {
|
||||
metadata: &CacheMetadata {
|
||||
mtime: FileTime::from_last_modification_time(metadata).unix_seconds(),
|
||||
|
||||
@@ -33,14 +33,16 @@ use crate::python::typing::SubscriptKind;
|
||||
use crate::settings::types::PythonVersion;
|
||||
use crate::settings::{flags, Settings};
|
||||
use crate::source_code_locator::SourceCodeLocator;
|
||||
use crate::vendored::cformat::{CFormatError, CFormatErrorType};
|
||||
use crate::source_code_style::SourceCodeStyleDetector;
|
||||
use crate::vendor::cformat::{CFormatError, CFormatErrorType};
|
||||
use crate::visibility::{module_visibility, transition_scope, Modifier, Visibility, VisibleScope};
|
||||
use crate::{
|
||||
docstrings, flake8_2020, flake8_annotations, flake8_bandit, flake8_blind_except,
|
||||
flake8_boolean_trap, flake8_bugbear, flake8_builtins, flake8_comprehensions, flake8_datetimez,
|
||||
flake8_debugger, flake8_errmsg, flake8_import_conventions, flake8_print, flake8_return,
|
||||
flake8_simplify, flake8_tidy_imports, flake8_unused_arguments, mccabe, noqa, pandas_vet,
|
||||
pep8_naming, pycodestyle, pydocstyle, pyflakes, pygrep_hooks, pylint, pyupgrade, visibility,
|
||||
pep8_naming, pycodestyle, pydocstyle, pyflakes, pygrep_hooks, pylint, pyupgrade, ruff,
|
||||
visibility,
|
||||
};
|
||||
|
||||
const GLOBAL_SCOPE_INDEX: usize = 0;
|
||||
@@ -56,6 +58,7 @@ pub struct Checker<'a> {
|
||||
pub(crate) settings: &'a Settings,
|
||||
pub(crate) noqa_line_for: &'a IntMap<usize, usize>,
|
||||
pub(crate) locator: &'a SourceCodeLocator<'a>,
|
||||
pub(crate) style: &'a SourceCodeStyleDetector<'a>,
|
||||
// Computed checks.
|
||||
checks: Vec<Check>,
|
||||
// Function and class definition tracking (e.g., for docstring enforcement).
|
||||
@@ -107,6 +110,7 @@ impl<'a> Checker<'a> {
|
||||
noqa: flags::Noqa,
|
||||
path: &'a Path,
|
||||
locator: &'a SourceCodeLocator,
|
||||
style: &'a SourceCodeStyleDetector,
|
||||
) -> Checker<'a> {
|
||||
Checker {
|
||||
settings,
|
||||
@@ -115,6 +119,7 @@ impl<'a> Checker<'a> {
|
||||
noqa,
|
||||
path,
|
||||
locator,
|
||||
style,
|
||||
checks: vec![],
|
||||
definitions: vec![],
|
||||
deletions: FxHashSet::default(),
|
||||
@@ -645,6 +650,9 @@ where
|
||||
));
|
||||
}
|
||||
}
|
||||
if self.settings.enabled.contains(&CheckCode::UP023) {
|
||||
pyupgrade::plugins::replace_c_element_tree(self, stmt);
|
||||
}
|
||||
|
||||
for alias in names {
|
||||
if alias.node.name.contains('.') && alias.node.asname.is_none() {
|
||||
@@ -814,6 +822,9 @@ where
|
||||
} => {
|
||||
// Track `import from` statements, to ensure that we can correctly attribute
|
||||
// references like `from typing import Union`.
|
||||
if self.settings.enabled.contains(&CheckCode::UP023) {
|
||||
pyupgrade::plugins::replace_c_element_tree(self, stmt);
|
||||
}
|
||||
if level.map(|level| level == 0).unwrap_or(true) {
|
||||
if let Some(module) = module {
|
||||
self.from_imports
|
||||
@@ -1547,9 +1558,6 @@ where
|
||||
pyupgrade::plugins::use_pep585_annotation(self, expr, attr);
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::UP019) {
|
||||
pyupgrade::plugins::typing_text_str_alias(self, expr);
|
||||
}
|
||||
if self.settings.enabled.contains(&CheckCode::UP016) {
|
||||
pyupgrade::plugins::remove_six_compat(self, expr);
|
||||
}
|
||||
@@ -1559,7 +1567,9 @@ where
|
||||
{
|
||||
pyupgrade::plugins::datetime_utc_alias(self, expr);
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::UP019) {
|
||||
pyupgrade::plugins::typing_text_str_alias(self, expr);
|
||||
}
|
||||
if self.settings.enabled.contains(&CheckCode::YTT202) {
|
||||
flake8_2020::plugins::name_or_attribute(self, expr);
|
||||
}
|
||||
@@ -1642,25 +1652,35 @@ where
|
||||
}
|
||||
|
||||
// pyupgrade
|
||||
if self.settings.enabled.contains(&CheckCode::UP003) {
|
||||
pyupgrade::plugins::type_of_primitive(self, expr, func, args);
|
||||
}
|
||||
if self.settings.enabled.contains(&CheckCode::UP005) {
|
||||
pyupgrade::plugins::deprecated_unittest_alias(self, func);
|
||||
}
|
||||
if self.settings.enabled.contains(&CheckCode::UP008) {
|
||||
pyupgrade::plugins::super_call_with_parameters(self, expr, func, args);
|
||||
}
|
||||
if self.settings.enabled.contains(&CheckCode::UP012) {
|
||||
pyupgrade::plugins::unnecessary_encode_utf8(self, expr, func, args, keywords);
|
||||
}
|
||||
if self.settings.enabled.contains(&CheckCode::UP015) {
|
||||
pyupgrade::plugins::redundant_open_modes(self, expr);
|
||||
}
|
||||
if self.settings.enabled.contains(&CheckCode::UP016) {
|
||||
pyupgrade::plugins::remove_six_compat(self, expr);
|
||||
}
|
||||
if self.settings.enabled.contains(&CheckCode::UP018) {
|
||||
pyupgrade::plugins::native_literals(self, expr, func, args, keywords);
|
||||
}
|
||||
if self.settings.enabled.contains(&CheckCode::UP020) {
|
||||
pyupgrade::plugins::open_alias(self, expr, func);
|
||||
}
|
||||
if self.settings.enabled.contains(&CheckCode::UP021) {
|
||||
pyupgrade::plugins::replace_universal_newlines(self, expr, keywords);
|
||||
}
|
||||
|
||||
// flake8-super
|
||||
if self.settings.enabled.contains(&CheckCode::UP008) {
|
||||
pyupgrade::plugins::super_call_with_parameters(self, expr, func, args);
|
||||
if self.settings.enabled.contains(&CheckCode::UP022) {
|
||||
pyupgrade::plugins::replace_stdout_stderr(self, expr, keywords);
|
||||
}
|
||||
|
||||
// flake8-print
|
||||
@@ -1725,7 +1745,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C401) {
|
||||
if let Some(check) = flake8_comprehensions::checks::unnecessary_generator_set(
|
||||
expr,
|
||||
@@ -1739,7 +1758,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C402) {
|
||||
if let Some(check) = flake8_comprehensions::checks::unnecessary_generator_dict(
|
||||
expr,
|
||||
@@ -1753,7 +1771,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C403) {
|
||||
if let Some(check) =
|
||||
flake8_comprehensions::checks::unnecessary_list_comprehension_set(
|
||||
@@ -1769,7 +1786,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C404) {
|
||||
if let Some(check) =
|
||||
flake8_comprehensions::checks::unnecessary_list_comprehension_dict(
|
||||
@@ -1785,7 +1801,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C405) {
|
||||
if let Some(check) = flake8_comprehensions::checks::unnecessary_literal_set(
|
||||
expr,
|
||||
@@ -1799,7 +1814,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C406) {
|
||||
if let Some(check) = flake8_comprehensions::checks::unnecessary_literal_dict(
|
||||
expr,
|
||||
@@ -1813,7 +1827,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C408) {
|
||||
if let Some(check) = flake8_comprehensions::checks::unnecessary_collection_call(
|
||||
expr,
|
||||
@@ -1827,7 +1840,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C409) {
|
||||
if let Some(check) =
|
||||
flake8_comprehensions::checks::unnecessary_literal_within_tuple_call(
|
||||
@@ -1842,7 +1854,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C410) {
|
||||
if let Some(check) =
|
||||
flake8_comprehensions::checks::unnecessary_literal_within_list_call(
|
||||
@@ -1857,7 +1868,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C411) {
|
||||
if let Some(check) = flake8_comprehensions::checks::unnecessary_list_call(
|
||||
expr,
|
||||
@@ -1870,7 +1880,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C413) {
|
||||
if let Some(check) =
|
||||
flake8_comprehensions::checks::unnecessary_call_around_sorted(
|
||||
@@ -1885,7 +1894,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C414) {
|
||||
if let Some(check) =
|
||||
flake8_comprehensions::checks::unnecessary_double_cast_or_process(
|
||||
@@ -1897,7 +1905,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C415) {
|
||||
if let Some(check) =
|
||||
flake8_comprehensions::checks::unnecessary_subscript_reversal(
|
||||
@@ -1909,7 +1916,6 @@ where
|
||||
self.add_check(check);
|
||||
};
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::C417) {
|
||||
if let Some(check) = flake8_comprehensions::checks::unnecessary_map(
|
||||
func,
|
||||
@@ -1920,19 +1926,6 @@ where
|
||||
};
|
||||
}
|
||||
|
||||
// pyupgrade
|
||||
if self.settings.enabled.contains(&CheckCode::UP003) {
|
||||
pyupgrade::plugins::type_of_primitive(self, expr, func, args);
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::UP015) {
|
||||
pyupgrade::plugins::redundant_open_modes(self, expr);
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::UP020) {
|
||||
pyupgrade::plugins::open_alias(self, expr, func);
|
||||
}
|
||||
|
||||
// flake8-boolean-trap
|
||||
if self.settings.enabled.contains(&CheckCode::FBT003) {
|
||||
flake8_boolean_trap::plugins::check_boolean_positional_value_in_function_call(
|
||||
@@ -1963,7 +1956,6 @@ where
|
||||
if self.settings.enabled.contains(&CheckCode::PD002) {
|
||||
self.add_checks(pandas_vet::checks::inplace_argument(keywords).into_iter());
|
||||
}
|
||||
|
||||
for (code, name) in vec![
|
||||
(CheckCode::PD003, "isnull"),
|
||||
(CheckCode::PD004, "notnull"),
|
||||
@@ -1980,7 +1972,6 @@ where
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::PD015) {
|
||||
if let Some(check) = pandas_vet::checks::use_of_pd_merge(func) {
|
||||
self.add_check(check);
|
||||
@@ -2074,6 +2065,14 @@ where
|
||||
if self.settings.enabled.contains(&CheckCode::PLR1722) {
|
||||
pylint::plugins::use_sys_exit(self, func);
|
||||
}
|
||||
|
||||
// ruff
|
||||
if self.settings.enabled.contains(&CheckCode::RUF004) {
|
||||
self.add_checks(
|
||||
ruff::checks::keyword_argument_before_star_argument(args, keywords)
|
||||
.into_iter(),
|
||||
);
|
||||
}
|
||||
}
|
||||
ExprKind::Dict { keys, .. } => {
|
||||
let check_repeated_literals = self.settings.enabled.contains(&CheckCode::F601);
|
||||
@@ -3926,16 +3925,26 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn check_ast(
|
||||
python_ast: &Suite,
|
||||
locator: &SourceCodeLocator,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
noqa_line_for: &IntMap<usize, usize>,
|
||||
settings: &Settings,
|
||||
autofix: flags::Autofix,
|
||||
noqa: flags::Noqa,
|
||||
path: &Path,
|
||||
) -> Vec<Check> {
|
||||
let mut checker = Checker::new(settings, noqa_line_for, autofix, noqa, path, locator);
|
||||
let mut checker = Checker::new(
|
||||
settings,
|
||||
noqa_line_for,
|
||||
autofix,
|
||||
noqa,
|
||||
path,
|
||||
locator,
|
||||
stylist,
|
||||
);
|
||||
checker.push_scope(Scope::new(ScopeKind::Module));
|
||||
checker.bind_builtins();
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
use crate::checks::{Check, CheckCode};
|
||||
use crate::pycodestyle::checks::{line_too_long, no_newline_at_end_of_file};
|
||||
use crate::pygrep_hooks::plugins::blanket_type_ignore;
|
||||
use crate::pygrep_hooks::plugins::{blanket_noqa, blanket_type_ignore};
|
||||
use crate::pyupgrade::checks::unnecessary_coding_comment;
|
||||
use crate::settings::{flags, Settings};
|
||||
|
||||
@@ -18,6 +18,7 @@ pub fn check_lines(
|
||||
let enforce_line_too_long = settings.enabled.contains(&CheckCode::E501);
|
||||
let enforce_no_newline_at_end_of_file = settings.enabled.contains(&CheckCode::W292);
|
||||
let enforce_blanket_type_ignore = settings.enabled.contains(&CheckCode::PGH003);
|
||||
let enforce_blanket_noqa = settings.enabled.contains(&CheckCode::PGH004);
|
||||
|
||||
let mut commented_lines_iter = commented_lines.iter().peekable();
|
||||
for (index, line) in contents.lines().enumerate() {
|
||||
@@ -45,6 +46,14 @@ pub fn check_lines(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if enforce_blanket_noqa {
|
||||
if commented_lines.contains(&(index + 1)) {
|
||||
if let Some(check) = blanket_noqa(index, line) {
|
||||
checks.push(check);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if enforce_line_too_long {
|
||||
|
||||
@@ -49,6 +49,9 @@ pub fn check_noqa(
|
||||
while let Some((index, check)) =
|
||||
checks_iter.next_if(|(_index, check)| check.location.row() <= *lineno)
|
||||
{
|
||||
if check.kind == CheckKind::BlanketNOQA {
|
||||
continue;
|
||||
}
|
||||
// Grab the noqa (logical) line number for the current (physical) line.
|
||||
// If there are newlines at the end of the file, they won't be represented in
|
||||
// `noqa_line_for`, so fallback to the current line.
|
||||
|
||||
@@ -229,6 +229,8 @@ pub enum CheckCode {
|
||||
UP019,
|
||||
UP020,
|
||||
UP021,
|
||||
UP022,
|
||||
UP023,
|
||||
// pydocstyle
|
||||
D100,
|
||||
D101,
|
||||
@@ -328,11 +330,13 @@ pub enum CheckCode {
|
||||
RUF001,
|
||||
RUF002,
|
||||
RUF003,
|
||||
RUF004,
|
||||
RUF100,
|
||||
// pygrep-hooks
|
||||
PGH001,
|
||||
PGH002,
|
||||
PGH003,
|
||||
PGH004,
|
||||
// pandas-vet
|
||||
PD002,
|
||||
PD003,
|
||||
@@ -843,6 +847,8 @@ pub enum CheckKind {
|
||||
NativeLiterals,
|
||||
OpenAlias,
|
||||
ReplaceUniversalNewlines,
|
||||
ReplaceStdoutStderr,
|
||||
RewriteCElementTree,
|
||||
// pydocstyle
|
||||
BlankLineAfterLastSection(String),
|
||||
BlankLineAfterSection(String),
|
||||
@@ -926,6 +932,7 @@ pub enum CheckKind {
|
||||
NoEval,
|
||||
DeprecatedLogWarn,
|
||||
BlanketTypeIgnore,
|
||||
BlanketNOQA,
|
||||
// flake8-unused-arguments
|
||||
UnusedFunctionArgument(String),
|
||||
UnusedMethodArgument(String),
|
||||
@@ -955,6 +962,7 @@ pub enum CheckKind {
|
||||
AmbiguousUnicodeCharacterString(char, char),
|
||||
AmbiguousUnicodeCharacterDocstring(char, char),
|
||||
AmbiguousUnicodeCharacterComment(char, char),
|
||||
KeywordArgumentBeforeStarArgument(String),
|
||||
UnusedNOQA(Option<UnusedCodes>),
|
||||
// flake8-datetimez
|
||||
CallDatetimeWithoutTzinfo,
|
||||
@@ -974,9 +982,11 @@ impl CheckCode {
|
||||
pub fn lint_source(&self) -> &'static LintSource {
|
||||
match self {
|
||||
CheckCode::RUF100 => &LintSource::NoQA,
|
||||
CheckCode::E501 | CheckCode::W292 | CheckCode::UP009 | CheckCode::PGH003 => {
|
||||
&LintSource::Lines
|
||||
}
|
||||
CheckCode::E501
|
||||
| CheckCode::W292
|
||||
| CheckCode::UP009
|
||||
| CheckCode::PGH003
|
||||
| CheckCode::PGH004 => &LintSource::Lines,
|
||||
CheckCode::ERA001
|
||||
| CheckCode::Q000
|
||||
| CheckCode::Q001
|
||||
@@ -1221,6 +1231,8 @@ impl CheckCode {
|
||||
CheckCode::UP019 => CheckKind::TypingTextStrAlias,
|
||||
CheckCode::UP020 => CheckKind::OpenAlias,
|
||||
CheckCode::UP021 => CheckKind::ReplaceUniversalNewlines,
|
||||
CheckCode::UP022 => CheckKind::ReplaceStdoutStderr,
|
||||
CheckCode::UP023 => CheckKind::RewriteCElementTree,
|
||||
// pydocstyle
|
||||
CheckCode::D100 => CheckKind::PublicModule,
|
||||
CheckCode::D101 => CheckKind::PublicClass,
|
||||
@@ -1319,6 +1331,7 @@ impl CheckCode {
|
||||
CheckCode::PGH001 => CheckKind::NoEval,
|
||||
CheckCode::PGH002 => CheckKind::DeprecatedLogWarn,
|
||||
CheckCode::PGH003 => CheckKind::BlanketTypeIgnore,
|
||||
CheckCode::PGH004 => CheckKind::BlanketNOQA,
|
||||
// flake8-unused-arguments
|
||||
CheckCode::ARG001 => CheckKind::UnusedFunctionArgument("...".to_string()),
|
||||
CheckCode::ARG002 => CheckKind::UnusedMethodArgument("...".to_string()),
|
||||
@@ -1360,6 +1373,7 @@ impl CheckCode {
|
||||
CheckCode::RUF001 => CheckKind::AmbiguousUnicodeCharacterString('𝐁', 'B'),
|
||||
CheckCode::RUF002 => CheckKind::AmbiguousUnicodeCharacterDocstring('𝐁', 'B'),
|
||||
CheckCode::RUF003 => CheckKind::AmbiguousUnicodeCharacterComment('𝐁', 'B'),
|
||||
CheckCode::RUF004 => CheckKind::KeywordArgumentBeforeStarArgument("...".to_string()),
|
||||
CheckCode::RUF100 => CheckKind::UnusedNOQA(None),
|
||||
}
|
||||
}
|
||||
@@ -1585,6 +1599,7 @@ impl CheckCode {
|
||||
CheckCode::PGH001 => CheckCategory::PygrepHooks,
|
||||
CheckCode::PGH002 => CheckCategory::PygrepHooks,
|
||||
CheckCode::PGH003 => CheckCategory::PygrepHooks,
|
||||
CheckCode::PGH004 => CheckCategory::PygrepHooks,
|
||||
CheckCode::PLC0414 => CheckCategory::Pylint,
|
||||
CheckCode::PLC2201 => CheckCategory::Pylint,
|
||||
CheckCode::PLC3002 => CheckCategory::Pylint,
|
||||
@@ -1612,6 +1627,7 @@ impl CheckCode {
|
||||
CheckCode::RUF001 => CheckCategory::Ruff,
|
||||
CheckCode::RUF002 => CheckCategory::Ruff,
|
||||
CheckCode::RUF003 => CheckCategory::Ruff,
|
||||
CheckCode::RUF004 => CheckCategory::Ruff,
|
||||
CheckCode::RUF100 => CheckCategory::Ruff,
|
||||
CheckCode::S101 => CheckCategory::Flake8Bandit,
|
||||
CheckCode::S102 => CheckCategory::Flake8Bandit,
|
||||
@@ -1643,6 +1659,8 @@ impl CheckCode {
|
||||
CheckCode::UP019 => CheckCategory::Pyupgrade,
|
||||
CheckCode::UP020 => CheckCategory::Pyupgrade,
|
||||
CheckCode::UP021 => CheckCategory::Pyupgrade,
|
||||
CheckCode::UP022 => CheckCategory::Pyupgrade,
|
||||
CheckCode::UP023 => CheckCategory::Pyupgrade,
|
||||
CheckCode::W292 => CheckCategory::Pycodestyle,
|
||||
CheckCode::W605 => CheckCategory::Pycodestyle,
|
||||
CheckCode::YTT101 => CheckCategory::Flake82020,
|
||||
@@ -1858,6 +1876,8 @@ impl CheckKind {
|
||||
CheckKind::TypingTextStrAlias => &CheckCode::UP019,
|
||||
CheckKind::OpenAlias => &CheckCode::UP020,
|
||||
CheckKind::ReplaceUniversalNewlines => &CheckCode::UP021,
|
||||
CheckKind::ReplaceStdoutStderr => &CheckCode::UP022,
|
||||
CheckKind::RewriteCElementTree => &CheckCode::UP023,
|
||||
// pydocstyle
|
||||
CheckKind::BlankLineAfterLastSection(..) => &CheckCode::D413,
|
||||
CheckKind::BlankLineAfterSection(..) => &CheckCode::D410,
|
||||
@@ -1941,6 +1961,7 @@ impl CheckKind {
|
||||
CheckKind::NoEval => &CheckCode::PGH001,
|
||||
CheckKind::DeprecatedLogWarn => &CheckCode::PGH002,
|
||||
CheckKind::BlanketTypeIgnore => &CheckCode::PGH003,
|
||||
CheckKind::BlanketNOQA => &CheckCode::PGH004,
|
||||
// flake8-unused-arguments
|
||||
CheckKind::UnusedFunctionArgument(..) => &CheckCode::ARG001,
|
||||
CheckKind::UnusedMethodArgument(..) => &CheckCode::ARG002,
|
||||
@@ -1980,6 +2001,7 @@ impl CheckKind {
|
||||
CheckKind::AmbiguousUnicodeCharacterString(..) => &CheckCode::RUF001,
|
||||
CheckKind::AmbiguousUnicodeCharacterDocstring(..) => &CheckCode::RUF002,
|
||||
CheckKind::AmbiguousUnicodeCharacterComment(..) => &CheckCode::RUF003,
|
||||
CheckKind::KeywordArgumentBeforeStarArgument(..) => &CheckCode::RUF004,
|
||||
CheckKind::UnusedNOQA(..) => &CheckCode::RUF100,
|
||||
}
|
||||
}
|
||||
@@ -2590,6 +2612,12 @@ impl CheckKind {
|
||||
CheckKind::ReplaceUniversalNewlines => {
|
||||
"`universal_newlines` is deprecated, use `text`".to_string()
|
||||
}
|
||||
CheckKind::ReplaceStdoutStderr => {
|
||||
"Sending stdout and stderr to pipe is deprecated, use `capture_output`".to_string()
|
||||
}
|
||||
CheckKind::RewriteCElementTree => {
|
||||
"`cElementTree` is deprecated, use `ElementTree`".to_string()
|
||||
}
|
||||
CheckKind::ConvertNamedTupleFunctionalToClass(name) => {
|
||||
format!("Convert `{name}` from `NamedTuple` functional to class syntax")
|
||||
}
|
||||
@@ -2795,13 +2823,14 @@ impl CheckKind {
|
||||
"Boolean positional value in function call".to_string()
|
||||
}
|
||||
// pygrep-hooks
|
||||
CheckKind::NoEval => "No builtin `eval()` allowed".to_string(),
|
||||
CheckKind::DeprecatedLogWarn => {
|
||||
"`warn` is deprecated in favor of `warning`".to_string()
|
||||
}
|
||||
CheckKind::BlanketNOQA => "Use specific error codes when using `noqa`".to_string(),
|
||||
CheckKind::BlanketTypeIgnore => {
|
||||
"Use specific error codes when ignoring type issues".to_string()
|
||||
}
|
||||
CheckKind::DeprecatedLogWarn => {
|
||||
"`warn` is deprecated in favor of `warning`".to_string()
|
||||
}
|
||||
CheckKind::NoEval => "No builtin `eval()` allowed".to_string(),
|
||||
// flake8-unused-arguments
|
||||
CheckKind::UnusedFunctionArgument(name) => {
|
||||
format!("Unused function argument: `{name}`")
|
||||
@@ -2882,6 +2911,9 @@ impl CheckKind {
|
||||
'{representant}'?)"
|
||||
)
|
||||
}
|
||||
CheckKind::KeywordArgumentBeforeStarArgument(name) => {
|
||||
format!("Keyword argument `{name}` must come after starred arguments")
|
||||
}
|
||||
CheckKind::UnusedNOQA(codes) => match codes {
|
||||
None => "Unused blanket `noqa` directive".to_string(),
|
||||
Some(codes) => {
|
||||
@@ -3032,6 +3064,8 @@ impl CheckKind {
|
||||
| CheckKind::OpenAlias
|
||||
| CheckKind::NewLineAfterLastParagraph
|
||||
| CheckKind::ReplaceUniversalNewlines
|
||||
| CheckKind::ReplaceStdoutStderr
|
||||
| CheckKind::RewriteCElementTree
|
||||
| CheckKind::NewLineAfterSectionName(..)
|
||||
| CheckKind::NoBlankLineAfterFunction(..)
|
||||
| CheckKind::NoBlankLineBeforeClass(..)
|
||||
|
||||
@@ -28,6 +28,7 @@ pub enum CheckCodePrefix {
|
||||
A001,
|
||||
A002,
|
||||
A003,
|
||||
ALL,
|
||||
ANN,
|
||||
ANN0,
|
||||
ANN00,
|
||||
@@ -376,6 +377,7 @@ pub enum CheckCodePrefix {
|
||||
PGH001,
|
||||
PGH002,
|
||||
PGH003,
|
||||
PGH004,
|
||||
PLC,
|
||||
PLC0,
|
||||
PLC04,
|
||||
@@ -456,6 +458,7 @@ pub enum CheckCodePrefix {
|
||||
RUF001,
|
||||
RUF002,
|
||||
RUF003,
|
||||
RUF004,
|
||||
RUF1,
|
||||
RUF10,
|
||||
RUF100,
|
||||
@@ -530,6 +533,8 @@ pub enum CheckCodePrefix {
|
||||
UP02,
|
||||
UP020,
|
||||
UP021,
|
||||
UP022,
|
||||
UP023,
|
||||
W,
|
||||
W2,
|
||||
W29,
|
||||
@@ -558,6 +563,7 @@ pub enum CheckCodePrefix {
|
||||
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum SuffixLength {
|
||||
None,
|
||||
Zero,
|
||||
One,
|
||||
Two,
|
||||
@@ -575,6 +581,300 @@ impl CheckCodePrefix {
|
||||
CheckCodePrefix::A001 => vec![CheckCode::A001],
|
||||
CheckCodePrefix::A002 => vec![CheckCode::A002],
|
||||
CheckCodePrefix::A003 => vec![CheckCode::A003],
|
||||
CheckCodePrefix::ALL => vec![
|
||||
CheckCode::E401,
|
||||
CheckCode::E402,
|
||||
CheckCode::E501,
|
||||
CheckCode::E711,
|
||||
CheckCode::E712,
|
||||
CheckCode::E713,
|
||||
CheckCode::E714,
|
||||
CheckCode::E721,
|
||||
CheckCode::E722,
|
||||
CheckCode::E731,
|
||||
CheckCode::E741,
|
||||
CheckCode::E742,
|
||||
CheckCode::E743,
|
||||
CheckCode::E902,
|
||||
CheckCode::E999,
|
||||
CheckCode::W292,
|
||||
CheckCode::W605,
|
||||
CheckCode::F401,
|
||||
CheckCode::F402,
|
||||
CheckCode::F403,
|
||||
CheckCode::F404,
|
||||
CheckCode::F405,
|
||||
CheckCode::F406,
|
||||
CheckCode::F407,
|
||||
CheckCode::F501,
|
||||
CheckCode::F502,
|
||||
CheckCode::F503,
|
||||
CheckCode::F504,
|
||||
CheckCode::F505,
|
||||
CheckCode::F506,
|
||||
CheckCode::F507,
|
||||
CheckCode::F508,
|
||||
CheckCode::F509,
|
||||
CheckCode::F521,
|
||||
CheckCode::F522,
|
||||
CheckCode::F523,
|
||||
CheckCode::F524,
|
||||
CheckCode::F525,
|
||||
CheckCode::F541,
|
||||
CheckCode::F601,
|
||||
CheckCode::F602,
|
||||
CheckCode::F621,
|
||||
CheckCode::F622,
|
||||
CheckCode::F631,
|
||||
CheckCode::F632,
|
||||
CheckCode::F633,
|
||||
CheckCode::F634,
|
||||
CheckCode::F701,
|
||||
CheckCode::F702,
|
||||
CheckCode::F704,
|
||||
CheckCode::F706,
|
||||
CheckCode::F707,
|
||||
CheckCode::F722,
|
||||
CheckCode::F811,
|
||||
CheckCode::F821,
|
||||
CheckCode::F822,
|
||||
CheckCode::F823,
|
||||
CheckCode::F831,
|
||||
CheckCode::F841,
|
||||
CheckCode::F842,
|
||||
CheckCode::F901,
|
||||
CheckCode::PLC0414,
|
||||
CheckCode::PLC2201,
|
||||
CheckCode::PLC3002,
|
||||
CheckCode::PLE0117,
|
||||
CheckCode::PLE0118,
|
||||
CheckCode::PLE1142,
|
||||
CheckCode::PLR0206,
|
||||
CheckCode::PLR0402,
|
||||
CheckCode::PLR1701,
|
||||
CheckCode::PLR1722,
|
||||
CheckCode::PLW0120,
|
||||
CheckCode::PLW0602,
|
||||
CheckCode::A001,
|
||||
CheckCode::A002,
|
||||
CheckCode::A003,
|
||||
CheckCode::B002,
|
||||
CheckCode::B003,
|
||||
CheckCode::B004,
|
||||
CheckCode::B005,
|
||||
CheckCode::B006,
|
||||
CheckCode::B007,
|
||||
CheckCode::B008,
|
||||
CheckCode::B009,
|
||||
CheckCode::B010,
|
||||
CheckCode::B011,
|
||||
CheckCode::B012,
|
||||
CheckCode::B013,
|
||||
CheckCode::B014,
|
||||
CheckCode::B015,
|
||||
CheckCode::B016,
|
||||
CheckCode::B017,
|
||||
CheckCode::B018,
|
||||
CheckCode::B019,
|
||||
CheckCode::B020,
|
||||
CheckCode::B021,
|
||||
CheckCode::B022,
|
||||
CheckCode::B023,
|
||||
CheckCode::B024,
|
||||
CheckCode::B025,
|
||||
CheckCode::B026,
|
||||
CheckCode::B027,
|
||||
CheckCode::B904,
|
||||
CheckCode::B905,
|
||||
CheckCode::BLE001,
|
||||
CheckCode::C400,
|
||||
CheckCode::C401,
|
||||
CheckCode::C402,
|
||||
CheckCode::C403,
|
||||
CheckCode::C404,
|
||||
CheckCode::C405,
|
||||
CheckCode::C406,
|
||||
CheckCode::C408,
|
||||
CheckCode::C409,
|
||||
CheckCode::C410,
|
||||
CheckCode::C411,
|
||||
CheckCode::C413,
|
||||
CheckCode::C414,
|
||||
CheckCode::C415,
|
||||
CheckCode::C416,
|
||||
CheckCode::C417,
|
||||
CheckCode::T100,
|
||||
CheckCode::C901,
|
||||
CheckCode::TID252,
|
||||
CheckCode::RET501,
|
||||
CheckCode::RET502,
|
||||
CheckCode::RET503,
|
||||
CheckCode::RET504,
|
||||
CheckCode::RET505,
|
||||
CheckCode::RET506,
|
||||
CheckCode::RET507,
|
||||
CheckCode::RET508,
|
||||
CheckCode::T201,
|
||||
CheckCode::T203,
|
||||
CheckCode::Q000,
|
||||
CheckCode::Q001,
|
||||
CheckCode::Q002,
|
||||
CheckCode::Q003,
|
||||
CheckCode::ANN001,
|
||||
CheckCode::ANN002,
|
||||
CheckCode::ANN003,
|
||||
CheckCode::ANN101,
|
||||
CheckCode::ANN102,
|
||||
CheckCode::ANN201,
|
||||
CheckCode::ANN202,
|
||||
CheckCode::ANN204,
|
||||
CheckCode::ANN205,
|
||||
CheckCode::ANN206,
|
||||
CheckCode::ANN401,
|
||||
CheckCode::YTT101,
|
||||
CheckCode::YTT102,
|
||||
CheckCode::YTT103,
|
||||
CheckCode::YTT201,
|
||||
CheckCode::YTT202,
|
||||
CheckCode::YTT203,
|
||||
CheckCode::YTT204,
|
||||
CheckCode::YTT301,
|
||||
CheckCode::YTT302,
|
||||
CheckCode::YTT303,
|
||||
CheckCode::SIM118,
|
||||
CheckCode::UP001,
|
||||
CheckCode::UP003,
|
||||
CheckCode::UP004,
|
||||
CheckCode::UP005,
|
||||
CheckCode::UP006,
|
||||
CheckCode::UP007,
|
||||
CheckCode::UP008,
|
||||
CheckCode::UP009,
|
||||
CheckCode::UP010,
|
||||
CheckCode::UP011,
|
||||
CheckCode::UP012,
|
||||
CheckCode::UP013,
|
||||
CheckCode::UP014,
|
||||
CheckCode::UP015,
|
||||
CheckCode::UP016,
|
||||
CheckCode::UP017,
|
||||
CheckCode::UP018,
|
||||
CheckCode::UP019,
|
||||
CheckCode::UP020,
|
||||
CheckCode::UP021,
|
||||
CheckCode::UP022,
|
||||
CheckCode::UP023,
|
||||
CheckCode::D100,
|
||||
CheckCode::D101,
|
||||
CheckCode::D102,
|
||||
CheckCode::D103,
|
||||
CheckCode::D104,
|
||||
CheckCode::D105,
|
||||
CheckCode::D106,
|
||||
CheckCode::D107,
|
||||
CheckCode::D200,
|
||||
CheckCode::D201,
|
||||
CheckCode::D202,
|
||||
CheckCode::D203,
|
||||
CheckCode::D204,
|
||||
CheckCode::D205,
|
||||
CheckCode::D206,
|
||||
CheckCode::D207,
|
||||
CheckCode::D208,
|
||||
CheckCode::D209,
|
||||
CheckCode::D210,
|
||||
CheckCode::D211,
|
||||
CheckCode::D212,
|
||||
CheckCode::D213,
|
||||
CheckCode::D214,
|
||||
CheckCode::D215,
|
||||
CheckCode::D300,
|
||||
CheckCode::D301,
|
||||
CheckCode::D400,
|
||||
CheckCode::D402,
|
||||
CheckCode::D403,
|
||||
CheckCode::D404,
|
||||
CheckCode::D405,
|
||||
CheckCode::D406,
|
||||
CheckCode::D407,
|
||||
CheckCode::D408,
|
||||
CheckCode::D409,
|
||||
CheckCode::D410,
|
||||
CheckCode::D411,
|
||||
CheckCode::D412,
|
||||
CheckCode::D413,
|
||||
CheckCode::D414,
|
||||
CheckCode::D415,
|
||||
CheckCode::D416,
|
||||
CheckCode::D417,
|
||||
CheckCode::D418,
|
||||
CheckCode::D419,
|
||||
CheckCode::N801,
|
||||
CheckCode::N802,
|
||||
CheckCode::N803,
|
||||
CheckCode::N804,
|
||||
CheckCode::N805,
|
||||
CheckCode::N806,
|
||||
CheckCode::N807,
|
||||
CheckCode::N811,
|
||||
CheckCode::N812,
|
||||
CheckCode::N813,
|
||||
CheckCode::N814,
|
||||
CheckCode::N815,
|
||||
CheckCode::N816,
|
||||
CheckCode::N817,
|
||||
CheckCode::N818,
|
||||
CheckCode::I001,
|
||||
CheckCode::ERA001,
|
||||
CheckCode::S101,
|
||||
CheckCode::S102,
|
||||
CheckCode::S104,
|
||||
CheckCode::S105,
|
||||
CheckCode::S106,
|
||||
CheckCode::S107,
|
||||
CheckCode::FBT001,
|
||||
CheckCode::FBT002,
|
||||
CheckCode::FBT003,
|
||||
CheckCode::ARG001,
|
||||
CheckCode::ARG002,
|
||||
CheckCode::ARG003,
|
||||
CheckCode::ARG004,
|
||||
CheckCode::ARG005,
|
||||
CheckCode::ICN001,
|
||||
CheckCode::DTZ001,
|
||||
CheckCode::DTZ002,
|
||||
CheckCode::DTZ003,
|
||||
CheckCode::DTZ004,
|
||||
CheckCode::DTZ005,
|
||||
CheckCode::DTZ006,
|
||||
CheckCode::DTZ007,
|
||||
CheckCode::DTZ011,
|
||||
CheckCode::DTZ012,
|
||||
CheckCode::RUF001,
|
||||
CheckCode::RUF002,
|
||||
CheckCode::RUF003,
|
||||
CheckCode::RUF004,
|
||||
CheckCode::RUF100,
|
||||
CheckCode::PGH001,
|
||||
CheckCode::PGH002,
|
||||
CheckCode::PGH003,
|
||||
CheckCode::PGH004,
|
||||
CheckCode::PD002,
|
||||
CheckCode::PD003,
|
||||
CheckCode::PD004,
|
||||
CheckCode::PD007,
|
||||
CheckCode::PD008,
|
||||
CheckCode::PD009,
|
||||
CheckCode::PD010,
|
||||
CheckCode::PD011,
|
||||
CheckCode::PD012,
|
||||
CheckCode::PD013,
|
||||
CheckCode::PD015,
|
||||
CheckCode::PD901,
|
||||
CheckCode::EM101,
|
||||
CheckCode::EM102,
|
||||
CheckCode::EM103,
|
||||
],
|
||||
CheckCodePrefix::ANN => vec![
|
||||
CheckCode::ANN001,
|
||||
CheckCode::ANN002,
|
||||
@@ -1775,12 +2075,28 @@ impl CheckCodePrefix {
|
||||
);
|
||||
vec![CheckCode::PD901]
|
||||
}
|
||||
CheckCodePrefix::PGH => vec![CheckCode::PGH001, CheckCode::PGH002, CheckCode::PGH003],
|
||||
CheckCodePrefix::PGH0 => vec![CheckCode::PGH001, CheckCode::PGH002, CheckCode::PGH003],
|
||||
CheckCodePrefix::PGH00 => vec![CheckCode::PGH001, CheckCode::PGH002, CheckCode::PGH003],
|
||||
CheckCodePrefix::PGH => vec![
|
||||
CheckCode::PGH001,
|
||||
CheckCode::PGH002,
|
||||
CheckCode::PGH003,
|
||||
CheckCode::PGH004,
|
||||
],
|
||||
CheckCodePrefix::PGH0 => vec![
|
||||
CheckCode::PGH001,
|
||||
CheckCode::PGH002,
|
||||
CheckCode::PGH003,
|
||||
CheckCode::PGH004,
|
||||
],
|
||||
CheckCodePrefix::PGH00 => vec![
|
||||
CheckCode::PGH001,
|
||||
CheckCode::PGH002,
|
||||
CheckCode::PGH003,
|
||||
CheckCode::PGH004,
|
||||
],
|
||||
CheckCodePrefix::PGH001 => vec![CheckCode::PGH001],
|
||||
CheckCodePrefix::PGH002 => vec![CheckCode::PGH002],
|
||||
CheckCodePrefix::PGH003 => vec![CheckCode::PGH003],
|
||||
CheckCodePrefix::PGH004 => vec![CheckCode::PGH004],
|
||||
CheckCodePrefix::PLC => {
|
||||
vec![CheckCode::PLC0414, CheckCode::PLC2201, CheckCode::PLC3002]
|
||||
}
|
||||
@@ -2025,13 +2341,25 @@ impl CheckCodePrefix {
|
||||
CheckCode::RUF001,
|
||||
CheckCode::RUF002,
|
||||
CheckCode::RUF003,
|
||||
CheckCode::RUF004,
|
||||
CheckCode::RUF100,
|
||||
],
|
||||
CheckCodePrefix::RUF0 => vec![CheckCode::RUF001, CheckCode::RUF002, CheckCode::RUF003],
|
||||
CheckCodePrefix::RUF00 => vec![CheckCode::RUF001, CheckCode::RUF002, CheckCode::RUF003],
|
||||
CheckCodePrefix::RUF0 => vec![
|
||||
CheckCode::RUF001,
|
||||
CheckCode::RUF002,
|
||||
CheckCode::RUF003,
|
||||
CheckCode::RUF004,
|
||||
],
|
||||
CheckCodePrefix::RUF00 => vec![
|
||||
CheckCode::RUF001,
|
||||
CheckCode::RUF002,
|
||||
CheckCode::RUF003,
|
||||
CheckCode::RUF004,
|
||||
],
|
||||
CheckCodePrefix::RUF001 => vec![CheckCode::RUF001],
|
||||
CheckCodePrefix::RUF002 => vec![CheckCode::RUF002],
|
||||
CheckCodePrefix::RUF003 => vec![CheckCode::RUF003],
|
||||
CheckCodePrefix::RUF004 => vec![CheckCode::RUF004],
|
||||
CheckCodePrefix::RUF1 => vec![CheckCode::RUF100],
|
||||
CheckCodePrefix::RUF10 => vec![CheckCode::RUF100],
|
||||
CheckCodePrefix::RUF100 => vec![CheckCode::RUF100],
|
||||
@@ -2109,6 +2437,8 @@ impl CheckCodePrefix {
|
||||
CheckCode::UP019,
|
||||
CheckCode::UP020,
|
||||
CheckCode::UP021,
|
||||
CheckCode::UP022,
|
||||
CheckCode::UP023,
|
||||
]
|
||||
}
|
||||
CheckCodePrefix::U0 => {
|
||||
@@ -2139,6 +2469,8 @@ impl CheckCodePrefix {
|
||||
CheckCode::UP019,
|
||||
CheckCode::UP020,
|
||||
CheckCode::UP021,
|
||||
CheckCode::UP022,
|
||||
CheckCode::UP023,
|
||||
]
|
||||
}
|
||||
CheckCodePrefix::U00 => {
|
||||
@@ -2353,6 +2685,8 @@ impl CheckCodePrefix {
|
||||
CheckCode::UP019,
|
||||
CheckCode::UP020,
|
||||
CheckCode::UP021,
|
||||
CheckCode::UP022,
|
||||
CheckCode::UP023,
|
||||
],
|
||||
CheckCodePrefix::UP0 => vec![
|
||||
CheckCode::UP001,
|
||||
@@ -2375,6 +2709,8 @@ impl CheckCodePrefix {
|
||||
CheckCode::UP019,
|
||||
CheckCode::UP020,
|
||||
CheckCode::UP021,
|
||||
CheckCode::UP022,
|
||||
CheckCode::UP023,
|
||||
],
|
||||
CheckCodePrefix::UP00 => vec![
|
||||
CheckCode::UP001,
|
||||
@@ -2416,9 +2752,16 @@ impl CheckCodePrefix {
|
||||
CheckCodePrefix::UP017 => vec![CheckCode::UP017],
|
||||
CheckCodePrefix::UP018 => vec![CheckCode::UP018],
|
||||
CheckCodePrefix::UP019 => vec![CheckCode::UP019],
|
||||
CheckCodePrefix::UP02 => vec![CheckCode::UP020, CheckCode::UP021],
|
||||
CheckCodePrefix::UP02 => vec![
|
||||
CheckCode::UP020,
|
||||
CheckCode::UP021,
|
||||
CheckCode::UP022,
|
||||
CheckCode::UP023,
|
||||
],
|
||||
CheckCodePrefix::UP020 => vec![CheckCode::UP020],
|
||||
CheckCodePrefix::UP021 => vec![CheckCode::UP021],
|
||||
CheckCodePrefix::UP022 => vec![CheckCode::UP022],
|
||||
CheckCodePrefix::UP023 => vec![CheckCode::UP023],
|
||||
CheckCodePrefix::W => vec![CheckCode::W292, CheckCode::W605],
|
||||
CheckCodePrefix::W2 => vec![CheckCode::W292],
|
||||
CheckCodePrefix::W29 => vec![CheckCode::W292],
|
||||
@@ -2478,6 +2821,7 @@ impl CheckCodePrefix {
|
||||
CheckCodePrefix::A001 => SuffixLength::Three,
|
||||
CheckCodePrefix::A002 => SuffixLength::Three,
|
||||
CheckCodePrefix::A003 => SuffixLength::Three,
|
||||
CheckCodePrefix::ALL => SuffixLength::None,
|
||||
CheckCodePrefix::ANN => SuffixLength::Zero,
|
||||
CheckCodePrefix::ANN0 => SuffixLength::One,
|
||||
CheckCodePrefix::ANN00 => SuffixLength::Two,
|
||||
@@ -2826,6 +3170,7 @@ impl CheckCodePrefix {
|
||||
CheckCodePrefix::PGH001 => SuffixLength::Three,
|
||||
CheckCodePrefix::PGH002 => SuffixLength::Three,
|
||||
CheckCodePrefix::PGH003 => SuffixLength::Three,
|
||||
CheckCodePrefix::PGH004 => SuffixLength::Three,
|
||||
CheckCodePrefix::PLC => SuffixLength::Zero,
|
||||
CheckCodePrefix::PLC0 => SuffixLength::One,
|
||||
CheckCodePrefix::PLC04 => SuffixLength::Two,
|
||||
@@ -2906,6 +3251,7 @@ impl CheckCodePrefix {
|
||||
CheckCodePrefix::RUF001 => SuffixLength::Three,
|
||||
CheckCodePrefix::RUF002 => SuffixLength::Three,
|
||||
CheckCodePrefix::RUF003 => SuffixLength::Three,
|
||||
CheckCodePrefix::RUF004 => SuffixLength::Three,
|
||||
CheckCodePrefix::RUF1 => SuffixLength::One,
|
||||
CheckCodePrefix::RUF10 => SuffixLength::Two,
|
||||
CheckCodePrefix::RUF100 => SuffixLength::Three,
|
||||
@@ -2980,6 +3326,8 @@ impl CheckCodePrefix {
|
||||
CheckCodePrefix::UP02 => SuffixLength::Two,
|
||||
CheckCodePrefix::UP020 => SuffixLength::Three,
|
||||
CheckCodePrefix::UP021 => SuffixLength::Three,
|
||||
CheckCodePrefix::UP022 => SuffixLength::Three,
|
||||
CheckCodePrefix::UP023 => SuffixLength::Three,
|
||||
CheckCodePrefix::W => SuffixLength::Zero,
|
||||
CheckCodePrefix::W2 => SuffixLength::One,
|
||||
CheckCodePrefix::W29 => SuffixLength::Two,
|
||||
@@ -3010,6 +3358,7 @@ impl CheckCodePrefix {
|
||||
|
||||
pub const CATEGORIES: &[CheckCodePrefix] = &[
|
||||
CheckCodePrefix::A,
|
||||
CheckCodePrefix::ALL,
|
||||
CheckCodePrefix::ANN,
|
||||
CheckCodePrefix::ARG,
|
||||
CheckCodePrefix::B,
|
||||
|
||||
23
src/cli.rs
23
src/cli.rs
@@ -50,6 +50,10 @@ pub struct Cli {
|
||||
fix_only: bool,
|
||||
#[clap(long, overrides_with("fix_only"), hide = true)]
|
||||
no_fix_only: bool,
|
||||
/// Avoid writing any fixed files back; instead, output a diff for each
|
||||
/// changed file to stdout.
|
||||
#[arg(long)]
|
||||
pub diff: bool,
|
||||
/// Disable cache reads.
|
||||
#[arg(short, long)]
|
||||
pub no_cache: bool,
|
||||
@@ -101,10 +105,15 @@ pub struct Cli {
|
||||
no_respect_gitignore: bool,
|
||||
/// Enforce exclusions, even for paths passed to Ruff directly on the
|
||||
/// command-line.
|
||||
#[arg(long, overrides_with("no_show_source"))]
|
||||
#[arg(long, overrides_with("no_force_exclude"))]
|
||||
force_exclude: bool,
|
||||
#[clap(long, overrides_with("force_exclude"), hide = true)]
|
||||
no_force_exclude: bool,
|
||||
/// Enable or disable automatic update checks.
|
||||
#[arg(long, overrides_with("no_update_check"))]
|
||||
update_check: bool,
|
||||
#[clap(long, overrides_with("update_check"), hide = true)]
|
||||
no_update_check: bool,
|
||||
/// See the files Ruff will be run against with the current settings.
|
||||
#[arg(long)]
|
||||
pub show_files: bool,
|
||||
@@ -154,6 +163,7 @@ impl Cli {
|
||||
add_noqa: self.add_noqa,
|
||||
autoformat: self.autoformat,
|
||||
config: self.config,
|
||||
diff: self.diff,
|
||||
exit_zero: self.exit_zero,
|
||||
explain: self.explain,
|
||||
files: self.files,
|
||||
@@ -187,11 +197,12 @@ impl Cli {
|
||||
target_version: self.target_version,
|
||||
unfixable: self.unfixable,
|
||||
// TODO(charlie): Included in `pyproject.toml`, but not inherited.
|
||||
cache_dir: self.cache_dir,
|
||||
fix: resolve_bool_arg(self.fix, self.no_fix),
|
||||
fix_only: resolve_bool_arg(self.fix_only, self.no_fix_only),
|
||||
format: self.format,
|
||||
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
|
||||
cache_dir: self.cache_dir,
|
||||
format: self.format,
|
||||
update_check: resolve_bool_arg(self.update_check, self.no_update_check),
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -213,6 +224,7 @@ pub struct Arguments {
|
||||
pub add_noqa: bool,
|
||||
pub autoformat: bool,
|
||||
pub config: Option<PathBuf>,
|
||||
pub diff: bool,
|
||||
pub exit_zero: bool,
|
||||
pub explain: Option<CheckCode>,
|
||||
pub files: Vec<PathBuf>,
|
||||
@@ -247,11 +259,12 @@ pub struct Overrides {
|
||||
pub target_version: Option<PythonVersion>,
|
||||
pub unfixable: Option<Vec<CheckCodePrefix>>,
|
||||
// TODO(charlie): Captured in pyproject.toml as a default, but not part of `Settings`.
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
pub fix: Option<bool>,
|
||||
pub fix_only: Option<bool>,
|
||||
pub format: Option<SerializationFormat>,
|
||||
pub force_exclude: Option<bool>,
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
pub format: Option<SerializationFormat>,
|
||||
pub update_check: Option<bool>,
|
||||
}
|
||||
|
||||
/// Map the CLI settings to a `LogLevel`.
|
||||
|
||||
@@ -332,6 +332,9 @@ pub fn explain(code: &CheckCode, format: &SerializationFormat) -> Result<()> {
|
||||
SerializationFormat::Github => {
|
||||
bail!("`--explain` does not support GitHub format")
|
||||
}
|
||||
SerializationFormat::Gitlab => {
|
||||
bail!("`--explain` does not support GitLab format")
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckKind};
|
||||
use crate::code_gen::SourceGenerator;
|
||||
use crate::source_code_generator::SourceCodeGenerator;
|
||||
|
||||
fn assertion_error(msg: Option<&Expr>) -> Stmt {
|
||||
Stmt::new(
|
||||
@@ -47,7 +47,8 @@ pub fn assert_false(checker: &mut Checker, stmt: &Stmt, test: &Expr, msg: Option
|
||||
|
||||
let mut check = Check::new(CheckKind::DoNotAssertFalse, Range::from_located(test));
|
||||
if checker.patch(check.kind.code()) {
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator =
|
||||
SourceCodeGenerator::new(checker.style.indentation(), checker.style.quote());
|
||||
generator.unparse_stmt(&assertion_error(msg));
|
||||
if let Ok(content) = generator.generate() {
|
||||
check.amend(Fix::replacement(
|
||||
|
||||
@@ -7,7 +7,7 @@ use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckCode, CheckKind};
|
||||
use crate::code_gen::SourceGenerator;
|
||||
use crate::source_code_generator::SourceCodeGenerator;
|
||||
|
||||
fn type_pattern(elts: Vec<&Expr>) -> Expr {
|
||||
Expr::new(
|
||||
@@ -54,7 +54,8 @@ fn duplicate_handler_exceptions<'a>(
|
||||
Range::from_located(expr),
|
||||
);
|
||||
if checker.patch(check.kind.code()) {
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator =
|
||||
SourceCodeGenerator::new(checker.style.indentation(), checker.style.quote());
|
||||
if unique_elts.len() == 1 {
|
||||
generator.unparse_expr(unique_elts[0], 0);
|
||||
} else {
|
||||
|
||||
@@ -4,9 +4,9 @@ use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckKind};
|
||||
use crate::code_gen::SourceGenerator;
|
||||
use crate::python::identifiers::IDENTIFIER_REGEX;
|
||||
use crate::python::keyword::KWLIST;
|
||||
use crate::source_code_generator::SourceCodeGenerator;
|
||||
|
||||
fn attribute(value: &Expr, attr: &str) -> Expr {
|
||||
Expr::new(
|
||||
@@ -46,7 +46,8 @@ pub fn getattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, ar
|
||||
|
||||
let mut check = Check::new(CheckKind::GetAttrWithConstant, Range::from_located(expr));
|
||||
if checker.patch(check.kind.code()) {
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator =
|
||||
SourceCodeGenerator::new(checker.style.indentation(), checker.style.quote());
|
||||
generator.unparse_expr(&attribute(obj, value), 0);
|
||||
if let Ok(content) = generator.generate() {
|
||||
check.amend(Fix::replacement(
|
||||
|
||||
@@ -4,7 +4,7 @@ use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckKind};
|
||||
use crate::code_gen::SourceGenerator;
|
||||
use crate::source_code_generator::SourceCodeGenerator;
|
||||
|
||||
/// B013
|
||||
pub fn redundant_tuple_in_exception_handler(checker: &mut Checker, handlers: &[Excepthandler]) {
|
||||
@@ -23,7 +23,8 @@ pub fn redundant_tuple_in_exception_handler(checker: &mut Checker, handlers: &[E
|
||||
Range::from_located(type_),
|
||||
);
|
||||
if checker.patch(check.kind.code()) {
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator =
|
||||
SourceCodeGenerator::new(checker.style.indentation(), checker.style.quote());
|
||||
generator.unparse_expr(elt, 0);
|
||||
if let Ok(content) = generator.generate() {
|
||||
check.amend(Fix::replacement(
|
||||
|
||||
@@ -6,11 +6,17 @@ use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckKind};
|
||||
use crate::code_gen::SourceGenerator;
|
||||
use crate::python::identifiers::IDENTIFIER_REGEX;
|
||||
use crate::python::keyword::KWLIST;
|
||||
use crate::source_code_generator::SourceCodeGenerator;
|
||||
use crate::source_code_style::SourceCodeStyleDetector;
|
||||
|
||||
fn assignment(obj: &Expr, name: &str, value: &Expr) -> Result<String> {
|
||||
fn assignment(
|
||||
obj: &Expr,
|
||||
name: &str,
|
||||
value: &Expr,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
) -> Result<String> {
|
||||
let stmt = Stmt::new(
|
||||
Location::default(),
|
||||
Location::default(),
|
||||
@@ -28,7 +34,7 @@ fn assignment(obj: &Expr, name: &str, value: &Expr) -> Result<String> {
|
||||
type_comment: None,
|
||||
},
|
||||
);
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator = SourceCodeGenerator::new(stylist.indentation(), stylist.quote());
|
||||
generator.unparse_stmt(&stmt);
|
||||
generator.generate().map_err(std::convert::Into::into)
|
||||
}
|
||||
@@ -63,7 +69,7 @@ pub fn setattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, ar
|
||||
if expr == child.as_ref() {
|
||||
let mut check = Check::new(CheckKind::SetAttrWithConstant, Range::from_located(expr));
|
||||
if checker.patch(check.kind.code()) {
|
||||
match assignment(obj, name, value) {
|
||||
match assignment(obj, name, value, checker.style) {
|
||||
Ok(content) => check.amend(Fix::replacement(
|
||||
content,
|
||||
expr.location,
|
||||
|
||||
@@ -25,7 +25,7 @@ expression: checks
|
||||
row: 10
|
||||
column: 12
|
||||
fix:
|
||||
content: "raise AssertionError('message')"
|
||||
content: "raise AssertionError(\"message\")"
|
||||
location:
|
||||
row: 10
|
||||
column: 0
|
||||
|
||||
@@ -7,7 +7,9 @@ use serde::{Deserialize, Serialize};
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
pub enum Quote {
|
||||
/// Use single quotes (`'`).
|
||||
Single,
|
||||
/// Use double quotes (`"`).
|
||||
Double,
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,9 @@ use serde::{Deserialize, Serialize};
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
pub enum Strictness {
|
||||
/// Ban imports that extend into the parent module or beyond.
|
||||
Parents,
|
||||
/// Ban all relative imports.
|
||||
All,
|
||||
}
|
||||
|
||||
@@ -29,7 +31,7 @@ pub struct Options {
|
||||
"#
|
||||
)]
|
||||
/// Whether to ban all relative imports (`"all"`), or only those imports
|
||||
/// that extend into the parent module and beyond (`"parents"`).
|
||||
/// that extend into the parent module or beyond (`"parents"`).
|
||||
pub ban_relative_imports: Option<Strictness>,
|
||||
}
|
||||
|
||||
|
||||
@@ -24,7 +24,6 @@ mod checkers;
|
||||
pub mod checks;
|
||||
pub mod checks_gen;
|
||||
pub mod cli;
|
||||
pub mod code_gen;
|
||||
mod cst;
|
||||
mod directives;
|
||||
mod docstrings;
|
||||
@@ -60,7 +59,7 @@ mod pandas_vet;
|
||||
pub mod pep8_naming;
|
||||
pub mod printer;
|
||||
mod pycodestyle;
|
||||
mod pydocstyle;
|
||||
pub mod pydocstyle;
|
||||
mod pyflakes;
|
||||
mod pygrep_hooks;
|
||||
mod pylint;
|
||||
@@ -70,8 +69,10 @@ pub mod resolver;
|
||||
mod ruff;
|
||||
mod rustpython_helpers;
|
||||
pub mod settings;
|
||||
pub mod source_code_generator;
|
||||
pub mod source_code_locator;
|
||||
mod vendored;
|
||||
pub mod source_code_style;
|
||||
mod vendor;
|
||||
pub mod visibility;
|
||||
|
||||
cfg_if! {
|
||||
|
||||
@@ -11,6 +11,7 @@ use crate::rustpython_helpers::tokenize;
|
||||
use crate::settings::configuration::Configuration;
|
||||
use crate::settings::{flags, pyproject, Settings};
|
||||
use crate::source_code_locator::SourceCodeLocator;
|
||||
use crate::source_code_style::SourceCodeStyleDetector;
|
||||
use crate::{directives, packages, resolver};
|
||||
|
||||
/// Load the relevant `Settings` for a given `Path`.
|
||||
@@ -38,9 +39,12 @@ pub fn check(path: &Path, contents: &str, autofix: bool) -> Result<Vec<Check>> {
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = tokenize(contents);
|
||||
|
||||
// Initialize the SourceCodeLocator (which computes offsets lazily).
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = SourceCodeLocator::new(contents);
|
||||
|
||||
// Detect the current code style (lazily).
|
||||
let stylist = SourceCodeStyleDetector::from_contents(contents, &locator);
|
||||
|
||||
// Extract the `# noqa` and `# isort: skip` directives from the source.
|
||||
let directives = directives::extract_directives(
|
||||
&tokens,
|
||||
@@ -55,6 +59,7 @@ pub fn check(path: &Path, contents: &str, autofix: bool) -> Result<Vec<Check>> {
|
||||
contents,
|
||||
tokens,
|
||||
&locator,
|
||||
&stylist,
|
||||
&directives,
|
||||
&settings,
|
||||
autofix.into(),
|
||||
|
||||
@@ -14,6 +14,9 @@ use crate::settings::configuration::Configuration;
|
||||
use crate::settings::options::Options;
|
||||
use crate::settings::{flags, Settings};
|
||||
use crate::source_code_locator::SourceCodeLocator;
|
||||
use crate::source_code_style::SourceCodeStyleDetector;
|
||||
|
||||
const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
#[wasm_bindgen(typescript_custom_section)]
|
||||
const TYPES: &'static str = r#"
|
||||
@@ -58,6 +61,11 @@ pub fn run() {
|
||||
console_log::init_with_level(Level::Debug).expect("Initializing logger went wrong.");
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn current_version() -> JsValue {
|
||||
JsValue::from(VERSION)
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn check(contents: &str, options: JsValue) -> Result<JsValue, JsValue> {
|
||||
let options: Options = serde_wasm_bindgen::from_value(options).map_err(|e| e.to_string())?;
|
||||
@@ -69,9 +77,12 @@ pub fn check(contents: &str, options: JsValue) -> Result<JsValue, JsValue> {
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = tokenize(contents);
|
||||
|
||||
// Initialize the SourceCodeLocator (which computes offsets lazily).
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = SourceCodeLocator::new(contents);
|
||||
|
||||
// Detect the current code style (lazily).
|
||||
let stylist = SourceCodeStyleDetector::from_contents(contents, &locator);
|
||||
|
||||
// Extract the `# noqa` and `# isort: skip` directives from the source.
|
||||
let directives = directives::extract_directives(&tokens, &locator, directives::Flags::empty());
|
||||
|
||||
@@ -82,6 +93,7 @@ pub fn check(contents: &str, options: JsValue) -> Result<JsValue, JsValue> {
|
||||
contents,
|
||||
tokens,
|
||||
&locator,
|
||||
&stylist,
|
||||
&directives,
|
||||
&settings,
|
||||
flags::Autofix::Enabled,
|
||||
|
||||
238
src/linter.rs
238
src/linter.rs
@@ -5,8 +5,10 @@ use std::ops::AddAssign;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use log::debug;
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use similar::TextDiff;
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::autofix::fixer;
|
||||
@@ -17,14 +19,18 @@ use crate::checkers::lines::check_lines;
|
||||
use crate::checkers::noqa::check_noqa;
|
||||
use crate::checkers::tokens::check_tokens;
|
||||
use crate::checks::{Check, CheckCode, CheckKind, LintSource};
|
||||
use crate::code_gen::SourceGenerator;
|
||||
use crate::directives::Directives;
|
||||
use crate::message::{Message, Source};
|
||||
use crate::noqa::add_noqa;
|
||||
use crate::settings::{flags, Settings};
|
||||
use crate::source_code_generator::SourceCodeGenerator;
|
||||
use crate::source_code_locator::SourceCodeLocator;
|
||||
use crate::source_code_style::SourceCodeStyleDetector;
|
||||
use crate::{cache, directives, fs, rustpython_helpers};
|
||||
|
||||
const CARGO_PKG_NAME: &str = env!("CARGO_PKG_NAME");
|
||||
const CARGO_PKG_REPOSITORY: &str = env!("CARGO_PKG_REPOSITORY");
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Diagnostics {
|
||||
pub messages: Vec<Message>,
|
||||
@@ -53,6 +59,7 @@ pub(crate) fn check_path(
|
||||
contents: &str,
|
||||
tokens: Vec<LexResult>,
|
||||
locator: &SourceCodeLocator,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
directives: &Directives,
|
||||
settings: &Settings,
|
||||
autofix: flags::Autofix,
|
||||
@@ -89,6 +96,7 @@ pub(crate) fn check_path(
|
||||
checks.extend(check_ast(
|
||||
&python_ast,
|
||||
locator,
|
||||
stylist,
|
||||
&directives.noqa_line_for,
|
||||
settings,
|
||||
autofix,
|
||||
@@ -180,26 +188,54 @@ pub fn lint_path(
|
||||
// Validate the `Settings` and return any errors.
|
||||
settings.validate()?;
|
||||
|
||||
let metadata = path.metadata()?;
|
||||
|
||||
// Check the cache.
|
||||
if let Some(messages) = cache::get(path, &metadata, settings, autofix, cache) {
|
||||
debug!("Cache hit for: {}", path.to_string_lossy());
|
||||
return Ok(Diagnostics::new(messages));
|
||||
}
|
||||
// TODO(charlie): `fixer::Mode::Apply` and `fixer::Mode::Diff` both have
|
||||
// side-effects that aren't captured in the cache. (In practice, it's fine
|
||||
// to cache `fixer::Mode::Apply`, since a file either has no fixes, or we'll
|
||||
// write the fixes to disk, thus invalidating the cache. But it's a bit hard
|
||||
// to reason about. We need to come up with a better solution here.)
|
||||
let metadata = if matches!(cache, flags::Cache::Enabled)
|
||||
&& matches!(autofix, fixer::Mode::None | fixer::Mode::Generate)
|
||||
{
|
||||
let metadata = path.metadata()?;
|
||||
if let Some(messages) = cache::get(path, &metadata, settings, autofix.into()) {
|
||||
debug!("Cache hit for: {}", path.to_string_lossy());
|
||||
return Ok(Diagnostics::new(messages));
|
||||
}
|
||||
Some(metadata)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Read the file from disk.
|
||||
let contents = fs::read_file(path)?;
|
||||
|
||||
// Lint the file.
|
||||
let (contents, fixed, messages) = lint(contents, path, package, settings, autofix)?;
|
||||
let (messages, fixed) = if matches!(autofix, fixer::Mode::Apply | fixer::Mode::Diff) {
|
||||
let (transformed, fixed, messages) = lint_fix(&contents, path, package, settings)?;
|
||||
if fixed > 0 {
|
||||
if matches!(autofix, fixer::Mode::Apply) {
|
||||
write(path, transformed)?;
|
||||
} else if matches!(autofix, fixer::Mode::Diff) {
|
||||
let mut stdout = io::stdout().lock();
|
||||
TextDiff::from_lines(&contents, &transformed)
|
||||
.unified_diff()
|
||||
.header(&fs::relativize_path(path), &fs::relativize_path(path))
|
||||
.to_writer(&mut stdout)?;
|
||||
stdout.write_all(b"\n")?;
|
||||
stdout.flush()?;
|
||||
}
|
||||
}
|
||||
(messages, fixed)
|
||||
} else {
|
||||
let messages = lint_only(&contents, path, package, settings, autofix.into())?;
|
||||
let fixed = 0;
|
||||
(messages, fixed)
|
||||
};
|
||||
|
||||
// Re-populate the cache.
|
||||
cache::set(path, &metadata, settings, autofix, &messages, cache);
|
||||
|
||||
// If we applied any fixes, write the contents back to disk.
|
||||
if fixed > 0 {
|
||||
write(path, contents)?;
|
||||
if let Some(metadata) = metadata {
|
||||
cache::set(path, &metadata, settings, autofix.into(), &messages);
|
||||
}
|
||||
|
||||
Ok(Diagnostics { messages, fixed })
|
||||
@@ -216,9 +252,12 @@ pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result<usize> {
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
|
||||
|
||||
// Initialize the SourceCodeLocator (which computes offsets lazily).
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = SourceCodeLocator::new(&contents);
|
||||
|
||||
// Detect the current code style (lazily).
|
||||
let stylist = SourceCodeStyleDetector::from_contents(&contents, &locator);
|
||||
|
||||
// Extract the `# noqa` and `# isort: skip` directives from the source.
|
||||
let directives = directives::extract_directives(
|
||||
&tokens,
|
||||
@@ -233,6 +272,7 @@ pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result<usize> {
|
||||
&contents,
|
||||
tokens,
|
||||
&locator,
|
||||
&stylist,
|
||||
&directives,
|
||||
settings,
|
||||
flags::Autofix::Disabled,
|
||||
@@ -259,9 +299,15 @@ pub fn autoformat_path(path: &Path, settings: &Settings) -> Result<()> {
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = SourceCodeLocator::new(&contents);
|
||||
|
||||
// Detect the current code style (lazily).
|
||||
let stylist = SourceCodeStyleDetector::from_contents(&contents, &locator);
|
||||
|
||||
// Generate the AST.
|
||||
let python_ast = rustpython_helpers::parse_program_tokens(tokens, "<filename>")?;
|
||||
let mut generator = SourceGenerator::default();
|
||||
let mut generator = SourceCodeGenerator::new(stylist.indentation(), stylist.quote());
|
||||
generator.unparse_suite(&python_ast);
|
||||
write(path, generator.generate()?)?;
|
||||
|
||||
@@ -273,40 +319,121 @@ pub fn autoformat_path(path: &Path, settings: &Settings) -> Result<()> {
|
||||
pub fn lint_stdin(
|
||||
path: Option<&Path>,
|
||||
package: Option<&Path>,
|
||||
stdin: &str,
|
||||
contents: &str,
|
||||
settings: &Settings,
|
||||
autofix: fixer::Mode,
|
||||
) -> Result<Diagnostics> {
|
||||
// Validate the `Settings` and return any errors.
|
||||
settings.validate()?;
|
||||
|
||||
// Read the file from disk.
|
||||
let contents = stdin.to_string();
|
||||
// Lint the inputs.
|
||||
let (messages, fixed) = if matches!(autofix, fixer::Mode::Apply | fixer::Mode::Diff) {
|
||||
let (transformed, fixed, messages) = lint_fix(
|
||||
contents,
|
||||
path.unwrap_or_else(|| Path::new("-")),
|
||||
package,
|
||||
settings,
|
||||
)?;
|
||||
|
||||
// Lint the file.
|
||||
let (contents, fixed, messages) = lint(
|
||||
contents,
|
||||
path.unwrap_or_else(|| Path::new("-")),
|
||||
package,
|
||||
settings,
|
||||
autofix,
|
||||
)?;
|
||||
if matches!(autofix, fixer::Mode::Apply) {
|
||||
// Write the contents to stdout, regardless of whether any errors were fixed.
|
||||
io::stdout().write_all(transformed.as_bytes())?;
|
||||
} else if matches!(autofix, fixer::Mode::Diff) {
|
||||
// But only write a diff if it's non-empty.
|
||||
if fixed > 0 {
|
||||
let text_diff = TextDiff::from_lines(contents, &transformed);
|
||||
let mut unified_diff = text_diff.unified_diff();
|
||||
if let Some(path) = path {
|
||||
unified_diff.header(&fs::relativize_path(path), &fs::relativize_path(path));
|
||||
}
|
||||
|
||||
// Write the fixed contents to stdout.
|
||||
if matches!(autofix, fixer::Mode::Apply) {
|
||||
io::stdout().write_all(contents.as_bytes())?;
|
||||
}
|
||||
let mut stdout = io::stdout().lock();
|
||||
unified_diff.to_writer(&mut stdout)?;
|
||||
stdout.write_all(b"\n")?;
|
||||
stdout.flush()?;
|
||||
}
|
||||
}
|
||||
|
||||
(messages, fixed)
|
||||
} else {
|
||||
let messages = lint_only(
|
||||
contents,
|
||||
path.unwrap_or_else(|| Path::new("-")),
|
||||
package,
|
||||
settings,
|
||||
autofix.into(),
|
||||
)?;
|
||||
let fixed = 0;
|
||||
(messages, fixed)
|
||||
};
|
||||
|
||||
Ok(Diagnostics { messages, fixed })
|
||||
}
|
||||
|
||||
fn lint(
|
||||
mut contents: String,
|
||||
/// Generate a list of `Check` violations (optionally including any autofix
|
||||
/// patches) from source code content.
|
||||
fn lint_only(
|
||||
contents: &str,
|
||||
path: &Path,
|
||||
package: Option<&Path>,
|
||||
settings: &Settings,
|
||||
autofix: flags::Autofix,
|
||||
) -> Result<Vec<Message>> {
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(contents);
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = SourceCodeLocator::new(contents);
|
||||
|
||||
// Detect the current code style (lazily).
|
||||
let stylist = SourceCodeStyleDetector::from_contents(contents, &locator);
|
||||
|
||||
// Extract the `# noqa` and `# isort: skip` directives from the source.
|
||||
let directives = directives::extract_directives(
|
||||
&tokens,
|
||||
&locator,
|
||||
directives::Flags::from_settings(settings),
|
||||
);
|
||||
|
||||
// Generate checks.
|
||||
let checks = check_path(
|
||||
path,
|
||||
package,
|
||||
contents,
|
||||
tokens,
|
||||
&locator,
|
||||
&stylist,
|
||||
&directives,
|
||||
settings,
|
||||
autofix,
|
||||
flags::Noqa::Enabled,
|
||||
)?;
|
||||
|
||||
// Convert from checks to messages.
|
||||
let path_lossy = path.to_string_lossy();
|
||||
Ok(checks
|
||||
.into_iter()
|
||||
.map(|check| {
|
||||
let source = if settings.show_source {
|
||||
Some(Source::from_check(&check, &locator))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Message::from_check(check, path_lossy.to_string(), source)
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
/// Generate a list of `Check` violations from source code content, iteratively
|
||||
/// autofixing any violations until stable.
|
||||
fn lint_fix(
|
||||
contents: &str,
|
||||
path: &Path,
|
||||
package: Option<&Path>,
|
||||
settings: &Settings,
|
||||
autofix: fixer::Mode,
|
||||
) -> Result<(String, usize, Vec<Message>)> {
|
||||
let mut contents = contents.to_string();
|
||||
|
||||
// Track the number of fixed errors across iterations.
|
||||
let mut fixed = 0;
|
||||
|
||||
@@ -314,13 +441,16 @@ fn lint(
|
||||
let mut iterations = 0;
|
||||
|
||||
// Continuously autofix until the source code stabilizes.
|
||||
let messages = loop {
|
||||
loop {
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
|
||||
|
||||
// Initialize the SourceCodeLocator (which computes offsets lazily).
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = SourceCodeLocator::new(&contents);
|
||||
|
||||
// Detect the current code style (lazily).
|
||||
let stylist = SourceCodeStyleDetector::from_contents(&contents, &locator);
|
||||
|
||||
// Extract the `# noqa` and `# isort: skip` directives from the source.
|
||||
let directives = directives::extract_directives(
|
||||
&tokens,
|
||||
@@ -335,15 +465,16 @@ fn lint(
|
||||
&contents,
|
||||
tokens,
|
||||
&locator,
|
||||
&stylist,
|
||||
&directives,
|
||||
settings,
|
||||
autofix.into(),
|
||||
flags::Autofix::Enabled,
|
||||
flags::Noqa::Enabled,
|
||||
)?;
|
||||
|
||||
// Apply autofix.
|
||||
if matches!(autofix, fixer::Mode::Apply) && iterations < MAX_ITERATIONS {
|
||||
if let Some((fixed_contents, applied)) = fix_file(&checks, &locator) {
|
||||
if let Some((fixed_contents, applied)) = fix_file(&checks, &locator) {
|
||||
if iterations < MAX_ITERATIONS {
|
||||
// Count the number of fixed errors.
|
||||
fixed += applied;
|
||||
|
||||
@@ -356,11 +487,29 @@ fn lint(
|
||||
// Re-run the linter pass (by avoiding the break).
|
||||
continue;
|
||||
}
|
||||
|
||||
eprintln!(
|
||||
"
|
||||
{}: Failed to converge after {} iterations.
|
||||
|
||||
This likely indicates a bug in `{}`. If you could open an issue at:
|
||||
|
||||
{}/issues
|
||||
|
||||
quoting the contents of `{}`, along with the `pyproject.toml` settings and executed command, we'd \
|
||||
be very appreciative!
|
||||
",
|
||||
"warning".yellow().bold(),
|
||||
MAX_ITERATIONS,
|
||||
CARGO_PKG_NAME,
|
||||
CARGO_PKG_REPOSITORY,
|
||||
fs::relativize_path(path),
|
||||
);
|
||||
}
|
||||
|
||||
// Convert to messages.
|
||||
let filename = path.to_string_lossy().to_string();
|
||||
break checks
|
||||
let path_lossy = path.to_string_lossy();
|
||||
let messages = checks
|
||||
.into_iter()
|
||||
.map(|check| {
|
||||
let source = if settings.show_source {
|
||||
@@ -368,12 +517,11 @@ fn lint(
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Message::from_check(check, filename.clone(), source)
|
||||
Message::from_check(check, path_lossy.to_string(), source)
|
||||
})
|
||||
.collect();
|
||||
};
|
||||
|
||||
Ok((contents, fixed, messages))
|
||||
return Ok((contents, fixed, messages));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -381,6 +529,7 @@ pub fn test_path(path: &Path, settings: &Settings) -> Result<Vec<Check>> {
|
||||
let contents = fs::read_file(path)?;
|
||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
|
||||
let locator = SourceCodeLocator::new(&contents);
|
||||
let stylist = SourceCodeStyleDetector::from_contents(&contents, &locator);
|
||||
let directives = directives::extract_directives(
|
||||
&tokens,
|
||||
&locator,
|
||||
@@ -392,6 +541,7 @@ pub fn test_path(path: &Path, settings: &Settings) -> Result<Vec<Check>> {
|
||||
&contents,
|
||||
tokens,
|
||||
&locator,
|
||||
&stylist,
|
||||
&directives,
|
||||
settings,
|
||||
flags::Autofix::Enabled,
|
||||
|
||||
@@ -117,25 +117,20 @@ pub(crate) fn inner_main() -> Result<ExitCode> {
|
||||
PyprojectDiscovery::Hierarchical(settings) => settings.respect_gitignore,
|
||||
},
|
||||
};
|
||||
let (fix, fix_only, format) = match &pyproject_strategy {
|
||||
PyprojectDiscovery::Fixed(settings) => (settings.fix, settings.fix_only, settings.format),
|
||||
PyprojectDiscovery::Hierarchical(settings) => {
|
||||
(settings.fix, settings.fix_only, settings.format)
|
||||
}
|
||||
let (fix, fix_only, format, update_check) = match &pyproject_strategy {
|
||||
PyprojectDiscovery::Fixed(settings) => (
|
||||
settings.fix,
|
||||
settings.fix_only,
|
||||
settings.format,
|
||||
settings.update_check,
|
||||
),
|
||||
PyprojectDiscovery::Hierarchical(settings) => (
|
||||
settings.fix,
|
||||
settings.fix_only,
|
||||
settings.format,
|
||||
settings.update_check,
|
||||
),
|
||||
};
|
||||
let autofix = if fix || fix_only {
|
||||
fixer::Mode::Apply
|
||||
} else if matches!(format, SerializationFormat::Json) {
|
||||
fixer::Mode::Generate
|
||||
} else {
|
||||
fixer::Mode::None
|
||||
};
|
||||
let violations = if fix_only {
|
||||
Violations::Hide
|
||||
} else {
|
||||
Violations::Show
|
||||
};
|
||||
let cache = !cli.no_cache;
|
||||
|
||||
if let Some(code) = cli.explain {
|
||||
commands::explain(&code, &format)?;
|
||||
@@ -150,9 +145,35 @@ pub(crate) fn inner_main() -> Result<ExitCode> {
|
||||
return Ok(ExitCode::SUCCESS);
|
||||
}
|
||||
|
||||
// Autofix rules are as follows:
|
||||
// - If `--fix` or `--fix-only` is set, always apply fixes to the filesystem (or
|
||||
// print them to stdout, if we're reading from stdin).
|
||||
// - Otherwise, if `--format json` is set, generate the fixes (so we print them
|
||||
// out as part of the JSON payload), but don't write them to disk.
|
||||
// - If `--diff` or `--fix-only` are set, don't print any violations (only
|
||||
// fixes).
|
||||
// TODO(charlie): Consider adding ESLint's `--fix-dry-run`, which would generate
|
||||
// but not apply fixes. That would allow us to avoid special-casing JSON
|
||||
// here.
|
||||
let autofix = if cli.diff {
|
||||
fixer::Mode::Diff
|
||||
} else if fix || fix_only {
|
||||
fixer::Mode::Apply
|
||||
} else if matches!(format, SerializationFormat::Json) {
|
||||
fixer::Mode::Generate
|
||||
} else {
|
||||
fixer::Mode::None
|
||||
};
|
||||
let violations = if cli.diff || fix_only {
|
||||
Violations::Hide
|
||||
} else {
|
||||
Violations::Show
|
||||
};
|
||||
let cache = !cli.no_cache;
|
||||
|
||||
let printer = Printer::new(&format, &log_level, &autofix, &violations);
|
||||
if cli.watch {
|
||||
if matches!(autofix, fixer::Mode::Generate | fixer::Mode::Apply) {
|
||||
if !matches!(autofix, fixer::Mode::None) {
|
||||
eprintln!("Warning: --fix is not enabled in watch mode.");
|
||||
}
|
||||
if cli.add_noqa {
|
||||
@@ -251,18 +272,28 @@ pub(crate) fn inner_main() -> Result<ExitCode> {
|
||||
// Always try to print violations (the printer itself may suppress output),
|
||||
// unless we're writing fixes via stdin (in which case, the transformed
|
||||
// source code goes to stdout).
|
||||
if !(is_stdin && matches!(autofix, fixer::Mode::Apply)) {
|
||||
if !(is_stdin && matches!(autofix, fixer::Mode::Apply | fixer::Mode::Diff)) {
|
||||
printer.write_once(&diagnostics)?;
|
||||
}
|
||||
|
||||
// Check for updates if we're in a non-silent log level.
|
||||
#[cfg(feature = "update-informer")]
|
||||
if !is_stdin && log_level >= LogLevel::Default && atty::is(atty::Stream::Stdout) {
|
||||
if update_check
|
||||
&& !is_stdin
|
||||
&& log_level >= LogLevel::Default
|
||||
&& atty::is(atty::Stream::Stdout)
|
||||
{
|
||||
drop(updates::check_for_updates());
|
||||
}
|
||||
|
||||
if !diagnostics.messages.is_empty() && !cli.exit_zero && !fix_only {
|
||||
return Ok(ExitCode::FAILURE);
|
||||
if !cli.exit_zero {
|
||||
if cli.diff || fix_only {
|
||||
if diagnostics.fixed > 0 {
|
||||
return Ok(ExitCode::FAILURE);
|
||||
}
|
||||
} else if !diagnostics.messages.is_empty() {
|
||||
return Ok(ExitCode::FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
22
src/noqa.rs
22
src/noqa.rs
@@ -10,7 +10,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use crate::checks::{Check, CheckCode, CODE_REDIRECTS};
|
||||
|
||||
static NO_QA_LINE_REGEX: Lazy<Regex> = Lazy::new(|| {
|
||||
static NOQA_LINE_REGEX: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(
|
||||
r"(?P<spaces>\s*)(?P<noqa>(?i:# noqa)(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)",
|
||||
)
|
||||
@@ -39,7 +39,7 @@ pub enum Directive<'a> {
|
||||
|
||||
/// Extract the noqa `Directive` from a line of Python source code.
|
||||
pub fn extract_noqa_directive(line: &str) -> Directive {
|
||||
match NO_QA_LINE_REGEX.captures(line) {
|
||||
match NOQA_LINE_REGEX.captures(line) {
|
||||
Some(caps) => match caps.name("spaces") {
|
||||
Some(spaces) => match caps.name("noqa") {
|
||||
Some(noqa) => match caps.name("codes") {
|
||||
@@ -206,20 +206,20 @@ mod tests {
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::checks::{Check, CheckKind};
|
||||
use crate::noqa::{add_noqa_inner, NO_QA_LINE_REGEX};
|
||||
use crate::noqa::{add_noqa_inner, NOQA_LINE_REGEX};
|
||||
|
||||
#[test]
|
||||
fn regex() {
|
||||
assert!(NO_QA_LINE_REGEX.is_match("# noqa"));
|
||||
assert!(NO_QA_LINE_REGEX.is_match("# NoQA"));
|
||||
assert!(NOQA_LINE_REGEX.is_match("# noqa"));
|
||||
assert!(NOQA_LINE_REGEX.is_match("# NoQA"));
|
||||
|
||||
assert!(NO_QA_LINE_REGEX.is_match("# noqa: F401"));
|
||||
assert!(NO_QA_LINE_REGEX.is_match("# NoQA: F401"));
|
||||
assert!(NO_QA_LINE_REGEX.is_match("# noqa: F401, E501"));
|
||||
assert!(NOQA_LINE_REGEX.is_match("# noqa: F401"));
|
||||
assert!(NOQA_LINE_REGEX.is_match("# NoQA: F401"));
|
||||
assert!(NOQA_LINE_REGEX.is_match("# noqa: F401, E501"));
|
||||
|
||||
assert!(NO_QA_LINE_REGEX.is_match("# noqa:F401"));
|
||||
assert!(NO_QA_LINE_REGEX.is_match("# NoQA:F401"));
|
||||
assert!(NO_QA_LINE_REGEX.is_match("# noqa:F401, E501"));
|
||||
assert!(NOQA_LINE_REGEX.is_match("# noqa:F401"));
|
||||
assert!(NOQA_LINE_REGEX.is_match("# NoQA:F401"));
|
||||
assert!(NOQA_LINE_REGEX.is_match("# noqa:F401, E501"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -14,6 +14,7 @@ mod tests {
|
||||
use crate::linter::check_path;
|
||||
use crate::settings::flags;
|
||||
use crate::source_code_locator::SourceCodeLocator;
|
||||
use crate::source_code_style::SourceCodeStyleDetector;
|
||||
use crate::{directives, rustpython_helpers, settings};
|
||||
|
||||
fn check_code(contents: &str, expected: &[CheckCode]) -> Result<()> {
|
||||
@@ -21,6 +22,7 @@ mod tests {
|
||||
let settings = settings::Settings::for_rules(CheckCodePrefix::PD.codes());
|
||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
|
||||
let locator = SourceCodeLocator::new(&contents);
|
||||
let stylist = SourceCodeStyleDetector::from_contents(&contents, &locator);
|
||||
let directives = directives::extract_directives(
|
||||
&tokens,
|
||||
&locator,
|
||||
@@ -32,6 +34,7 @@ mod tests {
|
||||
&contents,
|
||||
tokens,
|
||||
&locator,
|
||||
&stylist,
|
||||
&directives,
|
||||
&settings,
|
||||
flags::Autofix::Enabled,
|
||||
|
||||
@@ -8,6 +8,7 @@ use colored::Colorize;
|
||||
use itertools::iterate;
|
||||
use rustpython_parser::ast::Location;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::autofix::{fixer, Fix};
|
||||
use crate::checks::CheckCode;
|
||||
@@ -89,7 +90,11 @@ impl<'a> Printer<'a> {
|
||||
Violations::Hide => {
|
||||
let fixed = diagnostics.fixed;
|
||||
if fixed > 0 {
|
||||
println!("Fixed {fixed} error(s).");
|
||||
if matches!(self.autofix, fixer::Mode::Apply) {
|
||||
println!("Fixed {fixed} error(s).");
|
||||
} else if matches!(self.autofix, fixer::Mode::Diff) {
|
||||
println!("Would fix {fixed} error(s).");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -134,17 +139,8 @@ impl<'a> Printer<'a> {
|
||||
SerializationFormat::Junit => {
|
||||
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
|
||||
|
||||
// Group by filename.
|
||||
let mut grouped_messages = BTreeMap::default();
|
||||
for message in &diagnostics.messages {
|
||||
grouped_messages
|
||||
.entry(&message.filename)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(message);
|
||||
}
|
||||
|
||||
let mut report = Report::new("ruff");
|
||||
for (filename, messages) in grouped_messages {
|
||||
for (filename, messages) in group_messages_by_filename(&diagnostics.messages) {
|
||||
let mut test_suite = TestSuite::new(filename);
|
||||
test_suite
|
||||
.extra
|
||||
@@ -183,16 +179,7 @@ impl<'a> Printer<'a> {
|
||||
self.post_text(diagnostics);
|
||||
}
|
||||
SerializationFormat::Grouped => {
|
||||
// Group by filename.
|
||||
let mut grouped_messages = BTreeMap::default();
|
||||
for message in &diagnostics.messages {
|
||||
grouped_messages
|
||||
.entry(&message.filename)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(message);
|
||||
}
|
||||
|
||||
for (filename, messages) in grouped_messages {
|
||||
for (filename, messages) in group_messages_by_filename(&diagnostics.messages) {
|
||||
// Compute the maximum number of digits in the row and column, for messages in
|
||||
// this file.
|
||||
let row_length = num_digits(
|
||||
@@ -239,6 +226,34 @@ impl<'a> Printer<'a> {
|
||||
);
|
||||
});
|
||||
}
|
||||
SerializationFormat::Gitlab => {
|
||||
// Generate JSON with errors in GitLab CI format
|
||||
// https://docs.gitlab.com/ee/ci/testing/code_quality.html#implementing-a-custom-tool
|
||||
println!(
|
||||
"{}",
|
||||
serde_json::to_string_pretty(
|
||||
&diagnostics
|
||||
.messages
|
||||
.iter()
|
||||
.map(|message| {
|
||||
json!({
|
||||
"description": format!("({}) {}", message.kind.code(), message.kind.body()),
|
||||
"severity": "major",
|
||||
"fingerprint": message.kind.code(),
|
||||
"location": {
|
||||
"path": relativize_path(Path::new(&message.filename)),
|
||||
"lines": {
|
||||
"begin": message.location.row(),
|
||||
"end": message.end_location.row()
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
.collect::<Vec<_>>()
|
||||
)?
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -275,6 +290,17 @@ impl<'a> Printer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn group_messages_by_filename(messages: &Vec<Message>) -> BTreeMap<&String, Vec<&Message>> {
|
||||
let mut grouped_messages = BTreeMap::default();
|
||||
for message in messages {
|
||||
grouped_messages
|
||||
.entry(&message.filename)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(message);
|
||||
}
|
||||
grouped_messages
|
||||
}
|
||||
|
||||
fn num_digits(n: usize) -> usize {
|
||||
iterate(n, |&n| n / 10)
|
||||
.take_while(|&n| n > 0)
|
||||
|
||||
@@ -12,9 +12,15 @@ use crate::ast::whitespace::leading_space;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckKind, RejectedCmpop};
|
||||
use crate::code_gen::SourceGenerator;
|
||||
use crate::source_code_generator::SourceCodeGenerator;
|
||||
use crate::source_code_style::SourceCodeStyleDetector;
|
||||
|
||||
fn compare(left: &Expr, ops: &[Cmpop], comparators: &[Expr]) -> Option<String> {
|
||||
fn compare(
|
||||
left: &Expr,
|
||||
ops: &[Cmpop],
|
||||
comparators: &[Expr],
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
) -> Option<String> {
|
||||
let cmp = Expr::new(
|
||||
Location::default(),
|
||||
Location::default(),
|
||||
@@ -24,7 +30,7 @@ fn compare(left: &Expr, ops: &[Cmpop], comparators: &[Expr]) -> Option<String> {
|
||||
comparators: comparators.to_vec(),
|
||||
},
|
||||
);
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator = SourceCodeGenerator::new(stylist.indentation(), stylist.quote());
|
||||
generator.unparse_expr(&cmp, 0);
|
||||
generator.generate().ok()
|
||||
}
|
||||
@@ -194,7 +200,7 @@ pub fn literal_comparisons(
|
||||
.map(|(idx, op)| bad_ops.get(&idx).unwrap_or(op))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
if let Some(content) = compare(left, &ops, comparators) {
|
||||
if let Some(content) = compare(left, &ops, comparators, checker.style) {
|
||||
for check in &mut checks {
|
||||
check.amend(Fix::replacement(
|
||||
content.to_string(),
|
||||
@@ -233,7 +239,9 @@ pub fn not_tests(
|
||||
let mut check =
|
||||
Check::new(CheckKind::NotInTest, Range::from_located(operand));
|
||||
if checker.patch(check.kind.code()) && should_fix {
|
||||
if let Some(content) = compare(left, &[Cmpop::NotIn], comparators) {
|
||||
if let Some(content) =
|
||||
compare(left, &[Cmpop::NotIn], comparators, checker.style)
|
||||
{
|
||||
check.amend(Fix::replacement(
|
||||
content,
|
||||
expr.location,
|
||||
@@ -249,7 +257,9 @@ pub fn not_tests(
|
||||
let mut check =
|
||||
Check::new(CheckKind::NotIsTest, Range::from_located(operand));
|
||||
if checker.patch(check.kind.code()) && should_fix {
|
||||
if let Some(content) = compare(left, &[Cmpop::IsNot], comparators) {
|
||||
if let Some(content) =
|
||||
compare(left, &[Cmpop::IsNot], comparators, checker.style)
|
||||
{
|
||||
check.amend(Fix::replacement(
|
||||
content,
|
||||
expr.location,
|
||||
@@ -267,7 +277,12 @@ pub fn not_tests(
|
||||
}
|
||||
}
|
||||
|
||||
fn function(name: &str, args: &Arguments, body: &Expr) -> Result<String> {
|
||||
fn function(
|
||||
name: &str,
|
||||
args: &Arguments,
|
||||
body: &Expr,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
) -> Result<String> {
|
||||
let body = Stmt::new(
|
||||
Location::default(),
|
||||
Location::default(),
|
||||
@@ -287,7 +302,7 @@ fn function(name: &str, args: &Arguments, body: &Expr) -> Result<String> {
|
||||
type_comment: None,
|
||||
},
|
||||
);
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator = SourceCodeGenerator::new(stylist.indentation(), stylist.quote());
|
||||
generator.unparse_stmt(&func);
|
||||
Ok(generator.generate()?)
|
||||
}
|
||||
@@ -301,7 +316,7 @@ pub fn do_not_assign_lambda(checker: &mut Checker, target: &Expr, value: &Expr,
|
||||
if !match_leading_content(stmt, checker.locator)
|
||||
&& !match_trailing_content(stmt, checker.locator)
|
||||
{
|
||||
match function(id, args, body) {
|
||||
match function(id, args, body, checker.style) {
|
||||
Ok(content) => {
|
||||
let first_line = checker.locator.slice_source_code_range(&Range {
|
||||
location: Location::new(stmt.location.row(), 0),
|
||||
|
||||
@@ -7,7 +7,9 @@ use serde::{Deserialize, Serialize};
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
pub enum Convention {
|
||||
/// Use Google-style docstrings.
|
||||
Google,
|
||||
/// Use NumPy-style docstrings.
|
||||
Numpy,
|
||||
}
|
||||
|
||||
@@ -17,7 +19,7 @@ pub enum Convention {
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case", rename = "Pydocstyle")]
|
||||
pub struct Options {
|
||||
#[option(
|
||||
default = r#""convention""#,
|
||||
default = r#"None"#,
|
||||
value_type = "Convention",
|
||||
example = r#"
|
||||
# Use Google-style docstrings.
|
||||
|
||||
@@ -4,7 +4,7 @@ use std::str::FromStr;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::vendored::cformat::{
|
||||
use crate::vendor::cformat::{
|
||||
CFormatError, CFormatPart, CFormatQuantity, CFormatSpec, CFormatString,
|
||||
};
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ use std::fmt;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::vendored::format::{
|
||||
use crate::vendor::format::{
|
||||
FieldName, FieldType, FormatParseError, FormatPart, FormatString, FromTemplate,
|
||||
};
|
||||
|
||||
@@ -82,7 +82,7 @@ impl TryFrom<&str> for FormatSummary {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::vendored::format::FromTemplate;
|
||||
use crate::vendor::format::FromTemplate;
|
||||
|
||||
#[test]
|
||||
fn test_format_summary() {
|
||||
|
||||
@@ -20,6 +20,7 @@ mod tests {
|
||||
use crate::linter::{check_path, test_path};
|
||||
use crate::settings::flags;
|
||||
use crate::source_code_locator::SourceCodeLocator;
|
||||
use crate::source_code_style::SourceCodeStyleDetector;
|
||||
use crate::{directives, rustpython_helpers, settings};
|
||||
|
||||
#[test_case(CheckCode::F401, Path::new("F401_0.py"); "F401_0")]
|
||||
@@ -171,6 +172,7 @@ mod tests {
|
||||
let settings = settings::Settings::for_rules(CheckCodePrefix::F.codes());
|
||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
|
||||
let locator = SourceCodeLocator::new(&contents);
|
||||
let stylist = SourceCodeStyleDetector::from_contents(&contents, &locator);
|
||||
let directives = directives::extract_directives(
|
||||
&tokens,
|
||||
&locator,
|
||||
@@ -182,6 +184,7 @@ mod tests {
|
||||
&contents,
|
||||
tokens,
|
||||
&locator,
|
||||
&stylist,
|
||||
&directives,
|
||||
&settings,
|
||||
flags::Autofix::Enabled,
|
||||
|
||||
@@ -17,6 +17,7 @@ mod tests {
|
||||
#[test_case(CheckCode::PGH002, Path::new("PGH002_0.py"); "PGH002_0")]
|
||||
#[test_case(CheckCode::PGH002, Path::new("PGH002_1.py"); "PGH002_1")]
|
||||
#[test_case(CheckCode::PGH003, Path::new("PGH003_0.py"); "PGH003_0")]
|
||||
#[test_case(CheckCode::PGH004, Path::new("PGH004_0.py"); "PGH004_0")]
|
||||
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
|
||||
let mut checks = test_path(
|
||||
|
||||
22
src/pygrep_hooks/plugins/blanket_noqa.rs
Normal file
22
src/pygrep_hooks/plugins/blanket_noqa.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use rustpython_ast::Location;
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::checks::{Check, CheckKind};
|
||||
|
||||
static BLANKET_NOQA_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"(?i)# noqa($|\s|:[^ ])").unwrap());
|
||||
|
||||
/// PGH004 - use of blanket noqa comments
|
||||
pub fn blanket_noqa(lineno: usize, line: &str) -> Option<Check> {
|
||||
BLANKET_NOQA_REGEX.find(line).map(|m| {
|
||||
Check::new(
|
||||
CheckKind::BlanketNOQA,
|
||||
Range {
|
||||
location: Location::new(lineno + 1, m.start()),
|
||||
end_location: Location::new(lineno + 1, m.end()),
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -1,7 +1,9 @@
|
||||
pub use blanket_noqa::blanket_noqa;
|
||||
pub use blanket_type_ignore::blanket_type_ignore;
|
||||
pub use deprecated_log_warn::deprecated_log_warn;
|
||||
pub use no_eval::no_eval;
|
||||
|
||||
mod blanket_noqa;
|
||||
mod blanket_type_ignore;
|
||||
mod deprecated_log_warn;
|
||||
mod no_eval;
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
---
|
||||
source: src/pygrep_hooks/mod.rs
|
||||
expression: checks
|
||||
---
|
||||
- kind: BlanketNOQA
|
||||
location:
|
||||
row: 1
|
||||
column: 7
|
||||
end_location:
|
||||
row: 1
|
||||
column: 13
|
||||
fix: ~
|
||||
- kind: BlanketNOQA
|
||||
location:
|
||||
row: 2
|
||||
column: 7
|
||||
end_location:
|
||||
row: 2
|
||||
column: 15
|
||||
fix: ~
|
||||
- kind: BlanketNOQA
|
||||
location:
|
||||
row: 3
|
||||
column: 0
|
||||
end_location:
|
||||
row: 3
|
||||
column: 6
|
||||
fix: ~
|
||||
- kind: BlanketNOQA
|
||||
location:
|
||||
row: 4
|
||||
column: 0
|
||||
end_location:
|
||||
row: 4
|
||||
column: 6
|
||||
fix: ~
|
||||
- kind: BlanketNOQA
|
||||
location:
|
||||
row: 5
|
||||
column: 0
|
||||
end_location:
|
||||
row: 5
|
||||
column: 8
|
||||
fix: ~
|
||||
- kind: BlanketNOQA
|
||||
location:
|
||||
row: 6
|
||||
column: 0
|
||||
end_location:
|
||||
row: 6
|
||||
column: 8
|
||||
fix: ~
|
||||
|
||||
@@ -40,6 +40,8 @@ mod tests {
|
||||
#[test_case(CheckCode::UP018, Path::new("UP018.py"); "UP018")]
|
||||
#[test_case(CheckCode::UP019, Path::new("UP019.py"); "UP019")]
|
||||
#[test_case(CheckCode::UP021, Path::new("UP021.py"); "UP021")]
|
||||
#[test_case(CheckCode::UP022, Path::new("UP022.py"); "UP022")]
|
||||
#[test_case(CheckCode::UP023, Path::new("UP023.py"); "UP023")]
|
||||
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
|
||||
let mut checks = test_path(
|
||||
|
||||
@@ -7,9 +7,10 @@ use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckKind};
|
||||
use crate::code_gen::SourceGenerator;
|
||||
use crate::python::identifiers::IDENTIFIER_REGEX;
|
||||
use crate::python::keyword::KWLIST;
|
||||
use crate::source_code_generator::SourceCodeGenerator;
|
||||
use crate::source_code_style::SourceCodeStyleDetector;
|
||||
|
||||
/// Return the typename, args, keywords and mother class
|
||||
fn match_named_tuple_assign<'a>(
|
||||
@@ -163,8 +164,9 @@ fn convert_to_class(
|
||||
typename: &str,
|
||||
body: Vec<Stmt>,
|
||||
base_class: &ExprKind,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
) -> Result<Fix> {
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator = SourceCodeGenerator::new(stylist.indentation(), stylist.quote());
|
||||
generator.unparse_stmt(&create_class_def_stmt(typename, body, base_class));
|
||||
let content = generator.generate()?;
|
||||
Ok(Fix::replacement(
|
||||
@@ -194,7 +196,7 @@ pub fn convert_named_tuple_functional_to_class(
|
||||
Range::from_located(stmt),
|
||||
);
|
||||
if checker.patch(check.kind.code()) {
|
||||
match convert_to_class(stmt, typename, properties, base_class) {
|
||||
match convert_to_class(stmt, typename, properties, base_class, checker.style) {
|
||||
Ok(fix) => check.amend(fix),
|
||||
Err(err) => error!("Failed to convert `NamedTuple`: {err}"),
|
||||
}
|
||||
|
||||
@@ -9,9 +9,10 @@ use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckKind};
|
||||
use crate::code_gen::SourceGenerator;
|
||||
use crate::python::identifiers::IDENTIFIER_REGEX;
|
||||
use crate::python::keyword::KWLIST;
|
||||
use crate::source_code_generator::SourceCodeGenerator;
|
||||
use crate::source_code_style::SourceCodeStyleDetector;
|
||||
|
||||
/// Return the class name, arguments, keywords and base class for a `TypedDict`
|
||||
/// assignment.
|
||||
@@ -196,8 +197,9 @@ fn convert_to_class(
|
||||
body: Vec<Stmt>,
|
||||
total_keyword: Option<KeywordData>,
|
||||
base_class: &ExprKind,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
) -> Result<Fix> {
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator = SourceCodeGenerator::new(stylist.indentation(), stylist.quote());
|
||||
generator.unparse_stmt(&create_class_def_stmt(
|
||||
class_name,
|
||||
body,
|
||||
@@ -236,7 +238,14 @@ pub fn convert_typed_dict_functional_to_class(
|
||||
Range::from_located(stmt),
|
||||
);
|
||||
if checker.patch(check.kind.code()) {
|
||||
match convert_to_class(stmt, class_name, body, total_keyword, base_class) {
|
||||
match convert_to_class(
|
||||
stmt,
|
||||
class_name,
|
||||
body,
|
||||
total_keyword,
|
||||
base_class,
|
||||
checker.style,
|
||||
) {
|
||||
Ok(fix) => check.amend(fix),
|
||||
Err(err) => error!("Failed to convert TypedDict: {err}"),
|
||||
};
|
||||
|
||||
@@ -6,7 +6,9 @@ pub use native_literals::native_literals;
|
||||
pub use open_alias::open_alias;
|
||||
pub use redundant_open_modes::redundant_open_modes;
|
||||
pub use remove_six_compat::remove_six_compat;
|
||||
pub use replace_stdout_stderr::replace_stdout_stderr;
|
||||
pub use replace_universal_newlines::replace_universal_newlines;
|
||||
pub use rewrite_c_element_tree::replace_c_element_tree;
|
||||
pub use super_call_with_parameters::super_call_with_parameters;
|
||||
pub use type_of_primitive::type_of_primitive;
|
||||
pub use typing_text_str_alias::typing_text_str_alias;
|
||||
@@ -26,7 +28,9 @@ mod native_literals;
|
||||
mod open_alias;
|
||||
mod redundant_open_modes;
|
||||
mod remove_six_compat;
|
||||
mod replace_stdout_stderr;
|
||||
mod replace_universal_newlines;
|
||||
mod rewrite_c_element_tree;
|
||||
mod super_call_with_parameters;
|
||||
mod type_of_primitive;
|
||||
mod typing_text_str_alias;
|
||||
|
||||
@@ -7,7 +7,8 @@ use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckCode, CheckKind};
|
||||
use crate::code_gen::SourceGenerator;
|
||||
use crate::source_code_generator::SourceCodeGenerator;
|
||||
use crate::source_code_style::SourceCodeStyleDetector;
|
||||
use crate::SourceCodeLocator;
|
||||
|
||||
/// Return `true` if the `Expr` is a reference to `${module}.${any}`.
|
||||
@@ -87,13 +88,14 @@ fn replace_call_on_arg_by_arg_attribute(
|
||||
arg: &Expr,
|
||||
expr: &Expr,
|
||||
patch: bool,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
) -> Result<Check> {
|
||||
let attribute = ExprKind::Attribute {
|
||||
value: Box::new(arg.clone()),
|
||||
attr: attr.to_string(),
|
||||
ctx: ExprContext::Load,
|
||||
};
|
||||
replace_by_expr_kind(attribute, expr, patch)
|
||||
replace_by_expr_kind(attribute, expr, patch, stylist)
|
||||
}
|
||||
|
||||
// `func(arg, **args)` => `arg.method(**args)`
|
||||
@@ -102,6 +104,7 @@ fn replace_call_on_arg_by_arg_method_call(
|
||||
args: &[Expr],
|
||||
expr: &Expr,
|
||||
patch: bool,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
) -> Result<Option<Check>> {
|
||||
if args.is_empty() {
|
||||
bail!("Expected at least one argument");
|
||||
@@ -119,7 +122,7 @@ fn replace_call_on_arg_by_arg_method_call(
|
||||
.collect(),
|
||||
keywords: vec![],
|
||||
};
|
||||
let expr = replace_by_expr_kind(call, expr, patch)?;
|
||||
let expr = replace_by_expr_kind(call, expr, patch, stylist)?;
|
||||
Ok(Some(expr))
|
||||
} else {
|
||||
Ok(None)
|
||||
@@ -127,10 +130,15 @@ fn replace_call_on_arg_by_arg_method_call(
|
||||
}
|
||||
|
||||
// `expr` => `Expr(expr_kind)`
|
||||
fn replace_by_expr_kind(node: ExprKind, expr: &Expr, patch: bool) -> Result<Check> {
|
||||
fn replace_by_expr_kind(
|
||||
node: ExprKind,
|
||||
expr: &Expr,
|
||||
patch: bool,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
) -> Result<Check> {
|
||||
let mut check = Check::new(CheckKind::RemoveSixCompat, Range::from_located(expr));
|
||||
if patch {
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator = SourceCodeGenerator::new(stylist.indentation(), stylist.quote());
|
||||
generator.unparse_expr(&create_expr(node), 0);
|
||||
let content = generator.generate()?;
|
||||
check.amend(Fix::replacement(
|
||||
@@ -142,10 +150,15 @@ fn replace_by_expr_kind(node: ExprKind, expr: &Expr, patch: bool) -> Result<Chec
|
||||
Ok(check)
|
||||
}
|
||||
|
||||
fn replace_by_stmt_kind(node: StmtKind, expr: &Expr, patch: bool) -> Result<Check> {
|
||||
fn replace_by_stmt_kind(
|
||||
node: StmtKind,
|
||||
expr: &Expr,
|
||||
patch: bool,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
) -> Result<Check> {
|
||||
let mut check = Check::new(CheckKind::RemoveSixCompat, Range::from_located(expr));
|
||||
if patch {
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator = SourceCodeGenerator::new(stylist.indentation(), stylist.quote());
|
||||
generator.unparse_stmt(&create_stmt(node));
|
||||
let content = generator.generate()?;
|
||||
check.amend(Fix::replacement(
|
||||
@@ -163,12 +176,13 @@ fn replace_by_raise_from(
|
||||
cause: Option<ExprKind>,
|
||||
expr: &Expr,
|
||||
patch: bool,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
) -> Result<Check> {
|
||||
let stmt_kind = StmtKind::Raise {
|
||||
exc: exc.map(|exc| Box::new(create_expr(exc))),
|
||||
cause: cause.map(|cause| Box::new(create_expr(cause))),
|
||||
};
|
||||
replace_by_stmt_kind(stmt_kind, expr, patch)
|
||||
replace_by_stmt_kind(stmt_kind, expr, patch, stylist)
|
||||
}
|
||||
|
||||
fn replace_by_index_on_arg(
|
||||
@@ -176,16 +190,22 @@ fn replace_by_index_on_arg(
|
||||
index: &ExprKind,
|
||||
expr: &Expr,
|
||||
patch: bool,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
) -> Result<Check> {
|
||||
let index = ExprKind::Subscript {
|
||||
value: Box::new(create_expr(arg.node.clone())),
|
||||
slice: Box::new(create_expr(index.clone())),
|
||||
ctx: ExprContext::Load,
|
||||
};
|
||||
replace_by_expr_kind(index, expr, patch)
|
||||
replace_by_expr_kind(index, expr, patch, stylist)
|
||||
}
|
||||
|
||||
fn handle_reraise(args: &[Expr], expr: &Expr, patch: bool) -> Result<Option<Check>> {
|
||||
fn handle_reraise(
|
||||
args: &[Expr],
|
||||
expr: &Expr,
|
||||
patch: bool,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
) -> Result<Option<Check>> {
|
||||
if let [_, exc, tb] = args {
|
||||
let check = replace_by_raise_from(
|
||||
Some(ExprKind::Call {
|
||||
@@ -200,6 +220,7 @@ fn handle_reraise(args: &[Expr], expr: &Expr, patch: bool) -> Result<Option<Chec
|
||||
None,
|
||||
expr,
|
||||
patch,
|
||||
stylist,
|
||||
)?;
|
||||
Ok(Some(check))
|
||||
} else if let [arg] = args {
|
||||
@@ -208,7 +229,7 @@ fn handle_reraise(args: &[Expr], expr: &Expr, patch: bool) -> Result<Option<Chec
|
||||
if let ExprKind::Attribute { value, attr, .. } = &func.node {
|
||||
if let ExprKind::Name { id, .. } = &value.node {
|
||||
if id == "sys" && attr == "exc_info" {
|
||||
let check = replace_by_raise_from(None, None, expr, patch)?;
|
||||
let check = replace_by_raise_from(None, None, expr, patch, stylist)?;
|
||||
return Ok(Some(check));
|
||||
};
|
||||
};
|
||||
@@ -227,6 +248,7 @@ fn handle_func(
|
||||
keywords: &[Keyword],
|
||||
expr: &Expr,
|
||||
patch: bool,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
locator: &SourceCodeLocator,
|
||||
) -> Result<Option<Check>> {
|
||||
let func_name = match &func.node {
|
||||
@@ -241,72 +263,82 @@ fn handle_func(
|
||||
("ensure_str", [arg], []) => replace_by_str_literal(arg, false, expr, patch, locator),
|
||||
("ensure_text", [arg], []) => replace_by_str_literal(arg, false, expr, patch, locator),
|
||||
("iteritems", args, []) => {
|
||||
replace_call_on_arg_by_arg_method_call("items", args, expr, patch)?
|
||||
replace_call_on_arg_by_arg_method_call("items", args, expr, patch, stylist)?
|
||||
}
|
||||
("viewitems", args, []) => {
|
||||
replace_call_on_arg_by_arg_method_call("items", args, expr, patch)?
|
||||
replace_call_on_arg_by_arg_method_call("items", args, expr, patch, stylist)?
|
||||
}
|
||||
("iterkeys", args, []) => {
|
||||
replace_call_on_arg_by_arg_method_call("keys", args, expr, patch)?
|
||||
replace_call_on_arg_by_arg_method_call("keys", args, expr, patch, stylist)?
|
||||
}
|
||||
("viewkeys", args, []) => {
|
||||
replace_call_on_arg_by_arg_method_call("keys", args, expr, patch)?
|
||||
replace_call_on_arg_by_arg_method_call("keys", args, expr, patch, stylist)?
|
||||
}
|
||||
("itervalues", args, []) => {
|
||||
replace_call_on_arg_by_arg_method_call("values", args, expr, patch)?
|
||||
replace_call_on_arg_by_arg_method_call("values", args, expr, patch, stylist)?
|
||||
}
|
||||
("viewvalues", args, []) => {
|
||||
replace_call_on_arg_by_arg_method_call("values", args, expr, patch)?
|
||||
replace_call_on_arg_by_arg_method_call("values", args, expr, patch, stylist)?
|
||||
}
|
||||
("get_method_function", [arg], []) => Some(replace_call_on_arg_by_arg_attribute(
|
||||
"__func__", arg, expr, patch,
|
||||
"__func__", arg, expr, patch, stylist,
|
||||
)?),
|
||||
("get_method_self", [arg], []) => Some(replace_call_on_arg_by_arg_attribute(
|
||||
"__self__", arg, expr, patch,
|
||||
"__self__", arg, expr, patch, stylist,
|
||||
)?),
|
||||
("get_function_closure", [arg], []) => Some(replace_call_on_arg_by_arg_attribute(
|
||||
"__closure__",
|
||||
arg,
|
||||
expr,
|
||||
patch,
|
||||
stylist,
|
||||
)?),
|
||||
("get_function_code", [arg], []) => Some(replace_call_on_arg_by_arg_attribute(
|
||||
"__code__", arg, expr, patch,
|
||||
"__code__", arg, expr, patch, stylist,
|
||||
)?),
|
||||
("get_function_defaults", [arg], []) => Some(replace_call_on_arg_by_arg_attribute(
|
||||
"__defaults__",
|
||||
arg,
|
||||
expr,
|
||||
patch,
|
||||
stylist,
|
||||
)?),
|
||||
("get_function_globals", [arg], []) => Some(replace_call_on_arg_by_arg_attribute(
|
||||
"__globals__",
|
||||
arg,
|
||||
expr,
|
||||
patch,
|
||||
stylist,
|
||||
)?),
|
||||
("create_unbound_method", [arg, _], _) => Some(replace_by_expr_kind(
|
||||
arg.node.clone(),
|
||||
expr,
|
||||
patch,
|
||||
stylist,
|
||||
)?),
|
||||
("get_unbound_function", [arg], []) => Some(replace_by_expr_kind(
|
||||
arg.node.clone(),
|
||||
expr,
|
||||
patch,
|
||||
stylist,
|
||||
)?),
|
||||
("create_unbound_method", [arg, _], _) => {
|
||||
Some(replace_by_expr_kind(arg.node.clone(), expr, patch)?)
|
||||
}
|
||||
("get_unbound_function", [arg], []) => {
|
||||
Some(replace_by_expr_kind(arg.node.clone(), expr, patch)?)
|
||||
}
|
||||
("assertCountEqual", args, []) => {
|
||||
replace_call_on_arg_by_arg_method_call("assertCountEqual", args, expr, patch)?
|
||||
replace_call_on_arg_by_arg_method_call("assertCountEqual", args, expr, patch, stylist)?
|
||||
}
|
||||
("assertRaisesRegex", args, []) => {
|
||||
replace_call_on_arg_by_arg_method_call("assertRaisesRegex", args, expr, patch)?
|
||||
replace_call_on_arg_by_arg_method_call("assertRaisesRegex", args, expr, patch, stylist)?
|
||||
}
|
||||
("assertRegex", args, []) => {
|
||||
replace_call_on_arg_by_arg_method_call("assertRegex", args, expr, patch)?
|
||||
replace_call_on_arg_by_arg_method_call("assertRegex", args, expr, patch, stylist)?
|
||||
}
|
||||
("raise_from", [exc, cause], []) => Some(replace_by_raise_from(
|
||||
Some(exc.node.clone()),
|
||||
Some(cause.node.clone()),
|
||||
expr,
|
||||
patch,
|
||||
stylist,
|
||||
)?),
|
||||
("reraise", args, []) => handle_reraise(args, expr, patch)?,
|
||||
("reraise", args, []) => handle_reraise(args, expr, patch, stylist)?,
|
||||
("byte2int", [arg], []) => Some(replace_by_index_on_arg(
|
||||
arg,
|
||||
&ExprKind::Constant {
|
||||
@@ -315,10 +347,15 @@ fn handle_func(
|
||||
},
|
||||
expr,
|
||||
patch,
|
||||
stylist,
|
||||
)?),
|
||||
("indexbytes", [arg, index], []) => Some(replace_by_index_on_arg(
|
||||
arg,
|
||||
&index.node,
|
||||
expr,
|
||||
patch,
|
||||
stylist,
|
||||
)?),
|
||||
("indexbytes", [arg, index], []) => {
|
||||
Some(replace_by_index_on_arg(arg, &index.node, expr, patch)?)
|
||||
}
|
||||
("int2byte", [arg], []) => Some(replace_by_expr_kind(
|
||||
ExprKind::Call {
|
||||
func: Box::new(create_expr(ExprKind::Name {
|
||||
@@ -333,6 +370,7 @@ fn handle_func(
|
||||
},
|
||||
expr,
|
||||
patch,
|
||||
stylist,
|
||||
)?),
|
||||
_ => None,
|
||||
};
|
||||
@@ -382,6 +420,7 @@ fn handle_next_on_six_dict(expr: &Expr, patch: bool, checker: &Checker) -> Resul
|
||||
},
|
||||
arg,
|
||||
patch,
|
||||
checker.style,
|
||||
) {
|
||||
Ok(check) => Ok(Some(check)),
|
||||
Err(err) => Err(err),
|
||||
@@ -409,7 +448,15 @@ pub fn remove_six_compat(checker: &mut Checker, expr: &Expr) {
|
||||
func,
|
||||
args,
|
||||
keywords,
|
||||
} => match handle_func(func, args, keywords, expr, patch, checker.locator) {
|
||||
} => match handle_func(
|
||||
func,
|
||||
args,
|
||||
keywords,
|
||||
expr,
|
||||
patch,
|
||||
checker.style,
|
||||
checker.locator,
|
||||
) {
|
||||
Ok(check) => check,
|
||||
Err(err) => {
|
||||
error!("Failed to remove `six` reference: {err}");
|
||||
|
||||
112
src/pyupgrade/plugins/replace_stdout_stderr.rs
Normal file
112
src/pyupgrade/plugins/replace_stdout_stderr.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
use rustpython_ast::{Expr, Keyword};
|
||||
|
||||
use crate::ast::helpers::{find_keyword, match_module_member};
|
||||
use crate::ast::types::Range;
|
||||
use crate::ast::whitespace::indentation;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckKind};
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MiddleContent<'a> {
|
||||
contents: &'a str,
|
||||
multi_line: bool,
|
||||
}
|
||||
|
||||
/// Return the number of "dirty" characters.
|
||||
fn dirty_count(iter: impl Iterator<Item = char>) -> usize {
|
||||
let mut the_count = 0;
|
||||
for current_char in iter {
|
||||
if current_char == ' ' || current_char == ',' || current_char == '\n' {
|
||||
the_count += 1;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
the_count
|
||||
}
|
||||
|
||||
/// Extract the `Middle` content between two arguments.
|
||||
fn extract_middle(contents: &str) -> Option<MiddleContent> {
|
||||
let multi_line = contents.contains('\n');
|
||||
let start_gap = dirty_count(contents.chars());
|
||||
if contents.len() == start_gap {
|
||||
return None;
|
||||
}
|
||||
let end_gap = dirty_count(contents.chars().rev());
|
||||
Some(MiddleContent {
|
||||
contents: &contents[start_gap..contents.len() - end_gap],
|
||||
multi_line,
|
||||
})
|
||||
}
|
||||
|
||||
/// UP022
|
||||
pub fn replace_stdout_stderr(checker: &mut Checker, expr: &Expr, kwargs: &[Keyword]) {
|
||||
if match_module_member(
|
||||
expr,
|
||||
"subprocess",
|
||||
"run",
|
||||
&checker.from_imports,
|
||||
&checker.import_aliases,
|
||||
) {
|
||||
// Find `stdout` and `stderr` kwargs.
|
||||
let Some(stdout) = find_keyword(kwargs, "stdout") else {
|
||||
return;
|
||||
};
|
||||
let Some(stderr) = find_keyword(kwargs, "stderr") else {
|
||||
return;
|
||||
};
|
||||
|
||||
// Verify that they're both set to `subprocess.PIPE`.
|
||||
if !match_module_member(
|
||||
&stdout.node.value,
|
||||
"subprocess",
|
||||
"PIPE",
|
||||
&checker.from_imports,
|
||||
&checker.import_aliases,
|
||||
) || !match_module_member(
|
||||
&stderr.node.value,
|
||||
"subprocess",
|
||||
"PIPE",
|
||||
&checker.from_imports,
|
||||
&checker.import_aliases,
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut check = Check::new(CheckKind::ReplaceStdoutStderr, Range::from_located(expr));
|
||||
if checker.patch(check.kind.code()) {
|
||||
let first = if stdout.location < stderr.location {
|
||||
stdout
|
||||
} else {
|
||||
stderr
|
||||
};
|
||||
let last = if stdout.location > stderr.location {
|
||||
stdout
|
||||
} else {
|
||||
stderr
|
||||
};
|
||||
let mut contents = String::from("capture_output=True");
|
||||
if let Some(middle) = extract_middle(&checker.locator.slice_source_code_range(&Range {
|
||||
location: first.end_location.unwrap(),
|
||||
end_location: last.location,
|
||||
})) {
|
||||
if middle.multi_line {
|
||||
contents.push(',');
|
||||
contents.push('\n');
|
||||
contents.push_str(&indentation(checker, first));
|
||||
} else {
|
||||
contents.push(',');
|
||||
contents.push(' ');
|
||||
}
|
||||
contents.push_str(middle.contents);
|
||||
}
|
||||
check.amend(Fix::replacement(
|
||||
contents,
|
||||
first.location,
|
||||
last.end_location.unwrap(),
|
||||
));
|
||||
}
|
||||
checker.add_check(check);
|
||||
}
|
||||
}
|
||||
57
src/pyupgrade/plugins/rewrite_c_element_tree.rs
Normal file
57
src/pyupgrade/plugins/rewrite_c_element_tree.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
use rustpython_ast::{Located, Stmt, StmtKind};
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckKind};
|
||||
|
||||
fn add_check_for_node<T>(checker: &mut Checker, node: &Located<T>) {
|
||||
let mut check = Check::new(CheckKind::RewriteCElementTree, Range::from_located(node));
|
||||
if checker.patch(check.kind.code()) {
|
||||
let contents = checker
|
||||
.locator
|
||||
.slice_source_code_range(&Range::from_located(node));
|
||||
check.amend(Fix::replacement(
|
||||
contents.replacen("cElementTree", "ElementTree", 1),
|
||||
node.location,
|
||||
node.end_location.unwrap(),
|
||||
));
|
||||
}
|
||||
checker.add_check(check);
|
||||
}
|
||||
|
||||
/// UP023
|
||||
pub fn replace_c_element_tree(checker: &mut Checker, stmt: &Stmt) {
|
||||
match &stmt.node {
|
||||
StmtKind::Import { names } => {
|
||||
// Ex) `import xml.etree.cElementTree as ET`
|
||||
for name in names {
|
||||
if name.node.name == "xml.etree.cElementTree" && name.node.asname.is_some() {
|
||||
add_check_for_node(checker, name);
|
||||
}
|
||||
}
|
||||
}
|
||||
StmtKind::ImportFrom {
|
||||
module,
|
||||
names,
|
||||
level,
|
||||
} => {
|
||||
if level.map_or(false, |level| level > 0) {
|
||||
// Ex) `import .xml.etree.cElementTree as ET`
|
||||
} else if let Some(module) = module {
|
||||
if module == "xml.etree.cElementTree" {
|
||||
// Ex) `from xml.etree.cElementTree import XML`
|
||||
add_check_for_node(checker, stmt);
|
||||
} else if module == "xml.etree" {
|
||||
// Ex) `from xml.etree import cElementTree as ET`
|
||||
for name in names {
|
||||
if name.node.name == "cElementTree" && name.node.asname.is_some() {
|
||||
add_check_for_node(checker, name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => unreachable!("Expected StmtKind::Import | StmtKind::ImportFrom"),
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@ use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckKind};
|
||||
use crate::code_gen::SourceGenerator;
|
||||
use crate::source_code_generator::SourceCodeGenerator;
|
||||
|
||||
fn optional(expr: &Expr) -> Expr {
|
||||
Expr::new(
|
||||
@@ -65,7 +65,8 @@ pub fn use_pep604_annotation(checker: &mut Checker, expr: &Expr, value: &Expr, s
|
||||
if checker.match_typing_call_path(&call_path, "Optional") {
|
||||
let mut check = Check::new(CheckKind::UsePEP604Annotation, Range::from_located(expr));
|
||||
if checker.patch(check.kind.code()) {
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator =
|
||||
SourceCodeGenerator::new(checker.style.indentation(), checker.style.quote());
|
||||
generator.unparse_expr(&optional(slice), 0);
|
||||
if let Ok(content) = generator.generate() {
|
||||
check.amend(Fix::replacement(
|
||||
@@ -84,7 +85,10 @@ pub fn use_pep604_annotation(checker: &mut Checker, expr: &Expr, value: &Expr, s
|
||||
// Invalid type annotation.
|
||||
}
|
||||
ExprKind::Tuple { elts, .. } => {
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator = SourceCodeGenerator::new(
|
||||
checker.style.indentation(),
|
||||
checker.style.quote(),
|
||||
);
|
||||
generator.unparse_expr(&union(elts), 0);
|
||||
if let Ok(content) = generator.generate() {
|
||||
check.amend(Fix::replacement(
|
||||
@@ -96,7 +100,10 @@ pub fn use_pep604_annotation(checker: &mut Checker, expr: &Expr, value: &Expr, s
|
||||
}
|
||||
_ => {
|
||||
// Single argument.
|
||||
let mut generator = SourceGenerator::new();
|
||||
let mut generator = SourceCodeGenerator::new(
|
||||
checker.style.indentation(),
|
||||
checker.style.quote(),
|
||||
);
|
||||
generator.unparse_expr(slice, 0);
|
||||
if let Ok(content) = generator.generate() {
|
||||
check.amend(Fix::replacement(
|
||||
|
||||
@@ -75,7 +75,7 @@ expression: checks
|
||||
row: 17
|
||||
column: 46
|
||||
fix:
|
||||
content: "class MyType5(TypedDict):\n a: 'hello'"
|
||||
content: "class MyType5(TypedDict):\n a: \"hello\""
|
||||
location:
|
||||
row: 17
|
||||
column: 0
|
||||
@@ -91,7 +91,7 @@ expression: checks
|
||||
row: 18
|
||||
column: 41
|
||||
fix:
|
||||
content: "class MyType6(TypedDict):\n a: 'hello'"
|
||||
content: "class MyType6(TypedDict):\n a: \"hello\""
|
||||
location:
|
||||
row: 18
|
||||
column: 0
|
||||
@@ -139,7 +139,7 @@ expression: checks
|
||||
row: 30
|
||||
column: 59
|
||||
fix:
|
||||
content: "class MyType10(TypedDict):\n key: Literal['value']"
|
||||
content: "class MyType10(TypedDict):\n key: Literal[\"value\"]"
|
||||
location:
|
||||
row: 30
|
||||
column: 0
|
||||
|
||||
@@ -27,7 +27,7 @@ expression: checks
|
||||
row: 12
|
||||
column: 1
|
||||
fix:
|
||||
content: "class NT2(NamedTuple):\n a: int\n b: str = 'foo'\n c: list[bool] = [True]"
|
||||
content: "class NT2(NamedTuple):\n a: int\n b: str = \"foo\"\n c: list[bool] = [True]"
|
||||
location:
|
||||
row: 8
|
||||
column: 0
|
||||
|
||||
@@ -0,0 +1,110 @@
|
||||
---
|
||||
source: src/pyupgrade/mod.rs
|
||||
expression: checks
|
||||
---
|
||||
- kind: ReplaceStdoutStderr
|
||||
location:
|
||||
row: 4
|
||||
column: 9
|
||||
end_location:
|
||||
row: 4
|
||||
column: 69
|
||||
fix:
|
||||
content: capture_output=True
|
||||
location:
|
||||
row: 4
|
||||
column: 22
|
||||
end_location:
|
||||
row: 4
|
||||
column: 68
|
||||
- kind: ReplaceStdoutStderr
|
||||
location:
|
||||
row: 6
|
||||
column: 9
|
||||
end_location:
|
||||
row: 6
|
||||
column: 80
|
||||
fix:
|
||||
content: capture_output=True
|
||||
location:
|
||||
row: 6
|
||||
column: 33
|
||||
end_location:
|
||||
row: 6
|
||||
column: 79
|
||||
- kind: ReplaceStdoutStderr
|
||||
location:
|
||||
row: 8
|
||||
column: 9
|
||||
end_location:
|
||||
row: 8
|
||||
column: 86
|
||||
fix:
|
||||
content: "capture_output=True, args=[\"foo\"]"
|
||||
location:
|
||||
row: 8
|
||||
column: 24
|
||||
end_location:
|
||||
row: 8
|
||||
column: 85
|
||||
- kind: ReplaceStdoutStderr
|
||||
location:
|
||||
row: 10
|
||||
column: 9
|
||||
end_location:
|
||||
row: 12
|
||||
column: 1
|
||||
fix:
|
||||
content: "capture_output=True, check=True"
|
||||
location:
|
||||
row: 11
|
||||
column: 13
|
||||
end_location:
|
||||
row: 11
|
||||
column: 71
|
||||
- kind: ReplaceStdoutStderr
|
||||
location:
|
||||
row: 14
|
||||
column: 9
|
||||
end_location:
|
||||
row: 16
|
||||
column: 1
|
||||
fix:
|
||||
content: "capture_output=True, check=True"
|
||||
location:
|
||||
row: 15
|
||||
column: 13
|
||||
end_location:
|
||||
row: 15
|
||||
column: 71
|
||||
- kind: ReplaceStdoutStderr
|
||||
location:
|
||||
row: 18
|
||||
column: 9
|
||||
end_location:
|
||||
row: 26
|
||||
column: 1
|
||||
fix:
|
||||
content: "capture_output=True,\n check=True"
|
||||
location:
|
||||
row: 20
|
||||
column: 4
|
||||
end_location:
|
||||
row: 22
|
||||
column: 26
|
||||
- kind: ReplaceStdoutStderr
|
||||
location:
|
||||
row: 29
|
||||
column: 13
|
||||
end_location:
|
||||
row: 36
|
||||
column: 5
|
||||
fix:
|
||||
content: "capture_output=True,\n check=True"
|
||||
location:
|
||||
row: 31
|
||||
column: 8
|
||||
end_location:
|
||||
row: 33
|
||||
column: 30
|
||||
|
||||
@@ -0,0 +1,155 @@
|
||||
---
|
||||
source: src/pyupgrade/mod.rs
|
||||
expression: checks
|
||||
---
|
||||
- kind: RewriteCElementTree
|
||||
location:
|
||||
row: 2
|
||||
column: 0
|
||||
end_location:
|
||||
row: 2
|
||||
column: 59
|
||||
fix:
|
||||
content: "from xml.etree.ElementTree import XML, Element, SubElement"
|
||||
location:
|
||||
row: 2
|
||||
column: 0
|
||||
end_location:
|
||||
row: 2
|
||||
column: 59
|
||||
- kind: RewriteCElementTree
|
||||
location:
|
||||
row: 3
|
||||
column: 7
|
||||
end_location:
|
||||
row: 3
|
||||
column: 35
|
||||
fix:
|
||||
content: xml.etree.ElementTree as ET
|
||||
location:
|
||||
row: 3
|
||||
column: 7
|
||||
end_location:
|
||||
row: 3
|
||||
column: 35
|
||||
- kind: RewriteCElementTree
|
||||
location:
|
||||
row: 6
|
||||
column: 0
|
||||
end_location:
|
||||
row: 6
|
||||
column: 44
|
||||
fix:
|
||||
content: from xml.etree.ElementTree import XML
|
||||
location:
|
||||
row: 6
|
||||
column: 0
|
||||
end_location:
|
||||
row: 6
|
||||
column: 44
|
||||
- kind: RewriteCElementTree
|
||||
location:
|
||||
row: 7
|
||||
column: 10
|
||||
end_location:
|
||||
row: 7
|
||||
column: 49
|
||||
fix:
|
||||
content: xml.etree.ElementTree as ET
|
||||
location:
|
||||
row: 7
|
||||
column: 10
|
||||
end_location:
|
||||
row: 7
|
||||
column: 49
|
||||
- kind: RewriteCElementTree
|
||||
location:
|
||||
row: 10
|
||||
column: 0
|
||||
end_location:
|
||||
row: 14
|
||||
column: 1
|
||||
fix:
|
||||
content: "from xml.etree.ElementTree import (\n XML,\n Element,\n SubElement,\n)"
|
||||
location:
|
||||
row: 10
|
||||
column: 0
|
||||
end_location:
|
||||
row: 14
|
||||
column: 1
|
||||
- kind: RewriteCElementTree
|
||||
location:
|
||||
row: 16
|
||||
column: 11
|
||||
end_location:
|
||||
row: 16
|
||||
column: 39
|
||||
fix:
|
||||
content: xml.etree.ElementTree as ET
|
||||
location:
|
||||
row: 16
|
||||
column: 11
|
||||
end_location:
|
||||
row: 16
|
||||
column: 39
|
||||
- kind: RewriteCElementTree
|
||||
location:
|
||||
row: 17
|
||||
column: 26
|
||||
end_location:
|
||||
row: 17
|
||||
column: 45
|
||||
fix:
|
||||
content: ElementTree as CET
|
||||
location:
|
||||
row: 17
|
||||
column: 26
|
||||
end_location:
|
||||
row: 17
|
||||
column: 45
|
||||
- kind: RewriteCElementTree
|
||||
location:
|
||||
row: 19
|
||||
column: 22
|
||||
end_location:
|
||||
row: 19
|
||||
column: 40
|
||||
fix:
|
||||
content: ElementTree as ET
|
||||
location:
|
||||
row: 19
|
||||
column: 22
|
||||
end_location:
|
||||
row: 19
|
||||
column: 40
|
||||
- kind: RewriteCElementTree
|
||||
location:
|
||||
row: 21
|
||||
column: 19
|
||||
end_location:
|
||||
row: 21
|
||||
column: 47
|
||||
fix:
|
||||
content: xml.etree.ElementTree as ET
|
||||
location:
|
||||
row: 21
|
||||
column: 19
|
||||
end_location:
|
||||
row: 21
|
||||
column: 47
|
||||
- kind: RewriteCElementTree
|
||||
location:
|
||||
row: 24
|
||||
column: 31
|
||||
end_location:
|
||||
row: 24
|
||||
column: 59
|
||||
fix:
|
||||
content: xml.etree.ElementTree as ET
|
||||
location:
|
||||
row: 24
|
||||
column: 31
|
||||
end_location:
|
||||
row: 24
|
||||
column: 59
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustpython_ast::Location;
|
||||
use rustpython_ast::{Expr, ExprKind, Keyword, KeywordData, Location};
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
@@ -1680,3 +1680,25 @@ pub fn ambiguous_unicode_character(
|
||||
|
||||
checks
|
||||
}
|
||||
|
||||
/// RUF004
|
||||
pub fn keyword_argument_before_star_argument(args: &[Expr], keywords: &[Keyword]) -> Vec<Check> {
|
||||
let mut checks = vec![];
|
||||
if let Some(arg) = args
|
||||
.iter()
|
||||
.rfind(|arg| matches!(arg.node, ExprKind::Starred { .. }))
|
||||
{
|
||||
for keyword in keywords {
|
||||
if keyword.location < arg.location {
|
||||
let KeywordData { arg, .. } = &keyword.node;
|
||||
if let Some(arg) = arg {
|
||||
checks.push(Check::new(
|
||||
CheckKind::KeywordArgumentBeforeStarArgument(arg.to_string()),
|
||||
Range::from_located(keyword),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
checks
|
||||
}
|
||||
|
||||
@@ -8,10 +8,24 @@ mod tests {
|
||||
|
||||
use anyhow::Result;
|
||||
use rustc_hash::FxHashSet;
|
||||
use test_case::test_case;
|
||||
|
||||
use crate::checks::CheckCode;
|
||||
use crate::linter::test_path;
|
||||
use crate::settings;
|
||||
#[test_case(CheckCode::RUF004, Path::new("RUF004.py"); "RUF004")]
|
||||
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
|
||||
let mut checks = test_path(
|
||||
Path::new("./resources/test/fixtures/ruff")
|
||||
.join(path)
|
||||
.as_path(),
|
||||
&settings::Settings::for_rule(check_code),
|
||||
)?;
|
||||
checks.sort_by_key(|check| check.location);
|
||||
insta::assert_yaml_snapshot!(snapshot, checks);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn confusables() -> Result<()> {
|
||||
|
||||
32
src/ruff/snapshots/ruff__ruff__tests__RUF004_RUF004.py.snap
Normal file
32
src/ruff/snapshots/ruff__ruff__tests__RUF004_RUF004.py.snap
Normal file
@@ -0,0 +1,32 @@
|
||||
---
|
||||
source: src/ruff/mod.rs
|
||||
expression: checks
|
||||
---
|
||||
- kind:
|
||||
KeywordArgumentBeforeStarArgument: kw
|
||||
location:
|
||||
row: 13
|
||||
column: 2
|
||||
end_location:
|
||||
row: 13
|
||||
column: 6
|
||||
fix: ~
|
||||
- kind:
|
||||
KeywordArgumentBeforeStarArgument: kw
|
||||
location:
|
||||
row: 14
|
||||
column: 2
|
||||
end_location:
|
||||
row: 14
|
||||
column: 6
|
||||
fix: ~
|
||||
- kind:
|
||||
KeywordArgumentBeforeStarArgument: kw
|
||||
location:
|
||||
row: 15
|
||||
column: 6
|
||||
end_location:
|
||||
row: 15
|
||||
column: 10
|
||||
fix: ~
|
||||
|
||||
@@ -28,6 +28,7 @@ use crate::{
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Configuration {
|
||||
pub allowed_confusables: Option<Vec<char>>,
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
pub dummy_variable_rgx: Option<Regex>,
|
||||
pub exclude: Option<Vec<FilePattern>>,
|
||||
pub extend: Option<PathBuf>,
|
||||
@@ -38,8 +39,8 @@ pub struct Configuration {
|
||||
pub fix: Option<bool>,
|
||||
pub fix_only: Option<bool>,
|
||||
pub fixable: Option<Vec<CheckCodePrefix>>,
|
||||
pub format: Option<SerializationFormat>,
|
||||
pub force_exclude: Option<bool>,
|
||||
pub format: Option<SerializationFormat>,
|
||||
pub ignore: Option<Vec<CheckCodePrefix>>,
|
||||
pub ignore_init_module_imports: Option<bool>,
|
||||
pub line_length: Option<usize>,
|
||||
@@ -51,7 +52,7 @@ pub struct Configuration {
|
||||
pub src: Option<Vec<PathBuf>>,
|
||||
pub target_version: Option<PythonVersion>,
|
||||
pub unfixable: Option<Vec<CheckCodePrefix>>,
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
pub update_check: Option<bool>,
|
||||
// Plugins
|
||||
pub flake8_annotations: Option<flake8_annotations::settings::Options>,
|
||||
pub flake8_bugbear: Option<flake8_bugbear::settings::Options>,
|
||||
@@ -75,6 +76,14 @@ impl Configuration {
|
||||
pub fn from_options(options: Options, project_root: &Path) -> Result<Self> {
|
||||
Ok(Configuration {
|
||||
allowed_confusables: options.allowed_confusables,
|
||||
cache_dir: options
|
||||
.cache_dir
|
||||
.map(|dir| {
|
||||
let dir = shellexpand::full(&dir);
|
||||
dir.map(|dir| PathBuf::from(dir.as_ref()))
|
||||
})
|
||||
.transpose()
|
||||
.map_err(|e| anyhow!("Invalid `cache-dir` value: {e}"))?,
|
||||
dummy_variable_rgx: options
|
||||
.dummy_variable_rgx
|
||||
.map(|pattern| Regex::new(&pattern))
|
||||
@@ -139,14 +148,7 @@ impl Configuration {
|
||||
.transpose()?,
|
||||
target_version: options.target_version,
|
||||
unfixable: options.unfixable,
|
||||
cache_dir: options
|
||||
.cache_dir
|
||||
.map(|dir| {
|
||||
let dir = shellexpand::full(&dir);
|
||||
dir.map(|dir| PathBuf::from(dir.as_ref()))
|
||||
})
|
||||
.transpose()
|
||||
.map_err(|e| anyhow!("Invalid `cache-dir` value: {e}"))?,
|
||||
update_check: options.update_check,
|
||||
// Plugins
|
||||
flake8_annotations: options.flake8_annotations,
|
||||
flake8_bugbear: options.flake8_bugbear,
|
||||
@@ -167,6 +169,7 @@ impl Configuration {
|
||||
pub fn combine(self, config: Configuration) -> Self {
|
||||
Self {
|
||||
allowed_confusables: self.allowed_confusables.or(config.allowed_confusables),
|
||||
cache_dir: self.cache_dir.or(config.cache_dir),
|
||||
dummy_variable_rgx: self.dummy_variable_rgx.or(config.dummy_variable_rgx),
|
||||
exclude: self.exclude.or(config.exclude),
|
||||
extend: self.extend.or(config.extend),
|
||||
@@ -204,7 +207,7 @@ impl Configuration {
|
||||
src: self.src.or(config.src),
|
||||
target_version: self.target_version.or(config.target_version),
|
||||
unfixable: self.unfixable.or(config.unfixable),
|
||||
cache_dir: self.cache_dir.or(config.cache_dir),
|
||||
update_check: self.update_check.or(config.update_check),
|
||||
// Plugins
|
||||
flake8_annotations: self.flake8_annotations.or(config.flake8_annotations),
|
||||
flake8_bugbear: self.flake8_bugbear.or(config.flake8_bugbear),
|
||||
@@ -226,6 +229,9 @@ impl Configuration {
|
||||
}
|
||||
|
||||
pub fn apply(&mut self, overrides: Overrides) {
|
||||
if let Some(cache_dir) = overrides.cache_dir {
|
||||
self.cache_dir = Some(cache_dir);
|
||||
}
|
||||
if let Some(dummy_variable_rgx) = overrides.dummy_variable_rgx {
|
||||
self.dummy_variable_rgx = Some(dummy_variable_rgx);
|
||||
}
|
||||
@@ -279,8 +285,8 @@ impl Configuration {
|
||||
if let Some(unfixable) = overrides.unfixable {
|
||||
self.unfixable = Some(unfixable);
|
||||
}
|
||||
if let Some(cache_dir) = overrides.cache_dir {
|
||||
self.cache_dir = Some(cache_dir);
|
||||
if let Some(update_check) = overrides.update_check {
|
||||
self.update_check = Some(update_check);
|
||||
}
|
||||
// Special-case: `extend_ignore` and `extend_select` are parallel arrays, so
|
||||
// push an empty array if only one of the two is provided.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/// Simple flags used to drive program behavior.
|
||||
use crate::autofix::fixer;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
#[derive(Debug, Copy, Clone, Hash)]
|
||||
pub enum Autofix {
|
||||
Enabled,
|
||||
Disabled,
|
||||
@@ -20,13 +20,13 @@ impl From<bool> for Autofix {
|
||||
impl From<fixer::Mode> for Autofix {
|
||||
fn from(value: fixer::Mode) -> Self {
|
||||
match value {
|
||||
fixer::Mode::Generate | fixer::Mode::Apply => Autofix::Enabled,
|
||||
fixer::Mode::Generate | fixer::Mode::Diff | fixer::Mode::Apply => Autofix::Enabled,
|
||||
fixer::Mode::None => Autofix::Disabled,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
#[derive(Debug, Copy, Clone, Hash)]
|
||||
pub enum Noqa {
|
||||
Enabled,
|
||||
Disabled,
|
||||
@@ -42,7 +42,7 @@ impl From<bool> for Noqa {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
#[derive(Debug, Copy, Clone, Hash)]
|
||||
pub enum Cache {
|
||||
Enabled,
|
||||
Disabled,
|
||||
|
||||
@@ -39,6 +39,7 @@ const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct Settings {
|
||||
pub allowed_confusables: FxHashSet<char>,
|
||||
pub cache_dir: PathBuf,
|
||||
pub dummy_variable_rgx: Regex,
|
||||
pub enabled: FxHashSet<CheckCode>,
|
||||
pub exclude: GlobSet,
|
||||
@@ -47,8 +48,8 @@ pub struct Settings {
|
||||
pub fix: bool,
|
||||
pub fix_only: bool,
|
||||
pub fixable: FxHashSet<CheckCode>,
|
||||
pub format: SerializationFormat,
|
||||
pub force_exclude: bool,
|
||||
pub format: SerializationFormat,
|
||||
pub ignore_init_module_imports: bool,
|
||||
pub line_length: usize,
|
||||
pub per_file_ignores: Vec<(GlobMatcher, GlobMatcher, FxHashSet<CheckCode>)>,
|
||||
@@ -57,7 +58,7 @@ pub struct Settings {
|
||||
pub show_source: bool,
|
||||
pub src: Vec<PathBuf>,
|
||||
pub target_version: PythonVersion,
|
||||
pub cache_dir: PathBuf,
|
||||
pub update_check: bool,
|
||||
// Plugins
|
||||
pub flake8_annotations: flake8_annotations::settings::Settings,
|
||||
pub flake8_bugbear: flake8_bugbear::settings::Settings,
|
||||
@@ -107,6 +108,7 @@ impl Settings {
|
||||
.allowed_confusables
|
||||
.map(FxHashSet::from_iter)
|
||||
.unwrap_or_default(),
|
||||
cache_dir: config.cache_dir.unwrap_or_else(|| cache_dir(project_root)),
|
||||
dummy_variable_rgx: config
|
||||
.dummy_variable_rgx
|
||||
.unwrap_or_else(|| DEFAULT_DUMMY_VARIABLE_RGX.clone()),
|
||||
@@ -138,7 +140,7 @@ impl Settings {
|
||||
}]
|
||||
.into_iter(),
|
||||
),
|
||||
format: config.format.unwrap_or(SerializationFormat::Text),
|
||||
format: config.format.unwrap_or_default(),
|
||||
force_exclude: config.force_exclude.unwrap_or(false),
|
||||
ignore_init_module_imports: config.ignore_init_module_imports.unwrap_or_default(),
|
||||
line_length: config.line_length.unwrap_or(88),
|
||||
@@ -147,12 +149,12 @@ impl Settings {
|
||||
)?,
|
||||
respect_gitignore: config.respect_gitignore.unwrap_or(true),
|
||||
required_version: config.required_version,
|
||||
show_source: config.show_source.unwrap_or_default(),
|
||||
src: config
|
||||
.src
|
||||
.unwrap_or_else(|| vec![project_root.to_path_buf()]),
|
||||
target_version: config.target_version.unwrap_or(PythonVersion::Py310),
|
||||
show_source: config.show_source.unwrap_or_default(),
|
||||
cache_dir: config.cache_dir.unwrap_or_else(|| cache_dir(project_root)),
|
||||
update_check: config.update_check.unwrap_or(true),
|
||||
// Plugins
|
||||
flake8_annotations: config
|
||||
.flake8_annotations
|
||||
@@ -210,6 +212,7 @@ impl Settings {
|
||||
pub fn for_rule(check_code: CheckCode) -> Self {
|
||||
Self {
|
||||
allowed_confusables: FxHashSet::from_iter([]),
|
||||
cache_dir: cache_dir(path_dedot::CWD.as_path()),
|
||||
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
|
||||
enabled: FxHashSet::from_iter([check_code.clone()]),
|
||||
exclude: GlobSet::empty(),
|
||||
@@ -218,8 +221,8 @@ impl Settings {
|
||||
fix: false,
|
||||
fix_only: false,
|
||||
fixable: FxHashSet::from_iter([check_code]),
|
||||
format: SerializationFormat::Text,
|
||||
force_exclude: false,
|
||||
format: SerializationFormat::Text,
|
||||
ignore_init_module_imports: false,
|
||||
line_length: 88,
|
||||
per_file_ignores: vec![],
|
||||
@@ -228,7 +231,7 @@ impl Settings {
|
||||
show_source: false,
|
||||
src: vec![path_dedot::CWD.clone()],
|
||||
target_version: PythonVersion::Py310,
|
||||
cache_dir: cache_dir(path_dedot::CWD.as_path()),
|
||||
update_check: false,
|
||||
flake8_annotations: flake8_annotations::settings::Settings::default(),
|
||||
flake8_bugbear: flake8_bugbear::settings::Settings::default(),
|
||||
flake8_errmsg: flake8_errmsg::settings::Settings::default(),
|
||||
@@ -247,6 +250,7 @@ impl Settings {
|
||||
pub fn for_rules(check_codes: Vec<CheckCode>) -> Self {
|
||||
Self {
|
||||
allowed_confusables: FxHashSet::from_iter([]),
|
||||
cache_dir: cache_dir(path_dedot::CWD.as_path()),
|
||||
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
|
||||
enabled: FxHashSet::from_iter(check_codes.clone()),
|
||||
exclude: GlobSet::empty(),
|
||||
@@ -255,8 +259,8 @@ impl Settings {
|
||||
fix: false,
|
||||
fix_only: false,
|
||||
fixable: FxHashSet::from_iter(check_codes),
|
||||
format: SerializationFormat::Text,
|
||||
force_exclude: false,
|
||||
format: SerializationFormat::Text,
|
||||
ignore_init_module_imports: false,
|
||||
line_length: 88,
|
||||
per_file_ignores: vec![],
|
||||
@@ -265,7 +269,7 @@ impl Settings {
|
||||
show_source: false,
|
||||
src: vec![path_dedot::CWD.clone()],
|
||||
target_version: PythonVersion::Py310,
|
||||
cache_dir: cache_dir(path_dedot::CWD.as_path()),
|
||||
update_check: false,
|
||||
flake8_annotations: flake8_annotations::settings::Settings::default(),
|
||||
flake8_bugbear: flake8_bugbear::settings::Settings::default(),
|
||||
flake8_errmsg: flake8_errmsg::settings::Settings::default(),
|
||||
@@ -379,6 +383,7 @@ fn resolve_codes<'a>(specs: impl Iterator<Item = CheckCodeSpec<'a>>) -> FxHashSe
|
||||
let mut codes: FxHashSet<CheckCode> = FxHashSet::default();
|
||||
for spec in specs {
|
||||
for specificity in [
|
||||
SuffixLength::None,
|
||||
SuffixLength::Zero,
|
||||
SuffixLength::One,
|
||||
SuffixLength::Two,
|
||||
|
||||
@@ -30,6 +30,22 @@ pub struct Options {
|
||||
/// A list of allowed "confusable" Unicode characters to ignore when
|
||||
/// enforcing `RUF001`, `RUF002`, and `RUF003`.
|
||||
pub allowed_confusables: Option<Vec<char>>,
|
||||
#[option(
|
||||
default = ".ruff_cache",
|
||||
value_type = "PathBuf",
|
||||
example = r#"cache-dir = "~/.cache/ruff""#
|
||||
)]
|
||||
/// A path to the cache directory.
|
||||
///
|
||||
/// By default, Ruff stores cache results in a `.ruff_cache` directory in
|
||||
/// the current project root.
|
||||
///
|
||||
/// However, Ruff will also respect the `RUFF_CACHE_DIR` environment
|
||||
/// variable, which takes precedence over that default.
|
||||
///
|
||||
/// This setting will override even the `RUFF_CACHE_DIR` environment
|
||||
/// variable, if set.
|
||||
pub cache_dir: Option<String>,
|
||||
#[option(
|
||||
default = r#""^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$""#,
|
||||
value_type = "Regex",
|
||||
@@ -156,8 +172,9 @@ pub struct Options {
|
||||
)]
|
||||
/// The style in which violation messages should be formatted: `"text"`
|
||||
/// (default), `"grouped"` (group messages by file), `"json"`
|
||||
/// (machine-readable), `"junit"` (machine-readable XML), or `"github"`
|
||||
/// (GitHub Actions annotations).
|
||||
/// (machine-readable), `"junit"` (machine-readable XML), `"github"`
|
||||
/// (GitHub Actions annotations) or `"gitlab"`
|
||||
/// (GitLab CI code quality report).
|
||||
pub format: Option<SerializationFormat>,
|
||||
#[option(
|
||||
default = r#"false"#,
|
||||
@@ -323,22 +340,13 @@ pub struct Options {
|
||||
/// A list of check code prefixes to consider un-autofix-able.
|
||||
pub unfixable: Option<Vec<CheckCodePrefix>>,
|
||||
#[option(
|
||||
default = ".ruff_cache",
|
||||
value_type = "PathBuf",
|
||||
example = r#"cache-dir = "~/.cache/ruff""#
|
||||
default = "true",
|
||||
value_type = "bool",
|
||||
example = "update-check = false"
|
||||
)]
|
||||
/// A path to the cache directory.
|
||||
///
|
||||
/// By default, Ruff stores cache results in a `.ruff_cache` directory in
|
||||
/// the current project root.
|
||||
///
|
||||
/// However, Ruff will also respect the `RUFF_CACHE_DIR` environment
|
||||
/// variable, which takes precedence over that default.
|
||||
///
|
||||
/// This setting will override even the `RUFF_CACHE_DIR` environment
|
||||
/// variable, if set.
|
||||
pub cache_dir: Option<String>,
|
||||
/// Plugins
|
||||
/// Enable or disable automatic update checks (overridden by the
|
||||
/// `--update-check` and `--no-update-check` command-line flags).
|
||||
pub update_check: Option<bool>,
|
||||
#[option_group]
|
||||
/// Options for the `flake8-annotations` plugin.
|
||||
pub flake8_annotations: Option<flake8_annotations::settings::Options>,
|
||||
|
||||
@@ -164,6 +164,7 @@ mod tests {
|
||||
Some(Tools {
|
||||
ruff: Some(Options {
|
||||
allowed_confusables: None,
|
||||
cache_dir: None,
|
||||
dummy_variable_rgx: None,
|
||||
exclude: None,
|
||||
extend: None,
|
||||
@@ -174,20 +175,20 @@ mod tests {
|
||||
fix: None,
|
||||
fix_only: None,
|
||||
fixable: None,
|
||||
format: None,
|
||||
force_exclude: None,
|
||||
format: None,
|
||||
ignore: None,
|
||||
ignore_init_module_imports: None,
|
||||
line_length: None,
|
||||
per_file_ignores: None,
|
||||
respect_gitignore: None,
|
||||
required_version: None,
|
||||
respect_gitignore: None,
|
||||
select: None,
|
||||
show_source: None,
|
||||
src: None,
|
||||
target_version: None,
|
||||
unfixable: None,
|
||||
cache_dir: None,
|
||||
update_check: None,
|
||||
flake8_annotations: None,
|
||||
flake8_bugbear: None,
|
||||
flake8_errmsg: None,
|
||||
@@ -239,6 +240,7 @@ line-length = 79
|
||||
src: None,
|
||||
target_version: None,
|
||||
unfixable: None,
|
||||
update_check: None,
|
||||
cache_dir: None,
|
||||
flake8_annotations: None,
|
||||
flake8_bugbear: None,
|
||||
@@ -268,6 +270,7 @@ exclude = ["foo.py"]
|
||||
Some(Tools {
|
||||
ruff: Some(Options {
|
||||
allowed_confusables: None,
|
||||
cache_dir: None,
|
||||
dummy_variable_rgx: None,
|
||||
exclude: Some(vec!["foo.py".to_string()]),
|
||||
extend: None,
|
||||
@@ -284,14 +287,14 @@ exclude = ["foo.py"]
|
||||
ignore_init_module_imports: None,
|
||||
line_length: None,
|
||||
per_file_ignores: None,
|
||||
respect_gitignore: None,
|
||||
required_version: None,
|
||||
respect_gitignore: None,
|
||||
select: None,
|
||||
show_source: None,
|
||||
src: None,
|
||||
target_version: None,
|
||||
unfixable: None,
|
||||
cache_dir: None,
|
||||
update_check: None,
|
||||
flake8_annotations: None,
|
||||
flake8_errmsg: None,
|
||||
flake8_bugbear: None,
|
||||
@@ -320,6 +323,7 @@ select = ["E501"]
|
||||
Some(Tools {
|
||||
ruff: Some(Options {
|
||||
allowed_confusables: None,
|
||||
cache_dir: None,
|
||||
dummy_variable_rgx: None,
|
||||
exclude: None,
|
||||
extend: None,
|
||||
@@ -336,14 +340,14 @@ select = ["E501"]
|
||||
ignore_init_module_imports: None,
|
||||
line_length: None,
|
||||
per_file_ignores: None,
|
||||
respect_gitignore: None,
|
||||
required_version: None,
|
||||
respect_gitignore: None,
|
||||
select: Some(vec![CheckCodePrefix::E501]),
|
||||
show_source: None,
|
||||
src: None,
|
||||
target_version: None,
|
||||
unfixable: None,
|
||||
cache_dir: None,
|
||||
update_check: None,
|
||||
flake8_annotations: None,
|
||||
flake8_bugbear: None,
|
||||
flake8_errmsg: None,
|
||||
@@ -373,6 +377,7 @@ ignore = ["E501"]
|
||||
Some(Tools {
|
||||
ruff: Some(Options {
|
||||
allowed_confusables: None,
|
||||
cache_dir: None,
|
||||
dummy_variable_rgx: None,
|
||||
exclude: None,
|
||||
extend: None,
|
||||
@@ -389,14 +394,14 @@ ignore = ["E501"]
|
||||
ignore_init_module_imports: None,
|
||||
line_length: None,
|
||||
per_file_ignores: None,
|
||||
respect_gitignore: None,
|
||||
required_version: None,
|
||||
respect_gitignore: None,
|
||||
select: None,
|
||||
show_source: None,
|
||||
src: None,
|
||||
target_version: None,
|
||||
unfixable: None,
|
||||
cache_dir: None,
|
||||
update_check: None,
|
||||
flake8_annotations: None,
|
||||
flake8_bugbear: None,
|
||||
flake8_errmsg: None,
|
||||
@@ -480,6 +485,7 @@ other-attribute = 1
|
||||
format: None,
|
||||
force_exclude: None,
|
||||
unfixable: None,
|
||||
update_check: None,
|
||||
cache_dir: None,
|
||||
per_file_ignores: Some(FxHashMap::from_iter([(
|
||||
"__init__.py".to_string(),
|
||||
|
||||
@@ -154,6 +154,7 @@ pub enum SerializationFormat {
|
||||
Junit,
|
||||
Grouped,
|
||||
Github,
|
||||
Gitlab,
|
||||
}
|
||||
|
||||
impl Default for SerializationFormat {
|
||||
@@ -163,6 +164,12 @@ impl Default for SerializationFormat {
|
||||
return Self::Github;
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(gitlab_ci) = env::var("GITLAB_CI") {
|
||||
if gitlab_ci == "true" {
|
||||
return Self::Gitlab;
|
||||
}
|
||||
}
|
||||
Self::Text
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,19 @@
|
||||
//! Generate Python source code from an abstract syntax tree (AST).
|
||||
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
use std::string::FromUtf8Error;
|
||||
|
||||
use anyhow::Result;
|
||||
use rustpython_ast::{Excepthandler, ExcepthandlerKind, Suite, Withitem};
|
||||
use rustpython_common::str;
|
||||
use rustpython_parser::ast::{
|
||||
Alias, Arg, Arguments, Boolop, Cmpop, Comprehension, Constant, ConversionFlag, Expr, ExprKind,
|
||||
Operator, Stmt, StmtKind,
|
||||
};
|
||||
|
||||
use crate::source_code_style::{Indentation, Quote};
|
||||
use crate::vendor::{bytes, str};
|
||||
|
||||
mod precedence {
|
||||
macro_rules! precedence {
|
||||
($($op:ident,)*) => {
|
||||
@@ -27,25 +32,27 @@ mod precedence {
|
||||
pub const EXPR: u8 = BOR;
|
||||
}
|
||||
|
||||
pub struct SourceGenerator {
|
||||
pub struct SourceCodeGenerator<'a> {
|
||||
/// The indentation style to use.
|
||||
indent: &'a Indentation,
|
||||
/// The quote style to use for string literals.
|
||||
quote: &'a Quote,
|
||||
buffer: Vec<u8>,
|
||||
indentation: usize,
|
||||
new_lines: usize,
|
||||
indent_depth: usize,
|
||||
num_newlines: usize,
|
||||
initial: bool,
|
||||
}
|
||||
|
||||
impl Default for SourceGenerator {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceGenerator {
|
||||
pub fn new() -> Self {
|
||||
SourceGenerator {
|
||||
impl<'a> SourceCodeGenerator<'a> {
|
||||
pub fn new(indent: &'a Indentation, quote: &'a Quote) -> Self {
|
||||
SourceCodeGenerator {
|
||||
// Style preferences.
|
||||
indent,
|
||||
quote,
|
||||
// Internal state.
|
||||
buffer: vec![],
|
||||
indentation: 0,
|
||||
new_lines: 0,
|
||||
indent_depth: 0,
|
||||
num_newlines: 0,
|
||||
initial: true,
|
||||
}
|
||||
}
|
||||
@@ -56,30 +63,30 @@ impl SourceGenerator {
|
||||
|
||||
fn newline(&mut self) {
|
||||
if !self.initial {
|
||||
self.new_lines = std::cmp::max(self.new_lines, 1);
|
||||
self.num_newlines = std::cmp::max(self.num_newlines, 1);
|
||||
}
|
||||
}
|
||||
|
||||
fn newlines(&mut self, extra: usize) {
|
||||
if !self.initial {
|
||||
self.new_lines = std::cmp::max(self.new_lines, 1 + extra);
|
||||
self.num_newlines = std::cmp::max(self.num_newlines, 1 + extra);
|
||||
}
|
||||
}
|
||||
|
||||
fn body<U>(&mut self, stmts: &[Stmt<U>]) {
|
||||
self.indentation += 1;
|
||||
self.indent_depth += 1;
|
||||
for stmt in stmts {
|
||||
self.unparse_stmt(stmt);
|
||||
}
|
||||
self.indentation -= 1;
|
||||
self.indent_depth -= 1;
|
||||
}
|
||||
|
||||
fn p(&mut self, s: &str) {
|
||||
if self.new_lines > 0 {
|
||||
for _ in 0..self.new_lines {
|
||||
if self.num_newlines > 0 {
|
||||
for _ in 0..self.num_newlines {
|
||||
self.buffer.extend("\n".as_bytes());
|
||||
}
|
||||
self.new_lines = 0;
|
||||
self.num_newlines = 0;
|
||||
}
|
||||
self.buffer.extend(s.as_bytes());
|
||||
}
|
||||
@@ -108,7 +115,7 @@ impl SourceGenerator {
|
||||
macro_rules! statement {
|
||||
($body:block) => {{
|
||||
self.newline();
|
||||
self.p(&" ".repeat(self.indentation));
|
||||
self.p(&self.indent.deref().repeat(self.indent_depth));
|
||||
$body
|
||||
self.initial = false;
|
||||
}};
|
||||
@@ -123,7 +130,7 @@ impl SourceGenerator {
|
||||
..
|
||||
} => {
|
||||
// TODO(charlie): Handle decorators.
|
||||
self.newlines(if self.indentation == 0 { 2 } else { 1 });
|
||||
self.newlines(if self.indent_depth == 0 { 2 } else { 1 });
|
||||
statement!({
|
||||
self.p("def ");
|
||||
self.p(name);
|
||||
@@ -137,7 +144,7 @@ impl SourceGenerator {
|
||||
self.p(":");
|
||||
});
|
||||
self.body(body);
|
||||
if self.indentation == 0 {
|
||||
if self.indent_depth == 0 {
|
||||
self.newlines(2);
|
||||
}
|
||||
}
|
||||
@@ -149,7 +156,7 @@ impl SourceGenerator {
|
||||
..
|
||||
} => {
|
||||
// TODO(charlie): Handle decorators.
|
||||
self.newlines(if self.indentation == 0 { 2 } else { 1 });
|
||||
self.newlines(if self.indent_depth == 0 { 2 } else { 1 });
|
||||
statement!({
|
||||
self.p("async def ");
|
||||
self.p(name);
|
||||
@@ -163,7 +170,7 @@ impl SourceGenerator {
|
||||
self.p(":");
|
||||
});
|
||||
self.body(body);
|
||||
if self.indentation == 0 {
|
||||
if self.indent_depth == 0 {
|
||||
self.newlines(2);
|
||||
}
|
||||
}
|
||||
@@ -175,7 +182,7 @@ impl SourceGenerator {
|
||||
..
|
||||
} => {
|
||||
// TODO(charlie): Handle decorators.
|
||||
self.newlines(if self.indentation == 0 { 2 } else { 1 });
|
||||
self.newlines(if self.indent_depth == 0 { 2 } else { 1 });
|
||||
statement!({
|
||||
self.p("class ");
|
||||
self.p(name);
|
||||
@@ -200,7 +207,7 @@ impl SourceGenerator {
|
||||
self.p(":");
|
||||
});
|
||||
self.body(body);
|
||||
if self.indentation == 0 {
|
||||
if self.indent_depth == 0 {
|
||||
self.newlines(2);
|
||||
}
|
||||
}
|
||||
@@ -788,6 +795,12 @@ impl SourceGenerator {
|
||||
{
|
||||
self.p(&value.to_string().replace("inf", inf_str));
|
||||
}
|
||||
Constant::Bytes(b) => {
|
||||
self.p(&bytes::repr(b, self.quote.into()));
|
||||
}
|
||||
Constant::Str(s) => {
|
||||
self.p(&format!("{}", str::repr(s, self.quote.into())));
|
||||
}
|
||||
_ => self.p(&format!("{value}")),
|
||||
}
|
||||
}
|
||||
@@ -931,7 +944,7 @@ impl SourceGenerator {
|
||||
}
|
||||
|
||||
fn unparse_formatted<U>(&mut self, val: &Expr<U>, conversion: usize, spec: Option<&Expr<U>>) {
|
||||
let mut generator = SourceGenerator::default();
|
||||
let mut generator = SourceCodeGenerator::new(self.indent, self.quote);
|
||||
generator.unparse_expr(val, precedence::TEST + 1);
|
||||
let brace = if generator.buffer.starts_with("{".as_bytes()) {
|
||||
// put a space to avoid escaping the bracket
|
||||
@@ -987,10 +1000,10 @@ impl SourceGenerator {
|
||||
self.unparse_fstring_body(values, is_spec);
|
||||
} else {
|
||||
self.p("f");
|
||||
let mut generator = SourceGenerator::default();
|
||||
let mut generator = SourceCodeGenerator::new(self.indent, self.quote);
|
||||
generator.unparse_fstring_body(values, is_spec);
|
||||
let body = std::str::from_utf8(&generator.buffer).unwrap();
|
||||
self.p(&format!("{}", str::repr(body)));
|
||||
self.p(&format!("{}", str::repr(body, self.quote.into())));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1010,3 +1023,139 @@ impl SourceGenerator {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use anyhow::Result;
|
||||
use rustpython_parser::parser;
|
||||
|
||||
use crate::source_code_generator::SourceCodeGenerator;
|
||||
use crate::source_code_style::{Indentation, Quote};
|
||||
|
||||
fn round_trip(contents: &str) -> Result<String> {
|
||||
let indentation = Indentation::default();
|
||||
let quote = Quote::default();
|
||||
let program = parser::parse_program(contents, "<filename>")?;
|
||||
let stmt = program.first().unwrap();
|
||||
let mut generator = SourceCodeGenerator::new(&indentation, "e);
|
||||
generator.unparse_stmt(stmt);
|
||||
generator.generate().map_err(std::convert::Into::into)
|
||||
}
|
||||
|
||||
fn round_trip_with(indentation: &Indentation, quote: &Quote, contents: &str) -> Result<String> {
|
||||
let program = parser::parse_program(contents, "<filename>")?;
|
||||
let stmt = program.first().unwrap();
|
||||
let mut generator = SourceCodeGenerator::new(indentation, quote);
|
||||
generator.unparse_stmt(stmt);
|
||||
generator.generate().map_err(std::convert::Into::into)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn quote() -> Result<()> {
|
||||
assert_eq!(round_trip(r#""hello""#)?, r#""hello""#);
|
||||
assert_eq!(round_trip(r#"'hello'"#)?, r#""hello""#);
|
||||
assert_eq!(round_trip(r#"u'hello'"#)?, r#"u"hello""#);
|
||||
assert_eq!(round_trip(r#"r'hello'"#)?, r#""hello""#);
|
||||
assert_eq!(round_trip(r#"b'hello'"#)?, r#"b"hello""#);
|
||||
assert_eq!(round_trip(r#"("abc" "def" "ghi")"#)?, r#""abcdefghi""#);
|
||||
assert_eq!(round_trip(r#""he\"llo""#)?, r#"'he"llo'"#);
|
||||
assert_eq!(round_trip(r#"f'abc{"def"}{1}'"#)?, r#"f'abc{"def"}{1}'"#);
|
||||
assert_eq!(round_trip(r#"f"abc{'def'}{1}""#)?, r#"f'abc{"def"}{1}'"#);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn indent() -> Result<()> {
|
||||
assert_eq!(
|
||||
round_trip(
|
||||
r#"
|
||||
if True:
|
||||
pass
|
||||
"#
|
||||
.trim(),
|
||||
)?,
|
||||
r#"
|
||||
if True:
|
||||
pass
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_quote() -> Result<()> {
|
||||
assert_eq!(
|
||||
round_trip_with(&Indentation::default(), &Quote::Double, r#""hello""#)?,
|
||||
r#""hello""#
|
||||
);
|
||||
assert_eq!(
|
||||
round_trip_with(&Indentation::default(), &Quote::Single, r#""hello""#)?,
|
||||
r#"'hello'"#
|
||||
);
|
||||
assert_eq!(
|
||||
round_trip_with(&Indentation::default(), &Quote::Double, r#"'hello'"#)?,
|
||||
r#""hello""#
|
||||
);
|
||||
assert_eq!(
|
||||
round_trip_with(&Indentation::default(), &Quote::Single, r#"'hello'"#)?,
|
||||
r#"'hello'"#
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_indent() -> Result<()> {
|
||||
assert_eq!(
|
||||
round_trip_with(
|
||||
&Indentation::new(" ".to_string()),
|
||||
&Quote::default(),
|
||||
r#"
|
||||
if True:
|
||||
pass
|
||||
"#
|
||||
.trim(),
|
||||
)?,
|
||||
r#"
|
||||
if True:
|
||||
pass
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
assert_eq!(
|
||||
round_trip_with(
|
||||
&Indentation::new(" ".to_string()),
|
||||
&Quote::default(),
|
||||
r#"
|
||||
if True:
|
||||
pass
|
||||
"#
|
||||
.trim(),
|
||||
)?,
|
||||
r#"
|
||||
if True:
|
||||
pass
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
assert_eq!(
|
||||
round_trip_with(
|
||||
&Indentation::new("\t".to_string()),
|
||||
&Quote::default(),
|
||||
r#"
|
||||
if True:
|
||||
pass
|
||||
"#
|
||||
.trim(),
|
||||
)?,
|
||||
r#"
|
||||
if True:
|
||||
pass
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
200
src/source_code_style.rs
Normal file
200
src/source_code_style.rs
Normal file
@@ -0,0 +1,200 @@
|
||||
//! Detect code style from Python source code.
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
use once_cell::unsync::OnceCell;
|
||||
use rustpython_ast::Location;
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::pydocstyle::helpers::leading_quote;
|
||||
use crate::source_code_locator::SourceCodeLocator;
|
||||
use crate::vendor;
|
||||
|
||||
pub struct SourceCodeStyleDetector<'a> {
|
||||
contents: &'a str,
|
||||
locator: &'a SourceCodeLocator<'a>,
|
||||
indentation: OnceCell<Indentation>,
|
||||
quote: OnceCell<Quote>,
|
||||
}
|
||||
|
||||
impl<'a> SourceCodeStyleDetector<'a> {
|
||||
pub fn indentation(&'a self) -> &'a Indentation {
|
||||
self.indentation
|
||||
.get_or_init(|| detect_indentation(self.contents, self.locator).unwrap_or_default())
|
||||
}
|
||||
|
||||
pub fn quote(&'a self) -> &'a Quote {
|
||||
self.quote
|
||||
.get_or_init(|| detect_quote(self.contents, self.locator).unwrap_or_default())
|
||||
}
|
||||
|
||||
pub fn from_contents(contents: &'a str, locator: &'a SourceCodeLocator<'a>) -> Self {
|
||||
Self {
|
||||
contents,
|
||||
locator,
|
||||
indentation: OnceCell::default(),
|
||||
quote: OnceCell::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The quotation style used in Python source code.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum Quote {
|
||||
Single,
|
||||
Double,
|
||||
}
|
||||
|
||||
impl Default for Quote {
|
||||
fn default() -> Self {
|
||||
Quote::Double
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Quote> for vendor::str::Quote {
|
||||
fn from(val: &Quote) -> Self {
|
||||
match val {
|
||||
Quote::Single => vendor::str::Quote::Single,
|
||||
Quote::Double => vendor::str::Quote::Double,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The indentation style used in Python source code.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct Indentation(String);
|
||||
|
||||
impl Indentation {
|
||||
pub fn new(indentation: String) -> Self {
|
||||
Self(indentation)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Indentation {
|
||||
fn default() -> Self {
|
||||
Indentation(" ".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Indentation {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Detect the indentation style of the given tokens.
|
||||
fn detect_indentation(contents: &str, locator: &SourceCodeLocator) -> Option<Indentation> {
|
||||
for (_start, tok, end) in lexer::make_tokenizer(contents).flatten() {
|
||||
if let Tok::Indent { .. } = tok {
|
||||
let start = Location::new(end.row(), 0);
|
||||
let whitespace = locator.slice_source_code_range(&Range {
|
||||
location: start,
|
||||
end_location: end,
|
||||
});
|
||||
return Some(Indentation(whitespace.to_string()));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Detect the quotation style of the given tokens.
|
||||
fn detect_quote(contents: &str, locator: &SourceCodeLocator) -> Option<Quote> {
|
||||
for (start, tok, end) in lexer::make_tokenizer(contents).flatten() {
|
||||
if let Tok::String { .. } = tok {
|
||||
let content = locator.slice_source_code_range(&Range {
|
||||
location: start,
|
||||
end_location: end,
|
||||
});
|
||||
if let Some(pattern) = leading_quote(&content) {
|
||||
if pattern.contains('\'') {
|
||||
return Some(Quote::Single);
|
||||
} else if pattern.contains('"') {
|
||||
return Some(Quote::Double);
|
||||
}
|
||||
unreachable!("Expected string to start with a valid quote prefix")
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::source_code_style::{detect_indentation, detect_quote, Indentation, Quote};
|
||||
use crate::SourceCodeLocator;
|
||||
|
||||
#[test]
|
||||
fn indentation() {
|
||||
let contents = r#"x = 1"#;
|
||||
let locator = SourceCodeLocator::new(contents);
|
||||
assert_eq!(detect_indentation(contents, &locator), None);
|
||||
|
||||
let contents = r#"
|
||||
if True:
|
||||
pass
|
||||
"#;
|
||||
let locator = SourceCodeLocator::new(contents);
|
||||
assert_eq!(
|
||||
detect_indentation(contents, &locator),
|
||||
Some(Indentation(" ".to_string()))
|
||||
);
|
||||
|
||||
let contents = r#"
|
||||
if True:
|
||||
pass
|
||||
"#;
|
||||
let locator = SourceCodeLocator::new(contents);
|
||||
assert_eq!(
|
||||
detect_indentation(contents, &locator),
|
||||
Some(Indentation(" ".to_string()))
|
||||
);
|
||||
|
||||
let contents = r#"
|
||||
if True:
|
||||
pass
|
||||
"#;
|
||||
let locator = SourceCodeLocator::new(contents);
|
||||
assert_eq!(
|
||||
detect_indentation(contents, &locator),
|
||||
Some(Indentation("\t".to_string()))
|
||||
);
|
||||
|
||||
// TODO(charlie): Should non-significant whitespace be detected?
|
||||
let contents = r#"
|
||||
x = (
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
)
|
||||
"#;
|
||||
let locator = SourceCodeLocator::new(contents);
|
||||
assert_eq!(detect_indentation(contents, &locator), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn quote() {
|
||||
let contents = r#"x = 1"#;
|
||||
let locator = SourceCodeLocator::new(contents);
|
||||
assert_eq!(detect_quote(contents, &locator), None);
|
||||
|
||||
let contents = r#"x = '1'"#;
|
||||
let locator = SourceCodeLocator::new(contents);
|
||||
assert_eq!(detect_quote(contents, &locator), Some(Quote::Single));
|
||||
|
||||
let contents = r#"x = "1""#;
|
||||
let locator = SourceCodeLocator::new(contents);
|
||||
assert_eq!(detect_quote(contents, &locator), Some(Quote::Double));
|
||||
|
||||
let contents = r#"
|
||||
def f():
|
||||
"""Docstring."""
|
||||
pass
|
||||
"#;
|
||||
let locator = SourceCodeLocator::new(contents);
|
||||
assert_eq!(detect_quote(contents, &locator), Some(Quote::Double));
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user