Compare commits
23 Commits
v0.3.2
...
zb/recursi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b9a5a32e6e | ||
|
|
c56fb6e15a | ||
|
|
dbf82233b8 | ||
|
|
87afe36c87 | ||
|
|
704fefc7ab | ||
|
|
dacec7377c | ||
|
|
b669306c87 | ||
|
|
b117f33075 | ||
|
|
c746912b9e | ||
|
|
fc7139d9a5 | ||
|
|
f8f56186b3 | ||
|
|
02fc521369 | ||
|
|
4b0666919b | ||
|
|
06284c3700 | ||
|
|
8d73866f70 | ||
|
|
bc693ea13a | ||
|
|
ad84eedc18 | ||
|
|
96a4f95a44 | ||
|
|
bae26b49a6 | ||
|
|
3d7adbc0ed | ||
|
|
c6456b882c | ||
|
|
49eb97879a | ||
|
|
0c84fbb6db |
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -2,6 +2,8 @@
|
||||
|
||||
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
|
||||
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
||||
|
||||
2
.github/workflows/release.yaml
vendored
2
.github/workflows/release.yaml
vendored
@@ -517,7 +517,7 @@ jobs:
|
||||
path: binaries
|
||||
merge-multiple: true
|
||||
- name: "Publish to GitHub"
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
draft: true
|
||||
files: binaries/*
|
||||
|
||||
@@ -329,13 +329,13 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
|
||||
### Creating a new release
|
||||
|
||||
We use an experimental in-house tool for managing releases.
|
||||
|
||||
1. Install `rooster`: `pip install git+https://github.com/zanieb/rooster@main`
|
||||
1. Run `rooster release`; this command will:
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
1. Run `./scripts/release/bump.sh`; this command will:
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
- Display contributors for the release
|
||||
1. The changelog should then be editorialized for consistency
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
@@ -359,7 +359,7 @@ We use an experimental in-house tool for managing releases.
|
||||
1. Open the draft release in the GitHub release section
|
||||
1. Copy the changelog for the release into the GitHub release
|
||||
- See previous releases for formatting of section headers
|
||||
1. Generate the contributor list with `rooster contributors` and add to the release notes
|
||||
1. Append the contributors from the `bump.sh` script
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
|
||||
184
Cargo.lock
generated
184
Cargo.lock
generated
@@ -270,9 +270,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "chrono"
|
||||
version = "0.4.34"
|
||||
version = "0.4.35"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b"
|
||||
checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a"
|
||||
dependencies = [
|
||||
"android-tzdata",
|
||||
"iana-time-zone",
|
||||
@@ -309,9 +309,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.1"
|
||||
version = "4.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c918d541ef2913577a0f9566e9ce27cb35b6df072075769e0b26cb5a554520da"
|
||||
checksum = "b230ab84b0ffdf890d5a10abdbc8b83ae1c4918275daea1ab8801f71536b2651"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -319,9 +319,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.1"
|
||||
version = "4.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9f3e7391dad68afb0c2ede1bf619f579a3dc9c2ec67f089baa397123a2f3d1eb"
|
||||
checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -528,6 +528,19 @@ dependencies = [
|
||||
"itertools 0.10.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8"
|
||||
dependencies = [
|
||||
"crossbeam-channel",
|
||||
"crossbeam-deque",
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-queue",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-channel"
|
||||
version = "0.5.12"
|
||||
@@ -556,6 +569,15 @@ dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-queue"
|
||||
version = "0.3.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.19"
|
||||
@@ -1156,10 +1178,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.68"
|
||||
name = "jod-thread"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "406cda4b368d531c842222cf9d2600a9a4acce8d29423695379c6868a143a9ee"
|
||||
checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.69"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d"
|
||||
dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
@@ -1327,6 +1355,31 @@ version = "0.4.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
|
||||
|
||||
[[package]]
|
||||
name = "lsp-server"
|
||||
version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "248f65b78f6db5d8e1b1604b4098a28b43d21a8eb1deeca22b1c421b276c7095"
|
||||
dependencies = [
|
||||
"crossbeam-channel",
|
||||
"log",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lsp-types"
|
||||
version = "0.95.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "158c1911354ef73e8fe42da6b10c0484cb65c7f1007f28022e847706c1ab6984"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_repr",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matchers"
|
||||
version = "0.1.0"
|
||||
@@ -1982,6 +2035,7 @@ dependencies = [
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_formatter",
|
||||
"ruff_server",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"ruff_workspace",
|
||||
@@ -1996,6 +2050,8 @@ dependencies = [
|
||||
"tikv-jemallocator",
|
||||
"toml",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"tracing-tree",
|
||||
"walkdir",
|
||||
"wild",
|
||||
]
|
||||
@@ -2360,6 +2416,35 @@ dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_server"
|
||||
version = "0.2.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"crossbeam",
|
||||
"insta",
|
||||
"jod-thread",
|
||||
"libc",
|
||||
"lsp-server",
|
||||
"lsp-types",
|
||||
"ruff_diagnostics",
|
||||
"ruff_formatter",
|
||||
"ruff_linter",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_codegen",
|
||||
"ruff_python_formatter",
|
||||
"ruff_python_index",
|
||||
"ruff_python_parser",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"ruff_workspace",
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"similar",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_shrinking"
|
||||
version = "0.3.2"
|
||||
@@ -2631,6 +2716,17 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_repr"
|
||||
version = "0.1.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b2e6b945e9d3df726b65d6ee24060aff8e3533d431f677a9695db04eff9dfdb"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.52",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_spanned"
|
||||
version = "0.6.5"
|
||||
@@ -2954,22 +3050,6 @@ dependencies = [
|
||||
"tikv-jemalloc-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.3.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"time-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "time-core"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd"
|
||||
|
||||
[[package]]
|
||||
name = "tiny-keccak"
|
||||
version = "2.0.2"
|
||||
@@ -3083,6 +3163,17 @@ dependencies = [
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-log"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2"
|
||||
dependencies = [
|
||||
"log",
|
||||
"once_cell",
|
||||
"tracing-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-log"
|
||||
version = "0.2.0"
|
||||
@@ -3109,7 +3200,19 @@ dependencies = [
|
||||
"thread_local",
|
||||
"tracing",
|
||||
"tracing-core",
|
||||
"tracing-log",
|
||||
"tracing-log 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-tree"
|
||||
version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2ec6adcab41b1391b08a308cc6302b79f8095d1673f6947c2dc65ffb028b0b2d"
|
||||
dependencies = [
|
||||
"nu-ansi-term",
|
||||
"tracing-core",
|
||||
"tracing-log 0.1.4",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3195,9 +3298,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
|
||||
|
||||
[[package]]
|
||||
name = "unicode_names2"
|
||||
version = "1.2.1"
|
||||
version = "1.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac64ef2f016dc69dfa8283394a70b057066eb054d5fcb6b9eb17bd2ec5097211"
|
||||
checksum = "addeebf294df7922a1164f729fb27ebbbcea99cc32b3bf08afab62757f707677"
|
||||
dependencies = [
|
||||
"phf",
|
||||
"unicode_names2_generator",
|
||||
@@ -3205,15 +3308,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "unicode_names2_generator"
|
||||
version = "1.2.1"
|
||||
version = "1.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "013f6a731e80f3930de580e55ba41dfa846de4e0fdee4a701f97989cb1597d6a"
|
||||
checksum = "f444b8bba042fe3c1251ffaca35c603f2dc2ccc08d595c65a8c4f76f3e8426c0"
|
||||
dependencies = [
|
||||
"getopts",
|
||||
"log",
|
||||
"phf_codegen",
|
||||
"rand",
|
||||
"time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3352,9 +3454,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.91"
|
||||
version = "0.2.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f"
|
||||
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"wasm-bindgen-macro",
|
||||
@@ -3362,9 +3464,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.91"
|
||||
version = "0.2.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b"
|
||||
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"log",
|
||||
@@ -3389,9 +3491,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.91"
|
||||
version = "0.2.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed"
|
||||
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
@@ -3399,9 +3501,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.91"
|
||||
version = "0.2.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66"
|
||||
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3412,9 +3514,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.91"
|
||||
version = "0.2.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838"
|
||||
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-test"
|
||||
|
||||
16
Cargo.toml
16
Cargo.toml
@@ -21,8 +21,8 @@ bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.4.1" }
|
||||
bstr = { version = "1.9.1" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
chrono = { version = "0.4.34", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.1", features = ["derive"] }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.2", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clearscreen = { version = "2.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.4.0", default-features = false }
|
||||
@@ -32,6 +32,7 @@ console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dirs = { version = "5.0.0" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.10.1" }
|
||||
@@ -51,11 +52,15 @@ insta-cmd = { version = "0.4.0" }
|
||||
is-macro = { version = "0.3.5" }
|
||||
is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.12.1" }
|
||||
js-sys = { version = "0.3.67" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "0.1.2" }
|
||||
lalrpop-util = { version = "0.20.0", default-features = false }
|
||||
lexical-parse-float = { version = "0.8.0", features = ["format"] }
|
||||
libc = { version = "0.2.153" }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { version = "0.95.0", features = ["proposed"] }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
@@ -97,16 +102,17 @@ toml = { version = "0.8.9" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.2.4" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
unicode_names2 = { version = "1.2.1" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
ureq = { version = "2.9.6" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.84" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.40" }
|
||||
wild = { version = "2" }
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_notebook = { path = "../ruff_notebook" }
|
||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
||||
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
||||
ruff_server = { path = "../ruff_server" }
|
||||
ruff_source_file = { path = "../ruff_source_file" }
|
||||
ruff_text_size = { path = "../ruff_text_size" }
|
||||
ruff_workspace = { path = "../ruff_workspace" }
|
||||
@@ -52,6 +53,8 @@ tempfile = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["registry"]}
|
||||
tracing-tree = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
wild = { workspace = true }
|
||||
|
||||
|
||||
@@ -126,6 +126,8 @@ pub enum Command {
|
||||
GenerateShellCompletion { shell: clap_complete_command::Shell },
|
||||
/// Run the Ruff formatter on the given files or directories.
|
||||
Format(FormatCommand),
|
||||
/// Run the language server.
|
||||
Server(ServerCommand),
|
||||
/// Display Ruff's version
|
||||
Version {
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
@@ -494,6 +496,9 @@ pub struct FormatCommand {
|
||||
pub range: Option<FormatRange>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
pub struct ServerCommand;
|
||||
|
||||
#[derive(Debug, Clone, Copy, clap::ValueEnum)]
|
||||
pub enum HelpFormat {
|
||||
Text,
|
||||
|
||||
@@ -7,6 +7,7 @@ pub(crate) mod format;
|
||||
pub(crate) mod format_stdin;
|
||||
pub(crate) mod linter;
|
||||
pub(crate) mod rule;
|
||||
pub(crate) mod server;
|
||||
pub(crate) mod show_files;
|
||||
pub(crate) mod show_settings;
|
||||
pub(crate) mod version;
|
||||
|
||||
69
crates/ruff/src/commands/server.rs
Normal file
69
crates/ruff/src/commands/server.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
use crate::ExitStatus;
|
||||
use anyhow::Result;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_server::Server;
|
||||
use tracing::{level_filters::LevelFilter, metadata::Level, subscriber::Interest, Metadata};
|
||||
use tracing_subscriber::{
|
||||
layer::{Context, Filter, SubscriberExt},
|
||||
Layer, Registry,
|
||||
};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
pub(crate) fn run_server(log_level: LogLevel) -> Result<ExitStatus> {
|
||||
let trace_level = if log_level == LogLevel::Verbose {
|
||||
Level::TRACE
|
||||
} else {
|
||||
Level::DEBUG
|
||||
};
|
||||
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter { trace_level }),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber)?;
|
||||
|
||||
let server = Server::new()?;
|
||||
|
||||
server.run().map(|()| ExitStatus::Success)
|
||||
}
|
||||
|
||||
struct LoggingFilter {
|
||||
trace_level: Level,
|
||||
}
|
||||
|
||||
impl LoggingFilter {
|
||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
||||
let filter = if meta.target().starts_with("ruff") {
|
||||
self.trace_level
|
||||
} else {
|
||||
Level::INFO
|
||||
};
|
||||
|
||||
meta.level() <= &filter
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Filter<S> for LoggingFilter {
|
||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
||||
self.is_enabled(meta)
|
||||
}
|
||||
|
||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
||||
if self.is_enabled(meta) {
|
||||
Interest::always()
|
||||
} else {
|
||||
Interest::never()
|
||||
}
|
||||
}
|
||||
|
||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
||||
Some(LevelFilter::from_level(self.trace_level))
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,7 @@ use std::process::ExitCode;
|
||||
use std::sync::mpsc::channel;
|
||||
|
||||
use anyhow::Result;
|
||||
use args::GlobalConfigArgs;
|
||||
use args::{GlobalConfigArgs, ServerCommand};
|
||||
use clap::CommandFactory;
|
||||
use colored::Colorize;
|
||||
use log::warn;
|
||||
@@ -190,6 +190,7 @@ pub fn run(
|
||||
}
|
||||
Command::Check(args) => check(args, global_options),
|
||||
Command::Format(args) => format(args, global_options),
|
||||
Command::Server(args) => server(args, global_options.log_level()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -203,6 +204,12 @@ fn format(args: FormatCommand, global_options: GlobalConfigArgs) -> Result<ExitS
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_pass_by_value)] // TODO: remove once we start taking arguments from here
|
||||
fn server(args: ServerCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
let ServerCommand {} = args;
|
||||
commands::server::run_server(log_level)
|
||||
}
|
||||
|
||||
pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<ExitStatus> {
|
||||
let (cli, config_arguments) = args.partition(global_options)?;
|
||||
|
||||
|
||||
@@ -241,7 +241,22 @@ linter.flake8_gettext.functions_names = [
|
||||
ngettext,
|
||||
]
|
||||
linter.flake8_implicit_str_concat.allow_multiline = true
|
||||
linter.flake8_import_conventions.aliases = {"matplotlib": "mpl", "matplotlib.pyplot": "plt", "pandas": "pd", "seaborn": "sns", "tensorflow": "tf", "networkx": "nx", "plotly.express": "px", "polars": "pl", "numpy": "np", "panel": "pn", "pyarrow": "pa", "altair": "alt", "tkinter": "tk", "holoviews": "hv"}
|
||||
linter.flake8_import_conventions.aliases = {
|
||||
altair = alt,
|
||||
holoviews = hv,
|
||||
matplotlib = mpl,
|
||||
matplotlib.pyplot = plt,
|
||||
networkx = nx,
|
||||
numpy = np,
|
||||
pandas = pd,
|
||||
panel = pn,
|
||||
plotly.express = px,
|
||||
polars = pl,
|
||||
pyarrow = pa,
|
||||
seaborn = sns,
|
||||
tensorflow = tf,
|
||||
tkinter = tk,
|
||||
}
|
||||
linter.flake8_import_conventions.banned_aliases = {}
|
||||
linter.flake8_import_conventions.banned_from = []
|
||||
linter.flake8_pytest_style.fixture_parentheses = true
|
||||
|
||||
@@ -545,6 +545,10 @@ impl PrintedRange {
|
||||
&self.code
|
||||
}
|
||||
|
||||
pub fn into_code(self) -> String {
|
||||
self.code
|
||||
}
|
||||
|
||||
/// The range the formatted code corresponds to in the source document.
|
||||
pub fn source_range(&self) -> TextRange {
|
||||
self.source_range
|
||||
|
||||
22
crates/ruff_linter/resources/test/fixtures/flake8_bandit/S311.py
vendored
Normal file
22
crates/ruff_linter/resources/test/fixtures/flake8_bandit/S311.py
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
import os
|
||||
import random
|
||||
|
||||
import a_lib
|
||||
|
||||
# OK
|
||||
random.SystemRandom()
|
||||
|
||||
# Errors
|
||||
random.Random()
|
||||
random.random()
|
||||
random.randrange()
|
||||
random.randint()
|
||||
random.choice()
|
||||
random.choices()
|
||||
random.uniform()
|
||||
random.triangular()
|
||||
random.randbytes()
|
||||
|
||||
# Unrelated
|
||||
os.urandom()
|
||||
a_lib.random()
|
||||
@@ -1,52 +1,47 @@
|
||||
import crypt
|
||||
import hashlib
|
||||
from hashlib import new as hashlib_new
|
||||
from hashlib import sha1 as hashlib_sha1
|
||||
|
||||
# Invalid
|
||||
|
||||
# Errors
|
||||
hashlib.new('md5')
|
||||
|
||||
hashlib.new('md4', b'test')
|
||||
|
||||
hashlib.new(name='md5', data=b'test')
|
||||
|
||||
hashlib.new('MD4', data=b'test')
|
||||
|
||||
hashlib.new('sha1')
|
||||
|
||||
hashlib.new('sha1', data=b'test')
|
||||
|
||||
hashlib.new('sha', data=b'test')
|
||||
|
||||
hashlib.new(name='SHA', data=b'test')
|
||||
|
||||
hashlib.sha(data=b'test')
|
||||
|
||||
hashlib.md5()
|
||||
|
||||
hashlib_new('sha1')
|
||||
|
||||
hashlib_sha1('sha1')
|
||||
|
||||
# usedforsecurity arg only available in Python 3.9+
|
||||
hashlib.new('sha1', usedforsecurity=True)
|
||||
|
||||
# Valid
|
||||
crypt.crypt("test", salt=crypt.METHOD_CRYPT)
|
||||
crypt.crypt("test", salt=crypt.METHOD_MD5)
|
||||
crypt.crypt("test", salt=crypt.METHOD_BLOWFISH)
|
||||
crypt.crypt("test", crypt.METHOD_BLOWFISH)
|
||||
|
||||
crypt.mksalt(crypt.METHOD_CRYPT)
|
||||
crypt.mksalt(crypt.METHOD_MD5)
|
||||
crypt.mksalt(crypt.METHOD_BLOWFISH)
|
||||
|
||||
# OK
|
||||
hashlib.new('sha256')
|
||||
|
||||
hashlib.new('SHA512')
|
||||
|
||||
hashlib.sha256(data=b'test')
|
||||
|
||||
# usedforsecurity arg only available in Python 3.9+
|
||||
hashlib_new(name='sha1', usedforsecurity=False)
|
||||
|
||||
# usedforsecurity arg only available in Python 3.9+
|
||||
hashlib_sha1(name='sha1', usedforsecurity=False)
|
||||
|
||||
# usedforsecurity arg only available in Python 3.9+
|
||||
hashlib.md4(usedforsecurity=False)
|
||||
|
||||
# usedforsecurity arg only available in Python 3.9+
|
||||
hashlib.new(name='sha256', usedforsecurity=False)
|
||||
|
||||
crypt.crypt("test")
|
||||
crypt.crypt("test", salt=crypt.METHOD_SHA256)
|
||||
crypt.crypt("test", salt=crypt.METHOD_SHA512)
|
||||
|
||||
crypt.mksalt()
|
||||
crypt.mksalt(crypt.METHOD_SHA256)
|
||||
crypt.mksalt(crypt.METHOD_SHA512)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import commands
|
||||
import popen2
|
||||
@@ -16,6 +17,8 @@ popen2.Popen3("true")
|
||||
popen2.Popen4("true")
|
||||
commands.getoutput("true")
|
||||
commands.getstatusoutput("true")
|
||||
subprocess.getoutput("true")
|
||||
subprocess.getstatusoutput("true")
|
||||
|
||||
|
||||
# Check command argument looks unsafe.
|
||||
|
||||
34
crates/ruff_linter/resources/test/fixtures/flake8_bandit/S610.py
vendored
Normal file
34
crates/ruff_linter/resources/test/fixtures/flake8_bandit/S610.py
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
# Errors
|
||||
User.objects.filter(username='admin').extra(dict(could_be='insecure'))
|
||||
User.objects.filter(username='admin').extra(select=dict(could_be='insecure'))
|
||||
User.objects.filter(username='admin').extra(select={'test': '%secure' % 'nos'})
|
||||
User.objects.filter(username='admin').extra(select={'test': '{}secure'.format('nos')})
|
||||
User.objects.filter(username='admin').extra(where=['%secure' % 'nos'])
|
||||
User.objects.filter(username='admin').extra(where=['{}secure'.format('no')])
|
||||
|
||||
query = '"username") AS "username", * FROM "auth_user" WHERE 1=1 OR "username"=? --'
|
||||
User.objects.filter(username='admin').extra(select={'test': query})
|
||||
|
||||
where_var = ['1=1) OR 1=1 AND (1=1']
|
||||
User.objects.filter(username='admin').extra(where=where_var)
|
||||
|
||||
where_str = '1=1) OR 1=1 AND (1=1'
|
||||
User.objects.filter(username='admin').extra(where=[where_str])
|
||||
|
||||
tables_var = ['django_content_type" WHERE "auth_user"."username"="admin']
|
||||
User.objects.all().extra(tables=tables_var).distinct()
|
||||
|
||||
tables_str = 'django_content_type" WHERE "auth_user"."username"="admin'
|
||||
User.objects.all().extra(tables=[tables_str]).distinct()
|
||||
|
||||
# OK
|
||||
User.objects.filter(username='admin').extra(
|
||||
select={'test': 'secure'},
|
||||
where=['secure'],
|
||||
tables=['secure']
|
||||
)
|
||||
User.objects.filter(username='admin').extra({'test': 'secure'})
|
||||
User.objects.filter(username='admin').extra(select={'test': 'secure'})
|
||||
User.objects.filter(username='admin').extra(where=['secure'])
|
||||
@@ -14,9 +14,6 @@ reversed(sorted(x, reverse=not x))
|
||||
reversed(sorted(i for i in range(42)))
|
||||
reversed(sorted((i for i in range(42)), reverse=True))
|
||||
|
||||
|
||||
def reversed(*args, **kwargs):
|
||||
return None
|
||||
|
||||
|
||||
reversed(sorted(x, reverse=True))
|
||||
# Regression test for: https://github.com/astral-sh/ruff/issues/10335
|
||||
reversed(sorted([1, 2, 3], reverse=False or True))
|
||||
reversed(sorted([1, 2, 3], reverse=(False or True)))
|
||||
|
||||
1
crates/ruff_linter/resources/test/fixtures/pycodestyle/E2_syntax_error.py
vendored
Normal file
1
crates/ruff_linter/resources/test/fixtures/pycodestyle/E2_syntax_error.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
a = (1 or)
|
||||
88
crates/ruff_linter/resources/test/fixtures/pycodestyle/E502.py
vendored
Normal file
88
crates/ruff_linter/resources/test/fixtures/pycodestyle/E502.py
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
a = 2 + 2
|
||||
|
||||
a = (2 + 2)
|
||||
|
||||
a = 2 + \
|
||||
3 \
|
||||
+ 4
|
||||
|
||||
a = (3 -\
|
||||
2 + \
|
||||
7)
|
||||
|
||||
z = 5 + \
|
||||
(3 -\
|
||||
2 + \
|
||||
7) + \
|
||||
4
|
||||
|
||||
b = [2 +
|
||||
2]
|
||||
|
||||
b = [
|
||||
2 + 4 + 5 + \
|
||||
44 \
|
||||
- 5
|
||||
]
|
||||
|
||||
c = (True and
|
||||
False \
|
||||
or False \
|
||||
and True \
|
||||
)
|
||||
|
||||
c = (True and
|
||||
False)
|
||||
|
||||
d = True and \
|
||||
False or \
|
||||
False \
|
||||
and not True
|
||||
|
||||
|
||||
s = {
|
||||
'x': 2 + \
|
||||
2
|
||||
}
|
||||
|
||||
|
||||
s = {
|
||||
'x': 2 +
|
||||
2
|
||||
}
|
||||
|
||||
|
||||
x = {2 + 4 \
|
||||
+ 3}
|
||||
|
||||
y = (
|
||||
2 + 2 # \
|
||||
+ 3 # \
|
||||
+ 4 \
|
||||
+ 3
|
||||
)
|
||||
|
||||
|
||||
x = """
|
||||
(\\
|
||||
)
|
||||
"""
|
||||
|
||||
|
||||
("""hello \
|
||||
""")
|
||||
|
||||
("hello \
|
||||
")
|
||||
|
||||
|
||||
x = "abc" \
|
||||
"xyz"
|
||||
|
||||
x = ("abc" \
|
||||
"xyz")
|
||||
|
||||
|
||||
def foo():
|
||||
x = (a + \
|
||||
2)
|
||||
14
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_0.py
vendored
Normal file
14
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_0.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
# Unix style
|
||||
def foo() -> None:
|
||||
pass
|
||||
|
||||
|
||||
def bar() -> None:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
foo()
|
||||
bar()
|
||||
|
||||
13
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_1.py
vendored
Normal file
13
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_1.py
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
# Unix style
|
||||
def foo() -> None:
|
||||
pass
|
||||
|
||||
|
||||
def bar() -> None:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
foo()
|
||||
bar()
|
||||
17
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py
vendored
Normal file
17
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
# Windows style
|
||||
def foo() -> None:
|
||||
pass
|
||||
|
||||
|
||||
def bar() -> None:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
foo()
|
||||
bar()
|
||||
|
||||
|
||||
|
||||
|
||||
13
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py
vendored
Normal file
13
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
# Windows style
|
||||
def foo() -> None:
|
||||
pass
|
||||
|
||||
|
||||
def bar() -> None:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
foo()
|
||||
bar()
|
||||
5
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_4.py
vendored
Normal file
5
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_4.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# This is fine
|
||||
def foo():
|
||||
pass
|
||||
|
||||
# Some comment
|
||||
16
crates/ruff_linter/resources/test/fixtures/pyflakes/F821_11.pyi
vendored
Normal file
16
crates/ruff_linter/resources/test/fixtures/pyflakes/F821_11.pyi
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
"""Test case: strings used within calls within type annotations."""
|
||||
|
||||
from typing import Callable
|
||||
|
||||
import bpy
|
||||
from mypy_extensions import VarArg
|
||||
|
||||
class LightShow(bpy.types.Operator):
|
||||
label = "Create Character"
|
||||
name = "lightshow.letter_creation"
|
||||
|
||||
filepath: bpy.props.StringProperty(subtype="FILE_PATH") # OK
|
||||
|
||||
|
||||
def f(x: Callable[[VarArg("os")], None]): # F821
|
||||
pass
|
||||
44
crates/ruff_linter/resources/test/fixtures/pyflakes/F821_26.py
vendored
Normal file
44
crates/ruff_linter/resources/test/fixtures/pyflakes/F821_26.py
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Tests for constructs allowed in `.pyi` stub files but not at runtime"""
|
||||
|
||||
from typing import Optional, TypeAlias, Union
|
||||
|
||||
__version__: str
|
||||
__author__: str
|
||||
|
||||
# Forward references:
|
||||
MaybeCStr: TypeAlias = Optional[CStr] # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
MaybeCStr2: TypeAlias = Optional["CStr"] # always okay
|
||||
CStr: TypeAlias = Union[C, str] # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
CStr2: TypeAlias = Union["C", str] # always okay
|
||||
|
||||
# References to a class from inside the class:
|
||||
class C:
|
||||
other: C = ... # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
other2: "C" = ... # always okay
|
||||
def from_str(self, s: str) -> C: ... # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
def from_str2(self, s: str) -> "C": ... # always okay
|
||||
|
||||
# Circular references:
|
||||
class A:
|
||||
foo: B # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
foo2: "B" # always okay
|
||||
bar: dict[str, B] # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
bar2: dict[str, "A"] # always okay
|
||||
|
||||
class B:
|
||||
foo: A # always okay
|
||||
bar: dict[str, A] # always okay
|
||||
|
||||
class Leaf: ...
|
||||
class Tree(list[Tree | Leaf]): ... # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
class Tree2(list["Tree | Leaf"]): ... # always okay
|
||||
|
||||
# Annotations are treated as assignments in .pyi files, but not in .py files
|
||||
class MyClass:
|
||||
foo: int
|
||||
bar = foo # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
bar = "foo" # always okay
|
||||
|
||||
baz: MyClass
|
||||
eggs = baz # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
eggs = "baz" # always okay
|
||||
44
crates/ruff_linter/resources/test/fixtures/pyflakes/F821_26.pyi
vendored
Normal file
44
crates/ruff_linter/resources/test/fixtures/pyflakes/F821_26.pyi
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Tests for constructs allowed in `.pyi` stub files but not at runtime"""
|
||||
|
||||
from typing import Optional, TypeAlias, Union
|
||||
|
||||
__version__: str
|
||||
__author__: str
|
||||
|
||||
# Forward references:
|
||||
MaybeCStr: TypeAlias = Optional[CStr] # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
MaybeCStr2: TypeAlias = Optional["CStr"] # always okay
|
||||
CStr: TypeAlias = Union[C, str] # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
CStr2: TypeAlias = Union["C", str] # always okay
|
||||
|
||||
# References to a class from inside the class:
|
||||
class C:
|
||||
other: C = ... # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
other2: "C" = ... # always okay
|
||||
def from_str(self, s: str) -> C: ... # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
def from_str2(self, s: str) -> "C": ... # always okay
|
||||
|
||||
# Circular references:
|
||||
class A:
|
||||
foo: B # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
foo2: "B" # always okay
|
||||
bar: dict[str, B] # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
bar2: dict[str, "A"] # always okay
|
||||
|
||||
class B:
|
||||
foo: A # always okay
|
||||
bar: dict[str, A] # always okay
|
||||
|
||||
class Leaf: ...
|
||||
class Tree(list[Tree | Leaf]): ... # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
class Tree2(list["Tree | Leaf"]): ... # always okay
|
||||
|
||||
# Annotations are treated as assignments in .pyi files, but not in .py files
|
||||
class MyClass:
|
||||
foo: int
|
||||
bar = foo # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
bar = "foo" # always okay
|
||||
|
||||
baz: MyClass
|
||||
eggs = baz # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
eggs = "baz" # always okay
|
||||
48
crates/ruff_linter/resources/test/fixtures/pyflakes/F821_27.py
vendored
Normal file
48
crates/ruff_linter/resources/test/fixtures/pyflakes/F821_27.py
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
"""Tests for constructs allowed when `__future__` annotations are enabled but not otherwise"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional, TypeAlias, Union
|
||||
|
||||
__version__: str
|
||||
__author__: str
|
||||
|
||||
# References to a class from inside the class:
|
||||
class C:
|
||||
other: C = ... # valid when `__future__.annotations are enabled
|
||||
other2: "C" = ... # always okay
|
||||
def from_str(self, s: str) -> C: ... # valid when `__future__.annotations are enabled
|
||||
def from_str2(self, s: str) -> "C": ... # always okay
|
||||
|
||||
# Circular references:
|
||||
class A:
|
||||
foo: B # valid when `__future__.annotations are enabled
|
||||
foo2: "B" # always okay
|
||||
bar: dict[str, B] # valid when `__future__.annotations are enabled
|
||||
bar2: dict[str, "A"] # always okay
|
||||
|
||||
class B:
|
||||
foo: A # always okay
|
||||
bar: dict[str, A] # always okay
|
||||
|
||||
# Annotations are treated as assignments in .pyi files, but not in .py files
|
||||
class MyClass:
|
||||
foo: int
|
||||
bar = foo # Still invalid even when `__future__.annotations` are enabled
|
||||
bar = "foo" # always okay
|
||||
|
||||
baz: MyClass
|
||||
eggs = baz # Still invalid even when `__future__.annotations` are enabled
|
||||
eggs = "baz" # always okay
|
||||
|
||||
# Forward references:
|
||||
MaybeDStr: TypeAlias = Optional[DStr] # Still invalid even when `__future__.annotations` are enabled
|
||||
MaybeDStr2: TypeAlias = Optional["DStr"] # always okay
|
||||
DStr: TypeAlias = Union[D, str] # Still invalid even when `__future__.annotations` are enabled
|
||||
DStr2: TypeAlias = Union["D", str] # always okay
|
||||
|
||||
class D: ...
|
||||
|
||||
# More circular references
|
||||
class Leaf: ...
|
||||
class Tree(list[Tree | Leaf]): ... # Still invalid even when `__future__.annotations` are enabled
|
||||
class Tree2(list["Tree | Leaf"]): ... # always okay
|
||||
10
crates/ruff_linter/resources/test/fixtures/pyflakes/F821_5.pyi
vendored
Normal file
10
crates/ruff_linter/resources/test/fixtures/pyflakes/F821_5.pyi
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
"""Test: inner class annotation."""
|
||||
|
||||
class RandomClass:
|
||||
def bad_func(self) -> InnerClass: ... # F821
|
||||
def good_func(self) -> OuterClass.InnerClass: ... # Okay
|
||||
|
||||
class OuterClass:
|
||||
class InnerClass: ...
|
||||
|
||||
def good_func(self) -> InnerClass: ... # Okay
|
||||
4
crates/ruff_linter/resources/test/fixtures/pyflakes/F822_0.pyi
vendored
Normal file
4
crates/ruff_linter/resources/test/fixtures/pyflakes/F822_0.pyi
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
a = 1
|
||||
b: int # Considered a binding in a `.pyi` stub file, not in a `.py` runtime file
|
||||
|
||||
__all__ = ["a", "b", "c"] # c is flagged as missing; b is not
|
||||
@@ -54,3 +54,15 @@ class StudentE(StudentD):
|
||||
|
||||
def setup(self):
|
||||
pass
|
||||
|
||||
|
||||
class StudentF(object):
|
||||
__slots__ = ("name", "__dict__")
|
||||
|
||||
def __init__(self, name, middle_name):
|
||||
self.name = name
|
||||
self.middle_name = middle_name # [assigning-non-slot]
|
||||
self.setup()
|
||||
|
||||
def setup(self):
|
||||
pass
|
||||
|
||||
@@ -632,6 +632,9 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
]) {
|
||||
flake8_bandit::rules::shell_injection(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::DjangoExtra) {
|
||||
flake8_bandit::rules::django_extra(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::DjangoRawSql) {
|
||||
flake8_bandit::rules::django_raw_sql(checker, call);
|
||||
}
|
||||
|
||||
@@ -938,6 +938,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
&& !self.semantic.in_deferred_type_definition()
|
||||
&& self.semantic.in_type_definition()
|
||||
&& self.semantic.future_annotations()
|
||||
&& (self.semantic.in_typing_only_annotation() || self.source_type.is_stub())
|
||||
{
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = expr {
|
||||
self.visit.string_type_definitions.push((
|
||||
@@ -1839,11 +1840,13 @@ impl<'a> Checker<'a> {
|
||||
flags.insert(BindingFlags::UNPACKED_ASSIGNMENT);
|
||||
}
|
||||
|
||||
// Match the left-hand side of an annotated assignment, like `x` in `x: int`.
|
||||
// Match the left-hand side of an annotated assignment without a value,
|
||||
// like `x` in `x: int`. N.B. In stub files, these should be viewed
|
||||
// as assignments on par with statements such as `x: int = 5`.
|
||||
if matches!(
|
||||
parent,
|
||||
Stmt::AnnAssign(ast::StmtAnnAssign { value: None, .. })
|
||||
) && !self.semantic.in_annotation()
|
||||
) && !(self.semantic.in_annotation() || self.source_type.is_stub())
|
||||
{
|
||||
self.add_binding(id, expr.range(), BindingKind::Annotation, flags);
|
||||
return;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::line_width::IndentWidth;
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
use ruff_python_parser::TokenKind;
|
||||
use ruff_source_file::Locator;
|
||||
@@ -9,8 +10,8 @@ use ruff_text_size::{Ranged, TextRange};
|
||||
use crate::registry::AsRule;
|
||||
use crate::rules::pycodestyle::rules::logical_lines::{
|
||||
extraneous_whitespace, indentation, missing_whitespace, missing_whitespace_after_keyword,
|
||||
missing_whitespace_around_operator, space_after_comma, space_around_operator,
|
||||
whitespace_around_keywords, whitespace_around_named_parameter_equals,
|
||||
missing_whitespace_around_operator, redundant_backslash, space_after_comma,
|
||||
space_around_operator, whitespace_around_keywords, whitespace_around_named_parameter_equals,
|
||||
whitespace_before_comment, whitespace_before_parameters, LogicalLines, TokenFlags,
|
||||
};
|
||||
use crate::settings::LinterSettings;
|
||||
@@ -35,6 +36,7 @@ pub(crate) fn expand_indent(line: &str, indent_width: IndentWidth) -> usize {
|
||||
pub(crate) fn check_logical_lines(
|
||||
tokens: &[LexResult],
|
||||
locator: &Locator,
|
||||
indexer: &Indexer,
|
||||
stylist: &Stylist,
|
||||
settings: &LinterSettings,
|
||||
) -> Vec<Diagnostic> {
|
||||
@@ -73,6 +75,7 @@ pub(crate) fn check_logical_lines(
|
||||
|
||||
if line.flags().contains(TokenFlags::BRACKET) {
|
||||
whitespace_before_parameters(&line, &mut context);
|
||||
redundant_backslash(&line, locator, indexer, &mut context);
|
||||
}
|
||||
|
||||
// Extract the indentation level.
|
||||
|
||||
@@ -203,6 +203,10 @@ pub(crate) fn check_tokens(
|
||||
flake8_fixme::rules::todos(&mut diagnostics, &todo_comments);
|
||||
}
|
||||
|
||||
if settings.rules.enabled(Rule::TooManyNewlinesAtEndOfFile) {
|
||||
pycodestyle::rules::too_many_newlines_at_end_of_file(&mut diagnostics, tokens);
|
||||
}
|
||||
|
||||
diagnostics.retain(|diagnostic| settings.rules.enabled(diagnostic.kind.rule()));
|
||||
|
||||
diagnostics
|
||||
|
||||
@@ -146,6 +146,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pycodestyle, "E401") => (RuleGroup::Stable, rules::pycodestyle::rules::MultipleImportsOnOneLine),
|
||||
(Pycodestyle, "E402") => (RuleGroup::Stable, rules::pycodestyle::rules::ModuleImportNotAtTopOfFile),
|
||||
(Pycodestyle, "E501") => (RuleGroup::Stable, rules::pycodestyle::rules::LineTooLong),
|
||||
(Pycodestyle, "E502") => (RuleGroup::Preview, rules::pycodestyle::rules::logical_lines::RedundantBackslash),
|
||||
(Pycodestyle, "E701") => (RuleGroup::Stable, rules::pycodestyle::rules::MultipleStatementsOnOneLineColon),
|
||||
(Pycodestyle, "E702") => (RuleGroup::Stable, rules::pycodestyle::rules::MultipleStatementsOnOneLineSemicolon),
|
||||
(Pycodestyle, "E703") => (RuleGroup::Stable, rules::pycodestyle::rules::UselessSemicolon),
|
||||
@@ -167,6 +168,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pycodestyle, "W291") => (RuleGroup::Stable, rules::pycodestyle::rules::TrailingWhitespace),
|
||||
(Pycodestyle, "W292") => (RuleGroup::Stable, rules::pycodestyle::rules::MissingNewlineAtEndOfFile),
|
||||
(Pycodestyle, "W293") => (RuleGroup::Stable, rules::pycodestyle::rules::BlankLineWithWhitespace),
|
||||
(Pycodestyle, "W391") => (RuleGroup::Preview, rules::pycodestyle::rules::TooManyNewlinesAtEndOfFile),
|
||||
(Pycodestyle, "W505") => (RuleGroup::Stable, rules::pycodestyle::rules::DocLineTooLong),
|
||||
(Pycodestyle, "W605") => (RuleGroup::Stable, rules::pycodestyle::rules::InvalidEscapeSequence),
|
||||
|
||||
@@ -680,6 +682,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8Bandit, "607") => (RuleGroup::Stable, rules::flake8_bandit::rules::StartProcessWithPartialPath),
|
||||
(Flake8Bandit, "608") => (RuleGroup::Stable, rules::flake8_bandit::rules::HardcodedSQLExpression),
|
||||
(Flake8Bandit, "609") => (RuleGroup::Stable, rules::flake8_bandit::rules::UnixCommandWildcardInjection),
|
||||
(Flake8Bandit, "610") => (RuleGroup::Preview, rules::flake8_bandit::rules::DjangoExtra),
|
||||
(Flake8Bandit, "611") => (RuleGroup::Stable, rules::flake8_bandit::rules::DjangoRawSql),
|
||||
(Flake8Bandit, "612") => (RuleGroup::Stable, rules::flake8_bandit::rules::LoggingConfigInsecureListen),
|
||||
(Flake8Bandit, "701") => (RuleGroup::Stable, rules::flake8_bandit::rules::Jinja2AutoescapeFalse),
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use libcst_native::{
|
||||
Expression, Name, ParenthesizableWhitespace, SimpleWhitespace, UnaryOperation,
|
||||
Expression, LeftParen, Name, ParenthesizableWhitespace, ParenthesizedNode, RightParen,
|
||||
SimpleWhitespace, UnaryOperation,
|
||||
};
|
||||
|
||||
/// Return a [`ParenthesizableWhitespace`] containing a single space.
|
||||
@@ -24,6 +25,7 @@ pub(crate) fn negate<'a>(expression: &Expression<'a>) -> Expression<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
// If the expression is `True` or `False`, return the opposite.
|
||||
if let Expression::Name(ref expression) = expression {
|
||||
match expression.value {
|
||||
"True" => {
|
||||
@@ -44,11 +46,32 @@ pub(crate) fn negate<'a>(expression: &Expression<'a>) -> Expression<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
// If the expression is higher precedence than the unary `not`, we need to wrap it in
|
||||
// parentheses.
|
||||
//
|
||||
// For example: given `a and b`, we need to return `not (a and b)`, rather than `not a and b`.
|
||||
//
|
||||
// See: <https://docs.python.org/3/reference/expressions.html#operator-precedence>
|
||||
let needs_parens = matches!(
|
||||
expression,
|
||||
Expression::BooleanOperation(_)
|
||||
| Expression::IfExp(_)
|
||||
| Expression::Lambda(_)
|
||||
| Expression::NamedExpr(_)
|
||||
);
|
||||
let has_parens = !expression.lpar().is_empty() && !expression.rpar().is_empty();
|
||||
// Otherwise, wrap in a `not` operator.
|
||||
Expression::UnaryOperation(Box::new(UnaryOperation {
|
||||
operator: libcst_native::UnaryOp::Not {
|
||||
whitespace_after: space(),
|
||||
},
|
||||
expression: Box::new(expression.clone()),
|
||||
expression: Box::new(if needs_parens && !has_parens {
|
||||
expression
|
||||
.clone()
|
||||
.with_parens(LeftParen::default(), RightParen::default())
|
||||
} else {
|
||||
expression.clone()
|
||||
}),
|
||||
lpar: vec![],
|
||||
rpar: vec![],
|
||||
}))
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
//! and subject to change drastically.
|
||||
//!
|
||||
//! [Ruff]: https://github.com/astral-sh/ruff
|
||||
#![recursion_limit = "256"]
|
||||
|
||||
#[cfg(feature = "clap")]
|
||||
pub use registry::clap_completion::RuleParser;
|
||||
|
||||
@@ -132,7 +132,7 @@ pub fn check_path(
|
||||
.any(|rule_code| rule_code.lint_source().is_logical_lines())
|
||||
{
|
||||
diagnostics.extend(crate::checkers::logical_lines::check_logical_lines(
|
||||
&tokens, locator, stylist, settings,
|
||||
&tokens, locator, indexer, stylist, settings,
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
@@ -300,6 +300,7 @@ impl Rule {
|
||||
| Rule::SingleLineImplicitStringConcatenation
|
||||
| Rule::TabIndentation
|
||||
| Rule::TooManyBlankLines
|
||||
| Rule::TooManyNewlinesAtEndOfFile
|
||||
| Rule::TrailingCommaOnBareTuple
|
||||
| Rule::TypeCommentInStub
|
||||
| Rule::UselessSemicolon
|
||||
@@ -327,6 +328,7 @@ impl Rule {
|
||||
| Rule::NoSpaceAfterBlockComment
|
||||
| Rule::NoSpaceAfterInlineComment
|
||||
| Rule::OverIndented
|
||||
| Rule::RedundantBackslash
|
||||
| Rule::TabAfterComma
|
||||
| Rule::TabAfterKeyword
|
||||
| Rule::TabAfterOperator
|
||||
|
||||
@@ -48,6 +48,7 @@ mod tests {
|
||||
#[test_case(Rule::SuspiciousEvalUsage, Path::new("S307.py"))]
|
||||
#[test_case(Rule::SuspiciousMarkSafeUsage, Path::new("S308.py"))]
|
||||
#[test_case(Rule::SuspiciousURLOpenUsage, Path::new("S310.py"))]
|
||||
#[test_case(Rule::SuspiciousNonCryptographicRandomUsage, Path::new("S311.py"))]
|
||||
#[test_case(Rule::SuspiciousTelnetUsage, Path::new("S312.py"))]
|
||||
#[test_case(Rule::SuspiciousTelnetlibImport, Path::new("S401.py"))]
|
||||
#[test_case(Rule::SuspiciousFtplibImport, Path::new("S402.py"))]
|
||||
@@ -68,6 +69,7 @@ mod tests {
|
||||
#[test_case(Rule::UnixCommandWildcardInjection, Path::new("S609.py"))]
|
||||
#[test_case(Rule::UnsafeYAMLLoad, Path::new("S506.py"))]
|
||||
#[test_case(Rule::WeakCryptographicKey, Path::new("S505.py"))]
|
||||
#[test_case(Rule::DjangoExtra, Path::new("S610.py"))]
|
||||
#[test_case(Rule::DjangoRawSql, Path::new("S611.py"))]
|
||||
#[test_case(Rule::TarfileUnsafeMembers, Path::new("S202.py"))]
|
||||
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Expr, ExprAttribute, ExprDict, ExprList};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of Django's `extra` function.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Django's `extra` function can be used to execute arbitrary SQL queries,
|
||||
/// which can in turn lead to SQL injection vulnerabilities.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from django.contrib.auth.models import User
|
||||
///
|
||||
/// User.objects.all().extra(select={"test": "%secure" % "nos"})
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Django documentation: SQL injection protection](https://docs.djangoproject.com/en/dev/topics/security/#sql-injection-protection)
|
||||
/// - [Common Weakness Enumeration: CWE-89](https://cwe.mitre.org/data/definitions/89.html)
|
||||
#[violation]
|
||||
pub struct DjangoExtra;
|
||||
|
||||
impl Violation for DjangoExtra {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Use of Django `extra` can lead to SQL injection vulnerabilities")
|
||||
}
|
||||
}
|
||||
|
||||
/// S610
|
||||
pub(crate) fn django_extra(checker: &mut Checker, call: &ast::ExprCall) {
|
||||
let Expr::Attribute(ExprAttribute { attr, .. }) = call.func.as_ref() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if attr.as_str() != "extra" {
|
||||
return;
|
||||
}
|
||||
|
||||
if is_call_insecure(call) {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(DjangoExtra, call.arguments.range()));
|
||||
}
|
||||
}
|
||||
|
||||
fn is_call_insecure(call: &ast::ExprCall) -> bool {
|
||||
for (argument_name, position) in [("select", 0), ("where", 1), ("tables", 3)] {
|
||||
if let Some(argument) = call.arguments.find_argument(argument_name, position) {
|
||||
match argument_name {
|
||||
"select" => match argument {
|
||||
Expr::Dict(ExprDict { keys, values, .. }) => {
|
||||
if !keys.iter().flatten().all(Expr::is_string_literal_expr) {
|
||||
return true;
|
||||
}
|
||||
if !values.iter().all(Expr::is_string_literal_expr) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
_ => return true,
|
||||
},
|
||||
"where" | "tables" => match argument {
|
||||
Expr::List(ExprList { elts, .. }) => {
|
||||
if !elts.iter().all(Expr::is_string_literal_expr) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
_ => return true,
|
||||
},
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
@@ -9,7 +9,8 @@ use crate::checkers::ast::Checker;
|
||||
use super::super::helpers::string_literal;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of weak or broken cryptographic hash functions.
|
||||
/// Checks for uses of weak or broken cryptographic hash functions in
|
||||
/// `hashlib` and `crypt` libraries.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Weak or broken cryptographic hash functions may be susceptible to
|
||||
@@ -43,68 +44,134 @@ use super::super::helpers::string_literal;
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `hashlib` — Secure hashes and message digests](https://docs.python.org/3/library/hashlib.html)
|
||||
/// - [Python documentation: `crypt` — Function to check Unix passwords](https://docs.python.org/3/library/crypt.html)
|
||||
/// - [Common Weakness Enumeration: CWE-327](https://cwe.mitre.org/data/definitions/327.html)
|
||||
/// - [Common Weakness Enumeration: CWE-328](https://cwe.mitre.org/data/definitions/328.html)
|
||||
/// - [Common Weakness Enumeration: CWE-916](https://cwe.mitre.org/data/definitions/916.html)
|
||||
#[violation]
|
||||
pub struct HashlibInsecureHashFunction {
|
||||
library: String,
|
||||
string: String,
|
||||
}
|
||||
|
||||
impl Violation for HashlibInsecureHashFunction {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let HashlibInsecureHashFunction { string } = self;
|
||||
format!("Probable use of insecure hash functions in `hashlib`: `{string}`")
|
||||
let HashlibInsecureHashFunction { library, string } = self;
|
||||
format!("Probable use of insecure hash functions in `{library}`: `{string}`")
|
||||
}
|
||||
}
|
||||
|
||||
/// S324
|
||||
pub(crate) fn hashlib_insecure_hash_functions(checker: &mut Checker, call: &ast::ExprCall) {
|
||||
if let Some(hashlib_call) = checker
|
||||
if let Some(weak_hash_call) = checker
|
||||
.semantic()
|
||||
.resolve_qualified_name(&call.func)
|
||||
.and_then(|qualified_name| match qualified_name.segments() {
|
||||
["hashlib", "new"] => Some(HashlibCall::New),
|
||||
["hashlib", "md4"] => Some(HashlibCall::WeakHash("md4")),
|
||||
["hashlib", "md5"] => Some(HashlibCall::WeakHash("md5")),
|
||||
["hashlib", "sha"] => Some(HashlibCall::WeakHash("sha")),
|
||||
["hashlib", "sha1"] => Some(HashlibCall::WeakHash("sha1")),
|
||||
["hashlib", "new"] => Some(WeakHashCall::Hashlib {
|
||||
call: HashlibCall::New,
|
||||
}),
|
||||
["hashlib", "md4"] => Some(WeakHashCall::Hashlib {
|
||||
call: HashlibCall::WeakHash("md4"),
|
||||
}),
|
||||
["hashlib", "md5"] => Some(WeakHashCall::Hashlib {
|
||||
call: HashlibCall::WeakHash("md5"),
|
||||
}),
|
||||
["hashlib", "sha"] => Some(WeakHashCall::Hashlib {
|
||||
call: HashlibCall::WeakHash("sha"),
|
||||
}),
|
||||
["hashlib", "sha1"] => Some(WeakHashCall::Hashlib {
|
||||
call: HashlibCall::WeakHash("sha1"),
|
||||
}),
|
||||
["crypt", "crypt" | "mksalt"] => Some(WeakHashCall::Crypt),
|
||||
_ => None,
|
||||
})
|
||||
{
|
||||
if !is_used_for_security(&call.arguments) {
|
||||
return;
|
||||
}
|
||||
match hashlib_call {
|
||||
HashlibCall::New => {
|
||||
if let Some(name_arg) = call.arguments.find_argument("name", 0) {
|
||||
if let Some(hash_func_name) = string_literal(name_arg) {
|
||||
// `hashlib.new` accepts both lowercase and uppercase names for hash
|
||||
// functions.
|
||||
if matches!(
|
||||
hash_func_name,
|
||||
"md4" | "md5" | "sha" | "sha1" | "MD4" | "MD5" | "SHA" | "SHA1"
|
||||
) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
HashlibInsecureHashFunction {
|
||||
string: hash_func_name.to_string(),
|
||||
},
|
||||
name_arg.range(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
match weak_hash_call {
|
||||
WeakHashCall::Hashlib { call: hashlib_call } => {
|
||||
detect_insecure_hashlib_calls(checker, call, hashlib_call);
|
||||
}
|
||||
HashlibCall::WeakHash(func_name) => {
|
||||
WeakHashCall::Crypt => detect_insecure_crypt_calls(checker, call),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn detect_insecure_hashlib_calls(
|
||||
checker: &mut Checker,
|
||||
call: &ast::ExprCall,
|
||||
hashlib_call: HashlibCall,
|
||||
) {
|
||||
if !is_used_for_security(&call.arguments) {
|
||||
return;
|
||||
}
|
||||
|
||||
match hashlib_call {
|
||||
HashlibCall::New => {
|
||||
let Some(name_arg) = call.arguments.find_argument("name", 0) else {
|
||||
return;
|
||||
};
|
||||
let Some(hash_func_name) = string_literal(name_arg) else {
|
||||
return;
|
||||
};
|
||||
|
||||
// `hashlib.new` accepts both lowercase and uppercase names for hash
|
||||
// functions.
|
||||
if matches!(
|
||||
hash_func_name,
|
||||
"md4" | "md5" | "sha" | "sha1" | "MD4" | "MD5" | "SHA" | "SHA1"
|
||||
) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
HashlibInsecureHashFunction {
|
||||
string: (*func_name).to_string(),
|
||||
library: "hashlib".to_string(),
|
||||
string: hash_func_name.to_string(),
|
||||
},
|
||||
call.func.range(),
|
||||
name_arg.range(),
|
||||
));
|
||||
}
|
||||
}
|
||||
HashlibCall::WeakHash(func_name) => {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
HashlibInsecureHashFunction {
|
||||
library: "hashlib".to_string(),
|
||||
string: (*func_name).to_string(),
|
||||
},
|
||||
call.func.range(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn detect_insecure_crypt_calls(checker: &mut Checker, call: &ast::ExprCall) {
|
||||
let Some(method) = checker
|
||||
.semantic()
|
||||
.resolve_qualified_name(&call.func)
|
||||
.and_then(|qualified_name| match qualified_name.segments() {
|
||||
["crypt", "crypt"] => Some(("salt", 1)),
|
||||
["crypt", "mksalt"] => Some(("method", 0)),
|
||||
_ => None,
|
||||
})
|
||||
.and_then(|(argument_name, position)| {
|
||||
call.arguments.find_argument(argument_name, position)
|
||||
})
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(qualified_name) = checker.semantic().resolve_qualified_name(method) else {
|
||||
return;
|
||||
};
|
||||
|
||||
if matches!(
|
||||
qualified_name.segments(),
|
||||
["crypt", "METHOD_CRYPT" | "METHOD_MD5" | "METHOD_BLOWFISH"]
|
||||
) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
HashlibInsecureHashFunction {
|
||||
library: "crypt".to_string(),
|
||||
string: qualified_name.to_string(),
|
||||
},
|
||||
method.range(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,7 +181,13 @@ fn is_used_for_security(arguments: &Arguments) -> bool {
|
||||
.map_or(true, |keyword| !is_const_false(&keyword.value))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
enum WeakHashCall {
|
||||
Hashlib { call: HashlibCall },
|
||||
Crypt,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
enum HashlibCall {
|
||||
New,
|
||||
WeakHash(&'static str),
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
pub(crate) use assert_used::*;
|
||||
pub(crate) use bad_file_permissions::*;
|
||||
pub(crate) use django_extra::*;
|
||||
pub(crate) use django_raw_sql::*;
|
||||
pub(crate) use exec_used::*;
|
||||
pub(crate) use flask_debug_true::*;
|
||||
@@ -33,6 +34,7 @@ pub(crate) use weak_cryptographic_key::*;
|
||||
|
||||
mod assert_used;
|
||||
mod bad_file_permissions;
|
||||
mod django_extra;
|
||||
mod django_raw_sql;
|
||||
mod exec_used;
|
||||
mod flask_debug_true;
|
||||
|
||||
@@ -433,6 +433,7 @@ fn get_call_kind(func: &Expr, semantic: &SemanticModel) -> Option<CallKind> {
|
||||
"Popen" | "call" | "check_call" | "check_output" | "run" => {
|
||||
Some(CallKind::Subprocess)
|
||||
}
|
||||
"getoutput" | "getstatusoutput" => Some(CallKind::Shell),
|
||||
_ => None,
|
||||
},
|
||||
"popen2" => match submodule {
|
||||
|
||||
@@ -867,7 +867,7 @@ pub(crate) fn suspicious_function_call(checker: &mut Checker, call: &ExprCall) {
|
||||
["urllib", "request", "URLopener" | "FancyURLopener"] |
|
||||
["six", "moves", "urllib", "request", "URLopener" | "FancyURLopener"] => Some(SuspiciousURLOpenUsage.into()),
|
||||
// NonCryptographicRandom
|
||||
["random", "random" | "randrange" | "randint" | "choice" | "choices" | "uniform" | "triangular"] => Some(SuspiciousNonCryptographicRandomUsage.into()),
|
||||
["random", "Random" | "random" | "randrange" | "randint" | "choice" | "choices" | "uniform" | "triangular" | "randbytes"] => Some(SuspiciousNonCryptographicRandomUsage.into()),
|
||||
// UnverifiedContext
|
||||
["ssl", "_create_unverified_context"] => Some(SuspiciousUnverifiedContextUsage.into()),
|
||||
// XMLCElementTree
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_bandit/mod.rs
|
||||
---
|
||||
S311.py:10:1: S311 Standard pseudo-random generators are not suitable for cryptographic purposes
|
||||
|
|
||||
9 | # Errors
|
||||
10 | random.Random()
|
||||
| ^^^^^^^^^^^^^^^ S311
|
||||
11 | random.random()
|
||||
12 | random.randrange()
|
||||
|
|
||||
|
||||
S311.py:11:1: S311 Standard pseudo-random generators are not suitable for cryptographic purposes
|
||||
|
|
||||
9 | # Errors
|
||||
10 | random.Random()
|
||||
11 | random.random()
|
||||
| ^^^^^^^^^^^^^^^ S311
|
||||
12 | random.randrange()
|
||||
13 | random.randint()
|
||||
|
|
||||
|
||||
S311.py:12:1: S311 Standard pseudo-random generators are not suitable for cryptographic purposes
|
||||
|
|
||||
10 | random.Random()
|
||||
11 | random.random()
|
||||
12 | random.randrange()
|
||||
| ^^^^^^^^^^^^^^^^^^ S311
|
||||
13 | random.randint()
|
||||
14 | random.choice()
|
||||
|
|
||||
|
||||
S311.py:13:1: S311 Standard pseudo-random generators are not suitable for cryptographic purposes
|
||||
|
|
||||
11 | random.random()
|
||||
12 | random.randrange()
|
||||
13 | random.randint()
|
||||
| ^^^^^^^^^^^^^^^^ S311
|
||||
14 | random.choice()
|
||||
15 | random.choices()
|
||||
|
|
||||
|
||||
S311.py:14:1: S311 Standard pseudo-random generators are not suitable for cryptographic purposes
|
||||
|
|
||||
12 | random.randrange()
|
||||
13 | random.randint()
|
||||
14 | random.choice()
|
||||
| ^^^^^^^^^^^^^^^ S311
|
||||
15 | random.choices()
|
||||
16 | random.uniform()
|
||||
|
|
||||
|
||||
S311.py:15:1: S311 Standard pseudo-random generators are not suitable for cryptographic purposes
|
||||
|
|
||||
13 | random.randint()
|
||||
14 | random.choice()
|
||||
15 | random.choices()
|
||||
| ^^^^^^^^^^^^^^^^ S311
|
||||
16 | random.uniform()
|
||||
17 | random.triangular()
|
||||
|
|
||||
|
||||
S311.py:16:1: S311 Standard pseudo-random generators are not suitable for cryptographic purposes
|
||||
|
|
||||
14 | random.choice()
|
||||
15 | random.choices()
|
||||
16 | random.uniform()
|
||||
| ^^^^^^^^^^^^^^^^ S311
|
||||
17 | random.triangular()
|
||||
18 | random.randbytes()
|
||||
|
|
||||
|
||||
S311.py:17:1: S311 Standard pseudo-random generators are not suitable for cryptographic purposes
|
||||
|
|
||||
15 | random.choices()
|
||||
16 | random.uniform()
|
||||
17 | random.triangular()
|
||||
| ^^^^^^^^^^^^^^^^^^^ S311
|
||||
18 | random.randbytes()
|
||||
|
|
||||
|
||||
S311.py:18:1: S311 Standard pseudo-random generators are not suitable for cryptographic purposes
|
||||
|
|
||||
16 | random.uniform()
|
||||
17 | random.triangular()
|
||||
18 | random.randbytes()
|
||||
| ^^^^^^^^^^^^^^^^^^ S311
|
||||
19 |
|
||||
20 | # Unrelated
|
||||
|
|
||||
@@ -3,131 +3,195 @@ source: crates/ruff_linter/src/rules/flake8_bandit/mod.rs
|
||||
---
|
||||
S324.py:7:13: S324 Probable use of insecure hash functions in `hashlib`: `md5`
|
||||
|
|
||||
5 | # Invalid
|
||||
6 |
|
||||
6 | # Errors
|
||||
7 | hashlib.new('md5')
|
||||
| ^^^^^ S324
|
||||
8 |
|
||||
9 | hashlib.new('md4', b'test')
|
||||
8 | hashlib.new('md4', b'test')
|
||||
9 | hashlib.new(name='md5', data=b'test')
|
||||
|
|
||||
|
||||
S324.py:9:13: S324 Probable use of insecure hash functions in `hashlib`: `md4`
|
||||
S324.py:8:13: S324 Probable use of insecure hash functions in `hashlib`: `md4`
|
||||
|
|
||||
6 | # Errors
|
||||
7 | hashlib.new('md5')
|
||||
8 | hashlib.new('md4', b'test')
|
||||
| ^^^^^ S324
|
||||
9 | hashlib.new(name='md5', data=b'test')
|
||||
10 | hashlib.new('MD4', data=b'test')
|
||||
|
|
||||
|
||||
S324.py:9:18: S324 Probable use of insecure hash functions in `hashlib`: `md5`
|
||||
|
|
||||
7 | hashlib.new('md5')
|
||||
8 |
|
||||
9 | hashlib.new('md4', b'test')
|
||||
| ^^^^^ S324
|
||||
10 |
|
||||
11 | hashlib.new(name='md5', data=b'test')
|
||||
|
|
||||
|
||||
S324.py:11:18: S324 Probable use of insecure hash functions in `hashlib`: `md5`
|
||||
|
|
||||
9 | hashlib.new('md4', b'test')
|
||||
10 |
|
||||
11 | hashlib.new(name='md5', data=b'test')
|
||||
8 | hashlib.new('md4', b'test')
|
||||
9 | hashlib.new(name='md5', data=b'test')
|
||||
| ^^^^^ S324
|
||||
12 |
|
||||
13 | hashlib.new('MD4', data=b'test')
|
||||
10 | hashlib.new('MD4', data=b'test')
|
||||
11 | hashlib.new('sha1')
|
||||
|
|
||||
|
||||
S324.py:13:13: S324 Probable use of insecure hash functions in `hashlib`: `MD4`
|
||||
S324.py:10:13: S324 Probable use of insecure hash functions in `hashlib`: `MD4`
|
||||
|
|
||||
11 | hashlib.new(name='md5', data=b'test')
|
||||
12 |
|
||||
13 | hashlib.new('MD4', data=b'test')
|
||||
8 | hashlib.new('md4', b'test')
|
||||
9 | hashlib.new(name='md5', data=b'test')
|
||||
10 | hashlib.new('MD4', data=b'test')
|
||||
| ^^^^^ S324
|
||||
14 |
|
||||
15 | hashlib.new('sha1')
|
||||
11 | hashlib.new('sha1')
|
||||
12 | hashlib.new('sha1', data=b'test')
|
||||
|
|
||||
|
||||
S324.py:15:13: S324 Probable use of insecure hash functions in `hashlib`: `sha1`
|
||||
S324.py:11:13: S324 Probable use of insecure hash functions in `hashlib`: `sha1`
|
||||
|
|
||||
13 | hashlib.new('MD4', data=b'test')
|
||||
14 |
|
||||
15 | hashlib.new('sha1')
|
||||
9 | hashlib.new(name='md5', data=b'test')
|
||||
10 | hashlib.new('MD4', data=b'test')
|
||||
11 | hashlib.new('sha1')
|
||||
| ^^^^^^ S324
|
||||
16 |
|
||||
17 | hashlib.new('sha1', data=b'test')
|
||||
12 | hashlib.new('sha1', data=b'test')
|
||||
13 | hashlib.new('sha', data=b'test')
|
||||
|
|
||||
|
||||
S324.py:12:13: S324 Probable use of insecure hash functions in `hashlib`: `sha1`
|
||||
|
|
||||
10 | hashlib.new('MD4', data=b'test')
|
||||
11 | hashlib.new('sha1')
|
||||
12 | hashlib.new('sha1', data=b'test')
|
||||
| ^^^^^^ S324
|
||||
13 | hashlib.new('sha', data=b'test')
|
||||
14 | hashlib.new(name='SHA', data=b'test')
|
||||
|
|
||||
|
||||
S324.py:13:13: S324 Probable use of insecure hash functions in `hashlib`: `sha`
|
||||
|
|
||||
11 | hashlib.new('sha1')
|
||||
12 | hashlib.new('sha1', data=b'test')
|
||||
13 | hashlib.new('sha', data=b'test')
|
||||
| ^^^^^ S324
|
||||
14 | hashlib.new(name='SHA', data=b'test')
|
||||
15 | hashlib.sha(data=b'test')
|
||||
|
|
||||
|
||||
S324.py:14:18: S324 Probable use of insecure hash functions in `hashlib`: `SHA`
|
||||
|
|
||||
12 | hashlib.new('sha1', data=b'test')
|
||||
13 | hashlib.new('sha', data=b'test')
|
||||
14 | hashlib.new(name='SHA', data=b'test')
|
||||
| ^^^^^ S324
|
||||
15 | hashlib.sha(data=b'test')
|
||||
16 | hashlib.md5()
|
||||
|
|
||||
|
||||
S324.py:15:1: S324 Probable use of insecure hash functions in `hashlib`: `sha`
|
||||
|
|
||||
13 | hashlib.new('sha', data=b'test')
|
||||
14 | hashlib.new(name='SHA', data=b'test')
|
||||
15 | hashlib.sha(data=b'test')
|
||||
| ^^^^^^^^^^^ S324
|
||||
16 | hashlib.md5()
|
||||
17 | hashlib_new('sha1')
|
||||
|
|
||||
|
||||
S324.py:16:1: S324 Probable use of insecure hash functions in `hashlib`: `md5`
|
||||
|
|
||||
14 | hashlib.new(name='SHA', data=b'test')
|
||||
15 | hashlib.sha(data=b'test')
|
||||
16 | hashlib.md5()
|
||||
| ^^^^^^^^^^^ S324
|
||||
17 | hashlib_new('sha1')
|
||||
18 | hashlib_sha1('sha1')
|
||||
|
|
||||
|
||||
S324.py:17:13: S324 Probable use of insecure hash functions in `hashlib`: `sha1`
|
||||
|
|
||||
15 | hashlib.new('sha1')
|
||||
16 |
|
||||
17 | hashlib.new('sha1', data=b'test')
|
||||
15 | hashlib.sha(data=b'test')
|
||||
16 | hashlib.md5()
|
||||
17 | hashlib_new('sha1')
|
||||
| ^^^^^^ S324
|
||||
18 |
|
||||
19 | hashlib.new('sha', data=b'test')
|
||||
18 | hashlib_sha1('sha1')
|
||||
19 | # usedforsecurity arg only available in Python 3.9+
|
||||
|
|
||||
|
||||
S324.py:19:13: S324 Probable use of insecure hash functions in `hashlib`: `sha`
|
||||
S324.py:18:1: S324 Probable use of insecure hash functions in `hashlib`: `sha1`
|
||||
|
|
||||
17 | hashlib.new('sha1', data=b'test')
|
||||
18 |
|
||||
19 | hashlib.new('sha', data=b'test')
|
||||
| ^^^^^ S324
|
||||
20 |
|
||||
21 | hashlib.new(name='SHA', data=b'test')
|
||||
|
|
||||
|
||||
S324.py:21:18: S324 Probable use of insecure hash functions in `hashlib`: `SHA`
|
||||
|
|
||||
19 | hashlib.new('sha', data=b'test')
|
||||
20 |
|
||||
21 | hashlib.new(name='SHA', data=b'test')
|
||||
| ^^^^^ S324
|
||||
22 |
|
||||
23 | hashlib.sha(data=b'test')
|
||||
|
|
||||
|
||||
S324.py:23:1: S324 Probable use of insecure hash functions in `hashlib`: `sha`
|
||||
|
|
||||
21 | hashlib.new(name='SHA', data=b'test')
|
||||
22 |
|
||||
23 | hashlib.sha(data=b'test')
|
||||
| ^^^^^^^^^^^ S324
|
||||
24 |
|
||||
25 | hashlib.md5()
|
||||
|
|
||||
|
||||
S324.py:25:1: S324 Probable use of insecure hash functions in `hashlib`: `md5`
|
||||
|
|
||||
23 | hashlib.sha(data=b'test')
|
||||
24 |
|
||||
25 | hashlib.md5()
|
||||
| ^^^^^^^^^^^ S324
|
||||
26 |
|
||||
27 | hashlib_new('sha1')
|
||||
|
|
||||
|
||||
S324.py:27:13: S324 Probable use of insecure hash functions in `hashlib`: `sha1`
|
||||
|
|
||||
25 | hashlib.md5()
|
||||
26 |
|
||||
27 | hashlib_new('sha1')
|
||||
| ^^^^^^ S324
|
||||
28 |
|
||||
29 | hashlib_sha1('sha1')
|
||||
|
|
||||
|
||||
S324.py:29:1: S324 Probable use of insecure hash functions in `hashlib`: `sha1`
|
||||
|
|
||||
27 | hashlib_new('sha1')
|
||||
28 |
|
||||
29 | hashlib_sha1('sha1')
|
||||
16 | hashlib.md5()
|
||||
17 | hashlib_new('sha1')
|
||||
18 | hashlib_sha1('sha1')
|
||||
| ^^^^^^^^^^^^ S324
|
||||
30 |
|
||||
31 | # usedforsecurity arg only available in Python 3.9+
|
||||
19 | # usedforsecurity arg only available in Python 3.9+
|
||||
20 | hashlib.new('sha1', usedforsecurity=True)
|
||||
|
|
||||
|
||||
S324.py:32:13: S324 Probable use of insecure hash functions in `hashlib`: `sha1`
|
||||
S324.py:20:13: S324 Probable use of insecure hash functions in `hashlib`: `sha1`
|
||||
|
|
||||
31 | # usedforsecurity arg only available in Python 3.9+
|
||||
32 | hashlib.new('sha1', usedforsecurity=True)
|
||||
18 | hashlib_sha1('sha1')
|
||||
19 | # usedforsecurity arg only available in Python 3.9+
|
||||
20 | hashlib.new('sha1', usedforsecurity=True)
|
||||
| ^^^^^^ S324
|
||||
33 |
|
||||
34 | # Valid
|
||||
21 |
|
||||
22 | crypt.crypt("test", salt=crypt.METHOD_CRYPT)
|
||||
|
|
||||
|
||||
S324.py:22:26: S324 Probable use of insecure hash functions in `crypt`: `crypt.METHOD_CRYPT`
|
||||
|
|
||||
20 | hashlib.new('sha1', usedforsecurity=True)
|
||||
21 |
|
||||
22 | crypt.crypt("test", salt=crypt.METHOD_CRYPT)
|
||||
| ^^^^^^^^^^^^^^^^^^ S324
|
||||
23 | crypt.crypt("test", salt=crypt.METHOD_MD5)
|
||||
24 | crypt.crypt("test", salt=crypt.METHOD_BLOWFISH)
|
||||
|
|
||||
|
||||
S324.py:23:26: S324 Probable use of insecure hash functions in `crypt`: `crypt.METHOD_MD5`
|
||||
|
|
||||
22 | crypt.crypt("test", salt=crypt.METHOD_CRYPT)
|
||||
23 | crypt.crypt("test", salt=crypt.METHOD_MD5)
|
||||
| ^^^^^^^^^^^^^^^^ S324
|
||||
24 | crypt.crypt("test", salt=crypt.METHOD_BLOWFISH)
|
||||
25 | crypt.crypt("test", crypt.METHOD_BLOWFISH)
|
||||
|
|
||||
|
||||
S324.py:24:26: S324 Probable use of insecure hash functions in `crypt`: `crypt.METHOD_BLOWFISH`
|
||||
|
|
||||
22 | crypt.crypt("test", salt=crypt.METHOD_CRYPT)
|
||||
23 | crypt.crypt("test", salt=crypt.METHOD_MD5)
|
||||
24 | crypt.crypt("test", salt=crypt.METHOD_BLOWFISH)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ S324
|
||||
25 | crypt.crypt("test", crypt.METHOD_BLOWFISH)
|
||||
|
|
||||
|
||||
S324.py:25:21: S324 Probable use of insecure hash functions in `crypt`: `crypt.METHOD_BLOWFISH`
|
||||
|
|
||||
23 | crypt.crypt("test", salt=crypt.METHOD_MD5)
|
||||
24 | crypt.crypt("test", salt=crypt.METHOD_BLOWFISH)
|
||||
25 | crypt.crypt("test", crypt.METHOD_BLOWFISH)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ S324
|
||||
26 |
|
||||
27 | crypt.mksalt(crypt.METHOD_CRYPT)
|
||||
|
|
||||
|
||||
S324.py:27:14: S324 Probable use of insecure hash functions in `crypt`: `crypt.METHOD_CRYPT`
|
||||
|
|
||||
25 | crypt.crypt("test", crypt.METHOD_BLOWFISH)
|
||||
26 |
|
||||
27 | crypt.mksalt(crypt.METHOD_CRYPT)
|
||||
| ^^^^^^^^^^^^^^^^^^ S324
|
||||
28 | crypt.mksalt(crypt.METHOD_MD5)
|
||||
29 | crypt.mksalt(crypt.METHOD_BLOWFISH)
|
||||
|
|
||||
|
||||
S324.py:28:14: S324 Probable use of insecure hash functions in `crypt`: `crypt.METHOD_MD5`
|
||||
|
|
||||
27 | crypt.mksalt(crypt.METHOD_CRYPT)
|
||||
28 | crypt.mksalt(crypt.METHOD_MD5)
|
||||
| ^^^^^^^^^^^^^^^^ S324
|
||||
29 | crypt.mksalt(crypt.METHOD_BLOWFISH)
|
||||
|
|
||||
|
||||
S324.py:29:14: S324 Probable use of insecure hash functions in `crypt`: `crypt.METHOD_BLOWFISH`
|
||||
|
|
||||
27 | crypt.mksalt(crypt.METHOD_CRYPT)
|
||||
28 | crypt.mksalt(crypt.METHOD_MD5)
|
||||
29 | crypt.mksalt(crypt.METHOD_BLOWFISH)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ S324
|
||||
30 |
|
||||
31 | # OK
|
||||
|
|
||||
|
||||
@@ -1,147 +1,165 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_bandit/mod.rs
|
||||
---
|
||||
S605.py:7:11: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
6 | # Check all shell functions.
|
||||
7 | os.system("true")
|
||||
| ^^^^^^ S605
|
||||
8 | os.popen("true")
|
||||
9 | os.popen2("true")
|
||||
|
|
||||
|
||||
S605.py:8:10: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
S605.py:8:11: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
6 | # Check all shell functions.
|
||||
7 | os.system("true")
|
||||
8 | os.popen("true")
|
||||
| ^^^^^^ S605
|
||||
9 | os.popen2("true")
|
||||
10 | os.popen3("true")
|
||||
|
|
||||
|
||||
S605.py:9:11: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
7 | os.system("true")
|
||||
8 | os.popen("true")
|
||||
9 | os.popen2("true")
|
||||
7 | # Check all shell functions.
|
||||
8 | os.system("true")
|
||||
| ^^^^^^ S605
|
||||
10 | os.popen3("true")
|
||||
11 | os.popen4("true")
|
||||
9 | os.popen("true")
|
||||
10 | os.popen2("true")
|
||||
|
|
||||
|
||||
S605.py:9:10: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
7 | # Check all shell functions.
|
||||
8 | os.system("true")
|
||||
9 | os.popen("true")
|
||||
| ^^^^^^ S605
|
||||
10 | os.popen2("true")
|
||||
11 | os.popen3("true")
|
||||
|
|
||||
|
||||
S605.py:10:11: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
8 | os.popen("true")
|
||||
9 | os.popen2("true")
|
||||
10 | os.popen3("true")
|
||||
8 | os.system("true")
|
||||
9 | os.popen("true")
|
||||
10 | os.popen2("true")
|
||||
| ^^^^^^ S605
|
||||
11 | os.popen4("true")
|
||||
12 | popen2.popen2("true")
|
||||
11 | os.popen3("true")
|
||||
12 | os.popen4("true")
|
||||
|
|
||||
|
||||
S605.py:11:11: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
9 | os.popen2("true")
|
||||
10 | os.popen3("true")
|
||||
11 | os.popen4("true")
|
||||
9 | os.popen("true")
|
||||
10 | os.popen2("true")
|
||||
11 | os.popen3("true")
|
||||
| ^^^^^^ S605
|
||||
12 | popen2.popen2("true")
|
||||
13 | popen2.popen3("true")
|
||||
12 | os.popen4("true")
|
||||
13 | popen2.popen2("true")
|
||||
|
|
||||
|
||||
S605.py:12:15: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
S605.py:12:11: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
10 | os.popen3("true")
|
||||
11 | os.popen4("true")
|
||||
12 | popen2.popen2("true")
|
||||
| ^^^^^^ S605
|
||||
13 | popen2.popen3("true")
|
||||
14 | popen2.popen4("true")
|
||||
10 | os.popen2("true")
|
||||
11 | os.popen3("true")
|
||||
12 | os.popen4("true")
|
||||
| ^^^^^^ S605
|
||||
13 | popen2.popen2("true")
|
||||
14 | popen2.popen3("true")
|
||||
|
|
||||
|
||||
S605.py:13:15: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
11 | os.popen4("true")
|
||||
12 | popen2.popen2("true")
|
||||
13 | popen2.popen3("true")
|
||||
11 | os.popen3("true")
|
||||
12 | os.popen4("true")
|
||||
13 | popen2.popen2("true")
|
||||
| ^^^^^^ S605
|
||||
14 | popen2.popen4("true")
|
||||
15 | popen2.Popen3("true")
|
||||
14 | popen2.popen3("true")
|
||||
15 | popen2.popen4("true")
|
||||
|
|
||||
|
||||
S605.py:14:15: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
12 | popen2.popen2("true")
|
||||
13 | popen2.popen3("true")
|
||||
14 | popen2.popen4("true")
|
||||
12 | os.popen4("true")
|
||||
13 | popen2.popen2("true")
|
||||
14 | popen2.popen3("true")
|
||||
| ^^^^^^ S605
|
||||
15 | popen2.Popen3("true")
|
||||
16 | popen2.Popen4("true")
|
||||
15 | popen2.popen4("true")
|
||||
16 | popen2.Popen3("true")
|
||||
|
|
||||
|
||||
S605.py:15:15: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
13 | popen2.popen3("true")
|
||||
14 | popen2.popen4("true")
|
||||
15 | popen2.Popen3("true")
|
||||
13 | popen2.popen2("true")
|
||||
14 | popen2.popen3("true")
|
||||
15 | popen2.popen4("true")
|
||||
| ^^^^^^ S605
|
||||
16 | popen2.Popen4("true")
|
||||
17 | commands.getoutput("true")
|
||||
16 | popen2.Popen3("true")
|
||||
17 | popen2.Popen4("true")
|
||||
|
|
||||
|
||||
S605.py:16:15: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
14 | popen2.popen4("true")
|
||||
15 | popen2.Popen3("true")
|
||||
16 | popen2.Popen4("true")
|
||||
14 | popen2.popen3("true")
|
||||
15 | popen2.popen4("true")
|
||||
16 | popen2.Popen3("true")
|
||||
| ^^^^^^ S605
|
||||
17 | commands.getoutput("true")
|
||||
18 | commands.getstatusoutput("true")
|
||||
17 | popen2.Popen4("true")
|
||||
18 | commands.getoutput("true")
|
||||
|
|
||||
|
||||
S605.py:17:20: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
S605.py:17:15: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
15 | popen2.Popen3("true")
|
||||
16 | popen2.Popen4("true")
|
||||
17 | commands.getoutput("true")
|
||||
15 | popen2.popen4("true")
|
||||
16 | popen2.Popen3("true")
|
||||
17 | popen2.Popen4("true")
|
||||
| ^^^^^^ S605
|
||||
18 | commands.getoutput("true")
|
||||
19 | commands.getstatusoutput("true")
|
||||
|
|
||||
|
||||
S605.py:18:20: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
16 | popen2.Popen3("true")
|
||||
17 | popen2.Popen4("true")
|
||||
18 | commands.getoutput("true")
|
||||
| ^^^^^^ S605
|
||||
18 | commands.getstatusoutput("true")
|
||||
19 | commands.getstatusoutput("true")
|
||||
20 | subprocess.getoutput("true")
|
||||
|
|
||||
|
||||
S605.py:18:26: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
S605.py:19:26: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
16 | popen2.Popen4("true")
|
||||
17 | commands.getoutput("true")
|
||||
18 | commands.getstatusoutput("true")
|
||||
17 | popen2.Popen4("true")
|
||||
18 | commands.getoutput("true")
|
||||
19 | commands.getstatusoutput("true")
|
||||
| ^^^^^^ S605
|
||||
20 | subprocess.getoutput("true")
|
||||
21 | subprocess.getstatusoutput("true")
|
||||
|
|
||||
|
||||
S605.py:23:11: S605 Starting a process with a shell, possible injection detected
|
||||
S605.py:20:22: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
21 | # Check command argument looks unsafe.
|
||||
22 | var_string = "true"
|
||||
23 | os.system(var_string)
|
||||
18 | commands.getoutput("true")
|
||||
19 | commands.getstatusoutput("true")
|
||||
20 | subprocess.getoutput("true")
|
||||
| ^^^^^^ S605
|
||||
21 | subprocess.getstatusoutput("true")
|
||||
|
|
||||
|
||||
S605.py:21:28: S605 Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
|
|
||||
19 | commands.getstatusoutput("true")
|
||||
20 | subprocess.getoutput("true")
|
||||
21 | subprocess.getstatusoutput("true")
|
||||
| ^^^^^^ S605
|
||||
|
|
||||
|
||||
S605.py:26:11: S605 Starting a process with a shell, possible injection detected
|
||||
|
|
||||
24 | # Check command argument looks unsafe.
|
||||
25 | var_string = "true"
|
||||
26 | os.system(var_string)
|
||||
| ^^^^^^^^^^ S605
|
||||
24 | os.system([var_string])
|
||||
25 | os.system([var_string, ""])
|
||||
27 | os.system([var_string])
|
||||
28 | os.system([var_string, ""])
|
||||
|
|
||||
|
||||
S605.py:24:11: S605 Starting a process with a shell, possible injection detected
|
||||
S605.py:27:11: S605 Starting a process with a shell, possible injection detected
|
||||
|
|
||||
22 | var_string = "true"
|
||||
23 | os.system(var_string)
|
||||
24 | os.system([var_string])
|
||||
25 | var_string = "true"
|
||||
26 | os.system(var_string)
|
||||
27 | os.system([var_string])
|
||||
| ^^^^^^^^^^^^ S605
|
||||
25 | os.system([var_string, ""])
|
||||
28 | os.system([var_string, ""])
|
||||
|
|
||||
|
||||
S605.py:25:11: S605 Starting a process with a shell, possible injection detected
|
||||
S605.py:28:11: S605 Starting a process with a shell, possible injection detected
|
||||
|
|
||||
23 | os.system(var_string)
|
||||
24 | os.system([var_string])
|
||||
25 | os.system([var_string, ""])
|
||||
26 | os.system(var_string)
|
||||
27 | os.system([var_string])
|
||||
28 | os.system([var_string, ""])
|
||||
| ^^^^^^^^^^^^^^^^ S605
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,105 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_bandit/mod.rs
|
||||
---
|
||||
S610.py:4:44: S610 Use of Django `extra` can lead to SQL injection vulnerabilities
|
||||
|
|
||||
3 | # Errors
|
||||
4 | User.objects.filter(username='admin').extra(dict(could_be='insecure'))
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ S610
|
||||
5 | User.objects.filter(username='admin').extra(select=dict(could_be='insecure'))
|
||||
6 | User.objects.filter(username='admin').extra(select={'test': '%secure' % 'nos'})
|
||||
|
|
||||
|
||||
S610.py:5:44: S610 Use of Django `extra` can lead to SQL injection vulnerabilities
|
||||
|
|
||||
3 | # Errors
|
||||
4 | User.objects.filter(username='admin').extra(dict(could_be='insecure'))
|
||||
5 | User.objects.filter(username='admin').extra(select=dict(could_be='insecure'))
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ S610
|
||||
6 | User.objects.filter(username='admin').extra(select={'test': '%secure' % 'nos'})
|
||||
7 | User.objects.filter(username='admin').extra(select={'test': '{}secure'.format('nos')})
|
||||
|
|
||||
|
||||
S610.py:6:44: S610 Use of Django `extra` can lead to SQL injection vulnerabilities
|
||||
|
|
||||
4 | User.objects.filter(username='admin').extra(dict(could_be='insecure'))
|
||||
5 | User.objects.filter(username='admin').extra(select=dict(could_be='insecure'))
|
||||
6 | User.objects.filter(username='admin').extra(select={'test': '%secure' % 'nos'})
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ S610
|
||||
7 | User.objects.filter(username='admin').extra(select={'test': '{}secure'.format('nos')})
|
||||
8 | User.objects.filter(username='admin').extra(where=['%secure' % 'nos'])
|
||||
|
|
||||
|
||||
S610.py:7:44: S610 Use of Django `extra` can lead to SQL injection vulnerabilities
|
||||
|
|
||||
5 | User.objects.filter(username='admin').extra(select=dict(could_be='insecure'))
|
||||
6 | User.objects.filter(username='admin').extra(select={'test': '%secure' % 'nos'})
|
||||
7 | User.objects.filter(username='admin').extra(select={'test': '{}secure'.format('nos')})
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ S610
|
||||
8 | User.objects.filter(username='admin').extra(where=['%secure' % 'nos'])
|
||||
9 | User.objects.filter(username='admin').extra(where=['{}secure'.format('no')])
|
||||
|
|
||||
|
||||
S610.py:8:44: S610 Use of Django `extra` can lead to SQL injection vulnerabilities
|
||||
|
|
||||
6 | User.objects.filter(username='admin').extra(select={'test': '%secure' % 'nos'})
|
||||
7 | User.objects.filter(username='admin').extra(select={'test': '{}secure'.format('nos')})
|
||||
8 | User.objects.filter(username='admin').extra(where=['%secure' % 'nos'])
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ S610
|
||||
9 | User.objects.filter(username='admin').extra(where=['{}secure'.format('no')])
|
||||
|
|
||||
|
||||
S610.py:9:44: S610 Use of Django `extra` can lead to SQL injection vulnerabilities
|
||||
|
|
||||
7 | User.objects.filter(username='admin').extra(select={'test': '{}secure'.format('nos')})
|
||||
8 | User.objects.filter(username='admin').extra(where=['%secure' % 'nos'])
|
||||
9 | User.objects.filter(username='admin').extra(where=['{}secure'.format('no')])
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ S610
|
||||
10 |
|
||||
11 | query = '"username") AS "username", * FROM "auth_user" WHERE 1=1 OR "username"=? --'
|
||||
|
|
||||
|
||||
S610.py:12:44: S610 Use of Django `extra` can lead to SQL injection vulnerabilities
|
||||
|
|
||||
11 | query = '"username") AS "username", * FROM "auth_user" WHERE 1=1 OR "username"=? --'
|
||||
12 | User.objects.filter(username='admin').extra(select={'test': query})
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ S610
|
||||
13 |
|
||||
14 | where_var = ['1=1) OR 1=1 AND (1=1']
|
||||
|
|
||||
|
||||
S610.py:15:44: S610 Use of Django `extra` can lead to SQL injection vulnerabilities
|
||||
|
|
||||
14 | where_var = ['1=1) OR 1=1 AND (1=1']
|
||||
15 | User.objects.filter(username='admin').extra(where=where_var)
|
||||
| ^^^^^^^^^^^^^^^^^ S610
|
||||
16 |
|
||||
17 | where_str = '1=1) OR 1=1 AND (1=1'
|
||||
|
|
||||
|
||||
S610.py:18:44: S610 Use of Django `extra` can lead to SQL injection vulnerabilities
|
||||
|
|
||||
17 | where_str = '1=1) OR 1=1 AND (1=1'
|
||||
18 | User.objects.filter(username='admin').extra(where=[where_str])
|
||||
| ^^^^^^^^^^^^^^^^^^^ S610
|
||||
19 |
|
||||
20 | tables_var = ['django_content_type" WHERE "auth_user"."username"="admin']
|
||||
|
|
||||
|
||||
S610.py:21:25: S610 Use of Django `extra` can lead to SQL injection vulnerabilities
|
||||
|
|
||||
20 | tables_var = ['django_content_type" WHERE "auth_user"."username"="admin']
|
||||
21 | User.objects.all().extra(tables=tables_var).distinct()
|
||||
| ^^^^^^^^^^^^^^^^^^^ S610
|
||||
22 |
|
||||
23 | tables_str = 'django_content_type" WHERE "auth_user"."username"="admin'
|
||||
|
|
||||
|
||||
S610.py:24:25: S610 Use of Django `extra` can lead to SQL injection vulnerabilities
|
||||
|
|
||||
23 | tables_str = 'django_content_type" WHERE "auth_user"."username"="admin'
|
||||
24 | User.objects.all().extra(tables=[tables_str]).distinct()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ S610
|
||||
25 |
|
||||
26 | # OK
|
||||
|
|
||||
@@ -205,7 +205,7 @@ C413.py:14:1: C413 [*] Unnecessary `reversed` call around `sorted()`
|
||||
14 |+sorted((i for i in range(42)), reverse=True)
|
||||
15 15 | reversed(sorted((i for i in range(42)), reverse=True))
|
||||
16 16 |
|
||||
17 17 |
|
||||
17 17 | # Regression test for: https://github.com/astral-sh/ruff/issues/10335
|
||||
|
||||
C413.py:15:1: C413 [*] Unnecessary `reversed` call around `sorted()`
|
||||
|
|
||||
@@ -213,6 +213,8 @@ C413.py:15:1: C413 [*] Unnecessary `reversed` call around `sorted()`
|
||||
14 | reversed(sorted(i for i in range(42)))
|
||||
15 | reversed(sorted((i for i in range(42)), reverse=True))
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ C413
|
||||
16 |
|
||||
17 | # Regression test for: https://github.com/astral-sh/ruff/issues/10335
|
||||
|
|
||||
= help: Remove unnecessary `reversed` call
|
||||
|
||||
@@ -223,7 +225,38 @@ C413.py:15:1: C413 [*] Unnecessary `reversed` call around `sorted()`
|
||||
15 |-reversed(sorted((i for i in range(42)), reverse=True))
|
||||
15 |+sorted((i for i in range(42)), reverse=False)
|
||||
16 16 |
|
||||
17 17 |
|
||||
18 18 | def reversed(*args, **kwargs):
|
||||
17 17 | # Regression test for: https://github.com/astral-sh/ruff/issues/10335
|
||||
18 18 | reversed(sorted([1, 2, 3], reverse=False or True))
|
||||
|
||||
C413.py:18:1: C413 [*] Unnecessary `reversed` call around `sorted()`
|
||||
|
|
||||
17 | # Regression test for: https://github.com/astral-sh/ruff/issues/10335
|
||||
18 | reversed(sorted([1, 2, 3], reverse=False or True))
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ C413
|
||||
19 | reversed(sorted([1, 2, 3], reverse=(False or True)))
|
||||
|
|
||||
= help: Remove unnecessary `reversed` call
|
||||
|
||||
ℹ Unsafe fix
|
||||
15 15 | reversed(sorted((i for i in range(42)), reverse=True))
|
||||
16 16 |
|
||||
17 17 | # Regression test for: https://github.com/astral-sh/ruff/issues/10335
|
||||
18 |-reversed(sorted([1, 2, 3], reverse=False or True))
|
||||
18 |+sorted([1, 2, 3], reverse=not (False or True))
|
||||
19 19 | reversed(sorted([1, 2, 3], reverse=(False or True)))
|
||||
|
||||
C413.py:19:1: C413 [*] Unnecessary `reversed` call around `sorted()`
|
||||
|
|
||||
17 | # Regression test for: https://github.com/astral-sh/ruff/issues/10335
|
||||
18 | reversed(sorted([1, 2, 3], reverse=False or True))
|
||||
19 | reversed(sorted([1, 2, 3], reverse=(False or True)))
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ C413
|
||||
|
|
||||
= help: Remove unnecessary `reversed` call
|
||||
|
||||
ℹ Unsafe fix
|
||||
16 16 |
|
||||
17 17 | # Regression test for: https://github.com/astral-sh/ruff/issues/10335
|
||||
18 18 | reversed(sorted([1, 2, 3], reverse=False or True))
|
||||
19 |-reversed(sorted([1, 2, 3], reverse=(False or True)))
|
||||
19 |+sorted([1, 2, 3], reverse=not (False or True))
|
||||
|
||||
@@ -11,7 +11,7 @@ mod tests {
|
||||
|
||||
use crate::assert_messages;
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::flake8_import_conventions::settings::default_aliases;
|
||||
use crate::rules::flake8_import_conventions::settings::{default_aliases, BannedAliases};
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::test::test_path;
|
||||
|
||||
@@ -57,17 +57,20 @@ mod tests {
|
||||
banned_aliases: FxHashMap::from_iter([
|
||||
(
|
||||
"typing".to_string(),
|
||||
vec!["t".to_string(), "ty".to_string()],
|
||||
BannedAliases::from_iter(["t".to_string(), "ty".to_string()]),
|
||||
),
|
||||
(
|
||||
"numpy".to_string(),
|
||||
vec!["nmp".to_string(), "npy".to_string()],
|
||||
BannedAliases::from_iter(["nmp".to_string(), "npy".to_string()]),
|
||||
),
|
||||
(
|
||||
"tensorflow.keras.backend".to_string(),
|
||||
vec!["K".to_string()],
|
||||
BannedAliases::from_iter(["K".to_string()]),
|
||||
),
|
||||
(
|
||||
"torch.nn.functional".to_string(),
|
||||
BannedAliases::from_iter(["F".to_string()]),
|
||||
),
|
||||
("torch.nn.functional".to_string(), vec!["F".to_string()]),
|
||||
]),
|
||||
banned_from: FxHashSet::default(),
|
||||
},
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
use ruff_python_ast::Stmt;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::Stmt;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::rules::flake8_import_conventions::settings::BannedAliases;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for imports that use non-standard naming conventions, like
|
||||
/// `import tensorflow.keras.backend as K`.
|
||||
@@ -49,7 +51,7 @@ pub(crate) fn banned_import_alias(
|
||||
stmt: &Stmt,
|
||||
name: &str,
|
||||
asname: &str,
|
||||
banned_conventions: &FxHashMap<String, Vec<String>>,
|
||||
banned_conventions: &FxHashMap<String, BannedAliases>,
|
||||
) -> Option<Diagnostic> {
|
||||
if let Some(banned_aliases) = banned_conventions.get(name) {
|
||||
if banned_aliases
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
//! Settings for import conventions.
|
||||
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use crate::display_settings;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use ruff_macros::CacheKey;
|
||||
|
||||
use crate::display_settings;
|
||||
|
||||
const CONVENTIONAL_ALIASES: &[(&str, &str)] = &[
|
||||
("altair", "alt"),
|
||||
("matplotlib", "mpl"),
|
||||
@@ -23,10 +26,41 @@ const CONVENTIONAL_ALIASES: &[(&str, &str)] = &[
|
||||
("pyarrow", "pa"),
|
||||
];
|
||||
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct BannedAliases(Vec<String>);
|
||||
|
||||
impl Display for BannedAliases {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "[")?;
|
||||
for (i, alias) in self.0.iter().enumerate() {
|
||||
if i > 0 {
|
||||
write!(f, ", ")?;
|
||||
}
|
||||
write!(f, "{alias}")?;
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
}
|
||||
|
||||
impl BannedAliases {
|
||||
/// Returns an iterator over the banned aliases.
|
||||
pub fn iter(&self) -> impl Iterator<Item = &str> {
|
||||
self.0.iter().map(String::as_str)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<String> for BannedAliases {
|
||||
fn from_iter<I: IntoIterator<Item = String>>(iter: I) -> Self {
|
||||
Self(iter.into_iter().collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub aliases: FxHashMap<String, String>,
|
||||
pub banned_aliases: FxHashMap<String, Vec<String>>,
|
||||
pub banned_aliases: FxHashMap<String, BannedAliases>,
|
||||
pub banned_from: FxHashSet<String>,
|
||||
}
|
||||
|
||||
@@ -53,9 +87,9 @@ impl Display for Settings {
|
||||
formatter = f,
|
||||
namespace = "linter.flake8_import_conventions",
|
||||
fields = [
|
||||
self.aliases | debug,
|
||||
self.banned_aliases | debug,
|
||||
self.banned_from | array,
|
||||
self.aliases | map,
|
||||
self.banned_aliases | map,
|
||||
self.banned_from | set,
|
||||
]
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -413,5 +413,3 @@ PT018.py:65:5: PT018 [*] Assertion should be broken down into multiple parts
|
||||
70 72 |
|
||||
71 73 | assert (not self.find_graph_output(node.output[0]) or
|
||||
72 74 | self.find_graph_input(node.input[0]))
|
||||
|
||||
|
||||
|
||||
@@ -13,6 +13,12 @@ pub struct ApiBan {
|
||||
pub msg: String,
|
||||
}
|
||||
|
||||
impl Display for ApiBan {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.msg)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey, Default)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
@@ -47,7 +53,7 @@ impl Display for Settings {
|
||||
namespace = "linter.flake8_tidy_imports",
|
||||
fields = [
|
||||
self.ban_relative_imports,
|
||||
self.banned_api | debug,
|
||||
self.banned_api | map,
|
||||
self.banned_module_level_imports | array,
|
||||
]
|
||||
}
|
||||
|
||||
@@ -278,7 +278,7 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_text_size::Ranged;
|
||||
@@ -495,7 +495,7 @@ mod tests {
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
force_to_top: BTreeSet::from([
|
||||
force_to_top: FxHashSet::from_iter([
|
||||
"z".to_string(),
|
||||
"lib1".to_string(),
|
||||
"lib3".to_string(),
|
||||
@@ -575,9 +575,10 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
force_single_line: true,
|
||||
single_line_exclusions: vec!["os".to_string(), "logging.handlers".to_string()]
|
||||
.into_iter()
|
||||
.collect::<BTreeSet<_>>(),
|
||||
single_line_exclusions: FxHashSet::from_iter([
|
||||
"os".to_string(),
|
||||
"logging.handlers".to_string(),
|
||||
]),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
@@ -636,7 +637,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
order_by_type: true,
|
||||
classes: BTreeSet::from([
|
||||
classes: FxHashSet::from_iter([
|
||||
"SVC".to_string(),
|
||||
"SELU".to_string(),
|
||||
"N_CLASS".to_string(),
|
||||
@@ -664,7 +665,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
order_by_type: true,
|
||||
constants: BTreeSet::from([
|
||||
constants: FxHashSet::from_iter([
|
||||
"Const".to_string(),
|
||||
"constant".to_string(),
|
||||
"First".to_string(),
|
||||
@@ -694,7 +695,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
order_by_type: true,
|
||||
variables: BTreeSet::from([
|
||||
variables: FxHashSet::from_iter([
|
||||
"VAR".to_string(),
|
||||
"Variable".to_string(),
|
||||
"MyVar".to_string(),
|
||||
@@ -721,7 +722,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
force_sort_within_sections: true,
|
||||
force_to_top: BTreeSet::from(["z".to_string()]),
|
||||
force_to_top: FxHashSet::from_iter(["z".to_string()]),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
@@ -771,7 +772,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from([
|
||||
required_imports: BTreeSet::from_iter([
|
||||
"from __future__ import annotations".to_string()
|
||||
]),
|
||||
..super::settings::Settings::default()
|
||||
@@ -801,7 +802,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from([
|
||||
required_imports: BTreeSet::from_iter([
|
||||
"from __future__ import annotations as _annotations".to_string(),
|
||||
]),
|
||||
..super::settings::Settings::default()
|
||||
@@ -824,7 +825,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from([
|
||||
required_imports: BTreeSet::from_iter([
|
||||
"from __future__ import annotations".to_string(),
|
||||
"from __future__ import generator_stop".to_string(),
|
||||
]),
|
||||
@@ -848,7 +849,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from(["from __future__ import annotations, \
|
||||
required_imports: BTreeSet::from_iter(["from __future__ import annotations, \
|
||||
generator_stop"
|
||||
.to_string()]),
|
||||
..super::settings::Settings::default()
|
||||
@@ -871,7 +872,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from(["import os".to_string()]),
|
||||
required_imports: BTreeSet::from_iter(["import os".to_string()]),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
..LinterSettings::for_rule(Rule::MissingRequiredImport)
|
||||
@@ -1002,7 +1003,7 @@ mod tests {
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
no_lines_before: BTreeSet::from([
|
||||
no_lines_before: FxHashSet::from_iter([
|
||||
ImportSection::Known(ImportType::Future),
|
||||
ImportSection::Known(ImportType::StandardLibrary),
|
||||
ImportSection::Known(ImportType::ThirdParty),
|
||||
@@ -1030,7 +1031,7 @@ mod tests {
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
no_lines_before: BTreeSet::from([
|
||||
no_lines_before: FxHashSet::from_iter([
|
||||
ImportSection::Known(ImportType::StandardLibrary),
|
||||
ImportSection::Known(ImportType::LocalFolder),
|
||||
]),
|
||||
|
||||
@@ -5,12 +5,13 @@ use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use crate::display_settings;
|
||||
use ruff_macros::CacheKey;
|
||||
|
||||
use crate::display_settings;
|
||||
use crate::rules::isort::categorize::KnownModules;
|
||||
use crate::rules::isort::ImportType;
|
||||
|
||||
@@ -52,17 +53,17 @@ pub struct Settings {
|
||||
pub force_sort_within_sections: bool,
|
||||
pub case_sensitive: bool,
|
||||
pub force_wrap_aliases: bool,
|
||||
pub force_to_top: BTreeSet<String>,
|
||||
pub force_to_top: FxHashSet<String>,
|
||||
pub known_modules: KnownModules,
|
||||
pub detect_same_package: bool,
|
||||
pub order_by_type: bool,
|
||||
pub relative_imports_order: RelativeImportsOrder,
|
||||
pub single_line_exclusions: BTreeSet<String>,
|
||||
pub single_line_exclusions: FxHashSet<String>,
|
||||
pub split_on_trailing_comma: bool,
|
||||
pub classes: BTreeSet<String>,
|
||||
pub constants: BTreeSet<String>,
|
||||
pub variables: BTreeSet<String>,
|
||||
pub no_lines_before: BTreeSet<ImportSection>,
|
||||
pub classes: FxHashSet<String>,
|
||||
pub constants: FxHashSet<String>,
|
||||
pub variables: FxHashSet<String>,
|
||||
pub no_lines_before: FxHashSet<ImportSection>,
|
||||
pub lines_after_imports: isize,
|
||||
pub lines_between_types: usize,
|
||||
pub forced_separate: Vec<String>,
|
||||
@@ -77,23 +78,23 @@ pub struct Settings {
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
required_imports: BTreeSet::new(),
|
||||
required_imports: BTreeSet::default(),
|
||||
combine_as_imports: false,
|
||||
force_single_line: false,
|
||||
force_sort_within_sections: false,
|
||||
detect_same_package: true,
|
||||
case_sensitive: false,
|
||||
force_wrap_aliases: false,
|
||||
force_to_top: BTreeSet::new(),
|
||||
force_to_top: FxHashSet::default(),
|
||||
known_modules: KnownModules::default(),
|
||||
order_by_type: true,
|
||||
relative_imports_order: RelativeImportsOrder::default(),
|
||||
single_line_exclusions: BTreeSet::new(),
|
||||
single_line_exclusions: FxHashSet::default(),
|
||||
split_on_trailing_comma: true,
|
||||
classes: BTreeSet::new(),
|
||||
constants: BTreeSet::new(),
|
||||
variables: BTreeSet::new(),
|
||||
no_lines_before: BTreeSet::new(),
|
||||
classes: FxHashSet::default(),
|
||||
constants: FxHashSet::default(),
|
||||
variables: FxHashSet::default(),
|
||||
no_lines_before: FxHashSet::default(),
|
||||
lines_after_imports: -1,
|
||||
lines_between_types: 0,
|
||||
forced_separate: Vec::new(),
|
||||
@@ -113,23 +114,23 @@ impl Display for Settings {
|
||||
formatter = f,
|
||||
namespace = "linter.isort",
|
||||
fields = [
|
||||
self.required_imports | array,
|
||||
self.required_imports | set,
|
||||
self.combine_as_imports,
|
||||
self.force_single_line,
|
||||
self.force_sort_within_sections,
|
||||
self.detect_same_package,
|
||||
self.case_sensitive,
|
||||
self.force_wrap_aliases,
|
||||
self.force_to_top | array,
|
||||
self.force_to_top | set,
|
||||
self.known_modules,
|
||||
self.order_by_type,
|
||||
self.relative_imports_order,
|
||||
self.single_line_exclusions | array,
|
||||
self.single_line_exclusions | set,
|
||||
self.split_on_trailing_comma,
|
||||
self.classes | array,
|
||||
self.constants | array,
|
||||
self.variables | array,
|
||||
self.no_lines_before | array,
|
||||
self.classes | set,
|
||||
self.constants | set,
|
||||
self.variables | set,
|
||||
self.no_lines_before | set,
|
||||
self.lines_after_imports,
|
||||
self.lines_between_types,
|
||||
self.forced_separate | array,
|
||||
@@ -155,7 +156,7 @@ pub enum SettingsError {
|
||||
InvalidUserDefinedSection(glob::PatternError),
|
||||
}
|
||||
|
||||
impl fmt::Display for SettingsError {
|
||||
impl Display for SettingsError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
SettingsError::InvalidKnownThirdParty(err) => {
|
||||
|
||||
@@ -71,6 +71,12 @@ mod tests {
|
||||
#[test_case(Rule::IsLiteral, Path::new("constant_literals.py"))]
|
||||
#[test_case(Rule::TypeComparison, Path::new("E721.py"))]
|
||||
#[test_case(Rule::ModuleImportNotAtTopOfFile, Path::new("E402_2.py"))]
|
||||
#[test_case(Rule::RedundantBackslash, Path::new("E502.py"))]
|
||||
#[test_case(Rule::TooManyNewlinesAtEndOfFile, Path::new("W391_0.py"))]
|
||||
#[test_case(Rule::TooManyNewlinesAtEndOfFile, Path::new("W391_1.py"))]
|
||||
#[test_case(Rule::TooManyNewlinesAtEndOfFile, Path::new("W391_2.py"))]
|
||||
#[test_case(Rule::TooManyNewlinesAtEndOfFile, Path::new("W391_3.py"))]
|
||||
#[test_case(Rule::TooManyNewlinesAtEndOfFile, Path::new("W391_4.py"))]
|
||||
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
"preview__{}_{}",
|
||||
@@ -148,6 +154,23 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Tests the compatibility of E2 rules (E202, E225 and E275) on syntactically incorrect code.
|
||||
#[test]
|
||||
fn white_space_syntax_error_compatibility() -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
Path::new("pycodestyle").join("E2_syntax_error.py"),
|
||||
&settings::LinterSettings {
|
||||
..settings::LinterSettings::for_rules([
|
||||
Rule::MissingWhitespaceAroundOperator,
|
||||
Rule::MissingWhitespaceAfterKeyword,
|
||||
Rule::WhitespaceBeforeCloseBracket,
|
||||
])
|
||||
},
|
||||
)?;
|
||||
assert_messages!(diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::BlankLineBetweenMethods, Path::new("E30.py"))]
|
||||
#[test_case(Rule::BlankLinesTopLevel, Path::new("E30.py"))]
|
||||
#[test_case(Rule::TooManyBlankLines, Path::new("E30.py"))]
|
||||
|
||||
@@ -59,7 +59,13 @@ pub(crate) fn missing_whitespace_after_keyword(
|
||||
|| tok0_kind == TokenKind::Yield && tok1_kind == TokenKind::Rpar
|
||||
|| matches!(
|
||||
tok1_kind,
|
||||
TokenKind::Colon | TokenKind::Newline | TokenKind::NonLogicalNewline
|
||||
TokenKind::Colon
|
||||
| TokenKind::Newline
|
||||
| TokenKind::NonLogicalNewline
|
||||
// In the event of a syntax error, do not attempt to add a whitespace.
|
||||
| TokenKind::Rpar
|
||||
| TokenKind::Rsqb
|
||||
| TokenKind::Rbrace
|
||||
))
|
||||
&& tok0.end() == tok1.start()
|
||||
{
|
||||
|
||||
@@ -211,6 +211,21 @@ pub(crate) fn missing_whitespace_around_operator(
|
||||
} else {
|
||||
NeedsSpace::No
|
||||
}
|
||||
} else if tokens.peek().is_some_and(|token| {
|
||||
matches!(
|
||||
token.kind(),
|
||||
TokenKind::Rpar | TokenKind::Rsqb | TokenKind::Rbrace
|
||||
)
|
||||
}) {
|
||||
// There should not be a closing bracket directly after a token, as it is a syntax
|
||||
// error. For example:
|
||||
// ```
|
||||
// 1+)
|
||||
// ```
|
||||
//
|
||||
// However, allow it in order to prevent entering an infinite loop in which E225 adds a
|
||||
// space only for E202 to remove it.
|
||||
NeedsSpace::No
|
||||
} else if is_whitespace_needed(kind) {
|
||||
NeedsSpace::Yes
|
||||
} else {
|
||||
|
||||
@@ -3,6 +3,7 @@ pub(crate) use indentation::*;
|
||||
pub(crate) use missing_whitespace::*;
|
||||
pub(crate) use missing_whitespace_after_keyword::*;
|
||||
pub(crate) use missing_whitespace_around_operator::*;
|
||||
pub(crate) use redundant_backslash::*;
|
||||
pub(crate) use space_around_operator::*;
|
||||
pub(crate) use whitespace_around_keywords::*;
|
||||
pub(crate) use whitespace_around_named_parameter_equals::*;
|
||||
@@ -25,6 +26,7 @@ mod indentation;
|
||||
mod missing_whitespace;
|
||||
mod missing_whitespace_after_keyword;
|
||||
mod missing_whitespace_around_operator;
|
||||
mod redundant_backslash;
|
||||
mod space_around_operator;
|
||||
mod whitespace_around_keywords;
|
||||
mod whitespace_around_named_parameter_equals;
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::TokenKind;
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
use crate::checkers::logical_lines::LogicalLinesContext;
|
||||
|
||||
use super::LogicalLine;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for redundant backslashes between brackets.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Explicit line joins using a backslash are redundant between brackets.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// x = (2 + \
|
||||
/// 2)
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// x = (2 +
|
||||
/// 2)
|
||||
/// ```
|
||||
///
|
||||
/// [PEP 8]: https://peps.python.org/pep-0008/#maximum-line-length
|
||||
#[violation]
|
||||
pub struct RedundantBackslash;
|
||||
|
||||
impl AlwaysFixableViolation for RedundantBackslash {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Redundant backslash")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
"Remove redundant backslash".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// E502
|
||||
pub(crate) fn redundant_backslash(
|
||||
line: &LogicalLine,
|
||||
locator: &Locator,
|
||||
indexer: &Indexer,
|
||||
context: &mut LogicalLinesContext,
|
||||
) {
|
||||
let mut parens = 0;
|
||||
let continuation_lines = indexer.continuation_line_starts();
|
||||
let mut start_index = 0;
|
||||
|
||||
for token in line.tokens() {
|
||||
match token.kind() {
|
||||
TokenKind::Lpar | TokenKind::Lsqb | TokenKind::Lbrace => {
|
||||
if parens == 0 {
|
||||
let start = locator.line_start(token.start());
|
||||
start_index = continuation_lines
|
||||
.binary_search(&start)
|
||||
.map_or_else(|err_index| err_index, |ok_index| ok_index);
|
||||
}
|
||||
parens += 1;
|
||||
}
|
||||
TokenKind::Rpar | TokenKind::Rsqb | TokenKind::Rbrace => {
|
||||
parens -= 1;
|
||||
if parens == 0 {
|
||||
let end = locator.line_start(token.start());
|
||||
let end_index = continuation_lines
|
||||
.binary_search(&end)
|
||||
.map_or_else(|err_index| err_index, |ok_index| ok_index);
|
||||
for continuation_line in &continuation_lines[start_index..end_index] {
|
||||
let backslash_end = locator.line_end(*continuation_line);
|
||||
let backslash_start = backslash_end - TextSize::new(1);
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
RedundantBackslash,
|
||||
TextRange::new(backslash_start, backslash_end),
|
||||
);
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::deletion(
|
||||
backslash_start,
|
||||
backslash_end,
|
||||
)));
|
||||
context.push_diagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -42,7 +42,7 @@ pub(crate) fn no_newline_at_end_of_file(
|
||||
) -> Option<Diagnostic> {
|
||||
let source = locator.contents();
|
||||
|
||||
// Ignore empty and BOM only files
|
||||
// Ignore empty and BOM only files.
|
||||
if source.is_empty() || source == "\u{feff}" {
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ pub(crate) use module_import_not_at_top_of_file::*;
|
||||
pub(crate) use multiple_imports_on_one_line::*;
|
||||
pub(crate) use not_tests::*;
|
||||
pub(crate) use tab_indentation::*;
|
||||
pub(crate) use too_many_newlines_at_end_of_file::*;
|
||||
pub(crate) use trailing_whitespace::*;
|
||||
pub(crate) use type_comparison::*;
|
||||
|
||||
@@ -39,5 +40,6 @@ mod module_import_not_at_top_of_file;
|
||||
mod multiple_imports_on_one_line;
|
||||
mod not_tests;
|
||||
mod tab_indentation;
|
||||
mod too_many_newlines_at_end_of_file;
|
||||
mod trailing_whitespace;
|
||||
mod type_comparison;
|
||||
|
||||
@@ -0,0 +1,99 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
use ruff_python_parser::Tok;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for files with multiple trailing blank lines.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Trailing blank lines in a file are superfluous.
|
||||
///
|
||||
/// However, the last line of the file should end with a newline.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// spam(1)\n\n\n
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// spam(1)\n
|
||||
/// ```
|
||||
#[violation]
|
||||
pub struct TooManyNewlinesAtEndOfFile {
|
||||
num_trailing_newlines: u32,
|
||||
}
|
||||
|
||||
impl AlwaysFixableViolation for TooManyNewlinesAtEndOfFile {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let TooManyNewlinesAtEndOfFile {
|
||||
num_trailing_newlines,
|
||||
} = self;
|
||||
|
||||
// We expect a single trailing newline; so two trailing newlines is one too many, three
|
||||
// trailing newlines is two too many, etc.
|
||||
if *num_trailing_newlines > 2 {
|
||||
format!("Too many newlines at end of file")
|
||||
} else {
|
||||
format!("Extra newline at end of file")
|
||||
}
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
let TooManyNewlinesAtEndOfFile {
|
||||
num_trailing_newlines,
|
||||
} = self;
|
||||
if *num_trailing_newlines > 2 {
|
||||
"Remove trailing newlines".to_string()
|
||||
} else {
|
||||
"Remove trailing newline".to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// W391
|
||||
pub(crate) fn too_many_newlines_at_end_of_file(
|
||||
diagnostics: &mut Vec<Diagnostic>,
|
||||
lxr: &[LexResult],
|
||||
) {
|
||||
let mut num_trailing_newlines = 0u32;
|
||||
let mut start: Option<TextSize> = None;
|
||||
let mut end: Option<TextSize> = None;
|
||||
|
||||
// Count the number of trailing newlines.
|
||||
for (tok, range) in lxr.iter().rev().flatten() {
|
||||
match tok {
|
||||
Tok::NonLogicalNewline | Tok::Newline => {
|
||||
if num_trailing_newlines == 0 {
|
||||
end = Some(range.end());
|
||||
}
|
||||
start = Some(range.end());
|
||||
num_trailing_newlines += 1;
|
||||
}
|
||||
Tok::Dedent => continue,
|
||||
_ => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if num_trailing_newlines == 0 || num_trailing_newlines == 1 {
|
||||
return;
|
||||
}
|
||||
|
||||
let range = match (start, end) {
|
||||
(Some(start), Some(end)) => TextRange::new(start, end),
|
||||
_ => return,
|
||||
};
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
TooManyNewlinesAtEndOfFile {
|
||||
num_trailing_newlines,
|
||||
},
|
||||
range,
|
||||
);
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
@@ -0,0 +1,281 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
E502.py:9:9: E502 [*] Redundant backslash
|
||||
|
|
||||
7 | + 4
|
||||
8 |
|
||||
9 | a = (3 -\
|
||||
| ^ E502
|
||||
10 | 2 + \
|
||||
11 | 7)
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
6 6 | 3 \
|
||||
7 7 | + 4
|
||||
8 8 |
|
||||
9 |-a = (3 -\
|
||||
9 |+a = (3 -
|
||||
10 10 | 2 + \
|
||||
11 11 | 7)
|
||||
12 12 |
|
||||
|
||||
E502.py:10:11: E502 [*] Redundant backslash
|
||||
|
|
||||
9 | a = (3 -\
|
||||
10 | 2 + \
|
||||
| ^ E502
|
||||
11 | 7)
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
7 7 | + 4
|
||||
8 8 |
|
||||
9 9 | a = (3 -\
|
||||
10 |- 2 + \
|
||||
10 |+ 2 +
|
||||
11 11 | 7)
|
||||
12 12 |
|
||||
13 13 | z = 5 + \
|
||||
|
||||
E502.py:14:9: E502 [*] Redundant backslash
|
||||
|
|
||||
13 | z = 5 + \
|
||||
14 | (3 -\
|
||||
| ^ E502
|
||||
15 | 2 + \
|
||||
16 | 7) + \
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
11 11 | 7)
|
||||
12 12 |
|
||||
13 13 | z = 5 + \
|
||||
14 |- (3 -\
|
||||
14 |+ (3 -
|
||||
15 15 | 2 + \
|
||||
16 16 | 7) + \
|
||||
17 17 | 4
|
||||
|
||||
E502.py:15:11: E502 [*] Redundant backslash
|
||||
|
|
||||
13 | z = 5 + \
|
||||
14 | (3 -\
|
||||
15 | 2 + \
|
||||
| ^ E502
|
||||
16 | 7) + \
|
||||
17 | 4
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
12 12 |
|
||||
13 13 | z = 5 + \
|
||||
14 14 | (3 -\
|
||||
15 |- 2 + \
|
||||
15 |+ 2 +
|
||||
16 16 | 7) + \
|
||||
17 17 | 4
|
||||
18 18 |
|
||||
|
||||
E502.py:23:17: E502 [*] Redundant backslash
|
||||
|
|
||||
22 | b = [
|
||||
23 | 2 + 4 + 5 + \
|
||||
| ^ E502
|
||||
24 | 44 \
|
||||
25 | - 5
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
20 20 | 2]
|
||||
21 21 |
|
||||
22 22 | b = [
|
||||
23 |- 2 + 4 + 5 + \
|
||||
23 |+ 2 + 4 + 5 +
|
||||
24 24 | 44 \
|
||||
25 25 | - 5
|
||||
26 26 | ]
|
||||
|
||||
E502.py:24:8: E502 [*] Redundant backslash
|
||||
|
|
||||
22 | b = [
|
||||
23 | 2 + 4 + 5 + \
|
||||
24 | 44 \
|
||||
| ^ E502
|
||||
25 | - 5
|
||||
26 | ]
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
21 21 |
|
||||
22 22 | b = [
|
||||
23 23 | 2 + 4 + 5 + \
|
||||
24 |- 44 \
|
||||
24 |+ 44
|
||||
25 25 | - 5
|
||||
26 26 | ]
|
||||
27 27 |
|
||||
|
||||
E502.py:29:11: E502 [*] Redundant backslash
|
||||
|
|
||||
28 | c = (True and
|
||||
29 | False \
|
||||
| ^ E502
|
||||
30 | or False \
|
||||
31 | and True \
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
26 26 | ]
|
||||
27 27 |
|
||||
28 28 | c = (True and
|
||||
29 |- False \
|
||||
29 |+ False
|
||||
30 30 | or False \
|
||||
31 31 | and True \
|
||||
32 32 | )
|
||||
|
||||
E502.py:30:14: E502 [*] Redundant backslash
|
||||
|
|
||||
28 | c = (True and
|
||||
29 | False \
|
||||
30 | or False \
|
||||
| ^ E502
|
||||
31 | and True \
|
||||
32 | )
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
27 27 |
|
||||
28 28 | c = (True and
|
||||
29 29 | False \
|
||||
30 |- or False \
|
||||
30 |+ or False
|
||||
31 31 | and True \
|
||||
32 32 | )
|
||||
33 33 |
|
||||
|
||||
E502.py:31:14: E502 [*] Redundant backslash
|
||||
|
|
||||
29 | False \
|
||||
30 | or False \
|
||||
31 | and True \
|
||||
| ^ E502
|
||||
32 | )
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
28 28 | c = (True and
|
||||
29 29 | False \
|
||||
30 30 | or False \
|
||||
31 |- and True \
|
||||
31 |+ and True
|
||||
32 32 | )
|
||||
33 33 |
|
||||
34 34 | c = (True and
|
||||
|
||||
E502.py:44:14: E502 [*] Redundant backslash
|
||||
|
|
||||
43 | s = {
|
||||
44 | 'x': 2 + \
|
||||
| ^ E502
|
||||
45 | 2
|
||||
46 | }
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
41 41 |
|
||||
42 42 |
|
||||
43 43 | s = {
|
||||
44 |- 'x': 2 + \
|
||||
44 |+ 'x': 2 +
|
||||
45 45 | 2
|
||||
46 46 | }
|
||||
47 47 |
|
||||
|
||||
E502.py:55:12: E502 [*] Redundant backslash
|
||||
|
|
||||
55 | x = {2 + 4 \
|
||||
| ^ E502
|
||||
56 | + 3}
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
52 52 | }
|
||||
53 53 |
|
||||
54 54 |
|
||||
55 |-x = {2 + 4 \
|
||||
55 |+x = {2 + 4
|
||||
56 56 | + 3}
|
||||
57 57 |
|
||||
58 58 | y = (
|
||||
|
||||
E502.py:61:9: E502 [*] Redundant backslash
|
||||
|
|
||||
59 | 2 + 2 # \
|
||||
60 | + 3 # \
|
||||
61 | + 4 \
|
||||
| ^ E502
|
||||
62 | + 3
|
||||
63 | )
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
58 58 | y = (
|
||||
59 59 | 2 + 2 # \
|
||||
60 60 | + 3 # \
|
||||
61 |- + 4 \
|
||||
61 |+ + 4
|
||||
62 62 | + 3
|
||||
63 63 | )
|
||||
64 64 |
|
||||
|
||||
E502.py:82:12: E502 [*] Redundant backslash
|
||||
|
|
||||
80 | "xyz"
|
||||
81 |
|
||||
82 | x = ("abc" \
|
||||
| ^ E502
|
||||
83 | "xyz")
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
79 79 | x = "abc" \
|
||||
80 80 | "xyz"
|
||||
81 81 |
|
||||
82 |-x = ("abc" \
|
||||
82 |+x = ("abc"
|
||||
83 83 | "xyz")
|
||||
84 84 |
|
||||
85 85 |
|
||||
|
||||
E502.py:87:14: E502 [*] Redundant backslash
|
||||
|
|
||||
86 | def foo():
|
||||
87 | x = (a + \
|
||||
| ^ E502
|
||||
88 | 2)
|
||||
|
|
||||
= help: Remove redundant backslash
|
||||
|
||||
ℹ Safe fix
|
||||
84 84 |
|
||||
85 85 |
|
||||
86 86 | def foo():
|
||||
87 |- x = (a + \
|
||||
87 |+ x = (a +
|
||||
88 88 | 2)
|
||||
@@ -0,0 +1,17 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
W391_0.py:14:1: W391 [*] Extra newline at end of file
|
||||
|
|
||||
12 | foo()
|
||||
13 | bar()
|
||||
14 |
|
||||
| ^ W391
|
||||
|
|
||||
= help: Remove trailing newline
|
||||
|
||||
ℹ Safe fix
|
||||
11 11 | if __name__ == '__main__':
|
||||
12 12 | foo()
|
||||
13 13 | bar()
|
||||
14 |-
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
W391_2.py:14:1: W391 [*] Too many newlines at end of file
|
||||
|
|
||||
12 | foo()
|
||||
13 | bar()
|
||||
14 | /
|
||||
15 | |
|
||||
16 | |
|
||||
17 | |
|
||||
|
|
||||
= help: Remove trailing newlines
|
||||
|
||||
ℹ Safe fix
|
||||
11 11 | if __name__ == '__main__':
|
||||
12 12 | foo()
|
||||
13 13 | bar()
|
||||
14 |-
|
||||
15 |-
|
||||
16 |-
|
||||
17 |-
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
|
||||
@@ -97,8 +97,8 @@ impl fmt::Display for Settings {
|
||||
namespace = "linter.pydocstyle",
|
||||
fields = [
|
||||
self.convention | optional,
|
||||
self.ignore_decorators | debug,
|
||||
self.property_decorators | debug
|
||||
self.ignore_decorators | set,
|
||||
self.property_decorators | set
|
||||
]
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -130,12 +130,14 @@ mod tests {
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_3.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_4.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_5.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_5.pyi"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_6.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_7.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_8.pyi"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_9.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_10.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_11.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_11.pyi"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_12.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_13.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_14.py"))]
|
||||
@@ -150,7 +152,11 @@ mod tests {
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_23.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_24.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_25.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_26.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_26.pyi"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_27.py"))]
|
||||
#[test_case(Rule::UndefinedExport, Path::new("F822_0.py"))]
|
||||
#[test_case(Rule::UndefinedExport, Path::new("F822_0.pyi"))]
|
||||
#[test_case(Rule::UndefinedExport, Path::new("F822_1.py"))]
|
||||
#[test_case(Rule::UndefinedExport, Path::new("F822_2.py"))]
|
||||
#[test_case(Rule::UndefinedLocal, Path::new("F823.py"))]
|
||||
@@ -206,7 +212,11 @@ mod tests {
|
||||
fn init() -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
Path::new("pyflakes/__init__.py"),
|
||||
&LinterSettings::for_rules(vec![Rule::UndefinedName, Rule::UndefinedExport]),
|
||||
&LinterSettings::for_rules(vec![
|
||||
Rule::UndefinedName,
|
||||
Rule::UndefinedExport,
|
||||
Rule::UnusedImport,
|
||||
]),
|
||||
)?;
|
||||
assert_messages!(diagnostics);
|
||||
Ok(())
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F821_11.pyi:15:28: F821 Undefined name `os`
|
||||
|
|
||||
15 | def f(x: Callable[[VarArg("os")], None]): # F821
|
||||
| ^^ F821
|
||||
16 | pass
|
||||
|
|
||||
@@ -0,0 +1,83 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F821_26.py:9:33: F821 Undefined name `CStr`
|
||||
|
|
||||
8 | # Forward references:
|
||||
9 | MaybeCStr: TypeAlias = Optional[CStr] # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
| ^^^^ F821
|
||||
10 | MaybeCStr2: TypeAlias = Optional["CStr"] # always okay
|
||||
11 | CStr: TypeAlias = Union[C, str] # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
|
|
||||
|
||||
F821_26.py:11:25: F821 Undefined name `C`
|
||||
|
|
||||
9 | MaybeCStr: TypeAlias = Optional[CStr] # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
10 | MaybeCStr2: TypeAlias = Optional["CStr"] # always okay
|
||||
11 | CStr: TypeAlias = Union[C, str] # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
| ^ F821
|
||||
12 | CStr2: TypeAlias = Union["C", str] # always okay
|
||||
|
|
||||
|
||||
F821_26.py:16:12: F821 Undefined name `C`
|
||||
|
|
||||
14 | # References to a class from inside the class:
|
||||
15 | class C:
|
||||
16 | other: C = ... # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
| ^ F821
|
||||
17 | other2: "C" = ... # always okay
|
||||
18 | def from_str(self, s: str) -> C: ... # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
|
|
||||
|
||||
F821_26.py:18:35: F821 Undefined name `C`
|
||||
|
|
||||
16 | other: C = ... # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
17 | other2: "C" = ... # always okay
|
||||
18 | def from_str(self, s: str) -> C: ... # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
| ^ F821
|
||||
19 | def from_str2(self, s: str) -> "C": ... # always okay
|
||||
|
|
||||
|
||||
F821_26.py:23:10: F821 Undefined name `B`
|
||||
|
|
||||
21 | # Circular references:
|
||||
22 | class A:
|
||||
23 | foo: B # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
| ^ F821
|
||||
24 | foo2: "B" # always okay
|
||||
25 | bar: dict[str, B] # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
|
|
||||
|
||||
F821_26.py:25:20: F821 Undefined name `B`
|
||||
|
|
||||
23 | foo: B # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
24 | foo2: "B" # always okay
|
||||
25 | bar: dict[str, B] # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
| ^ F821
|
||||
26 | bar2: dict[str, "A"] # always okay
|
||||
|
|
||||
|
||||
F821_26.py:33:17: F821 Undefined name `Tree`
|
||||
|
|
||||
32 | class Leaf: ...
|
||||
33 | class Tree(list[Tree | Leaf]): ... # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
| ^^^^ F821
|
||||
34 | class Tree2(list["Tree | Leaf"]): ... # always okay
|
||||
|
|
||||
|
||||
F821_26.py:39:11: F821 Undefined name `foo`
|
||||
|
|
||||
37 | class MyClass:
|
||||
38 | foo: int
|
||||
39 | bar = foo # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
| ^^^ F821
|
||||
40 | bar = "foo" # always okay
|
||||
|
|
||||
|
||||
F821_26.py:43:8: F821 Undefined name `baz`
|
||||
|
|
||||
42 | baz: MyClass
|
||||
43 | eggs = baz # valid in a `.pyi` stub file, not in a `.py` runtime file
|
||||
| ^^^ F821
|
||||
44 | eggs = "baz" # always okay
|
||||
|
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F821_27.py:30:11: F821 Undefined name `foo`
|
||||
|
|
||||
28 | class MyClass:
|
||||
29 | foo: int
|
||||
30 | bar = foo # Still invalid even when `__future__.annotations` are enabled
|
||||
| ^^^ F821
|
||||
31 | bar = "foo" # always okay
|
||||
|
|
||||
|
||||
F821_27.py:34:8: F821 Undefined name `baz`
|
||||
|
|
||||
33 | baz: MyClass
|
||||
34 | eggs = baz # Still invalid even when `__future__.annotations` are enabled
|
||||
| ^^^ F821
|
||||
35 | eggs = "baz" # always okay
|
||||
|
|
||||
|
||||
F821_27.py:38:33: F821 Undefined name `DStr`
|
||||
|
|
||||
37 | # Forward references:
|
||||
38 | MaybeDStr: TypeAlias = Optional[DStr] # Still invalid even when `__future__.annotations` are enabled
|
||||
| ^^^^ F821
|
||||
39 | MaybeDStr2: TypeAlias = Optional["DStr"] # always okay
|
||||
40 | DStr: TypeAlias = Union[D, str] # Still invalid even when `__future__.annotations` are enabled
|
||||
|
|
||||
|
||||
F821_27.py:40:25: F821 Undefined name `D`
|
||||
|
|
||||
38 | MaybeDStr: TypeAlias = Optional[DStr] # Still invalid even when `__future__.annotations` are enabled
|
||||
39 | MaybeDStr2: TypeAlias = Optional["DStr"] # always okay
|
||||
40 | DStr: TypeAlias = Union[D, str] # Still invalid even when `__future__.annotations` are enabled
|
||||
| ^ F821
|
||||
41 | DStr2: TypeAlias = Union["D", str] # always okay
|
||||
|
|
||||
|
||||
F821_27.py:47:17: F821 Undefined name `Tree`
|
||||
|
|
||||
45 | # More circular references
|
||||
46 | class Leaf: ...
|
||||
47 | class Tree(list[Tree | Leaf]): ... # Still invalid even when `__future__.annotations` are enabled
|
||||
| ^^^^ F821
|
||||
48 | class Tree2(list["Tree | Leaf"]): ... # always okay
|
||||
|
|
||||
@@ -0,0 +1,10 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F821_5.pyi:4:27: F821 Undefined name `InnerClass`
|
||||
|
|
||||
3 | class RandomClass:
|
||||
4 | def bad_func(self) -> InnerClass: ... # F821
|
||||
| ^^^^^^^^^^ F821
|
||||
5 | def good_func(self) -> OuterClass.InnerClass: ... # Okay
|
||||
|
|
||||
@@ -0,0 +1,10 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F822_0.pyi:4:1: F822 Undefined name `c` in `__all__`
|
||||
|
|
||||
2 | b: int # Considered a binding in a `.pyi` stub file, not in a `.py` runtime file
|
||||
3 |
|
||||
4 | __all__ = ["a", "b", "c"] # c is flagged as missing; b is not
|
||||
| ^^^^^^^ F822
|
||||
|
|
||||
@@ -1,4 +1,17 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^ F401
|
||||
2 |
|
||||
3 | print(__path__)
|
||||
|
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
ℹ Safe fix
|
||||
1 |-import os
|
||||
2 1 |
|
||||
3 2 | print(__path__)
|
||||
4 3 |
|
||||
|
||||
@@ -142,7 +142,7 @@ fn is_attributes_not_in_slots(body: &[Stmt]) -> Vec<AttributeAssignment> {
|
||||
}
|
||||
}
|
||||
|
||||
if slots.is_empty() {
|
||||
if slots.is_empty() || slots.contains("__dict__") {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
|
||||
@@ -88,7 +88,7 @@ impl fmt::Display for Settings {
|
||||
namespace = "linter.pylint",
|
||||
fields = [
|
||||
self.allow_magic_value_types | array,
|
||||
self.allow_dunder_method_names | array,
|
||||
self.allow_dunder_method_names | set,
|
||||
self.max_args,
|
||||
self.max_positional_args,
|
||||
self.max_returns,
|
||||
|
||||
@@ -155,6 +155,38 @@ macro_rules! display_settings {
|
||||
}
|
||||
}
|
||||
};
|
||||
(@field $fmt:ident, $prefix:ident, $settings:ident.$field:ident | map) => {
|
||||
{
|
||||
use itertools::Itertools;
|
||||
|
||||
write!($fmt, "{}{} = ", $prefix, stringify!($field))?;
|
||||
if $settings.$field.is_empty() {
|
||||
writeln!($fmt, "{{}}")?;
|
||||
} else {
|
||||
writeln!($fmt, "{{")?;
|
||||
for (key, value) in $settings.$field.iter().sorted_by(|(left, _), (right, _)| left.cmp(right)) {
|
||||
writeln!($fmt, "\t{key} = {value},")?;
|
||||
}
|
||||
writeln!($fmt, "}}")?;
|
||||
}
|
||||
}
|
||||
};
|
||||
(@field $fmt:ident, $prefix:ident, $settings:ident.$field:ident | set) => {
|
||||
{
|
||||
use itertools::Itertools;
|
||||
|
||||
write!($fmt, "{}{} = ", $prefix, stringify!($field))?;
|
||||
if $settings.$field.is_empty() {
|
||||
writeln!($fmt, "[]")?;
|
||||
} else {
|
||||
writeln!($fmt, "[")?;
|
||||
for elem in $settings.$field.iter().sorted_by(|left, right| left.cmp(right)) {
|
||||
writeln!($fmt, "\t{elem},")?;
|
||||
}
|
||||
writeln!($fmt, "]")?;
|
||||
}
|
||||
}
|
||||
};
|
||||
(@field $fmt:ident, $prefix:ident, $settings:ident.$field:ident | paths) => {
|
||||
{
|
||||
write!($fmt, "{}{} = ", $prefix, stringify!($field))?;
|
||||
|
||||
@@ -4148,7 +4148,8 @@ mod tests {
|
||||
assert_eq!(std::mem::size_of::<ExprDict>(), 56);
|
||||
assert_eq!(std::mem::size_of::<ExprDictComp>(), 48);
|
||||
assert_eq!(std::mem::size_of::<ExprEllipsisLiteral>(), 8);
|
||||
assert_eq!(std::mem::size_of::<ExprFString>(), 48);
|
||||
// 56 for Rustc < 1.76
|
||||
assert!(matches!(std::mem::size_of::<ExprFString>(), 48 | 56));
|
||||
assert_eq!(std::mem::size_of::<ExprGenerator>(), 48);
|
||||
assert_eq!(std::mem::size_of::<ExprIf>(), 32);
|
||||
assert_eq!(std::mem::size_of::<ExprIpyEscapeCommand>(), 32);
|
||||
|
||||
@@ -37,7 +37,6 @@ impl Indexer {
|
||||
let mut continuation_lines = Vec::new();
|
||||
// Token, end
|
||||
let mut prev_end = TextSize::default();
|
||||
let mut prev_token: Option<&Tok> = None;
|
||||
let mut line_start = TextSize::default();
|
||||
|
||||
for (tok, range) in tokens.iter().flatten() {
|
||||
@@ -51,11 +50,7 @@ impl Indexer {
|
||||
if text == "\r" && trivia.as_bytes().get(index + 1) == Some(&b'\n') {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Newlines after a newline never form a continuation.
|
||||
if !matches!(prev_token, Some(Tok::Newline | Tok::NonLogicalNewline)) {
|
||||
continuation_lines.push(line_start);
|
||||
}
|
||||
continuation_lines.push(line_start);
|
||||
|
||||
// SAFETY: Safe because of the len assertion at the top of the function.
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
@@ -80,7 +75,6 @@ impl Indexer {
|
||||
_ => {}
|
||||
}
|
||||
|
||||
prev_token = Some(tok);
|
||||
prev_end = range.end();
|
||||
}
|
||||
|
||||
@@ -361,6 +355,33 @@ f'foo { 'str1' \
|
||||
TextSize::new(63),
|
||||
]
|
||||
);
|
||||
|
||||
let contents = r"
|
||||
x = (
|
||||
1
|
||||
\
|
||||
\
|
||||
\
|
||||
|
||||
\
|
||||
+ 2)
|
||||
"
|
||||
.trim();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let indexer = Indexer::from_tokens(lxr.as_slice(), &Locator::new(contents));
|
||||
assert_eq!(
|
||||
indexer.continuation_line_starts(),
|
||||
[
|
||||
// row 3
|
||||
TextSize::new(12),
|
||||
// row 4
|
||||
TextSize::new(18),
|
||||
// row 5
|
||||
TextSize::new(24),
|
||||
// row 7
|
||||
TextSize::new(31),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
44
crates/ruff_server/Cargo.toml
Normal file
44
crates/ruff_server/Cargo.toml
Normal file
@@ -0,0 +1,44 @@
|
||||
[package]
|
||||
name = "ruff_server"
|
||||
version = "0.2.2"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
|
||||
[dependencies]
|
||||
ruff_diagnostics = { path = "../ruff_diagnostics" }
|
||||
ruff_formatter = { path = "../ruff_formatter" }
|
||||
ruff_linter = { path = "../ruff_linter" }
|
||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
||||
ruff_python_codegen = { path = "../ruff_python_codegen" }
|
||||
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
||||
ruff_python_index = { path = "../ruff_python_index" }
|
||||
ruff_python_parser = { path = "../ruff_python_parser" }
|
||||
ruff_source_file = { path = "../ruff_source_file" }
|
||||
ruff_text_size = { path = "../ruff_text_size" }
|
||||
ruff_workspace = { path = "../ruff_workspace" }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
crossbeam = { workspace = true }
|
||||
jod-thread = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
lsp-server = { workspace = true }
|
||||
lsp-types = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
similar = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
1
crates/ruff_server/README.md
Normal file
1
crates/ruff_server/README.md
Normal file
@@ -0,0 +1 @@
|
||||
## The Ruff Language Server
|
||||
1240
crates/ruff_server/resources/test/fixtures/pandas_html.py
vendored
Normal file
1240
crates/ruff_server/resources/test/fixtures/pandas_html.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
50
crates/ruff_server/src/edit.rs
Normal file
50
crates/ruff_server/src/edit.rs
Normal file
@@ -0,0 +1,50 @@
|
||||
//! Types and utilities for working with text, modifying source files, and `Ruff <-> LSP` type conversion.
|
||||
|
||||
mod document;
|
||||
mod range;
|
||||
|
||||
pub use document::Document;
|
||||
pub(crate) use document::DocumentVersion;
|
||||
use lsp_types::PositionEncodingKind;
|
||||
pub(crate) use range::{RangeExt, ToRangeExt};
|
||||
|
||||
/// A convenient enumeration for supported text encodings. Can be converted to [`lsp_types::PositionEncodingKind`].
|
||||
// Please maintain the order from least to greatest priority for the derived `Ord` impl.
|
||||
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum PositionEncoding {
|
||||
/// UTF 16 is the encoding supported by all LSP clients.
|
||||
#[default]
|
||||
UTF16,
|
||||
|
||||
/// Second choice because UTF32 uses a fixed 4 byte encoding for each character (makes conversion relatively easy)
|
||||
UTF32,
|
||||
|
||||
/// Ruff's preferred encoding
|
||||
UTF8,
|
||||
}
|
||||
|
||||
impl From<PositionEncoding> for lsp_types::PositionEncodingKind {
|
||||
fn from(value: PositionEncoding) -> Self {
|
||||
match value {
|
||||
PositionEncoding::UTF8 => lsp_types::PositionEncodingKind::UTF8,
|
||||
PositionEncoding::UTF16 => lsp_types::PositionEncodingKind::UTF16,
|
||||
PositionEncoding::UTF32 => lsp_types::PositionEncodingKind::UTF32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&lsp_types::PositionEncodingKind> for PositionEncoding {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: &PositionEncodingKind) -> Result<Self, Self::Error> {
|
||||
Ok(if value == &PositionEncodingKind::UTF8 {
|
||||
PositionEncoding::UTF8
|
||||
} else if value == &PositionEncodingKind::UTF16 {
|
||||
PositionEncoding::UTF16
|
||||
} else if value == &PositionEncodingKind::UTF32 {
|
||||
PositionEncoding::UTF32
|
||||
} else {
|
||||
return Err(());
|
||||
})
|
||||
}
|
||||
}
|
||||
123
crates/ruff_server/src/edit/document.rs
Normal file
123
crates/ruff_server/src/edit/document.rs
Normal file
@@ -0,0 +1,123 @@
|
||||
use lsp_types::TextDocumentContentChangeEvent;
|
||||
use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::PositionEncoding;
|
||||
|
||||
use super::RangeExt;
|
||||
|
||||
pub(crate) type DocumentVersion = i32;
|
||||
|
||||
/// The state for an individual document in the server. Stays up-to-date
|
||||
/// with changes made by the user, including unsaved changes.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Document {
|
||||
/// The string contents of the document.
|
||||
contents: String,
|
||||
/// A computed line index for the document. This should always reflect
|
||||
/// the current version of `contents`. Using a function like [`Self::modify`]
|
||||
/// will re-calculate the line index automatically when the `contents` value is updated.
|
||||
index: LineIndex,
|
||||
/// The latest version of the document, set by the LSP client. The server will panic in
|
||||
/// debug mode if we attempt to update the document with an 'older' version.
|
||||
version: DocumentVersion,
|
||||
}
|
||||
|
||||
impl Document {
|
||||
pub fn new(contents: String, version: DocumentVersion) -> Self {
|
||||
let index = LineIndex::from_source_text(&contents);
|
||||
Self {
|
||||
contents,
|
||||
index,
|
||||
version,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn contents(&self) -> &str {
|
||||
&self.contents
|
||||
}
|
||||
|
||||
pub fn index(&self) -> &LineIndex {
|
||||
&self.index
|
||||
}
|
||||
|
||||
pub fn version(&self) -> DocumentVersion {
|
||||
self.version
|
||||
}
|
||||
|
||||
pub fn apply_changes(
|
||||
&mut self,
|
||||
changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
|
||||
new_version: DocumentVersion,
|
||||
encoding: PositionEncoding,
|
||||
) {
|
||||
if let [lsp_types::TextDocumentContentChangeEvent {
|
||||
range: None, text, ..
|
||||
}] = changes.as_slice()
|
||||
{
|
||||
tracing::debug!("Fast path - replacing entire document");
|
||||
self.modify(|contents, version| {
|
||||
*contents = text.clone();
|
||||
*version = new_version;
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
let old_contents = self.contents().to_string();
|
||||
let mut new_contents = self.contents().to_string();
|
||||
let mut active_index = self.index().clone();
|
||||
|
||||
for TextDocumentContentChangeEvent {
|
||||
range,
|
||||
text: change,
|
||||
..
|
||||
} in changes
|
||||
{
|
||||
if let Some(range) = range {
|
||||
let range = range.to_text_range(&new_contents, &active_index, encoding);
|
||||
|
||||
new_contents.replace_range(
|
||||
usize::from(range.start())..usize::from(range.end()),
|
||||
&change,
|
||||
);
|
||||
} else {
|
||||
new_contents = change;
|
||||
}
|
||||
|
||||
if new_contents != old_contents {
|
||||
active_index = LineIndex::from_source_text(&new_contents);
|
||||
}
|
||||
}
|
||||
|
||||
self.modify_with_manual_index(|contents, version, index| {
|
||||
if contents != &new_contents {
|
||||
*index = active_index;
|
||||
}
|
||||
*contents = new_contents;
|
||||
*version = new_version;
|
||||
});
|
||||
}
|
||||
|
||||
pub fn update_version(&mut self, new_version: DocumentVersion) {
|
||||
self.modify_with_manual_index(|_, version, _| {
|
||||
*version = new_version;
|
||||
});
|
||||
}
|
||||
|
||||
// A private function for modifying the document's internal state
|
||||
fn modify(&mut self, func: impl FnOnce(&mut String, &mut DocumentVersion)) {
|
||||
self.modify_with_manual_index(|c, v, i| {
|
||||
func(c, v);
|
||||
*i = LineIndex::from_source_text(c);
|
||||
});
|
||||
}
|
||||
|
||||
// A private function for overriding how we update the line index by default.
|
||||
fn modify_with_manual_index(
|
||||
&mut self,
|
||||
func: impl FnOnce(&mut String, &mut DocumentVersion, &mut LineIndex),
|
||||
) {
|
||||
let old_version = self.version;
|
||||
func(&mut self.contents, &mut self.version, &mut self.index);
|
||||
debug_assert!(self.version >= old_version);
|
||||
}
|
||||
}
|
||||
153
crates/ruff_server/src/edit/range.rs
Normal file
153
crates/ruff_server/src/edit/range.rs
Normal file
@@ -0,0 +1,153 @@
|
||||
use super::PositionEncoding;
|
||||
use lsp_types as types;
|
||||
use ruff_source_file::OneIndexed;
|
||||
use ruff_source_file::{LineIndex, SourceLocation};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
pub(crate) trait RangeExt {
|
||||
fn to_text_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding)
|
||||
-> TextRange;
|
||||
}
|
||||
|
||||
pub(crate) trait ToRangeExt {
|
||||
fn to_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) -> types::Range;
|
||||
}
|
||||
|
||||
fn u32_index_to_usize(index: u32) -> usize {
|
||||
usize::try_from(index).expect("u32 fits in usize")
|
||||
}
|
||||
|
||||
impl RangeExt for lsp_types::Range {
|
||||
fn to_text_range(
|
||||
&self,
|
||||
text: &str,
|
||||
index: &LineIndex,
|
||||
encoding: PositionEncoding,
|
||||
) -> TextRange {
|
||||
let start_line = index.line_range(
|
||||
OneIndexed::from_zero_indexed(u32_index_to_usize(self.start.line)),
|
||||
text,
|
||||
);
|
||||
let end_line = index.line_range(
|
||||
OneIndexed::from_zero_indexed(u32_index_to_usize(self.end.line)),
|
||||
text,
|
||||
);
|
||||
|
||||
let (start_column_offset, end_column_offset) = match encoding {
|
||||
PositionEncoding::UTF8 => (
|
||||
TextSize::new(self.start.character),
|
||||
TextSize::new(self.end.character),
|
||||
),
|
||||
|
||||
PositionEncoding::UTF16 => {
|
||||
// Fast path for ASCII only documents
|
||||
if index.is_ascii() {
|
||||
(
|
||||
TextSize::new(self.start.character),
|
||||
TextSize::new(self.end.character),
|
||||
)
|
||||
} else {
|
||||
// UTF16 encodes characters either as one or two 16 bit words.
|
||||
// The position in `range` is the 16-bit word offset from the start of the line (and not the character offset)
|
||||
// UTF-16 with a text that may use variable-length characters.
|
||||
(
|
||||
utf8_column_offset(self.start.character, &text[start_line]),
|
||||
utf8_column_offset(self.end.character, &text[end_line]),
|
||||
)
|
||||
}
|
||||
}
|
||||
PositionEncoding::UTF32 => {
|
||||
// UTF-32 uses 4 bytes for each character. Meaning, the position in range is a character offset.
|
||||
return TextRange::new(
|
||||
index.offset(
|
||||
OneIndexed::from_zero_indexed(u32_index_to_usize(self.start.line)),
|
||||
OneIndexed::from_zero_indexed(u32_index_to_usize(self.start.character)),
|
||||
text,
|
||||
),
|
||||
index.offset(
|
||||
OneIndexed::from_zero_indexed(u32_index_to_usize(self.end.line)),
|
||||
OneIndexed::from_zero_indexed(u32_index_to_usize(self.end.character)),
|
||||
text,
|
||||
),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
TextRange::new(
|
||||
start_line.start() + start_column_offset.clamp(TextSize::new(0), start_line.end()),
|
||||
end_line.start() + end_column_offset.clamp(TextSize::new(0), end_line.end()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToRangeExt for TextRange {
|
||||
fn to_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) -> types::Range {
|
||||
types::Range {
|
||||
start: offset_to_position(self.start(), text, index, encoding),
|
||||
end: offset_to_position(self.end(), text, index, encoding),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a UTF-16 code unit offset for a given line into a UTF-8 column number.
|
||||
fn utf8_column_offset(utf16_code_unit_offset: u32, line: &str) -> TextSize {
|
||||
let mut utf8_code_unit_offset = TextSize::new(0);
|
||||
|
||||
let mut i = 0u32;
|
||||
|
||||
for c in line.chars() {
|
||||
if i >= utf16_code_unit_offset {
|
||||
break;
|
||||
}
|
||||
|
||||
// Count characters encoded as two 16 bit words as 2 characters.
|
||||
{
|
||||
utf8_code_unit_offset +=
|
||||
TextSize::new(u32::try_from(c.len_utf8()).expect("utf8 len always <=4"));
|
||||
i += u32::try_from(c.len_utf16()).expect("utf16 len always <=2");
|
||||
}
|
||||
}
|
||||
|
||||
utf8_code_unit_offset
|
||||
}
|
||||
|
||||
fn offset_to_position(
|
||||
offset: TextSize,
|
||||
text: &str,
|
||||
index: &LineIndex,
|
||||
encoding: PositionEncoding,
|
||||
) -> types::Position {
|
||||
let location = match encoding {
|
||||
PositionEncoding::UTF8 => {
|
||||
let row = index.line_index(offset);
|
||||
let column = offset - index.line_start(row, text);
|
||||
|
||||
SourceLocation {
|
||||
column: OneIndexed::from_zero_indexed(column.to_usize()),
|
||||
row,
|
||||
}
|
||||
}
|
||||
PositionEncoding::UTF16 => {
|
||||
let row = index.line_index(offset);
|
||||
|
||||
let column = if index.is_ascii() {
|
||||
(offset - index.line_start(row, text)).to_usize()
|
||||
} else {
|
||||
let up_to_line = &text[TextRange::new(index.line_start(row, text), offset)];
|
||||
up_to_line.encode_utf16().count()
|
||||
};
|
||||
|
||||
SourceLocation {
|
||||
column: OneIndexed::from_zero_indexed(column),
|
||||
row,
|
||||
}
|
||||
}
|
||||
PositionEncoding::UTF32 => index.source_location(offset, text),
|
||||
};
|
||||
|
||||
types::Position {
|
||||
line: u32::try_from(location.row.to_zero_indexed()).expect("row usize fits in u32"),
|
||||
character: u32::try_from(location.column.to_zero_indexed())
|
||||
.expect("character usize fits in u32"),
|
||||
}
|
||||
}
|
||||
33
crates/ruff_server/src/format.rs
Normal file
33
crates/ruff_server/src/format.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
use ruff_formatter::PrintedRange;
|
||||
use ruff_python_formatter::format_module_source;
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_workspace::FormatterSettings;
|
||||
|
||||
use crate::edit::Document;
|
||||
|
||||
pub(crate) fn format(
|
||||
document: &Document,
|
||||
formatter_settings: &FormatterSettings,
|
||||
) -> crate::Result<String> {
|
||||
// TODO(jane): support Jupyter Notebook
|
||||
let format_options = formatter_settings
|
||||
.to_format_options(ruff_python_ast::PySourceType::Python, document.contents());
|
||||
let formatted = format_module_source(document.contents(), format_options)?;
|
||||
Ok(formatted.into_code())
|
||||
}
|
||||
|
||||
pub(crate) fn format_range(
|
||||
document: &Document,
|
||||
formatter_settings: &FormatterSettings,
|
||||
range: TextRange,
|
||||
) -> crate::Result<PrintedRange> {
|
||||
// TODO(jane): support Jupyter Notebook
|
||||
let format_options = formatter_settings
|
||||
.to_format_options(ruff_python_ast::PySourceType::Python, document.contents());
|
||||
|
||||
Ok(ruff_python_formatter::format_range(
|
||||
document.contents(),
|
||||
range,
|
||||
format_options,
|
||||
)?)
|
||||
}
|
||||
21
crates/ruff_server/src/lib.rs
Normal file
21
crates/ruff_server/src/lib.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
//! ## The Ruff Language Server
|
||||
|
||||
pub use edit::{Document, PositionEncoding};
|
||||
pub use server::Server;
|
||||
|
||||
mod edit;
|
||||
mod format;
|
||||
mod lint;
|
||||
mod server;
|
||||
mod session;
|
||||
|
||||
pub(crate) const SERVER_NAME: &str = "ruff";
|
||||
pub(crate) const DIAGNOSTIC_NAME: &str = "Ruff";
|
||||
|
||||
/// A common result type used in most cases where a
|
||||
/// result type is needed.
|
||||
pub(crate) type Result<T> = anyhow::Result<T>;
|
||||
|
||||
pub(crate) fn version() -> &'static str {
|
||||
ruff_linter::VERSION
|
||||
}
|
||||
120
crates/ruff_server/src/lint.rs
Normal file
120
crates/ruff_server/src/lint.rs
Normal file
@@ -0,0 +1,120 @@
|
||||
//! Access to the Ruff linting API for the LSP
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use ruff_diagnostics::{Applicability, Diagnostic, DiagnosticKind, Fix};
|
||||
use ruff_linter::{
|
||||
directives::{extract_directives, Flags},
|
||||
linter::{check_path, LinterResult, TokenSource},
|
||||
registry::AsRule,
|
||||
settings::{flags, LinterSettings},
|
||||
source_kind::SourceKind,
|
||||
};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
use ruff_python_parser::AsMode;
|
||||
use ruff_source_file::Locator;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{edit::ToRangeExt, PositionEncoding, DIAGNOSTIC_NAME};
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub(crate) struct DiagnosticFix {
|
||||
pub(crate) kind: DiagnosticKind,
|
||||
pub(crate) fix: Fix,
|
||||
}
|
||||
|
||||
pub(crate) fn check(
|
||||
document: &crate::edit::Document,
|
||||
linter_settings: &LinterSettings,
|
||||
encoding: PositionEncoding,
|
||||
) -> Vec<lsp_types::Diagnostic> {
|
||||
let contents = document.contents();
|
||||
let index = document.index().clone();
|
||||
|
||||
let source_type = PySourceType::default();
|
||||
|
||||
// TODO(jane): Support Jupyter Notebooks
|
||||
let source_kind = SourceKind::Python(contents.to_string());
|
||||
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, source_type.as_mode());
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::with_index(contents, index);
|
||||
|
||||
// Detect the current code style (lazily).
|
||||
let stylist = Stylist::from_tokens(&tokens, &locator);
|
||||
|
||||
// Extra indices from the code.
|
||||
let indexer = Indexer::from_tokens(&tokens, &locator);
|
||||
|
||||
// Extract the `# noqa` and `# isort: skip` directives from the source.
|
||||
let directives = extract_directives(&tokens, Flags::empty(), &locator, &indexer);
|
||||
|
||||
// Generate checks.
|
||||
let LinterResult {
|
||||
data: (diagnostics, _imports),
|
||||
..
|
||||
} = check_path(
|
||||
Path::new("<filename>"),
|
||||
None,
|
||||
&locator,
|
||||
&stylist,
|
||||
&indexer,
|
||||
&directives,
|
||||
linter_settings,
|
||||
flags::Noqa::Enabled,
|
||||
&source_kind,
|
||||
source_type,
|
||||
TokenSource::Tokens(tokens),
|
||||
);
|
||||
|
||||
diagnostics
|
||||
.into_iter()
|
||||
.map(|diagnostic| to_lsp_diagnostic(diagnostic, document, encoding))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn to_lsp_diagnostic(
|
||||
diagnostic: Diagnostic,
|
||||
document: &crate::edit::Document,
|
||||
encoding: PositionEncoding,
|
||||
) -> lsp_types::Diagnostic {
|
||||
let Diagnostic {
|
||||
kind, range, fix, ..
|
||||
} = diagnostic;
|
||||
|
||||
let rule = kind.rule();
|
||||
|
||||
let data = fix.and_then(|fix| {
|
||||
fix.applies(Applicability::Unsafe)
|
||||
.then(|| {
|
||||
serde_json::to_value(&DiagnosticFix {
|
||||
kind: kind.clone(),
|
||||
fix,
|
||||
})
|
||||
.ok()
|
||||
})
|
||||
.flatten()
|
||||
});
|
||||
lsp_types::Diagnostic {
|
||||
range: range.to_range(document.contents(), document.index(), encoding),
|
||||
severity: Some(lsp_types::DiagnosticSeverity::ERROR),
|
||||
code: Some(lsp_types::NumberOrString::String(
|
||||
rule.noqa_code().to_string(),
|
||||
)),
|
||||
code_description: rule.url().and_then(|url| {
|
||||
Some(lsp_types::CodeDescription {
|
||||
href: lsp_types::Url::parse(&url).ok()?,
|
||||
})
|
||||
}),
|
||||
source: Some(DIAGNOSTIC_NAME.into()),
|
||||
message: kind.body,
|
||||
related_information: None,
|
||||
tags: None,
|
||||
data,
|
||||
}
|
||||
}
|
||||
158
crates/ruff_server/src/server.rs
Normal file
158
crates/ruff_server/src/server.rs
Normal file
@@ -0,0 +1,158 @@
|
||||
//! Scheduling, I/O, and API endpoints.
|
||||
|
||||
use anyhow::anyhow;
|
||||
use lsp::Connection;
|
||||
use lsp_server as lsp;
|
||||
use lsp_types as types;
|
||||
use types::ClientCapabilities;
|
||||
use types::CodeActionKind;
|
||||
use types::CodeActionOptions;
|
||||
use types::DiagnosticOptions;
|
||||
use types::OneOf;
|
||||
use types::TextDocumentSyncCapability;
|
||||
use types::TextDocumentSyncKind;
|
||||
use types::TextDocumentSyncOptions;
|
||||
use types::WorkDoneProgressOptions;
|
||||
use types::WorkspaceFoldersServerCapabilities;
|
||||
|
||||
use self::schedule::event_loop_thread;
|
||||
use crate::session::Session;
|
||||
use crate::PositionEncoding;
|
||||
|
||||
mod api;
|
||||
mod client;
|
||||
mod schedule;
|
||||
|
||||
pub(crate) type Result<T> = std::result::Result<T, api::Error>;
|
||||
|
||||
pub struct Server {
|
||||
conn: lsp::Connection,
|
||||
threads: lsp::IoThreads,
|
||||
session: Session,
|
||||
}
|
||||
|
||||
impl Server {
|
||||
pub fn new() -> crate::Result<Self> {
|
||||
let (conn, threads) = lsp::Connection::stdio();
|
||||
|
||||
let (id, params) = conn.initialize_start()?;
|
||||
|
||||
let init_params: types::InitializeParams = serde_json::from_value(params)?;
|
||||
|
||||
let client_capabilities = init_params.capabilities;
|
||||
let server_capabilities = Self::server_capabilities(&client_capabilities);
|
||||
|
||||
let workspaces = init_params
|
||||
.workspace_folders
|
||||
.map(|folders| folders.into_iter().map(|folder| folder.uri).collect())
|
||||
.or_else(|| init_params.root_uri.map(|u| vec![u]))
|
||||
.ok_or_else(|| {
|
||||
anyhow!("No workspace or root URI was given in the LSP initialization parameters. The server cannot start.")
|
||||
})?;
|
||||
|
||||
let initialize_data = serde_json::json!({
|
||||
"capabilities": server_capabilities,
|
||||
"serverInfo": {
|
||||
"name": crate::SERVER_NAME,
|
||||
"version": crate::version()
|
||||
}
|
||||
});
|
||||
|
||||
conn.initialize_finish(id, initialize_data)?;
|
||||
|
||||
Ok(Self {
|
||||
conn,
|
||||
threads,
|
||||
session: Session::new(&server_capabilities, &workspaces)?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn run(self) -> crate::Result<()> {
|
||||
let result = event_loop_thread(move || Self::event_loop(&self.conn, self.session))?.join();
|
||||
self.threads.join()?;
|
||||
result
|
||||
}
|
||||
|
||||
fn event_loop(connection: &Connection, session: Session) -> crate::Result<()> {
|
||||
// TODO(jane): Make thread count configurable
|
||||
let mut scheduler = schedule::Scheduler::new(session, 4, &connection.sender);
|
||||
for msg in &connection.receiver {
|
||||
let task = match msg {
|
||||
lsp::Message::Request(req) => {
|
||||
if connection.handle_shutdown(&req)? {
|
||||
return Ok(());
|
||||
}
|
||||
api::request(req)
|
||||
}
|
||||
lsp::Message::Notification(notification) => api::notification(notification),
|
||||
lsp::Message::Response(response) => {
|
||||
tracing::error!(
|
||||
"Expected request or notification, got response instead: {response:?}"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
scheduler.dispatch(task);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn server_capabilities(client_capabilities: &ClientCapabilities) -> types::ServerCapabilities {
|
||||
let position_encoding = client_capabilities
|
||||
.general
|
||||
.as_ref()
|
||||
.and_then(|general_capabilities| general_capabilities.position_encodings.as_ref())
|
||||
.and_then(|encodings| {
|
||||
encodings
|
||||
.iter()
|
||||
.filter_map(|encoding| PositionEncoding::try_from(encoding).ok())
|
||||
.max() // this selects the highest priority position encoding
|
||||
})
|
||||
.unwrap_or_default();
|
||||
types::ServerCapabilities {
|
||||
position_encoding: Some(position_encoding.into()),
|
||||
code_action_provider: Some(types::CodeActionProviderCapability::Options(
|
||||
CodeActionOptions {
|
||||
code_action_kinds: Some(vec![
|
||||
CodeActionKind::QUICKFIX,
|
||||
CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
|
||||
]),
|
||||
work_done_progress_options: WorkDoneProgressOptions {
|
||||
work_done_progress: Some(true),
|
||||
},
|
||||
resolve_provider: Some(false),
|
||||
},
|
||||
)),
|
||||
workspace: Some(types::WorkspaceServerCapabilities {
|
||||
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
|
||||
supported: Some(true),
|
||||
change_notifications: Some(OneOf::Left(true)),
|
||||
}),
|
||||
file_operations: None,
|
||||
}),
|
||||
document_formatting_provider: Some(OneOf::Left(true)),
|
||||
document_range_formatting_provider: Some(OneOf::Left(true)),
|
||||
diagnostic_provider: Some(types::DiagnosticServerCapabilities::Options(
|
||||
DiagnosticOptions {
|
||||
identifier: Some(crate::DIAGNOSTIC_NAME.into()),
|
||||
// multi-file analysis could change this
|
||||
inter_file_dependencies: false,
|
||||
workspace_diagnostics: false,
|
||||
work_done_progress_options: WorkDoneProgressOptions {
|
||||
work_done_progress: Some(true),
|
||||
},
|
||||
},
|
||||
)),
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Options(
|
||||
TextDocumentSyncOptions {
|
||||
open_close: Some(true),
|
||||
change: Some(TextDocumentSyncKind::INCREMENTAL),
|
||||
will_save: Some(false),
|
||||
will_save_wait_until: Some(false),
|
||||
..Default::default()
|
||||
},
|
||||
)),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
244
crates/ruff_server/src/server/api.rs
Normal file
244
crates/ruff_server/src/server/api.rs
Normal file
@@ -0,0 +1,244 @@
|
||||
use crate::{server::schedule::Task, session::Session};
|
||||
use lsp_server as server;
|
||||
|
||||
mod notifications;
|
||||
mod requests;
|
||||
mod traits;
|
||||
|
||||
use notifications as notification;
|
||||
use requests as request;
|
||||
|
||||
use self::traits::{NotificationHandler, RequestHandler};
|
||||
|
||||
use super::{client::Responder, schedule::BackgroundSchedule, Result};
|
||||
|
||||
/// Defines the `document_url` method for implementors of [`traits::Notification`] and [`traits::Request`],
|
||||
/// given the parameter type used by the implementor.
|
||||
macro_rules! define_document_url {
|
||||
($params:ident: &$p:ty) => {
|
||||
fn document_url($params: &$p) -> &lsp_types::Url {
|
||||
&$params.text_document.uri
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
use define_document_url;
|
||||
|
||||
pub(super) fn request<'a>(req: server::Request) -> Task<'a> {
|
||||
let id = req.id.clone();
|
||||
|
||||
match req.method.as_str() {
|
||||
request::CodeAction::METHOD => background_request_task::<request::CodeAction>(
|
||||
req,
|
||||
BackgroundSchedule::LatencySensitive,
|
||||
),
|
||||
request::DocumentDiagnostic::METHOD => {
|
||||
background_request_task::<request::DocumentDiagnostic>(
|
||||
req,
|
||||
BackgroundSchedule::LatencySensitive,
|
||||
)
|
||||
}
|
||||
request::Format::METHOD => {
|
||||
background_request_task::<request::Format>(req, BackgroundSchedule::Fmt)
|
||||
}
|
||||
request::FormatRange::METHOD => {
|
||||
background_request_task::<request::FormatRange>(req, BackgroundSchedule::Fmt)
|
||||
}
|
||||
method => {
|
||||
tracing::warn!("Received request {method} which does not have a handler");
|
||||
return Task::nothing();
|
||||
}
|
||||
}
|
||||
.unwrap_or_else(|err| {
|
||||
tracing::error!("Encountered error when routing request with ID {id}: {err}");
|
||||
let result: Result<()> = Err(err);
|
||||
Task::immediate(id, result)
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) fn notification<'a>(notif: server::Notification) -> Task<'a> {
|
||||
match notif.method.as_str() {
|
||||
notification::Cancel::METHOD => local_notification_task::<notification::Cancel>(notif),
|
||||
notification::DidChange::METHOD => {
|
||||
local_notification_task::<notification::DidChange>(notif)
|
||||
}
|
||||
notification::DidChangeConfiguration::METHOD => {
|
||||
local_notification_task::<notification::DidChangeConfiguration>(notif)
|
||||
}
|
||||
notification::DidChangeWorkspace::METHOD => {
|
||||
local_notification_task::<notification::DidChangeWorkspace>(notif)
|
||||
}
|
||||
notification::DidClose::METHOD => local_notification_task::<notification::DidClose>(notif),
|
||||
notification::DidOpen::METHOD => local_notification_task::<notification::DidOpen>(notif),
|
||||
method => {
|
||||
tracing::warn!("Received notification {method} which does not have a handler.");
|
||||
return Task::nothing();
|
||||
}
|
||||
}
|
||||
.unwrap_or_else(|err| {
|
||||
tracing::error!("Encountered error when routing notification: {err}");
|
||||
Task::nothing()
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn local_request_task<'a, R: traits::SyncRequestHandler>(
|
||||
req: server::Request,
|
||||
) -> super::Result<Task<'a>> {
|
||||
let (id, params) = cast_request::<R>(req)?;
|
||||
Ok(Task::local(|session, notifier, responder| {
|
||||
let result = R::run(session, notifier, params);
|
||||
respond::<R>(id, result, &responder);
|
||||
}))
|
||||
}
|
||||
|
||||
fn background_request_task<'a, R: traits::BackgroundDocumentRequestHandler>(
|
||||
req: server::Request,
|
||||
schedule: BackgroundSchedule,
|
||||
) -> super::Result<Task<'a>> {
|
||||
let (id, params) = cast_request::<R>(req)?;
|
||||
Ok(Task::background(schedule, move |session: &Session| {
|
||||
// TODO(jane): we should log an error if we can't take a snapshot.
|
||||
let Some(snapshot) = session.take_snapshot(R::document_url(¶ms)) else {
|
||||
return Box::new(|_, _| {});
|
||||
};
|
||||
Box::new(move |notifier, responder| {
|
||||
let result = R::run_with_snapshot(snapshot, notifier, params);
|
||||
respond::<R>(id, result, &responder);
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
fn local_notification_task<'a, N: traits::SyncNotificationHandler>(
|
||||
notif: server::Notification,
|
||||
) -> super::Result<Task<'a>> {
|
||||
let (id, params) = cast_notification::<N>(notif)?;
|
||||
Ok(Task::local(move |session, notifier, _| {
|
||||
if let Err(err) = N::run(session, notifier, params) {
|
||||
tracing::error!("An error occurred while running {id}: {err}");
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn background_notification_thread<'a, N: traits::BackgroundDocumentNotificationHandler>(
|
||||
req: server::Notification,
|
||||
schedule: BackgroundSchedule,
|
||||
) -> super::Result<Task<'a>> {
|
||||
let (id, params) = cast_notification::<N>(req)?;
|
||||
Ok(Task::background(schedule, move |session: &Session| {
|
||||
// TODO(jane): we should log an error if we can't take a snapshot.
|
||||
let Some(snapshot) = session.take_snapshot(N::document_url(¶ms)) else {
|
||||
return Box::new(|_, _| {});
|
||||
};
|
||||
Box::new(move |notifier, _| {
|
||||
if let Err(err) = N::run_with_snapshot(snapshot, notifier, params) {
|
||||
tracing::error!("An error occurred while running {id}: {err}");
|
||||
}
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
/// Tries to cast a serialized request from the server into
|
||||
/// a parameter type for a specific request handler.
|
||||
/// It is *highly* recommended to not override this function in your
|
||||
/// implementation.
|
||||
fn cast_request<Req>(
|
||||
request: server::Request,
|
||||
) -> super::Result<(
|
||||
server::RequestId,
|
||||
<<Req as RequestHandler>::RequestType as lsp_types::request::Request>::Params,
|
||||
)>
|
||||
where
|
||||
Req: traits::RequestHandler,
|
||||
{
|
||||
request
|
||||
.extract(Req::METHOD)
|
||||
.map_err(|err| match err {
|
||||
json_err @ server::ExtractError::JsonError { .. } => {
|
||||
anyhow::anyhow!("JSON parsing failure:\n{json_err}")
|
||||
}
|
||||
server::ExtractError::MethodMismatch(_) => {
|
||||
unreachable!("A method mismatch should not be possible here unless you've used a different handler (`Req`) \
|
||||
than the one whose method name was matched against earlier.")
|
||||
}
|
||||
})
|
||||
.with_failure_code(server::ErrorCode::InternalError)
|
||||
}
|
||||
|
||||
/// Sends back a response to the server using a [`Responder`].
|
||||
fn respond<Req>(
|
||||
id: server::RequestId,
|
||||
result: crate::server::Result<
|
||||
<<Req as traits::RequestHandler>::RequestType as lsp_types::request::Request>::Result,
|
||||
>,
|
||||
responder: &Responder,
|
||||
) where
|
||||
Req: traits::RequestHandler,
|
||||
{
|
||||
if let Err(err) = responder.respond(id, result) {
|
||||
tracing::error!("Failed to send response: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
/// Tries to cast a serialized request from the server into
|
||||
/// a parameter type for a specific request handler.
|
||||
fn cast_notification<N>(
|
||||
notification: server::Notification,
|
||||
) -> super::Result<
|
||||
(
|
||||
&'static str,
|
||||
<<N as traits::NotificationHandler>::NotificationType as lsp_types::notification::Notification>::Params,
|
||||
)> where N: traits::NotificationHandler{
|
||||
Ok((
|
||||
N::METHOD,
|
||||
notification
|
||||
.extract(N::METHOD)
|
||||
.map_err(|err| match err {
|
||||
json_err @ server::ExtractError::JsonError { .. } => {
|
||||
anyhow::anyhow!("JSON parsing failure:\n{json_err}")
|
||||
}
|
||||
server::ExtractError::MethodMismatch(_) => {
|
||||
unreachable!("A method mismatch should not be possible here unless you've used a different handler (`N`) \
|
||||
than the one whose method name was matched against earlier.")
|
||||
}
|
||||
})
|
||||
.with_failure_code(server::ErrorCode::InternalError)?,
|
||||
))
|
||||
}
|
||||
|
||||
pub(crate) struct Error {
|
||||
pub(crate) code: server::ErrorCode,
|
||||
pub(crate) error: anyhow::Error,
|
||||
}
|
||||
|
||||
/// A trait to convert result types into the server result type, [`super::Result`].
|
||||
trait LSPResult<T> {
|
||||
fn with_failure_code(self, code: server::ErrorCode) -> super::Result<T>;
|
||||
}
|
||||
|
||||
impl<T, E: Into<anyhow::Error>> LSPResult<T> for core::result::Result<T, E> {
|
||||
fn with_failure_code(self, code: server::ErrorCode) -> super::Result<T> {
|
||||
self.map_err(|err| Error::new(err.into(), code))
|
||||
}
|
||||
}
|
||||
|
||||
impl Error {
|
||||
pub(crate) fn new(err: anyhow::Error, code: server::ErrorCode) -> Self {
|
||||
Self { code, error: err }
|
||||
}
|
||||
}
|
||||
|
||||
// Right now, we treat the error code as invisible data that won't
|
||||
// be printed.
|
||||
impl std::fmt::Debug for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.error.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.error.fmt(f)
|
||||
}
|
||||
}
|
||||
14
crates/ruff_server/src/server/api/notifications.rs
Normal file
14
crates/ruff_server/src/server/api/notifications.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
mod cancel;
|
||||
mod did_change;
|
||||
mod did_change_configuration;
|
||||
mod did_change_workspace;
|
||||
mod did_close;
|
||||
mod did_open;
|
||||
|
||||
use super::traits::{NotificationHandler, SyncNotificationHandler};
|
||||
pub(super) use cancel::Cancel;
|
||||
pub(super) use did_change::DidChange;
|
||||
pub(super) use did_change_configuration::DidChangeConfiguration;
|
||||
pub(super) use did_change_workspace::DidChangeWorkspace;
|
||||
pub(super) use did_close::DidClose;
|
||||
pub(super) use did_open::DidOpen;
|
||||
23
crates/ruff_server/src/server/api/notifications/cancel.rs
Normal file
23
crates/ruff_server/src/server/api/notifications/cancel.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use crate::server::client::Notifier;
|
||||
use crate::server::Result;
|
||||
use crate::session::Session;
|
||||
use lsp_types as types;
|
||||
use lsp_types::notification as notif;
|
||||
|
||||
pub(crate) struct Cancel;
|
||||
|
||||
impl super::NotificationHandler for Cancel {
|
||||
type NotificationType = notif::Cancel;
|
||||
}
|
||||
|
||||
impl super::SyncNotificationHandler for Cancel {
|
||||
#[tracing::instrument(skip_all)]
|
||||
fn run(
|
||||
_session: &mut Session,
|
||||
_notifier: Notifier,
|
||||
_params: types::CancelParams,
|
||||
) -> Result<()> {
|
||||
// TODO(jane): Handle this once we have task cancellation in the scheduler.
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user