Compare commits

..

1 Commits

Author SHA1 Message Date
David Peter
e935bc5578 [ty] enum.Flag 2025-07-29 16:52:42 +02:00
153 changed files with 1915 additions and 6697 deletions

View File

@@ -1,35 +1,5 @@
# Changelog
## 0.12.7
This is a follow-up release to 0.12.6. Because of an issue in the package metadata, 0.12.6 failed to publish fully to PyPI and has been yanked. Similarly, there is no GitHub release or Git tag for 0.12.6. The contents of the 0.12.7 release are identical to 0.12.6, except for the updated metadata.
## 0.12.6
### Preview features
- \[`flake8-commas`\] Add support for trailing comma checks in type parameter lists (`COM812`, `COM819`) ([#19390](https://github.com/astral-sh/ruff/pull/19390))
- \[`pylint`\] Implement auto-fix for `missing-maxsplit-arg` (`PLC0207`) ([#19387](https://github.com/astral-sh/ruff/pull/19387))
- \[`ruff`\] Offer fixes for `RUF039` in more cases ([#19065](https://github.com/astral-sh/ruff/pull/19065))
### Bug fixes
- Support `.pyi` files in ruff analyze graph ([#19611](https://github.com/astral-sh/ruff/pull/19611))
- \[`flake8-pyi`\] Preserve inline comment in ellipsis removal (`PYI013`) ([#19399](https://github.com/astral-sh/ruff/pull/19399))
- \[`perflint`\] Ignore rule if target is `global` or `nonlocal` (`PERF401`) ([#19539](https://github.com/astral-sh/ruff/pull/19539))
- \[`pyupgrade`\] Fix `UP030` to avoid modifying double curly braces in format strings ([#19378](https://github.com/astral-sh/ruff/pull/19378))
- \[`refurb`\] Ignore decorated functions for `FURB118` ([#19339](https://github.com/astral-sh/ruff/pull/19339))
- \[`refurb`\] Mark `int` and `bool` cases for `Decimal.from_float` as safe fixes (`FURB164`) ([#19468](https://github.com/astral-sh/ruff/pull/19468))
- \[`ruff`\] Fix `RUF033` for named default expressions ([#19115](https://github.com/astral-sh/ruff/pull/19115))
### Rule changes
- \[`flake8-blind-except`\] Change `BLE001` to permit `logging.critical(..., exc_info=True)` ([#19520](https://github.com/astral-sh/ruff/pull/19520))
### Performance
- Add support for specifying minimum dots in detected string imports ([#19538](https://github.com/astral-sh/ruff/pull/19538))
## 0.12.5
### Preview features

333
Cargo.lock generated
View File

@@ -4,9 +4,9 @@ version = 4
[[package]]
name = "adler2"
version = "2.0.1"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
[[package]]
name = "aho-corasick"
@@ -77,52 +77,52 @@ checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
[[package]]
name = "anstyle-lossy"
version = "1.1.4"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04d3a5dc826f84d0ea11882bb8054ff7f3d482602e11bb181101303a279ea01f"
checksum = "934ff8719effd2023a48cf63e69536c1c3ced9d3895068f6f5cc9a4ff845e59b"
dependencies = [
"anstyle",
]
[[package]]
name = "anstyle-parse"
version = "0.2.7"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.1.3"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c8bdeb6047d8983be085bab0ba1472e6dc604e7041dbf6fcd5e71523014fae9"
checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c"
dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "anstyle-svg"
version = "0.1.9"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a43964079ef399480603125d5afae2b219aceffb77478956e25f17b9bc3435c"
checksum = "d3607949e9f6de49ea4bafe12f5e4fd73613ebf24795e48587302a8cc0e4bb35"
dependencies = [
"anstream",
"anstyle",
"anstyle-lossy",
"anstyle-parse",
"html-escape",
"unicode-width 0.2.1",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.9"
version = "3.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "403f75924867bb1033c59fbf0797484329750cfbe3c4325cd33127941fabc882"
checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e"
dependencies = [
"anstyle",
"once_cell_polyfill",
"once_cell",
"windows-sys 0.59.0",
]
@@ -210,9 +210,9 @@ dependencies = [
[[package]]
name = "autocfg"
version = "1.5.0"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
[[package]]
name = "base64"
@@ -301,9 +301,9 @@ dependencies = [
[[package]]
name = "bumpalo"
version = "3.19.0"
version = "3.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
[[package]]
name = "byteorder"
@@ -337,18 +337,18 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
name = "castaway"
version = "0.2.4"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a"
checksum = "0abae9be0aaf9ea96a3b1b8b1b55c602ca751eba1b1500220cea4ecbafe7c0d5"
dependencies = [
"rustversion",
]
[[package]]
name = "cc"
version = "1.2.30"
version = "1.2.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "deec109607ca693028562ed836a5f1c4b8bd77755c4e132fc5ce11b0b6211ae7"
checksum = "5f4ac86a9e5bc1e2b3449ab9d7d3a6a405e3d1bb28d7b9be8614f55846ae3766"
dependencies = [
"jobserver",
"libc",
@@ -357,9 +357,9 @@ dependencies = [
[[package]]
name = "cfg-if"
version = "1.0.1"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "cfg_aliases"
@@ -408,9 +408,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.42"
version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed87a9d530bb41a67537289bafcac159cb3ee28460e0a4571123d2a778a6a882"
checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9"
dependencies = [
"clap_builder",
"clap_derive",
@@ -418,9 +418,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.42"
version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64f4f3f3c77c94aff3c7e9aac9a2ca1974a5adf392a8bb751e827d6d127ab966"
checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d"
dependencies = [
"anstream",
"anstyle",
@@ -431,9 +431,9 @@ dependencies = [
[[package]]
name = "clap_complete"
version = "4.5.55"
version = "4.5.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5abde44486daf70c5be8b8f8f1b66c49f86236edf6fa2abadb4d961c4c6229a"
checksum = "c91d3baa3bcd889d60e6ef28874126a0b384fd225ab83aa6d8a801c519194ce1"
dependencies = [
"clap",
]
@@ -451,9 +451,9 @@ dependencies = [
[[package]]
name = "clap_complete_nushell"
version = "4.5.8"
version = "4.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a0c951694691e65bf9d421d597d68416c22de9632e884c28412cb8cd8b73dce"
checksum = "c6a8b1593457dfc2fe539002b795710d022dc62a65bf15023f039f9760c7b18a"
dependencies = [
"clap",
"clap_complete",
@@ -473,9 +473,9 @@ dependencies = [
[[package]]
name = "clap_lex"
version = "0.7.5"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]]
name = "clearscreen"
@@ -492,9 +492,9 @@ dependencies = [
[[package]]
name = "codspeed"
version = "3.0.4"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29180405ab3b37bb020246ea66bf8ae233708766fd59581ae929feaef10ce91"
checksum = "922018102595f6668cdd09c03f4bff2d951ce2318c6dca4fe11bdcb24b65b2bf"
dependencies = [
"anyhow",
"bincode 1.3.3",
@@ -510,9 +510,9 @@ dependencies = [
[[package]]
name = "codspeed-criterion-compat"
version = "3.0.4"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2454d874ca820ffd71273565530ad318f413195bbc99dce6c958ca07db362c63"
checksum = "24d8ad82d2383cb74995f58993cbdd2914aed57b2f91f46580310dd81dc3d05a"
dependencies = [
"codspeed",
"codspeed-criterion-compat-walltime",
@@ -521,9 +521,9 @@ dependencies = [
[[package]]
name = "codspeed-criterion-compat-walltime"
version = "3.0.4"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "093a9383cdd1a5a0bd1a47cdafb49ae0c6dcd0793c8fb8f79768bab423128c9c"
checksum = "61badaa6c452d192a29f8387147888f0ab358553597c3fe9bf8a162ef7c2fa64"
dependencies = [
"anes",
"cast",
@@ -546,9 +546,9 @@ dependencies = [
[[package]]
name = "codspeed-divan-compat"
version = "3.0.4"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1c73bce1e3f47738bf74a6b58b72a49b4f40c837ce420d8d65a270298592aac"
checksum = "3acf1d6fe367c2ff5ff136ca723f678490c3691d59d7f2b83d5e53b7b25ac91e"
dependencies = [
"codspeed",
"codspeed-divan-compat-macros",
@@ -557,9 +557,9 @@ dependencies = [
[[package]]
name = "codspeed-divan-compat-macros"
version = "3.0.4"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea51dd8add7eba774cc24b4a98324252ac3ec092ccb5f07e52bbe1cb72a6d373"
checksum = "bcfa2013d7bee54a497d0e1410751d5de690fd67a3e9eb728ca049b6a3d16d0b"
dependencies = [
"divan-macros",
"itertools 0.14.0",
@@ -571,9 +571,9 @@ dependencies = [
[[package]]
name = "codspeed-divan-compat-walltime"
version = "3.0.4"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "417e9edfc4b0289d4b9b48e62f98c6168d5e30c0e612b2935e394b0dd930fe83"
checksum = "e513100fb0e7ba02fb3824546ecd2abfb8f334262f0972225b463aad07f99ff0"
dependencies = [
"cfg-if",
"clap",
@@ -586,15 +586,15 @@ dependencies = [
[[package]]
name = "collection_literals"
version = "1.0.2"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26b3f65b8fb8e88ba339f7d23a390fe1b0896217da05e2a66c584c9b29a91df8"
checksum = "186dce98367766de751c42c4f03970fc60fc012296e706ccbb9d5df9b6c1e271"
[[package]]
name = "colorchoice"
version = "1.0.4"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
[[package]]
name = "colored"
@@ -704,9 +704,9 @@ dependencies = [
[[package]]
name = "crc32fast"
version = "1.5.0"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511"
checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3"
dependencies = [
"cfg-if",
]
@@ -810,9 +810,9 @@ checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
[[package]]
name = "crunchy"
version = "0.2.4"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929"
[[package]]
name = "crypto-common"
@@ -1000,9 +1000,9 @@ checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813"
[[package]]
name = "dyn-clone"
version = "1.0.20"
version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555"
checksum = "1c7a8fb8a9fbf66c1f703fe16184d10ca0ee9d23be5b4436400408ba54a95005"
[[package]]
name = "either"
@@ -1030,12 +1030,12 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "errno"
version = "0.3.13"
version = "0.3.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18"
dependencies = [
"libc",
"windows-sys 0.60.2",
"windows-sys 0.59.0",
]
[[package]]
@@ -1096,9 +1096,9 @@ dependencies = [
[[package]]
name = "flate2"
version = "1.1.2"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d"
checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece"
dependencies = [
"crc32fast",
"miniz_oxide",
@@ -1184,11 +1184,11 @@ dependencies = [
[[package]]
name = "getopts"
version = "0.2.23"
version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cba6ae63eb948698e300f645f87c70f76630d505f23b8907cf1e193ee85048c1"
checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5"
dependencies = [
"unicode-width 0.2.1",
"unicode-width 0.1.14",
]
[[package]]
@@ -1199,7 +1199,7 @@ checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
dependencies = [
"cfg-if",
"libc",
"wasi 0.11.1+wasi-snapshot-preview1",
"wasi 0.11.0+wasi-snapshot-preview1",
]
[[package]]
@@ -1290,9 +1290,15 @@ checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "hermit-abi"
version = "0.5.2"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c"
checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
[[package]]
name = "hermit-abi"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f154ce46856750ed433c8649605bf7ed2de3bc35fd9d2a9f30cddd873c80cb08"
[[package]]
name = "home"
@@ -1385,9 +1391,9 @@ checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3"
[[package]]
name = "icu_properties"
version = "2.0.1"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b"
checksum = "2549ca8c7241c82f59c80ba2a6f415d931c5b58d24fb8412caa1a1f02c49139a"
dependencies = [
"displaydoc",
"icu_collections",
@@ -1401,9 +1407,9 @@ dependencies = [
[[package]]
name = "icu_properties_data"
version = "2.0.1"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632"
checksum = "8197e866e47b68f8f7d95249e172903bec06004b18b2937f1095d40a0c57de04"
[[package]]
name = "icu_provider"
@@ -1615,7 +1621,7 @@ version = "0.4.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
dependencies = [
"hermit-abi",
"hermit-abi 0.5.1",
"libc",
"windows-sys 0.59.0",
]
@@ -1805,9 +1811,9 @@ dependencies = [
[[package]]
name = "libredox"
version = "0.1.8"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "360e552c93fa0e8152ab463bc4c4837fce76a225df11dfaeea66c313de5e61f7"
checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
dependencies = [
"bitflags 2.9.1",
"libc",
@@ -1974,23 +1980,23 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "miniz_oxide"
version = "0.8.9"
version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a"
dependencies = [
"adler2",
]
[[package]]
name = "mio"
version = "1.0.4"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c"
checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd"
dependencies = [
"libc",
"log",
"wasi 0.11.1+wasi-snapshot-preview1",
"windows-sys 0.59.0",
"wasi 0.11.0+wasi-snapshot-preview1",
"windows-sys 0.52.0",
]
[[package]]
@@ -2001,9 +2007,9 @@ checksum = "308d96db8debc727c3fd9744aac51751243420e46edf401010908da7f8d5e57c"
[[package]]
name = "newtype-uuid"
version = "1.2.4"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a17d82edb1c8a6c20c238747ae7aae9181133e766bc92cd2556fdd764407d0d1"
checksum = "ee3224f0e8be7c2a1ebc77ef9c3eecb90f55c6594399ee825de964526b3c9056"
dependencies = [
"uuid",
]
@@ -2093,11 +2099,11 @@ dependencies = [
[[package]]
name = "num_cpus"
version = "1.17.0"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b"
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
dependencies = [
"hermit-abi",
"hermit-abi 0.3.9",
"libc",
]
@@ -2107,12 +2113,6 @@ version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "once_cell_polyfill"
version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
[[package]]
name = "oorandom"
version = "11.1.5"
@@ -2137,9 +2137,9 @@ dependencies = [
[[package]]
name = "os_pipe"
version = "1.2.2"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db335f4760b14ead6290116f2427bf33a14d4f0617d49f78a246de10c1831224"
checksum = "5ffd2b0a5634335b135d5728d84c5e0fd726954b87111f7506a61c502280d982"
dependencies = [
"libc",
"windows-sys 0.59.0",
@@ -2147,9 +2147,9 @@ dependencies = [
[[package]]
name = "os_str_bytes"
version = "7.1.1"
version = "7.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63eceb7b5d757011a87d08eb2123db15d87fb0c281f65d101ce30a1e96c3ad5c"
checksum = "c86e2db86dd008b4c88c77a9bb83d9286bf77204e255bb3fda3b2eebcae66b62"
dependencies = [
"memchr",
]
@@ -2162,9 +2162,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "parking_lot"
version = "0.12.4"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13"
checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
dependencies = [
"lock_api",
"parking_lot_core",
@@ -2172,9 +2172,9 @@ dependencies = [
[[package]]
name = "parking_lot_core"
version = "0.9.11"
version = "0.9.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5"
checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
dependencies = [
"cfg-if",
"libc",
@@ -2289,9 +2289,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "pest"
version = "2.8.1"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323"
checksum = "198db74531d58c70a361c42201efde7e2591e976d518caf7662a47dc5720e7b6"
dependencies = [
"memchr",
"thiserror 2.0.12",
@@ -2300,9 +2300,9 @@ dependencies = [
[[package]]
name = "pest_derive"
version = "2.8.1"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb056d9e8ea77922845ec74a1c4e8fb17e7c218cc4fc11a15c5d25e189aa40bc"
checksum = "d725d9cfd79e87dccc9341a2ef39d1b6f6353d68c4b33c177febbe1a402c97c5"
dependencies = [
"pest",
"pest_generator",
@@ -2310,9 +2310,9 @@ dependencies = [
[[package]]
name = "pest_generator"
version = "2.8.1"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87e404e638f781eb3202dc82db6760c8ae8a1eeef7fb3fa8264b2ef280504966"
checksum = "db7d01726be8ab66ab32f9df467ae8b1148906685bbe75c82d1e65d7f5b3f841"
dependencies = [
"pest",
"pest_meta",
@@ -2323,10 +2323,11 @@ dependencies = [
[[package]]
name = "pest_meta"
version = "2.8.1"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edd1101f170f5903fde0914f899bb503d9ff5271d7ba76bbb70bea63690cc0d5"
checksum = "7f9f832470494906d1fca5329f8ab5791cc60beb230c74815dff541cbd2b5ca0"
dependencies = [
"once_cell",
"pest",
"sha2",
]
@@ -2383,9 +2384,9 @@ checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
[[package]]
name = "portable-atomic"
version = "1.11.1"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e"
[[package]]
name = "portable-atomic-util"
@@ -2571,9 +2572,9 @@ dependencies = [
[[package]]
name = "r-efi"
version = "5.3.0"
version = "5.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5"
[[package]]
name = "radium"
@@ -2662,9 +2663,9 @@ dependencies = [
[[package]]
name = "redox_syscall"
version = "0.5.17"
version = "0.5.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77"
checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af"
dependencies = [
"bitflags 2.9.1",
]
@@ -2743,7 +2744,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.12.7"
version = "0.12.5"
dependencies = [
"anyhow",
"argfile",
@@ -2795,7 +2796,7 @@ dependencies = [
"test-case",
"thiserror 2.0.12",
"tikv-jemallocator",
"toml 0.9.4",
"toml 0.9.2",
"tracing",
"walkdir",
"wild",
@@ -2811,7 +2812,7 @@ dependencies = [
"ruff_annotate_snippets",
"serde",
"snapbox",
"toml 0.9.4",
"toml 0.9.2",
"tryfn",
"unicode-width 0.2.1",
]
@@ -2890,6 +2891,7 @@ dependencies = [
"tracing",
"tracing-subscriber",
"ty_static",
"unicode-width 0.2.1",
"web-time",
"zip",
]
@@ -2927,7 +2929,7 @@ dependencies = [
"similar",
"strum",
"tempfile",
"toml 0.9.4",
"toml 0.9.2",
"tracing",
"tracing-indicatif",
"tracing-subscriber",
@@ -2995,7 +2997,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.12.7"
version = "0.12.5"
dependencies = [
"aho-corasick",
"anyhow",
@@ -3048,7 +3050,7 @@ dependencies = [
"tempfile",
"test-case",
"thiserror 2.0.12",
"toml 0.9.4",
"toml 0.9.2",
"typed-arena",
"unicode-normalization",
"unicode-width 0.2.1",
@@ -3298,7 +3300,7 @@ dependencies = [
"serde_json",
"shellexpand",
"thiserror 2.0.12",
"toml 0.9.4",
"toml 0.9.2",
"tracing",
"tracing-log",
"tracing-subscriber",
@@ -3327,7 +3329,7 @@ dependencies = [
[[package]]
name = "ruff_wasm"
version = "0.12.7"
version = "0.12.5"
dependencies = [
"console_error_panic_hook",
"console_log",
@@ -3388,7 +3390,7 @@ dependencies = [
"shellexpand",
"strum",
"tempfile",
"toml 0.9.4",
"toml 0.9.2",
]
[[package]]
@@ -3415,22 +3417,22 @@ checksum = "781442f29170c5c93b7185ad559492601acdc71d5bb0706f5868094f45cfcd08"
[[package]]
name = "rustix"
version = "1.0.8"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266"
dependencies = [
"bitflags 2.9.1",
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.60.2",
"windows-sys 0.59.0",
]
[[package]]
name = "rustversion"
version = "1.0.21"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d"
checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2"
[[package]]
name = "ryu"
@@ -3441,7 +3443,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa?rev=f3dc2f30f9a250618161e35600a00de7fe744953#f3dc2f30f9a250618161e35600a00de7fe744953"
source = "git+https://github.com/salsa-rs/salsa?rev=dba66f1a37acca014c2402f231ed5b361bd7d8fe#dba66f1a37acca014c2402f231ed5b361bd7d8fe"
dependencies = [
"boxcar",
"compact_str",
@@ -3466,12 +3468,12 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa?rev=f3dc2f30f9a250618161e35600a00de7fe744953#f3dc2f30f9a250618161e35600a00de7fe744953"
source = "git+https://github.com/salsa-rs/salsa?rev=dba66f1a37acca014c2402f231ed5b361bd7d8fe#dba66f1a37acca014c2402f231ed5b361bd7d8fe"
[[package]]
name = "salsa-macros"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa?rev=f3dc2f30f9a250618161e35600a00de7fe744953#f3dc2f30f9a250618161e35600a00de7fe744953"
source = "git+https://github.com/salsa-rs/salsa?rev=dba66f1a37acca014c2402f231ed5b361bd7d8fe#dba66f1a37acca014c2402f231ed5b361bd7d8fe"
dependencies = [
"proc-macro2",
"quote",
@@ -3938,11 +3940,12 @@ dependencies = [
[[package]]
name = "thread_local"
version = "1.1.9"
version = "1.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185"
checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c"
dependencies = [
"cfg-if",
"once_cell",
]
[[package]]
@@ -4023,9 +4026,9 @@ dependencies = [
[[package]]
name = "toml"
version = "0.9.4"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41ae868b5a0f67631c14589f7e250c1ea2c574ee5ba21c6c8dd4b1485705a5a1"
checksum = "ed0aee96c12fa71097902e0bb061a5e1ebd766a6636bb605ba401c45c1650eac"
dependencies = [
"indexmap",
"serde",
@@ -4096,9 +4099,9 @@ dependencies = [
[[package]]
name = "tracing-attributes"
version = "0.1.30"
version = "0.1.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
dependencies = [
"proc-macro2",
"quote",
@@ -4128,9 +4131,9 @@ dependencies = [
[[package]]
name = "tracing-indicatif"
version = "0.3.12"
version = "0.3.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1983afead46ff13a3c93581e0cec31d20b29efdd22cbdaa8b9f850eccf2c352"
checksum = "8c714cc8fc46db04fcfddbd274c6ef59bebb1b435155984e7c6e89c3ce66f200"
dependencies = [
"indicatif",
"tracing",
@@ -4203,7 +4206,7 @@ dependencies = [
"ruff_python_trivia",
"salsa",
"tempfile",
"toml 0.9.4",
"toml 0.9.2",
"tracing",
"tracing-flame",
"tracing-subscriber",
@@ -4264,7 +4267,7 @@ dependencies = [
"schemars",
"serde",
"thiserror 2.0.12",
"toml 0.9.4",
"toml 0.9.2",
"tracing",
"ty_python_semantic",
"ty_vendored",
@@ -4387,7 +4390,7 @@ dependencies = [
"smallvec",
"tempfile",
"thiserror 2.0.12",
"toml 0.9.4",
"toml 0.9.2",
"tracing",
"ty_python_semantic",
"ty_static",
@@ -4711,9 +4714,9 @@ dependencies = [
[[package]]
name = "wasi"
version = "0.11.1+wasi-snapshot-preview1"
version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasi"
@@ -4892,9 +4895,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-core"
version = "0.61.2"
version = "0.61.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3"
checksum = "46ec44dc15085cea82cf9c78f85a9114c463a369786585ad2882d1ff0b0acf40"
dependencies = [
"windows-implement",
"windows-interface",
@@ -4927,28 +4930,37 @@ dependencies = [
[[package]]
name = "windows-link"
version = "0.1.3"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38"
[[package]]
name = "windows-result"
version = "0.3.4"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6"
checksum = "4b895b5356fc36103d0f64dd1e94dfa7ac5633f1c9dd6e80fe9ec4adef69e09d"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-strings"
version = "0.4.2"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57"
checksum = "2a7ab927b2637c19b3dbe0965e75d8f2d30bdd697a1516191cad2ec4df8fb28a"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-sys"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "windows-sys"
version = "0.59.0"
@@ -4964,7 +4976,7 @@ version = "0.60.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
dependencies = [
"windows-targets 0.53.3",
"windows-targets 0.53.2",
]
[[package]]
@@ -4985,11 +4997,10 @@ dependencies = [
[[package]]
name = "windows-targets"
version = "0.53.3"
version = "0.53.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91"
checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef"
dependencies = [
"windows-link",
"windows_aarch64_gnullvm 0.53.0",
"windows_aarch64_msvc 0.53.0",
"windows_i686_gnu 0.53.0",
@@ -5098,9 +5109,9 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
[[package]]
name = "winnow"
version = "0.7.12"
version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95"
checksum = "c06928c8748d81b05c9be96aad92e1b6ff01833332f281e8cfca3be4b35fc9ec"
dependencies = [
"memchr",
]
@@ -5167,18 +5178,18 @@ dependencies = [
[[package]]
name = "zerocopy"
version = "0.8.26"
version = "0.8.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f"
checksum = "a1702d9583232ddb9174e01bb7c15a2ab8fb1bc6f227aa1233858c351a3ba0cb"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.8.26"
version = "0.8.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181"
checksum = "28a6e20d751156648aa063f3800b706ee209a32c0b4d9f24be3d980b01be55ef"
dependencies = [
"proc-macro2",
"quote",

View File

@@ -141,7 +141,7 @@ regex-automata = { version = "0.4.9" }
rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "f3dc2f30f9a250618161e35600a00de7fe744953" }
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "dba66f1a37acca014c2402f231ed5b361bd7d8fe" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }

View File

@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.12.7/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.12.7/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.12.5/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.12.5/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.12.7
rev: v0.12.5
hooks:
# Run the linter.
- id: ruff-check

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.12.7"
version = "0.12.5"
publish = true
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -351,41 +351,6 @@ fn benchmark_many_tuple_assignments(criterion: &mut Criterion) {
});
}
fn benchmark_tuple_implicit_instance_attributes(criterion: &mut Criterion) {
setup_rayon();
criterion.bench_function("ty_micro[many_tuple_assignments]", |b| {
b.iter_batched_ref(
|| {
// This is a regression benchmark for a case that used to hang:
// https://github.com/astral-sh/ty/issues/765
setup_micro_case(
r#"
from typing import Any
class A:
foo: tuple[Any, ...]
class B(A):
def __init__(self, parent: "C", x: tuple[Any]):
self.foo = parent.foo + x
class C(A):
def __init__(self, parent: B, x: tuple[Any]):
self.foo = parent.foo + x
"#,
)
},
|case| {
let Case { db, .. } = case;
let result = db.check();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,
);
});
}
fn benchmark_complex_constrained_attributes_1(criterion: &mut Criterion) {
setup_rayon();
@@ -665,7 +630,6 @@ criterion_group!(
micro,
benchmark_many_string_assignments,
benchmark_many_tuple_assignments,
benchmark_tuple_implicit_instance_attributes,
benchmark_complex_constrained_attributes_1,
benchmark_complex_constrained_attributes_2,
benchmark_many_enum_members,

View File

@@ -42,6 +42,7 @@ serde_json = { workspace = true, optional = true }
thiserror = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true, optional = true }
unicode-width = { workspace = true }
zip = { workspace = true }
[target.'cfg(target_arch="wasm32")'.dependencies]

View File

@@ -21,7 +21,7 @@ mod stylesheet;
/// characteristics in the inputs given to the tool. Typically, but not always,
/// a characteristic is a deficiency. An example of a characteristic that is
/// _not_ a deficiency is the `reveal_type` diagnostic for our type checker.
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
pub struct Diagnostic {
/// The actual diagnostic.
///
@@ -479,7 +479,7 @@ impl Diagnostic {
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
struct DiagnosticInner {
id: DiagnosticId,
severity: Severity,
@@ -555,7 +555,7 @@ impl Eq for RenderingSortKey<'_> {}
/// Currently, the order in which sub-diagnostics are rendered relative to one
/// another (for a single parent diagnostic) is the order in which they were
/// attached to the diagnostic.
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
pub struct SubDiagnostic {
/// Like with `Diagnostic`, we box the `SubDiagnostic` to make it
/// pointer-sized.
@@ -659,7 +659,7 @@ impl SubDiagnostic {
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
struct SubDiagnosticInner {
severity: SubDiagnosticSeverity,
message: DiagnosticMessage,
@@ -687,7 +687,7 @@ struct SubDiagnosticInner {
///
/// Messages attached to annotations should also be as brief and specific as
/// possible. Long messages could negative impact the quality of rendering.
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
pub struct Annotation {
/// The span of this annotation, corresponding to some subsequence of the
/// user's input that we want to highlight.
@@ -807,7 +807,7 @@ impl Annotation {
///
/// These tags are used to provide additional information about the annotation.
/// and are passed through to the language server protocol.
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
pub enum DiagnosticTag {
/// Unused or unnecessary code. Used for unused parameters, unreachable code, etc.
Unnecessary,
@@ -1016,7 +1016,7 @@ impl std::fmt::Display for DiagnosticId {
///
/// This enum presents a unified interface to these two types for the sake of creating [`Span`]s and
/// emitting diagnostics from both ty and ruff.
#[derive(Debug, Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
pub enum UnifiedFile {
Ty(File),
Ruff(SourceFile),
@@ -1080,7 +1080,7 @@ impl DiagnosticSource {
/// It consists of a `File` and an optional range into that file. When the
/// range isn't present, it semantically implies that the diagnostic refers to
/// the entire file. For example, when the file should be executable but isn't.
#[derive(Debug, Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
pub struct Span {
file: UnifiedFile,
range: Option<TextRange>,
@@ -1158,7 +1158,7 @@ impl From<crate::files::FileRange> for Span {
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash, get_size2::GetSize)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, get_size2::GetSize)]
pub enum Severity {
Info,
Warning,
@@ -1193,7 +1193,7 @@ impl Severity {
/// This type only exists to add an additional `Help` severity that isn't present in `Severity` or
/// used for main diagnostics. If we want to add `Severity::Help` in the future, this type could be
/// deleted and the two combined again.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash, get_size2::GetSize)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, get_size2::GetSize)]
pub enum SubDiagnosticSeverity {
Help,
Info,
@@ -1428,7 +1428,7 @@ impl std::fmt::Display for ConciseMessage<'_> {
/// In most cases, callers shouldn't need to use this. Instead, there is
/// a blanket trait implementation for `IntoDiagnosticMessage` for
/// anything that implements `std::fmt::Display`.
#[derive(Clone, Debug, Eq, PartialEq, Hash, get_size2::GetSize)]
#[derive(Clone, Debug, Eq, PartialEq, get_size2::GetSize)]
pub struct DiagnosticMessage(Box<str>);
impl DiagnosticMessage {

View File

@@ -585,7 +585,8 @@ impl<'r> RenderableSnippet<'r> {
let EscapedSourceCode {
text: snippet,
annotations,
} = replace_unprintable(snippet, annotations).fix_up_empty_spans_after_line_terminator();
} = replace_whitespace_and_unprintable(snippet, annotations)
.fix_up_empty_spans_after_line_terminator();
RenderableSnippet {
snippet,
@@ -827,18 +828,13 @@ fn relativize_path<'p>(cwd: &SystemPath, path: &'p str) -> &'p str {
path
}
/// Given some source code and annotation ranges, this routine replaces
/// unprintable characters with printable representations of them.
/// Given some source code and annotation ranges, this routine replaces tabs
/// with ASCII whitespace, and unprintable characters with printable
/// representations of them.
///
/// The source code and annotations returned are updated to reflect changes made
/// to the source code (if any).
///
/// We don't need to normalize whitespace, such as converting tabs to spaces,
/// because `annotate-snippets` handles that internally. Similarly, it's safe to
/// modify the annotation ranges by inserting 3-byte Unicode replacements
/// because `annotate-snippets` will account for their actual width when
/// rendering and displaying the column to the user.
fn replace_unprintable<'r>(
fn replace_whitespace_and_unprintable<'r>(
source: &'r str,
mut annotations: Vec<RenderableAnnotation<'r>>,
) -> EscapedSourceCode<'r> {
@@ -870,17 +866,48 @@ fn replace_unprintable<'r>(
}
};
const TAB_SIZE: usize = 4;
let mut width = 0;
let mut column = 0;
let mut last_end = 0;
let mut result = String::new();
for (index, c) in source.char_indices() {
if let Some(printable) = unprintable_replacement(c) {
result.push_str(&source[last_end..index]);
let old_width = width;
match c {
'\n' | '\r' => {
width = 0;
column = 0;
}
'\t' => {
let tab_offset = TAB_SIZE - (column % TAB_SIZE);
width += tab_offset;
column += tab_offset;
let len = printable.text_len().to_u32();
update_ranges(result.text_len().to_usize(), len);
let tab_width =
u32::try_from(width - old_width).expect("small width because of tab size");
result.push_str(&source[last_end..index]);
result.push(printable);
last_end = index + 1;
update_ranges(result.text_len().to_usize(), tab_width);
for _ in 0..tab_width {
result.push(' ');
}
last_end = index + 1;
}
_ => {
width += unicode_width::UnicodeWidthChar::width(c).unwrap_or(0);
column += 1;
if let Some(printable) = unprintable_replacement(c) {
result.push_str(&source[last_end..index]);
let len = printable.text_len().to_u32();
update_ranges(result.text_len().to_usize(), len);
result.push(printable);
last_end = index + 1;
}
}
}
}

View File

@@ -177,25 +177,4 @@ print()
Ok(())
}
/// Ensure that the header column matches the column in the user's input, even if we've replaced
/// tabs with spaces for rendering purposes.
#[test]
fn tab_replacement() {
let mut env = TestEnvironment::new();
env.add("example.py", "def foo():\n\treturn 1");
env.format(DiagnosticFormat::Full);
let diagnostic = env.err().primary("example.py", "2:1", "2:9", "").build();
insta::assert_snapshot!(env.render(&diagnostic), @r"
error[test-diagnostic]: main diagnostic message
--> example.py:2:2
|
1 | def foo():
2 | return 1
| ^^^^^^^^
|
");
}
}

View File

@@ -21,7 +21,7 @@ use crate::source::source_text;
/// reflected in the changed AST offsets.
/// The other reason is that Ruff's AST doesn't implement `Eq` which Salsa requires
/// for determining if a query result is unchanged.
#[salsa::tracked(returns(ref), no_eq, heap_size=get_size2::heap_size)]
#[salsa::tracked(returns(ref), no_eq, heap_size=get_size2::GetSize::get_heap_size)]
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
let _span = tracing::trace_span!("parsed_module", ?file).entered();

View File

@@ -9,7 +9,7 @@ use crate::Db;
use crate::files::{File, FilePath};
/// Reads the source text of a python text file (must be valid UTF8) or notebook.
#[salsa::tracked(heap_size=get_size2::heap_size)]
#[salsa::tracked(heap_size=get_size2::GetSize::get_heap_size)]
pub fn source_text(db: &dyn Db, file: File) -> SourceText {
let path = file.path(db);
let _span = tracing::trace_span!("source_text", file = %path).entered();
@@ -69,21 +69,21 @@ impl SourceText {
pub fn as_str(&self) -> &str {
match &self.inner.kind {
SourceTextKind::Text(source) => source,
SourceTextKind::Notebook { notebook } => notebook.source_code(),
SourceTextKind::Notebook(notebook) => notebook.source_code(),
}
}
/// Returns the underlying notebook if this is a notebook file.
pub fn as_notebook(&self) -> Option<&Notebook> {
match &self.inner.kind {
SourceTextKind::Notebook { notebook } => Some(notebook),
SourceTextKind::Notebook(notebook) => Some(notebook),
SourceTextKind::Text(_) => None,
}
}
/// Returns `true` if this is a notebook source file.
pub fn is_notebook(&self) -> bool {
matches!(&self.inner.kind, SourceTextKind::Notebook { .. })
matches!(&self.inner.kind, SourceTextKind::Notebook(_))
}
/// Returns `true` if there was an error when reading the content of the file.
@@ -108,7 +108,7 @@ impl std::fmt::Debug for SourceText {
SourceTextKind::Text(text) => {
dbg.field(text);
}
SourceTextKind::Notebook { notebook } => {
SourceTextKind::Notebook(notebook) => {
dbg.field(notebook);
}
}
@@ -123,15 +123,23 @@ struct SourceTextInner {
read_error: Option<SourceTextError>,
}
#[derive(Eq, PartialEq, get_size2::GetSize)]
#[derive(Eq, PartialEq)]
enum SourceTextKind {
Text(String),
Notebook {
// Jupyter notebooks are not very relevant for memory profiling, and contain
// arbitrary JSON values that do not implement the `GetSize` trait.
#[get_size(ignore)]
notebook: Box<Notebook>,
},
Notebook(Box<Notebook>),
}
impl get_size2::GetSize for SourceTextKind {
fn get_heap_size(&self) -> usize {
match self {
SourceTextKind::Text(text) => text.get_heap_size(),
// TODO: The `get-size` derive does not support ignoring enum variants.
//
// Jupyter notebooks are not very relevant for memory profiling, and contain
// arbitrary JSON values that do not implement the `GetSize` trait.
SourceTextKind::Notebook(_) => 0,
}
}
}
impl From<String> for SourceTextKind {
@@ -142,9 +150,7 @@ impl From<String> for SourceTextKind {
impl From<Notebook> for SourceTextKind {
fn from(notebook: Notebook) -> Self {
SourceTextKind::Notebook {
notebook: Box::new(notebook),
}
SourceTextKind::Notebook(Box::new(notebook))
}
}
@@ -157,7 +163,7 @@ pub enum SourceTextError {
}
/// Computes the [`LineIndex`] for `file`.
#[salsa::tracked(heap_size=get_size2::heap_size)]
#[salsa::tracked(heap_size=get_size2::GetSize::get_heap_size)]
pub fn line_index(db: &dyn Db, file: File) -> LineIndex {
let _span = tracing::trace_span!("line_index", ?file).entered();

View File

@@ -43,7 +43,7 @@ pub enum IsolationLevel {
}
/// A collection of [`Edit`] elements to be applied to a source file.
#[derive(Debug, PartialEq, Eq, Clone, Hash, get_size2::GetSize)]
#[derive(Debug, PartialEq, Eq, Clone, get_size2::GetSize)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Fix {
/// The [`Edit`] elements to be applied, sorted by [`Edit::start`] in ascending order.

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.12.7"
version = "0.12.5"
publish = false
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -39,11 +39,6 @@ class NonEmptyWithInit:
pass
class NonEmptyChildWithInlineComment:
value: int
... # preserve me
class EmptyClass:
...

View File

@@ -38,10 +38,6 @@ class NonEmptyWithInit:
def __init__():
pass
class NonEmptyChildWithInlineComment:
value: int
... # preserve me
# Not violations
class EmptyClass: ...

View File

@@ -1,4 +1,4 @@
from pathlib import Path, PurePath, PosixPath, PurePosixPath, WindowsPath, PureWindowsPath
from pathlib import Path, PurePath
from pathlib import Path as pth
@@ -68,11 +68,3 @@ Path(".", "folder")
PurePath(".", "folder")
Path()
from importlib.metadata import PackagePath
_ = PosixPath(".")
_ = PurePosixPath(".")
_ = WindowsPath(".")
_ = PureWindowsPath(".")
_ = PackagePath(".")

View File

@@ -1,3 +0,0 @@
"""Hello, world!"""\
x = 1; y = 2

View File

@@ -59,7 +59,3 @@ kwargs = {x: x for x in range(10)}
"{1}_{0}".format(1, 2, *args)
"{1}_{0}".format(1, 2)
r"\d{{1,2}} {0}".format(42)
"{{{0}}}".format(123)

View File

@@ -52,7 +52,3 @@ f"{repr(lambda: 1)}"
f"{repr(x := 2)}"
f"{str(object=3)}"
f"{str(x for x in [])}"
f"{str((x for x in []))}"

View File

@@ -590,16 +590,6 @@ impl<'a> Checker<'a> {
member,
})
}
/// Return the [`LintContext`] for the current analysis.
///
/// Note that you should always prefer calling methods like `settings`, `report_diagnostic`, or
/// `is_rule_enabled` directly on [`Checker`] when possible. This method exists only for the
/// rare cases where rules or helper functions need to be accessed by both a `Checker` and a
/// `LintContext` in different analysis phases.
pub(crate) const fn context(&self) -> &'a LintContext<'a> {
self.context
}
}
pub(crate) struct TypingImporter<'a, 'b> {

View File

@@ -56,19 +56,13 @@ impl<'a> Insertion<'a> {
stylist: &Stylist,
) -> Insertion<'static> {
// Skip over any docstrings.
let mut location = if let Some(mut location) = match_docstring_end(body) {
let mut location = if let Some(location) = match_docstring_end(body) {
// If the first token after the docstring is a semicolon, insert after the semicolon as
// an inline statement.
if let Some(offset) = match_semicolon(locator.after(location)) {
return Insertion::inline(" ", location.add(offset).add(TextSize::of(';')), ";");
}
// If the first token after the docstring is a continuation character (i.e. "\"), advance
// an additional row to prevent inserting in the same logical line.
if match_continuation(locator.after(location)).is_some() {
location = locator.full_line_end(location);
}
// Otherwise, advance to the next row.
locator.full_line_end(location)
} else {
@@ -369,16 +363,6 @@ mod tests {
Insertion::own_line("", TextSize::from(20), "\n")
);
let contents = r#"
"""Hello, world!"""\
"#
.trim_start();
assert_eq!(
insert(contents)?,
Insertion::own_line("", TextSize::from(22), "\n")
);
let contents = r"
x = 1
"

View File

@@ -13,6 +13,7 @@ use ruff_notebook::NotebookIndex;
use ruff_source_file::OneIndexed;
use ruff_text_size::{TextLen, TextRange, TextSize};
use crate::line_width::{IndentWidth, LineWidthBuilder};
use crate::message::diff::Diff;
use crate::message::{Emitter, EmitterContext};
use crate::settings::types::UnsafeFixes;
@@ -228,7 +229,7 @@ impl Display for MessageCodeFrame<'_> {
let start_offset = source_code.line_start(start_index);
let end_offset = source_code.line_end(end_index);
let source = replace_unprintable(
let source = replace_whitespace_and_unprintable(
source_code.slice(TextRange::new(start_offset, end_offset)),
self.message.expect_range() - start_offset,
)
@@ -271,20 +272,16 @@ impl Display for MessageCodeFrame<'_> {
}
/// Given some source code and an annotation range, this routine replaces
/// unprintable characters with printable representations of them.
/// tabs with ASCII whitespace, and unprintable characters with printable
/// representations of them.
///
/// The source code returned has an annotation that is updated to reflect
/// changes made to the source code (if any).
///
/// We don't need to normalize whitespace, such as converting tabs to spaces,
/// because `annotate-snippets` handles that internally. Similarly, it's safe to
/// modify the annotation ranges by inserting 3-byte Unicode replacements
/// because `annotate-snippets` will account for their actual width when
/// rendering and displaying the column to the user.
fn replace_unprintable(source: &str, annotation_range: TextRange) -> SourceCode {
fn replace_whitespace_and_unprintable(source: &str, annotation_range: TextRange) -> SourceCode {
let mut result = String::new();
let mut last_end = 0;
let mut range = annotation_range;
let mut line_width = LineWidthBuilder::new(IndentWidth::default());
// Updates the range given by the caller whenever a single byte (at
// `index` in `source`) is replaced with `len` bytes.
@@ -313,7 +310,19 @@ fn replace_unprintable(source: &str, annotation_range: TextRange) -> SourceCode
};
for (index, c) in source.char_indices() {
if let Some(printable) = unprintable_replacement(c) {
let old_width = line_width.get();
line_width = line_width.add_char(c);
if matches!(c, '\t') {
let tab_width = u32::try_from(line_width.get() - old_width)
.expect("small width because of tab size");
result.push_str(&source[last_end..index]);
for _ in 0..tab_width {
result.push(' ');
}
last_end = index + 1;
update_range(index, tab_width);
} else if let Some(printable) = unprintable_replacement(c) {
result.push_str(&source[last_end..index]);
result.push(printable);
last_end = index + 1;

View File

@@ -1,11 +1,10 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::whitespace::trailing_comment_start_offset;
use ruff_python_ast::{Stmt, StmtExpr};
use ruff_text_size::Ranged;
use crate::checkers::ast::Checker;
use crate::fix;
use crate::{Edit, Fix, FixAvailability, Violation};
use crate::{Fix, FixAvailability, Violation};
/// ## What it does
/// Removes ellipses (`...`) in otherwise non-empty class bodies.
@@ -51,21 +50,15 @@ pub(crate) fn ellipsis_in_non_empty_class_body(checker: &Checker, body: &[Stmt])
}
for stmt in body {
let Stmt::Expr(StmtExpr { value, .. }) = stmt else {
let Stmt::Expr(StmtExpr { value, .. }) = &stmt else {
continue;
};
if value.is_ellipsis_literal_expr() {
let mut diagnostic =
checker.report_diagnostic(EllipsisInNonEmptyClassBody, stmt.range());
// Try to preserve trailing comment if it exists
let edit = if let Some(index) = trailing_comment_start_offset(stmt, checker.source()) {
Edit::range_deletion(stmt.range().add_end(index))
} else {
fix::edits::delete_stmt(stmt, Some(stmt), checker.locator(), checker.indexer())
};
let edit =
fix::edits::delete_stmt(stmt, Some(stmt), checker.locator(), checker.indexer());
diagnostic.set_fix(Fix::safe_edit(edit).isolate(Checker::isolation(
checker.semantic().current_statement_id(),
)));

View File

@@ -145,22 +145,3 @@ PYI013.py:36:5: PYI013 [*] Non-empty class body must not contain `...`
37 36 |
38 37 | def __init__():
39 38 | pass
PYI013.py:44:5: PYI013 [*] Non-empty class body must not contain `...`
|
42 | class NonEmptyChildWithInlineComment:
43 | value: int
44 | ... # preserve me
| ^^^ PYI013
|
= help: Remove unnecessary `...`
Safe fix
41 41 |
42 42 | class NonEmptyChildWithInlineComment:
43 43 | value: int
44 |- ... # preserve me
44 |+ # preserve me
45 45 |
46 46 |
47 47 | class EmptyClass:

View File

@@ -17,10 +17,9 @@ PYI013.pyi:5:5: PYI013 [*] Non-empty class body must not contain `...`
3 3 | class OneAttributeClass:
4 4 | value: int
5 |- ... # Error
5 |+ # Error
6 6 |
7 7 | class OneAttributeClass2:
8 8 | ... # Error
6 5 |
7 6 | class OneAttributeClass2:
8 7 | ... # Error
PYI013.pyi:8:5: PYI013 [*] Non-empty class body must not contain `...`
|
@@ -36,10 +35,9 @@ PYI013.pyi:8:5: PYI013 [*] Non-empty class body must not contain `...`
6 6 |
7 7 | class OneAttributeClass2:
8 |- ... # Error
8 |+ # Error
9 9 | value: int
10 10 |
11 11 | class MyClass:
9 8 | value: int
10 9 |
11 10 | class MyClass:
PYI013.pyi:12:5: PYI013 [*] Non-empty class body must not contain `...`
|
@@ -93,10 +91,9 @@ PYI013.pyi:17:5: PYI013 [*] Non-empty class body must not contain `...`
15 15 | class TwoEllipsesClass:
16 16 | ...
17 |- ... # Error
17 |+ # Error
18 18 |
19 19 | class DocstringClass:
20 20 | """
18 17 |
19 18 | class DocstringClass:
20 19 | """
PYI013.pyi:24:5: PYI013 [*] Non-empty class body must not contain `...`
|
@@ -114,10 +111,9 @@ PYI013.pyi:24:5: PYI013 [*] Non-empty class body must not contain `...`
22 22 | """
23 23 |
24 |- ... # Error
24 |+ # Error
25 25 |
26 26 | class NonEmptyChild(Exception):
27 27 | value: int
25 24 |
26 25 | class NonEmptyChild(Exception):
27 26 | value: int
PYI013.pyi:28:5: PYI013 [*] Non-empty class body must not contain `...`
|
@@ -135,10 +131,9 @@ PYI013.pyi:28:5: PYI013 [*] Non-empty class body must not contain `...`
26 26 | class NonEmptyChild(Exception):
27 27 | value: int
28 |- ... # Error
28 |+ # Error
29 29 |
30 30 | class NonEmptyChild2(Exception):
31 31 | ... # Error
29 28 |
30 29 | class NonEmptyChild2(Exception):
31 30 | ... # Error
PYI013.pyi:31:5: PYI013 [*] Non-empty class body must not contain `...`
|
@@ -154,10 +149,9 @@ PYI013.pyi:31:5: PYI013 [*] Non-empty class body must not contain `...`
29 29 |
30 30 | class NonEmptyChild2(Exception):
31 |- ... # Error
31 |+ # Error
32 32 | value: int
33 33 |
34 34 | class NonEmptyWithInit:
32 31 | value: int
33 32 |
34 33 | class NonEmptyWithInit:
PYI013.pyi:36:5: PYI013 [*] Non-empty class body must not contain `...`
|
@@ -175,28 +169,6 @@ PYI013.pyi:36:5: PYI013 [*] Non-empty class body must not contain `...`
34 34 | class NonEmptyWithInit:
35 35 | value: int
36 |- ... # Error
36 |+ # Error
37 37 |
38 38 | def __init__():
39 39 | pass
PYI013.pyi:43:5: PYI013 [*] Non-empty class body must not contain `...`
|
41 | class NonEmptyChildWithInlineComment:
42 | value: int
43 | ... # preserve me
| ^^^ PYI013
44 |
45 | # Not violations
|
= help: Remove unnecessary `...`
Safe fix
40 40 |
41 41 | class NonEmptyChildWithInlineComment:
42 42 | value: int
43 |- ... # preserve me
43 |+ # preserve me
44 44 |
45 45 | # Not violations
46 46 |
37 36 |
38 37 | def __init__():
39 38 | pass

View File

@@ -20,35 +20,6 @@ pub(crate) fn is_pathlib_path_call(checker: &Checker, expr: &Expr) -> bool {
})
}
/// Check if the given segments represent a pathlib Path subclass or `PackagePath` with preview mode support.
/// In stable mode, only checks for `Path` and `PurePath`. In preview mode, also checks for
/// `PosixPath`, `PurePosixPath`, `WindowsPath`, `PureWindowsPath`, and `PackagePath`.
pub(crate) fn is_pure_path_subclass_with_preview(
checker: &crate::checkers::ast::Checker,
segments: &[&str],
) -> bool {
let is_core_pathlib = matches!(segments, ["pathlib", "Path" | "PurePath"]);
if is_core_pathlib {
return true;
}
if checker.settings().preview.is_enabled() {
let is_expanded_pathlib = matches!(
segments,
[
"pathlib",
"PosixPath" | "PurePosixPath" | "WindowsPath" | "PureWindowsPath"
]
);
let is_packagepath = matches!(segments, ["importlib", "metadata", "PackagePath"]);
return is_expanded_pathlib || is_packagepath;
}
false
}
/// We check functions that take only 1 argument, this does not apply to functions
/// with `dir_fd` argument, because `dir_fd` is not supported by pathlib,
/// so check if it's set to non-default values

View File

@@ -123,7 +123,6 @@ mod tests {
Ok(())
}
#[test_case(Rule::PathConstructorCurrentDirectory, Path::new("PTH201.py"))]
#[test_case(Rule::OsPathGetsize, Path::new("PTH202.py"))]
#[test_case(Rule::OsPathGetsize, Path::new("PTH202_2.py"))]
#[test_case(Rule::OsPathGetatime, Path::new("PTH203.py"))]

View File

@@ -9,7 +9,6 @@ use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::Checker;
use crate::fix::edits::{Parentheses, remove_argument};
use crate::rules::flake8_use_pathlib::helpers::is_pure_path_subclass_with_preview;
use crate::{AlwaysFixableViolation, Applicability, Edit, Fix};
/// ## What it does
@@ -70,7 +69,7 @@ pub(crate) fn path_constructor_current_directory(
let arguments = &call.arguments;
if !is_pure_path_subclass_with_preview(checker, segments) {
if !matches!(segments, ["pathlib", "Path" | "PurePath"]) {
return;
}

View File

@@ -1,423 +0,0 @@
---
source: crates/ruff_linter/src/rules/flake8_use_pathlib/mod.rs
assertion_line: 144
---
PTH201.py:6:10: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
5 | # match
6 | _ = Path(".")
| ^^^ PTH201
7 | _ = pth(".")
8 | _ = PurePath(".")
|
= help: Remove the current directory argument
Safe fix
3 3 |
4 4 |
5 5 | # match
6 |-_ = Path(".")
6 |+_ = Path()
7 7 | _ = pth(".")
8 8 | _ = PurePath(".")
9 9 | _ = Path("")
PTH201.py:7:9: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
5 | # match
6 | _ = Path(".")
7 | _ = pth(".")
| ^^^ PTH201
8 | _ = PurePath(".")
9 | _ = Path("")
|
= help: Remove the current directory argument
Safe fix
4 4 |
5 5 | # match
6 6 | _ = Path(".")
7 |-_ = pth(".")
7 |+_ = pth()
8 8 | _ = PurePath(".")
9 9 | _ = Path("")
10 10 |
PTH201.py:8:14: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
6 | _ = Path(".")
7 | _ = pth(".")
8 | _ = PurePath(".")
| ^^^ PTH201
9 | _ = Path("")
|
= help: Remove the current directory argument
Safe fix
5 5 | # match
6 6 | _ = Path(".")
7 7 | _ = pth(".")
8 |-_ = PurePath(".")
8 |+_ = PurePath()
9 9 | _ = Path("")
10 10 |
11 11 | Path('', )
PTH201.py:9:10: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
7 | _ = pth(".")
8 | _ = PurePath(".")
9 | _ = Path("")
| ^^ PTH201
10 |
11 | Path('', )
|
= help: Remove the current directory argument
Safe fix
6 6 | _ = Path(".")
7 7 | _ = pth(".")
8 8 | _ = PurePath(".")
9 |-_ = Path("")
9 |+_ = Path()
10 10 |
11 11 | Path('', )
12 12 |
PTH201.py:11:6: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
9 | _ = Path("")
10 |
11 | Path('', )
| ^^ PTH201
12 |
13 | Path(
|
= help: Remove the current directory argument
Safe fix
8 8 | _ = PurePath(".")
9 9 | _ = Path("")
10 10 |
11 |-Path('', )
11 |+Path()
12 12 |
13 13 | Path(
14 14 | '',
PTH201.py:14:5: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
13 | Path(
14 | '',
| ^^ PTH201
15 | )
|
= help: Remove the current directory argument
Safe fix
10 10 |
11 11 | Path('', )
12 12 |
13 |-Path(
14 |- '',
15 |-)
13 |+Path()
16 14 |
17 15 | Path( # Comment before argument
18 16 | '',
PTH201.py:18:5: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
17 | Path( # Comment before argument
18 | '',
| ^^ PTH201
19 | )
|
= help: Remove the current directory argument
Unsafe fix
14 14 | '',
15 15 | )
16 16 |
17 |-Path( # Comment before argument
18 |- '',
19 |-)
17 |+Path()
20 18 |
21 19 | Path(
22 20 | '', # EOL comment
PTH201.py:22:5: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
21 | Path(
22 | '', # EOL comment
| ^^ PTH201
23 | )
|
= help: Remove the current directory argument
Unsafe fix
18 18 | '',
19 19 | )
20 20 |
21 |-Path(
22 |- '', # EOL comment
23 |-)
21 |+Path()
24 22 |
25 23 | Path(
26 24 | '' # Comment in the middle of implicitly concatenated string
PTH201.py:26:5: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
25 | Path(
26 | / '' # Comment in the middle of implicitly concatenated string
27 | | ".",
| |_______^ PTH201
28 | )
|
= help: Remove the current directory argument
Unsafe fix
22 22 | '', # EOL comment
23 23 | )
24 24 |
25 |-Path(
26 |- '' # Comment in the middle of implicitly concatenated string
27 |- ".",
28 |-)
25 |+Path()
29 26 |
30 27 | Path(
31 28 | '' # Comment before comma
PTH201.py:31:5: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
30 | Path(
31 | '' # Comment before comma
| ^^ PTH201
32 | ,
33 | )
|
= help: Remove the current directory argument
Unsafe fix
27 27 | ".",
28 28 | )
29 29 |
30 |-Path(
31 |- '' # Comment before comma
32 |- ,
33 |-)
30 |+Path()
34 31 |
35 32 | Path(
36 33 | '',
PTH201.py:36:5: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
35 | Path(
36 | '',
| ^^ PTH201
37 | ) / "bare"
|
= help: Remove the current directory argument
Safe fix
33 33 | )
34 34 |
35 35 | Path(
36 |- '',
37 |-) / "bare"
36 |+ "bare",
37 |+)
38 38 |
39 39 | Path( # Comment before argument
40 40 | '',
PTH201.py:40:5: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
39 | Path( # Comment before argument
40 | '',
| ^^ PTH201
41 | ) / ("parenthesized")
|
= help: Remove the current directory argument
Unsafe fix
37 37 | ) / "bare"
38 38 |
39 39 | Path( # Comment before argument
40 |- '',
41 |-) / ("parenthesized")
40 |+ ("parenthesized"),
41 |+)
42 42 |
43 43 | Path(
44 44 | '', # EOL comment
PTH201.py:44:5: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
43 | Path(
44 | '', # EOL comment
| ^^ PTH201
45 | ) / ( ("double parenthesized" ) )
|
= help: Remove the current directory argument
Unsafe fix
41 41 | ) / ("parenthesized")
42 42 |
43 43 | Path(
44 |- '', # EOL comment
45 |-) / ( ("double parenthesized" ) )
44 |+ ( ("double parenthesized" ) ), # EOL comment
45 |+)
46 46 |
47 47 | ( Path(
48 48 | '' # Comment in the middle of implicitly concatenated string
PTH201.py:48:5: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
47 | ( Path(
48 | / '' # Comment in the middle of implicitly concatenated string
49 | | ".",
| |_______^ PTH201
50 | ) )/ (("parenthesized path call")
51 | # Comment between closing parentheses
|
= help: Remove the current directory argument
Unsafe fix
44 44 | '', # EOL comment
45 45 | ) / ( ("double parenthesized" ) )
46 46 |
47 |-( Path(
48 |- '' # Comment in the middle of implicitly concatenated string
49 |- ".",
50 |-) )/ (("parenthesized path call")
47 |+Path(
48 |+ (("parenthesized path call")
51 49 | # Comment between closing parentheses
50 |+),
52 51 | )
53 52 |
54 53 | Path(
PTH201.py:55:5: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
54 | Path(
55 | '' # Comment before comma
| ^^ PTH201
56 | ,
57 | ) / "multiple" / (
|
= help: Remove the current directory argument
Unsafe fix
52 52 | )
53 53 |
54 54 | Path(
55 |- '' # Comment before comma
55 |+ "multiple" # Comment before comma
56 56 | ,
57 |-) / "multiple" / (
57 |+) / (
58 58 | "frag" # Comment
59 59 | 'ment'
60 60 | )
PTH201.py:74:15: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
72 | from importlib.metadata import PackagePath
73 |
74 | _ = PosixPath(".")
| ^^^ PTH201
75 | _ = PurePosixPath(".")
76 | _ = WindowsPath(".")
|
= help: Remove the current directory argument
Safe fix
71 71 |
72 72 | from importlib.metadata import PackagePath
73 73 |
74 |-_ = PosixPath(".")
74 |+_ = PosixPath()
75 75 | _ = PurePosixPath(".")
76 76 | _ = WindowsPath(".")
77 77 | _ = PureWindowsPath(".")
PTH201.py:75:19: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
74 | _ = PosixPath(".")
75 | _ = PurePosixPath(".")
| ^^^ PTH201
76 | _ = WindowsPath(".")
77 | _ = PureWindowsPath(".")
|
= help: Remove the current directory argument
Safe fix
72 72 | from importlib.metadata import PackagePath
73 73 |
74 74 | _ = PosixPath(".")
75 |-_ = PurePosixPath(".")
75 |+_ = PurePosixPath()
76 76 | _ = WindowsPath(".")
77 77 | _ = PureWindowsPath(".")
78 78 | _ = PackagePath(".")
PTH201.py:76:17: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
74 | _ = PosixPath(".")
75 | _ = PurePosixPath(".")
76 | _ = WindowsPath(".")
| ^^^ PTH201
77 | _ = PureWindowsPath(".")
78 | _ = PackagePath(".")
|
= help: Remove the current directory argument
Safe fix
73 73 |
74 74 | _ = PosixPath(".")
75 75 | _ = PurePosixPath(".")
76 |-_ = WindowsPath(".")
76 |+_ = WindowsPath()
77 77 | _ = PureWindowsPath(".")
78 78 | _ = PackagePath(".")
PTH201.py:77:21: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
75 | _ = PurePosixPath(".")
76 | _ = WindowsPath(".")
77 | _ = PureWindowsPath(".")
| ^^^ PTH201
78 | _ = PackagePath(".")
|
= help: Remove the current directory argument
Safe fix
74 74 | _ = PosixPath(".")
75 75 | _ = PurePosixPath(".")
76 76 | _ = WindowsPath(".")
77 |-_ = PureWindowsPath(".")
77 |+_ = PureWindowsPath()
78 78 | _ = PackagePath(".")
PTH201.py:78:17: PTH201 [*] Do not pass the current directory explicitly to `Path`
|
76 | _ = WindowsPath(".")
77 | _ = PureWindowsPath(".")
78 | _ = PackagePath(".")
| ^^^ PTH201
|
= help: Remove the current directory argument
Safe fix
75 75 | _ = PurePosixPath(".")
76 76 | _ = WindowsPath(".")
77 77 | _ = PureWindowsPath(".")
78 |-_ = PackagePath(".")
78 |+_ = PackagePath()

View File

@@ -794,7 +794,6 @@ mod tests {
#[test_case(Path::new("comments_and_newlines.py"))]
#[test_case(Path::new("docstring.py"))]
#[test_case(Path::new("docstring.pyi"))]
#[test_case(Path::new("docstring_followed_by_continuation.py"))]
#[test_case(Path::new("docstring_only.py"))]
#[test_case(Path::new("docstring_with_continuation.py"))]
#[test_case(Path::new("docstring_with_semicolon.py"))]
@@ -829,7 +828,6 @@ mod tests {
#[test_case(Path::new("comments_and_newlines.py"))]
#[test_case(Path::new("docstring.py"))]
#[test_case(Path::new("docstring.pyi"))]
#[test_case(Path::new("docstring_followed_by_continuation.py"))]
#[test_case(Path::new("docstring_only.py"))]
#[test_case(Path::new("docstring_with_continuation.py"))]
#[test_case(Path::new("docstring_with_semicolon.py"))]

View File

@@ -1,9 +0,0 @@
---
source: crates/ruff_linter/src/rules/isort/mod.rs
---
docstring_followed_by_continuation.py:1:1: I002 [*] Missing required import: `from __future__ import annotations`
Safe fix
1 1 | """Hello, world!"""\
2 2 |
3 |+from __future__ import annotations
3 4 | x = 1; y = 2

View File

@@ -1,9 +0,0 @@
---
source: crates/ruff_linter/src/rules/isort/mod.rs
---
docstring_followed_by_continuation.py:1:1: I002 [*] Missing required import: `from __future__ import annotations as _annotations`
Safe fix
1 1 | """Hello, world!"""\
2 2 |
3 |+from __future__ import annotations as _annotations
3 4 | x = 1; y = 2

View File

@@ -15,8 +15,8 @@ E101.py:15:1: E101 Indentation contains mixed spaces and tabs
|
13 | def func_mixed_start_with_space():
14 | # E101
15 | print("mixed starts with space")
| ^^^^^^^^^^^^^^^^^^^^ E101
15 | print("mixed starts with space")
| ^^^^^^^^^^^^^^^ E101
16 |
17 | def xyz():
|
@@ -25,6 +25,6 @@ E101.py:19:1: E101 Indentation contains mixed spaces and tabs
|
17 | def xyz():
18 | # E101
19 | print("xyz");
| ^^^^^^^ E101
19 | print("xyz");
| ^^^^ E101
|

View File

@@ -47,7 +47,7 @@ E20.py:6:15: E201 [*] Whitespace after '{'
6 | spam(ham[1], { eggs: 2})
| ^ E201
7 | #: E201:1:6
8 | spam( ham[1], {eggs: 2})
8 | spam( ham[1], {eggs: 2})
|
= help: Remove whitespace before '{'
@@ -65,10 +65,10 @@ E20.py:8:6: E201 [*] Whitespace after '('
|
6 | spam(ham[1], { eggs: 2})
7 | #: E201:1:6
8 | spam( ham[1], {eggs: 2})
| ^^^^ E201
8 | spam( ham[1], {eggs: 2})
| ^^^ E201
9 | #: E201:1:10
10 | spam(ham[ 1], {eggs: 2})
10 | spam(ham[ 1], {eggs: 2})
|
= help: Remove whitespace before '('
@@ -84,12 +84,12 @@ E20.py:8:6: E201 [*] Whitespace after '('
E20.py:10:10: E201 [*] Whitespace after '['
|
8 | spam( ham[1], {eggs: 2})
8 | spam( ham[1], {eggs: 2})
9 | #: E201:1:10
10 | spam(ham[ 1], {eggs: 2})
| ^^^^ E201
10 | spam(ham[ 1], {eggs: 2})
| ^^^ E201
11 | #: E201:1:15
12 | spam(ham[1], { eggs: 2})
12 | spam(ham[1], { eggs: 2})
|
= help: Remove whitespace before '['
@@ -105,10 +105,10 @@ E20.py:10:10: E201 [*] Whitespace after '['
E20.py:12:15: E201 [*] Whitespace after '{'
|
10 | spam(ham[ 1], {eggs: 2})
10 | spam(ham[ 1], {eggs: 2})
11 | #: E201:1:15
12 | spam(ham[1], { eggs: 2})
| ^^^^ E201
12 | spam(ham[1], { eggs: 2})
| ^^ E201
13 | #: Okay
14 | spam(ham[1], {eggs: 2})
|

View File

@@ -49,7 +49,7 @@ E20.py:23:11: E202 [*] Whitespace before ']'
23 | spam(ham[1 ], {eggs: 2})
| ^ E202
24 | #: E202:1:23
25 | spam(ham[1], {eggs: 2} )
25 | spam(ham[1], {eggs: 2} )
|
= help: Remove whitespace before ']'
@@ -67,10 +67,10 @@ E20.py:25:23: E202 [*] Whitespace before ')'
|
23 | spam(ham[1 ], {eggs: 2})
24 | #: E202:1:23
25 | spam(ham[1], {eggs: 2} )
| ^^^^ E202
25 | spam(ham[1], {eggs: 2} )
| ^^ E202
26 | #: E202:1:22
27 | spam(ham[1], {eggs: 2 })
27 | spam(ham[1], {eggs: 2 })
|
= help: Remove whitespace before ')'
@@ -86,12 +86,12 @@ E20.py:25:23: E202 [*] Whitespace before ')'
E20.py:27:22: E202 [*] Whitespace before '}'
|
25 | spam(ham[1], {eggs: 2} )
25 | spam(ham[1], {eggs: 2} )
26 | #: E202:1:22
27 | spam(ham[1], {eggs: 2 })
| ^^^^ E202
27 | spam(ham[1], {eggs: 2 })
| ^^^ E202
28 | #: E202:1:11
29 | spam(ham[1 ], {eggs: 2})
29 | spam(ham[1 ], {eggs: 2})
|
= help: Remove whitespace before '}'
@@ -107,10 +107,10 @@ E20.py:27:22: E202 [*] Whitespace before '}'
E20.py:29:11: E202 [*] Whitespace before ']'
|
27 | spam(ham[1], {eggs: 2 })
27 | spam(ham[1], {eggs: 2 })
28 | #: E202:1:11
29 | spam(ham[1 ], {eggs: 2})
| ^^^^ E202
29 | spam(ham[1 ], {eggs: 2})
| ^^ E202
30 | #: Okay
31 | spam(ham[1], {eggs: 2})
|

View File

@@ -25,8 +25,8 @@ E20.py:55:10: E203 [*] Whitespace before ':'
|
53 | x, y = y, x
54 | #: E203:1:10
55 | if x == 4 :
| ^^^^ E203
55 | if x == 4 :
| ^^^ E203
56 | print(x, y)
57 | x, y = y, x
|
@@ -67,8 +67,8 @@ E20.py:63:16: E203 [*] Whitespace before ';'
|
61 | #: E203:2:15 E702:2:16
62 | if x == 4:
63 | print(x, y) ; x, y = y, x
| ^^^^ E203
63 | print(x, y) ; x, y = y, x
| ^ E203
64 | #: E203:3:13
65 | if x == 4:
|

View File

@@ -1,12 +1,13 @@
---
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
snapshot_kind: text
---
E22.py:43:2: E223 [*] Tab before operator
|
41 | #: E223
42 | foobart = 4
43 | a = 3 # aligned with tab
| ^^^^ E223
43 | a = 3 # aligned with tab
| ^^^ E223
44 | #:
|
= help: Replace with single space

View File

@@ -1,12 +1,13 @@
---
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
snapshot_kind: text
---
E24.py:6:8: E242 [*] Tab after comma
|
4 | b = (1, 20)
5 | #: E242
6 | a = (1, 2) # tab before 2
| ^^^^ E242
6 | a = (1, 2) # tab before 2
| ^ E242
7 | #: Okay
8 | b = (1, 20) # space before 20
|

View File

@@ -66,7 +66,7 @@ E27.py:8:3: E271 [*] Multiple spaces after keyword
E27.py:15:6: E271 [*] Multiple spaces after keyword
|
13 | True and False
13 | True and False
14 | #: E271
15 | a and b
| ^^ E271

View File

@@ -1,5 +1,6 @@
---
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
snapshot_kind: text
---
E27.py:21:2: E272 [*] Multiple spaces before keyword
|
@@ -50,7 +51,7 @@ E27.py:25:5: E272 [*] Multiple spaces before keyword
25 | this and False
| ^^ E272
26 | #: E273
27 | a and b
27 | a and b
|
= help: Replace with single space

View File

@@ -8,7 +8,7 @@ E27.py:11:9: E273 [*] Tab after keyword
11 | True and False
| ^^^^^^^^ E273
12 | #: E273 E274
13 | True and False
13 | True and False
|
= help: Replace with single space
@@ -26,7 +26,7 @@ E27.py:13:5: E273 [*] Tab after keyword
|
11 | True and False
12 | #: E273 E274
13 | True and False
13 | True and False
| ^^^^^^^^ E273
14 | #: E271
15 | a and b
@@ -47,8 +47,8 @@ E27.py:13:10: E273 [*] Tab after keyword
|
11 | True and False
12 | #: E273 E274
13 | True and False
| ^^^^ E273
13 | True and False
| ^ E273
14 | #: E271
15 | a and b
|
@@ -68,10 +68,10 @@ E27.py:27:6: E273 [*] Tab after keyword
|
25 | this and False
26 | #: E273
27 | a and b
| ^^^^ E273
27 | a and b
| ^^^ E273
28 | #: E274
29 | a and b
29 | a and b
|
= help: Replace with single space
@@ -87,10 +87,10 @@ E27.py:27:6: E273 [*] Tab after keyword
E27.py:31:10: E273 [*] Tab after keyword
|
29 | a and b
29 | a and b
30 | #: E273 E274
31 | this and False
| ^^^^ E273
31 | this and False
| ^ E273
32 | #: Okay
33 | from u import (a, b)
|

View File

@@ -1,14 +1,15 @@
---
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
snapshot_kind: text
---
E27.py:29:2: E274 [*] Tab before keyword
|
27 | a and b
27 | a and b
28 | #: E274
29 | a and b
| ^^^^^^^^ E274
29 | a and b
| ^^^^^^^ E274
30 | #: E273 E274
31 | this and False
31 | this and False
|
= help: Replace with single space
@@ -24,9 +25,9 @@ E27.py:29:2: E274 [*] Tab before keyword
E27.py:31:5: E274 [*] Tab before keyword
|
29 | a and b
29 | a and b
30 | #: E273 E274
31 | this and False
31 | this and False
| ^^^^^^^^ E274
32 | #: Okay
33 | from u import (a, b)

View File

@@ -1,5 +1,6 @@
---
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
snapshot_kind: text
---
W19.py:1:1: W191 Indentation contains tabs
|
@@ -370,7 +371,7 @@ W19.py:157:1: W191 Indentation contains tabs
156 | f"test{
157 | tab_indented_should_be_flagged
| ^^^^ W191
158 | } <- this tab is fine"
158 | } <- this tab is fine"
|
W19.py:161:1: W191 Indentation contains tabs
@@ -378,5 +379,5 @@ W19.py:161:1: W191 Indentation contains tabs
160 | f"""test{
161 | tab_indented_should_be_flagged
| ^^^^ W191
162 | } <- this tab is fine"""
162 | } <- this tab is fine"""
|

View File

@@ -7,18 +7,16 @@ pub(crate) mod types;
#[cfg(test)]
mod tests {
use std::collections::BTreeSet;
use std::path::Path;
use anyhow::Result;
use ruff_python_ast::PythonVersion;
use ruff_python_semantic::{MemberNameImport, NameImport};
use test_case::test_case;
use crate::registry::Rule;
use crate::rules::{isort, pyupgrade};
use crate::rules::pyupgrade;
use crate::settings::types::PreviewMode;
use crate::test::{test_path, test_snippet};
use crate::test::test_path;
use crate::{assert_diagnostics, settings};
#[test_case(Rule::ConvertNamedTupleFunctionalToClass, Path::new("UP014.py"))]
@@ -296,63 +294,4 @@ mod tests {
assert_diagnostics!(diagnostics);
Ok(())
}
#[test]
fn i002_conflict() {
let diagnostics = test_snippet(
"from pipes import quote, Template",
&settings::LinterSettings {
isort: isort::settings::Settings {
required_imports: BTreeSet::from_iter([
// https://github.com/astral-sh/ruff/issues/18729
NameImport::ImportFrom(MemberNameImport::member(
"__future__".to_string(),
"generator_stop".to_string(),
)),
// https://github.com/astral-sh/ruff/issues/16802
NameImport::ImportFrom(MemberNameImport::member(
"collections".to_string(),
"Sequence".to_string(),
)),
// Only bail out if _all_ the names in UP035 are required. `pipes.Template`
// isn't flagged by UP035, so requiring it shouldn't prevent `pipes.quote`
// from getting a diagnostic.
NameImport::ImportFrom(MemberNameImport::member(
"pipes".to_string(),
"Template".to_string(),
)),
]),
..Default::default()
},
..settings::LinterSettings::for_rules([
Rule::MissingRequiredImport,
Rule::UnnecessaryFutureImport,
Rule::DeprecatedImport,
])
},
);
assert_diagnostics!(diagnostics, @r"
<filename>:1:1: UP035 [*] Import from `shlex` instead: `quote`
|
1 | from pipes import quote, Template
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ UP035
|
= help: Import from `shlex`
Safe fix
1 |-from pipes import quote, Template
1 |+from pipes import Template
2 |+from shlex import quote
<filename>:1:1: I002 [*] Missing required import: `from __future__ import generator_stop`
Safe fix
1 |+from __future__ import generator_stop
1 2 | from pipes import quote, Template
<filename>:1:1: I002 [*] Missing required import: `from collections import Sequence`
Safe fix
1 |+from collections import Sequence
1 2 | from pipes import quote, Template
");
}
}

View File

@@ -2,7 +2,7 @@ use itertools::Itertools;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::whitespace::indentation;
use ruff_python_ast::{Alias, StmtImportFrom, StmtRef};
use ruff_python_ast::{Alias, StmtImportFrom};
use ruff_python_codegen::Stylist;
use ruff_python_parser::Tokens;
use ruff_text_size::Ranged;
@@ -10,12 +10,9 @@ use ruff_text_size::Ranged;
use crate::Locator;
use crate::checkers::ast::Checker;
use crate::rules::pyupgrade::fixes;
use crate::rules::pyupgrade::rules::unnecessary_future_import::is_import_required_by_isort;
use crate::{Edit, Fix, FixAvailability, Violation};
use ruff_python_ast::PythonVersion;
use super::RequiredImports;
/// An import was moved and renamed as part of a deprecation.
/// For example, `typing.AbstractSet` was moved to `collections.abc.Set`.
#[derive(Debug, PartialEq, Eq)]
@@ -413,7 +410,6 @@ struct ImportReplacer<'a> {
stylist: &'a Stylist<'a>,
tokens: &'a Tokens,
version: PythonVersion,
required_imports: &'a RequiredImports,
}
impl<'a> ImportReplacer<'a> {
@@ -424,7 +420,6 @@ impl<'a> ImportReplacer<'a> {
stylist: &'a Stylist<'a>,
tokens: &'a Tokens,
version: PythonVersion,
required_imports: &'a RequiredImports,
) -> Self {
Self {
import_from_stmt,
@@ -433,7 +428,6 @@ impl<'a> ImportReplacer<'a> {
stylist,
tokens,
version,
required_imports,
}
}
@@ -443,13 +437,6 @@ impl<'a> ImportReplacer<'a> {
if self.module == "typing" {
if self.version >= PythonVersion::PY39 {
for member in &self.import_from_stmt.names {
if is_import_required_by_isort(
self.required_imports,
StmtRef::ImportFrom(self.import_from_stmt),
member,
) {
continue;
}
if let Some(target) = TYPING_TO_RENAME_PY39.iter().find_map(|(name, target)| {
if &member.name == *name {
Some(*target)
@@ -686,13 +673,7 @@ impl<'a> ImportReplacer<'a> {
let mut matched_names = vec![];
let mut unmatched_names = vec![];
for name in &self.import_from_stmt.names {
if is_import_required_by_isort(
self.required_imports,
StmtRef::ImportFrom(self.import_from_stmt),
name,
) {
unmatched_names.push(name);
} else if candidates.contains(&name.name.as_str()) {
if candidates.contains(&name.name.as_str()) {
matched_names.push(name);
} else {
unmatched_names.push(name);
@@ -745,7 +726,6 @@ pub(crate) fn deprecated_import(checker: &Checker, import_from_stmt: &StmtImport
checker.stylist(),
checker.tokens(),
checker.target_version(),
&checker.settings().isort.required_imports,
);
for (operation, fix) in fixer.without_renames() {

View File

@@ -124,20 +124,10 @@ fn is_sequential(indices: &[usize]) -> bool {
indices.iter().enumerate().all(|(idx, value)| idx == *value)
}
static FORMAT_SPECIFIER: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r"(?x)
(?P<prefix>
^|[^{]|(?:\{{2})+ # preceded by nothing, a non-brace, or an even number of braces
)
\{ # opening curly brace
(?P<int>\d+) # followed by any integer
(?P<fmt>.*?) # followed by any text
} # followed by a closing brace
",
)
.unwrap()
});
// An opening curly brace, followed by any integer, followed by any text,
// followed by a closing brace.
static FORMAT_SPECIFIER: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\{(?P<int>\d+)(?P<fmt>.*?)}").unwrap());
/// Remove the explicit positional indices from a format string.
fn remove_specifiers<'a>(value: &mut Expression<'a>, arena: &'a typed_arena::Arena<String>) {
@@ -145,7 +135,7 @@ fn remove_specifiers<'a>(value: &mut Expression<'a>, arena: &'a typed_arena::Are
Expression::SimpleString(expr) => {
expr.value = arena.alloc(
FORMAT_SPECIFIER
.replace_all(expr.value, "$prefix{$fmt}")
.replace_all(expr.value, "{$fmt}")
.to_string(),
);
}
@@ -156,7 +146,7 @@ fn remove_specifiers<'a>(value: &mut Expression<'a>, arena: &'a typed_arena::Are
libcst_native::String::Simple(string) => {
string.value = arena.alloc(
FORMAT_SPECIFIER
.replace_all(string.value, "$prefix{$fmt}")
.replace_all(string.value, "{$fmt}")
.to_string(),
);
}

View File

@@ -1,10 +1,7 @@
use std::collections::BTreeSet;
use itertools::Itertools;
use ruff_python_ast::{Alias, Stmt};
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::{self as ast, Alias, Stmt, StmtRef};
use ruff_python_semantic::NameImport;
use ruff_text_size::Ranged;
use crate::checkers::ast::Checker;
@@ -87,29 +84,6 @@ const PY37_PLUS_REMOVE_FUTURES: &[&str] = &[
"generator_stop",
];
pub(crate) type RequiredImports = BTreeSet<NameImport>;
pub(crate) fn is_import_required_by_isort(
required_imports: &RequiredImports,
stmt: StmtRef,
alias: &Alias,
) -> bool {
let segments: &[&str] = match stmt {
StmtRef::ImportFrom(ast::StmtImportFrom {
module: Some(module),
..
}) => &[module.as_str(), alias.name.as_str()],
StmtRef::ImportFrom(ast::StmtImportFrom { module: None, .. }) | StmtRef::Import(_) => {
&[alias.name.as_str()]
}
_ => return false,
};
required_imports
.iter()
.any(|required_import| required_import.qualified_name().segments() == segments)
}
/// UP010
pub(crate) fn unnecessary_future_import(checker: &Checker, stmt: &Stmt, names: &[Alias]) {
let mut unused_imports: Vec<&Alias> = vec![];
@@ -117,15 +91,6 @@ pub(crate) fn unnecessary_future_import(checker: &Checker, stmt: &Stmt, names: &
if alias.asname.is_some() {
continue;
}
if is_import_required_by_isort(
&checker.settings().isort.required_imports,
stmt.into(),
alias,
) {
continue;
}
if PY33_PLUS_REMOVE_FUTURES.contains(&alias.name.as_str())
|| PY37_PLUS_REMOVE_FUTURES.contains(&alias.name.as_str())
{
@@ -154,7 +119,7 @@ pub(crate) fn unnecessary_future_import(checker: &Checker, stmt: &Stmt, names: &
unused_imports
.iter()
.map(|alias| &alias.name)
.map(ast::Identifier::as_str),
.map(ruff_python_ast::Identifier::as_str),
statement,
parent,
checker.locator(),

View File

@@ -481,7 +481,6 @@ UP030_0.py:59:1: UP030 [*] Use implicit references for positional format fields
59 |+"{}_{}".format(2, 1, )
60 60 |
61 61 | "{1}_{0}".format(1, 2)
62 62 |
UP030_0.py:61:1: UP030 [*] Use implicit references for positional format fields
|
@@ -489,8 +488,6 @@ UP030_0.py:61:1: UP030 [*] Use implicit references for positional format fields
60 |
61 | "{1}_{0}".format(1, 2)
| ^^^^^^^^^^^^^^^^^^^^^^ UP030
62 |
63 | r"\d{{1,2}} {0}".format(42)
|
= help: Remove explicit positional indices
@@ -500,42 +497,3 @@ UP030_0.py:61:1: UP030 [*] Use implicit references for positional format fields
60 60 |
61 |-"{1}_{0}".format(1, 2)
61 |+"{}_{}".format(2, 1)
62 62 |
63 63 | r"\d{{1,2}} {0}".format(42)
64 64 |
UP030_0.py:63:1: UP030 [*] Use implicit references for positional format fields
|
61 | "{1}_{0}".format(1, 2)
62 |
63 | r"\d{{1,2}} {0}".format(42)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ UP030
64 |
65 | "{{{0}}}".format(123)
|
= help: Remove explicit positional indices
Unsafe fix
60 60 |
61 61 | "{1}_{0}".format(1, 2)
62 62 |
63 |-r"\d{{1,2}} {0}".format(42)
63 |+r"\d{{1,2}} {}".format(42)
64 64 |
65 65 | "{{{0}}}".format(123)
UP030_0.py:65:1: UP030 [*] Use implicit references for positional format fields
|
63 | r"\d{{1,2}} {0}".format(42)
64 |
65 | "{{{0}}}".format(123)
| ^^^^^^^^^^^^^^^^^^^^^ UP030
|
= help: Remove explicit positional indices
Unsafe fix
62 62 |
63 63 | r"\d{{1,2}} {0}".format(42)
64 64 |
65 |-"{{{0}}}".format(123)
65 |+"{{{}}}".format(123)

View File

@@ -22,19 +22,13 @@ use crate::{AlwaysFixableViolation, Edit, Fix};
///
/// ## Example
/// ```python
/// import abc
///
///
/// class C(metaclass=abc.ABCMeta):
/// class C(metaclass=ABCMeta):
/// pass
/// ```
///
/// Use instead:
/// ```python
/// import abc
///
///
/// class C(abc.ABC):
/// class C(ABC):
/// pass
/// ```
///

View File

@@ -12,6 +12,7 @@ use crate::checkers::ast::{Checker, LintContext};
use crate::preview::is_unicode_to_unicode_confusables_enabled;
use crate::rules::ruff::rules::Context;
use crate::rules::ruff::rules::confusables::confusable;
use crate::settings::LinterSettings;
/// ## What it does
/// Checks for ambiguous Unicode characters in strings.
@@ -179,7 +180,9 @@ pub(crate) fn ambiguous_unicode_character_comment(
range: TextRange,
) {
let text = locator.slice(range);
ambiguous_unicode_character(text, range, Context::Comment, context);
for candidate in ambiguous_unicode_character(text, range, context.settings()) {
candidate.into_diagnostic(Context::Comment, context);
}
}
/// RUF001, RUF002
@@ -200,19 +203,22 @@ pub(crate) fn ambiguous_unicode_character_string(checker: &Checker, string_like:
match part {
ast::StringLikePart::String(string_literal) => {
let text = checker.locator().slice(string_literal);
ambiguous_unicode_character(
text,
string_literal.range(),
context,
checker.context(),
);
for candidate in
ambiguous_unicode_character(text, string_literal.range(), checker.settings())
{
candidate.report_diagnostic(checker, context);
}
}
ast::StringLikePart::Bytes(_) => {}
ast::StringLikePart::FString(FString { elements, .. })
| ast::StringLikePart::TString(TString { elements, .. }) => {
for literal in elements.literals() {
let text = checker.locator().slice(literal);
ambiguous_unicode_character(text, literal.range(), context, checker.context());
for candidate in
ambiguous_unicode_character(text, literal.range(), checker.settings())
{
candidate.report_diagnostic(checker, context);
}
}
}
}
@@ -222,12 +228,13 @@ pub(crate) fn ambiguous_unicode_character_string(checker: &Checker, string_like:
fn ambiguous_unicode_character(
text: &str,
range: TextRange,
context: Context,
lint_context: &LintContext,
) {
settings: &LinterSettings,
) -> Vec<Candidate> {
let mut candidates = Vec::new();
// Most of the time, we don't need to check for ambiguous unicode characters at all.
if text.is_ascii() {
return;
return candidates;
}
// Iterate over the "words" in the text.
@@ -239,7 +246,7 @@ fn ambiguous_unicode_character(
if !word_candidates.is_empty() {
if word_flags.is_candidate_word() {
for candidate in word_candidates.drain(..) {
candidate.into_diagnostic(context, lint_context);
candidates.push(candidate);
}
}
word_candidates.clear();
@@ -250,23 +257,21 @@ fn ambiguous_unicode_character(
// case, it's always included as a diagnostic.
if !current_char.is_ascii() {
if let Some(representant) = confusable(current_char as u32).filter(|representant| {
is_unicode_to_unicode_confusables_enabled(lint_context.settings())
|| representant.is_ascii()
is_unicode_to_unicode_confusables_enabled(settings) || representant.is_ascii()
}) {
let candidate = Candidate::new(
TextSize::try_from(relative_offset).unwrap() + range.start(),
current_char,
representant,
);
candidate.into_diagnostic(context, lint_context);
candidates.push(candidate);
}
}
} else if current_char.is_ascii() {
// The current word contains at least one ASCII character.
word_flags |= WordFlags::ASCII;
} else if let Some(representant) = confusable(current_char as u32).filter(|representant| {
is_unicode_to_unicode_confusables_enabled(lint_context.settings())
|| representant.is_ascii()
is_unicode_to_unicode_confusables_enabled(settings) || representant.is_ascii()
}) {
// The current word contains an ambiguous unicode character.
word_candidates.push(Candidate::new(
@@ -284,11 +289,13 @@ fn ambiguous_unicode_character(
if !word_candidates.is_empty() {
if word_flags.is_candidate_word() {
for candidate in word_candidates.drain(..) {
candidate.into_diagnostic(context, lint_context);
candidates.push(candidate);
}
}
word_candidates.clear();
}
candidates
}
bitflags! {
@@ -366,6 +373,39 @@ impl Candidate {
};
}
}
fn report_diagnostic(self, checker: &Checker, context: Context) {
if !checker
.settings()
.allowed_confusables
.contains(&self.confusable)
{
let char_range = TextRange::at(self.offset, self.confusable.text_len());
match context {
Context::String => checker.report_diagnostic_if_enabled(
AmbiguousUnicodeCharacterString {
confusable: self.confusable,
representant: self.representant,
},
char_range,
),
Context::Docstring => checker.report_diagnostic_if_enabled(
AmbiguousUnicodeCharacterDocstring {
confusable: self.confusable,
representant: self.representant,
},
char_range,
),
Context::Comment => checker.report_diagnostic_if_enabled(
AmbiguousUnicodeCharacterComment {
confusable: self.confusable,
representant: self.representant,
},
char_range,
),
};
}
}
}
struct NamedUnicode(char);

View File

@@ -149,7 +149,8 @@ fn convert_call_to_conversion_flag(
formatted_string_expression.whitespace_before_expression = space();
}
formatted_string_expression.expression = if needs_paren_expr(arg) {
formatted_string_expression.expression = if needs_paren(OperatorPrecedence::from_expr(arg))
{
call.args[0]
.value
.clone()
@@ -177,16 +178,6 @@ fn needs_paren(precedence: OperatorPrecedence) -> bool {
precedence <= OperatorPrecedence::Lambda
}
fn needs_paren_expr(arg: &Expr) -> bool {
// Generator expressions need to be parenthesized in f-string expressions
if let Some(generator) = arg.as_generator_expr() {
return !generator.parenthesized;
}
// Check precedence for other expressions
needs_paren(OperatorPrecedence::from_expr(arg))
}
/// Represents the three built-in Python conversion functions that can be replaced
/// with f-string conversion flags.
#[derive(Copy, Clone)]

View File

@@ -359,7 +359,6 @@ RUF010.py:52:4: RUF010 [*] Use explicit conversion flag
52 |+f"{(x := 2)!r}"
53 53 |
54 54 | f"{str(object=3)}"
55 55 |
RUF010.py:54:4: RUF010 [*] Use explicit conversion flag
|
@@ -367,8 +366,6 @@ RUF010.py:54:4: RUF010 [*] Use explicit conversion flag
53 |
54 | f"{str(object=3)}"
| ^^^^^^^^^^^^^ RUF010
55 |
56 | f"{str(x for x in [])}"
|
= help: Replace with conversion flag
@@ -378,42 +375,3 @@ RUF010.py:54:4: RUF010 [*] Use explicit conversion flag
53 53 |
54 |-f"{str(object=3)}"
54 |+f"{3!s}"
55 55 |
56 56 | f"{str(x for x in [])}"
57 57 |
RUF010.py:56:4: RUF010 [*] Use explicit conversion flag
|
54 | f"{str(object=3)}"
55 |
56 | f"{str(x for x in [])}"
| ^^^^^^^^^^^^^^^^^^ RUF010
57 |
58 | f"{str((x for x in []))}"
|
= help: Replace with conversion flag
Safe fix
53 53 |
54 54 | f"{str(object=3)}"
55 55 |
56 |-f"{str(x for x in [])}"
56 |+f"{(x for x in [])!s}"
57 57 |
58 58 | f"{str((x for x in []))}"
RUF010.py:58:4: RUF010 [*] Use explicit conversion flag
|
56 | f"{str(x for x in [])}"
57 |
58 | f"{str((x for x in []))}"
| ^^^^^^^^^^^^^^^^^^^^ RUF010
|
= help: Replace with conversion flag
Safe fix
55 55 |
56 56 | f"{str(x for x in [])}"
57 57 |
58 |-f"{str((x for x in []))}"
58 |+f"{(x for x in [])!s}"

View File

@@ -22,7 +22,7 @@ RUF054.py:10:3: RUF054 Indented form feed
RUF054.py:13:2: RUF054 Indented form feed
|
12 | def _():
13 | pass
13 | pass
| ^ RUF054
14 |
15 | if False:

View File

@@ -380,7 +380,7 @@ macro_rules! assert_diagnostics {
}};
($value:expr, @$snapshot:literal) => {{
insta::with_settings!({ omit_expression => true }, {
insta::assert_snapshot!($crate::test::print_messages(&$value), @$snapshot);
insta::assert_snapshot!($crate::test::print_messages(&$value), $snapshot);
});
}};
($name:expr, $value:expr) => {{

View File

@@ -1,6 +1,5 @@
use std::cmp::Ordering;
use std::fmt::{Debug, Display, Formatter};
use std::hash::Hash;
use std::sync::{Arc, OnceLock};
#[cfg(feature = "serde")]
@@ -163,7 +162,7 @@ impl SourceFileBuilder {
/// A source file that is identified by its name. Optionally stores the source code and [`LineIndex`].
///
/// Cloning a [`SourceFile`] is cheap, because it only requires bumping a reference count.
#[derive(Clone, Eq, PartialEq, Hash)]
#[derive(Clone, Eq, PartialEq)]
#[cfg_attr(feature = "get-size", derive(get_size2::GetSize))]
pub struct SourceFile {
inner: Arc<SourceFileInner>,
@@ -242,13 +241,6 @@ impl PartialEq for SourceFileInner {
impl Eq for SourceFileInner {}
impl Hash for SourceFileInner {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.name.hash(state);
self.code.hash(state);
}
}
/// The line and column of an offset in a source file.
///
/// See [`LineIndex::line_column`] for more information.

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_wasm"
version = "0.12.7"
version = "0.12.5"
publish = false
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -66,9 +66,9 @@ enum CompletionTargetTokens<'t> {
/// A token was found under the cursor, but it didn't
/// match any of our anticipated token patterns.
Generic { token: &'t Token },
/// No token was found. We generally treat this like
/// `Generic` (i.e., offer scope based completions).
Unknown,
/// No token was found, but we have the offset of the
/// cursor.
Unknown { offset: TextSize },
}
impl<'t> CompletionTargetTokens<'t> {
@@ -78,7 +78,7 @@ impl<'t> CompletionTargetTokens<'t> {
static OBJECT_DOT_NON_EMPTY: [TokenKind; 2] = [TokenKind::Dot, TokenKind::Name];
let offset = match parsed.tokens().at_offset(offset) {
TokenAt::None => return Some(CompletionTargetTokens::Unknown),
TokenAt::None => return Some(CompletionTargetTokens::Unknown { offset }),
TokenAt::Single(tok) => tok.end(),
TokenAt::Between(_, tok) => tok.start(),
};
@@ -122,7 +122,7 @@ impl<'t> CompletionTargetTokens<'t> {
return None;
} else {
let Some(last) = before.last() else {
return Some(CompletionTargetTokens::Unknown);
return Some(CompletionTargetTokens::Unknown { offset });
};
CompletionTargetTokens::Generic { token: last }
},
@@ -171,7 +171,7 @@ impl<'t> CompletionTargetTokens<'t> {
node: covering_node.node(),
})
}
CompletionTargetTokens::Unknown => {
CompletionTargetTokens::Unknown { offset } => {
let range = TextRange::empty(offset);
let covering_node = covering_node(parsed.syntax().into(), range);
Some(CompletionTargetAst::Scoped {

View File

@@ -38,7 +38,7 @@ mod tests {
impl CursorTest {
fn references(&self) -> String {
let Some(mut reference_results) =
let Some(reference_results) =
goto_references(&self.db, self.cursor.file, self.cursor.offset, true)
else {
return "No references found".to_string();
@@ -48,8 +48,6 @@ mod tests {
return "No references found".to_string();
}
reference_results.sort_by_key(ReferenceTarget::file);
self.render_diagnostics(reference_results.into_iter().enumerate().map(
|(i, ref_item)| -> ReferenceResult {
ReferenceResult {

View File

@@ -1,5 +1,5 @@
use std::fmt::Formatter;
use std::panic::RefUnwindSafe;
use std::panic::{AssertUnwindSafe, RefUnwindSafe};
use std::sync::Arc;
use std::{cmp, fmt};
@@ -87,7 +87,9 @@ impl ProjectDatabase {
///
/// [`set_check_mode`]: ProjectDatabase::set_check_mode
pub fn check(&self) -> Vec<Diagnostic> {
self.project().check(self, &mut DummyReporter)
let mut reporter = DummyReporter;
let reporter = AssertUnwindSafe(&mut reporter as &mut dyn ProgressReporter);
self.project().check(self, reporter)
}
/// Checks the files in the project and its dependencies, using the given reporter.
@@ -96,6 +98,7 @@ impl ProjectDatabase {
///
/// [`set_check_mode`]: ProjectDatabase::set_check_mode
pub fn check_with_reporter(&self, reporter: &mut dyn ProgressReporter) -> Vec<Diagnostic> {
let reporter = AssertUnwindSafe(reporter);
self.project().check(self, reporter)
}

View File

@@ -174,7 +174,7 @@ impl Project {
/// This is a salsa query to prevent re-computing queries if other, unrelated
/// settings change. For example, we don't want that changing the terminal settings
/// invalidates any type checking queries.
#[salsa::tracked(returns(deref), heap_size=get_size2::heap_size)]
#[salsa::tracked(returns(deref), heap_size=get_size2::GetSize::get_heap_size)]
pub fn rules(self, db: &dyn Db) -> Arc<RuleSelection> {
self.settings(db).to_rules()
}
@@ -228,7 +228,7 @@ impl Project {
pub(crate) fn check(
self,
db: &ProjectDatabase,
reporter: &mut dyn ProgressReporter,
mut reporter: AssertUnwindSafe<&mut dyn ProgressReporter>,
) -> Vec<Diagnostic> {
let project_span = tracing::debug_span!("Project::check");
let _span = project_span.enter();
@@ -511,7 +511,7 @@ impl Project {
}
}
#[salsa::tracked(returns(ref), heap_size=get_size2::heap_size)]
#[salsa::tracked(returns(ref), heap_size=get_size2::GetSize::get_heap_size)]
pub(crate) fn check_file_impl(db: &dyn Db, file: File) -> Result<Box<[Diagnostic]>, Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = Vec::new();

View File

@@ -96,7 +96,7 @@ impl Override {
}
/// Resolves the settings for a given file.
#[salsa::tracked(returns(ref), heap_size=get_size2::heap_size)]
#[salsa::tracked(returns(ref), heap_size=get_size2::GetSize::get_heap_size)]
pub(crate) fn file_settings(db: &dyn Db, file: File) -> FileSettings {
let settings = db.project().settings(db);
@@ -155,7 +155,7 @@ pub(crate) fn file_settings(db: &dyn Db, file: File) -> FileSettings {
/// This is to make Salsa happy because it requires that queries with only a single argument
/// take a salsa-struct as argument, which isn't the case here. The `()` enables salsa's
/// automatic interning for the arguments.
#[salsa::tracked(heap_size=get_size2::heap_size)]
#[salsa::tracked(heap_size=get_size2::GetSize::get_heap_size)]
fn merge_overrides(db: &dyn Db, overrides: Vec<Arc<InnerOverrideOptions>>, _: ()) -> FileSettings {
let mut overrides = overrides.into_iter().rev();
let mut merged = (*overrides.next().unwrap()).clone();

View File

@@ -13,7 +13,7 @@ reveal_type(x) # revealed: int | float
x = (1, 2)
x += (3, 4)
reveal_type(x) # revealed: tuple[Literal[1, 2, 3, 4], ...]
reveal_type(x) # revealed: tuple[Literal[1], Literal[2], Literal[3], Literal[4]]
```
## Dunder methods

View File

@@ -1,123 +0,0 @@
# `async` / `await`
## Basic
```py
async def retrieve() -> int:
return 42
async def main():
result = await retrieve()
reveal_type(result) # revealed: int
```
## Generic `async` functions
```py
from typing import TypeVar
T = TypeVar("T")
async def persist(x: T) -> T:
return x
async def f(x: int):
result = await persist(x)
reveal_type(result) # revealed: int
```
## Use cases
### `Future`
```py
import asyncio
import concurrent.futures
def blocking_function() -> int:
return 42
async def main():
loop = asyncio.get_event_loop()
with concurrent.futures.ThreadPoolExecutor() as pool:
result = await loop.run_in_executor(pool, blocking_function)
# TODO: should be `int`
reveal_type(result) # revealed: Unknown
```
### `asyncio.Task`
```py
import asyncio
async def f() -> int:
return 1
async def main():
task = asyncio.create_task(f())
result = await task
# TODO: this should be `int`
reveal_type(result) # revealed: Unknown
```
### `asyncio.gather`
```py
import asyncio
async def task(name: str) -> int:
return len(name)
async def main():
(a, b) = await asyncio.gather(
task("A"),
task("B"),
)
# TODO: these should be `int`
reveal_type(a) # revealed: Unknown
reveal_type(b) # revealed: Unknown
```
## Under the hood
```toml
[environment]
python-version = "3.12" # Use 3.12 to be able to use PEP 695 generics
```
Let's look at the example from the beginning again:
```py
async def retrieve() -> int:
return 42
```
When we look at the signature of this function, we see that it actually returns a `CoroutineType`:
```py
reveal_type(retrieve) # revealed: def retrieve() -> CoroutineType[Any, Any, int]
```
The expression `await retrieve()` desugars into a call to the `__await__` dunder method on the
`CoroutineType` object, followed by a `yield from`. Let's first see the return type of `__await__`:
```py
reveal_type(retrieve().__await__()) # revealed: Generator[Any, None, int]
```
We can see that this returns a `Generator` that yields `Any`, and eventually returns `int`. For the
final type of the `await` expression, we retrieve that third argument of the `Generator` type:
```py
from typing import Generator
def _():
result = yield from retrieve().__await__()
reveal_type(result) # revealed: int
```

View File

@@ -0,0 +1,49 @@
# Static binary operations using `in`
## Basic functionality
This demonstrates type inference support for `<str-literal> in <tuple>`:
```py
from ty_extensions import static_assert
static_assert("foo" in ("quux", "foo", "baz"))
static_assert("foo" not in ("quux", "bar", "baz"))
```
## With variables
```py
from ty_extensions import static_assert
x = ("quux", "foo", "baz")
static_assert("foo" in x)
x = ("quux", "bar", "baz")
static_assert("foo" not in x)
```
## Statically unknown results in a `bool`
```py
def _(a: str, b: str):
reveal_type("foo" in (a, b)) # revealed: bool
```
## Values being unknown doesn't mean the result is unknown
For example, when the types are completely disjoint:
```py
from ty_extensions import static_assert
def _(a: int, b: int):
static_assert("foo" not in (a, b))
```
## Failure cases
```py
# We don't support byte strings.
reveal_type(b"foo" not in (b"quux", b"foo", b"baz")) # revealed: bool
```

View File

@@ -3,14 +3,14 @@
## Concatenation for heterogeneous tuples
```py
reveal_type((1, 2) + (3, 4)) # revealed: tuple[Literal[1, 2, 3, 4], ...]
reveal_type(() + (1, 2)) # revealed: tuple[Literal[1, 2], ...]
reveal_type((1, 2) + ()) # revealed: tuple[Literal[1, 2], ...]
reveal_type((1, 2) + (3, 4)) # revealed: tuple[Literal[1], Literal[2], Literal[3], Literal[4]]
reveal_type(() + (1, 2)) # revealed: tuple[Literal[1], Literal[2]]
reveal_type((1, 2) + ()) # revealed: tuple[Literal[1], Literal[2]]
reveal_type(() + ()) # revealed: tuple[()]
def _(x: tuple[int, str], y: tuple[None, tuple[int]]):
reveal_type(x + y) # revealed: tuple[int | str | None | tuple[int], ...]
reveal_type(y + x) # revealed: tuple[None | tuple[int] | int | str, ...]
reveal_type(x + y) # revealed: tuple[int, str, None, tuple[int]]
reveal_type(y + x) # revealed: tuple[None, tuple[int], int, str]
```
## Concatenation for homogeneous tuples
@@ -19,10 +19,10 @@ def _(x: tuple[int, str], y: tuple[None, tuple[int]]):
def _(x: tuple[int, ...], y: tuple[str, ...]):
reveal_type(x + x) # revealed: tuple[int, ...]
reveal_type(x + y) # revealed: tuple[int | str, ...]
reveal_type((1, 2) + x) # revealed: tuple[int, ...]
reveal_type(x + (3, 4)) # revealed: tuple[int, ...]
reveal_type((1, 2) + x + (3, 4)) # revealed: tuple[int, ...]
reveal_type((1, 2) + y + (3, 4) + x) # revealed: tuple[int | str, ...]
reveal_type((1, 2) + x) # revealed: tuple[Literal[1], Literal[2], *tuple[int, ...]]
reveal_type(x + (3, 4)) # revealed: tuple[*tuple[int, ...], Literal[3], Literal[4]]
reveal_type((1, 2) + x + (3, 4)) # revealed: tuple[Literal[1], Literal[2], *tuple[int, ...], Literal[3], Literal[4]]
reveal_type((1, 2) + y + (3, 4) + x) # revealed: tuple[Literal[1], Literal[2], *tuple[int | str, ...]]
```
We get the same results even when we use a legacy type alias, even though this involves first
@@ -41,8 +41,8 @@ StrTuple = tuple[str, ...]
def _(one_two: OneTwo, x: IntTuple, y: StrTuple, three_four: ThreeFour):
reveal_type(x + x) # revealed: tuple[int, ...]
reveal_type(x + y) # revealed: tuple[int | str, ...]
reveal_type(one_two + x) # revealed: tuple[int, ...]
reveal_type(x + three_four) # revealed: tuple[int, ...]
reveal_type(one_two + x + three_four) # revealed: tuple[int, ...]
reveal_type(one_two + y + three_four + x) # revealed: tuple[int | str, ...]
reveal_type(one_two + x) # revealed: tuple[Literal[1], Literal[2], *tuple[int, ...]]
reveal_type(x + three_four) # revealed: tuple[*tuple[int, ...], Literal[3], Literal[4]]
reveal_type(one_two + x + three_four) # revealed: tuple[Literal[1], Literal[2], *tuple[int, ...], Literal[3], Literal[4]]
reveal_type(one_two + y + three_four + x) # revealed: tuple[Literal[1], Literal[2], *tuple[int | str, ...]]
```

View File

@@ -15,7 +15,8 @@ reveal_type(get_int()) # revealed: int
async def get_int_async() -> int:
return 42
reveal_type(get_int_async()) # revealed: CoroutineType[Any, Any, int]
# TODO: we don't yet support `types.CoroutineType`, should be generic `Coroutine[Any, Any, int]`
reveal_type(get_int_async()) # revealed: @Todo(generic types.CoroutineType)
```
## Generic

View File

@@ -1,70 +0,0 @@
# `replace`
The `replace` function and the `replace` protocol were added in Python 3.13:
<https://docs.python.org/3/whatsnew/3.13.html#copy>
```toml
[environment]
python-version = "3.13"
```
## Basic
```py
from copy import replace
from datetime import time
t = time(12, 0, 0)
t = replace(t, minute=30)
reveal_type(t) # revealed: time
```
## The `__replace__` protocol
### Dataclasses
Dataclasses support the `__replace__` protocol:
```py
from dataclasses import dataclass
from copy import replace
@dataclass
class Point:
x: int
y: int
reveal_type(Point.__replace__) # revealed: (self: Point, *, x: int = int, y: int = int) -> Point
```
The `__replace__` method can either be called directly or through the `replace` function:
```py
a = Point(1, 2)
b = a.__replace__(x=3, y=4)
reveal_type(b) # revealed: Point
b = replace(a, x=3, y=4)
reveal_type(b) # revealed: Point
```
A call to `replace` does not require all keyword arguments:
```py
c = a.__replace__(y=4)
reveal_type(c) # revealed: Point
d = replace(a, y=4)
reveal_type(d) # revealed: Point
```
Invalid calls to `__replace__` or `replace` will raise an error:
```py
e = a.__replace__(x="wrong") # error: [invalid-argument-type]
# TODO: this should ideally also be emit an error
e = replace(a, x="wrong")
```

View File

@@ -128,7 +128,7 @@ class AsyncIterable:
return AsyncIterator()
async def _():
# revealed: int
# revealed: @Todo(async iterables/iterators)
[reveal_type(x) async for x in AsyncIterable()]
```
@@ -147,7 +147,6 @@ class Iterable:
return Iterator()
async def _():
# error: [not-iterable] "Object of type `Iterable` is not async-iterable"
# revealed: Unknown
# revealed: @Todo(async iterables/iterators)
[reveal_type(x) async for x in Iterable()]
```

View File

@@ -181,7 +181,7 @@ def f(l: list[int]):
# but if it was greater than that, it will not be an error.
reveal_type(l[0]) # revealed: int
# error: [invalid-argument-type]
# error: [call-non-callable]
del l["string"]
l[0] = 1

View File

@@ -27,7 +27,6 @@ If all of the comprehensions are `async`, on the other hand, the code was still
```py
async def test():
# error: [not-iterable] "Object of type `range` is not async-iterable"
return [[x async for x in elements(n)] async for n in range(3)]
```

View File

@@ -559,6 +559,22 @@ class Answer(Enum):
reveal_type(enum_members(Answer))
```
## Subclasses of `enum.Flag`
```py
from enum import Flag, auto
class KeyModifier(Flag):
SHIFT = auto()
CTRL = auto()
ALT = auto()
reveal_type(KeyModifier.SHIFT) # revealed: Literal[KeyModifier.SHIFT]
# TODO: this should be `KeyModifier`
reveal_type(KeyModifier.SHIFT | KeyModifier.CTRL) # revealed: Literal[KeyModifier.CTRL]
```
## Custom enum types
Enum classes can also be defined using a subclass of `enum.Enum` or any class that uses

View File

@@ -1,130 +0,0 @@
# `yield` and `yield from`
## Basic `yield` and `yield from`
The type of a `yield` expression is the "send" type of the generator function. The type of a
`yield from` expression is the return type of the inner generator:
```py
from typing import Generator
def inner_generator() -> Generator[int, bytes, str]:
yield 1
yield 2
x = yield 3
# TODO: this should be `bytes`
reveal_type(x) # revealed: @Todo(yield expressions)
return "done"
def outer_generator():
result = yield from inner_generator()
reveal_type(result) # revealed: str
```
## `yield from` with a custom iterable
`yield from` can also be used with custom iterable types. In that case, the type of the `yield from`
expression can not be determined
```py
from typing import Generator, TypeVar, Generic
T = TypeVar("T")
class OnceIterator(Generic[T]):
def __init__(self, value: T):
self.value = value
self.returned = False
def __next__(self) -> T:
if self.returned:
raise StopIteration(42)
self.returned = True
return self.value
class Once(Generic[T]):
def __init__(self, value: T):
self.value = value
def __iter__(self) -> OnceIterator[T]:
return OnceIterator(self.value)
for x in Once("a"):
reveal_type(x) # revealed: str
def generator() -> Generator:
result = yield from Once("a")
# At runtime, the value of `result` will be the `.value` attribute of the `StopIteration`
# error raised by `OnceIterator` to signal to the interpreter that the iterator has been
# exhausted. Here that will always be 42, but this information cannot be captured in the
# signature of `OnceIterator.__next__`, since exceptions lie outside the type signature.
# We therefore just infer `Unknown` here.
#
# If the `StopIteration` error in `OnceIterator.__next__` had been simply `raise StopIteration`
# (the more common case), then the `.value` attribute of the `StopIteration` instance
# would default to `None`.
reveal_type(result) # revealed: Unknown
```
## `yield from` with a generator that return `types.GeneratorType`
`types.GeneratorType` is a nominal type that implements the `typing.Generator` protocol:
```py
from types import GeneratorType
def inner_generator() -> GeneratorType[int, bytes, str]:
yield 1
yield 2
x = yield 3
# TODO: this should be `bytes`
reveal_type(x) # revealed: @Todo(yield expressions)
return "done"
def outer_generator():
result = yield from inner_generator()
reveal_type(result) # revealed: str
```
## Error cases
### Non-iterable type
```py
from typing import Generator
def generator() -> Generator:
yield from 42 # error: [not-iterable] "Object of type `Literal[42]` is not iterable"
```
### Invalid `yield` type
```py
from typing import Generator
# TODO: This should be an error. Claims to yield `int`, but yields `str`.
def invalid_generator() -> Generator[int, None, None]:
yield "not an int" # This should be an `int`
```
### Invalid return type
```py
from typing import Generator
# TODO: should emit an error (does not return `str`)
def invalid_generator1() -> Generator[int, None, str]:
yield 1
# TODO: should emit an error (does not return `int`)
def invalid_generator2() -> Generator[int, None, None]:
yield 1
return "done"
```

View File

@@ -145,34 +145,27 @@ T = TypeVar("T")
def takes_mixed_tuple_suffix(x: tuple[int, bytes, *tuple[str, ...], T, int]) -> T:
return x[-2]
# TODO: revealed: Literal[True]
reveal_type(takes_mixed_tuple_suffix((1, b"foo", "bar", "baz", True, 42))) # revealed: Unknown
def takes_mixed_tuple_prefix(x: tuple[int, T, *tuple[str, ...], bool, int]) -> T:
return x[1]
def _(x: tuple[int, bytes, *tuple[str, ...], bool, int]):
reveal_type(takes_mixed_tuple_suffix(x)) # revealed: bool
reveal_type(takes_mixed_tuple_prefix(x)) # revealed: bytes
reveal_type(takes_mixed_tuple_suffix((1, b"foo", "bar", "baz", True, 42))) # revealed: Literal[True]
reveal_type(takes_mixed_tuple_prefix((1, b"foo", "bar", "baz", True, 42))) # revealed: Literal[b"foo"]
# TODO: revealed: Literal[b"foo"]
reveal_type(takes_mixed_tuple_prefix((1, b"foo", "bar", "baz", True, 42))) # revealed: Unknown
def takes_fixed_tuple(x: tuple[T, int]) -> T:
return x[0]
def _(x: tuple[str, int]):
reveal_type(takes_fixed_tuple(x)) # revealed: str
reveal_type(takes_fixed_tuple((True, 42))) # revealed: Literal[True]
def takes_homogeneous_tuple(x: tuple[T, ...]) -> T:
return x[0]
def _(x: tuple[str, int], y: tuple[bool, ...], z: tuple[int, str, *tuple[range, ...], bytes]):
reveal_type(takes_homogeneous_tuple(x)) # revealed: str | int
reveal_type(takes_homogeneous_tuple(y)) # revealed: bool
reveal_type(takes_homogeneous_tuple(z)) # revealed: int | str | range | bytes
reveal_type(takes_homogeneous_tuple((42,))) # revealed: Literal[42]
reveal_type(takes_homogeneous_tuple((42, 43))) # revealed: Literal[42, 43]
# TODO: revealed: Literal[42]
reveal_type(takes_homogeneous_tuple((42,))) # revealed: Unknown
# TODO: revealed: Literal[42, 43]
reveal_type(takes_homogeneous_tuple((42, 43))) # revealed: Unknown
```
## Inferring a bound typevar

View File

@@ -131,34 +131,27 @@ reveal_type(takes_in_protocol(ExplicitGenericSub[str]())) # revealed: str
def takes_mixed_tuple_suffix[T](x: tuple[int, bytes, *tuple[str, ...], T, int]) -> T:
return x[-2]
# TODO: revealed: Literal[True]
reveal_type(takes_mixed_tuple_suffix((1, b"foo", "bar", "baz", True, 42))) # revealed: Unknown
def takes_mixed_tuple_prefix[T](x: tuple[int, T, *tuple[str, ...], bool, int]) -> T:
return x[1]
def _(x: tuple[int, bytes, *tuple[str, ...], bool, int]):
reveal_type(takes_mixed_tuple_suffix(x)) # revealed: bool
reveal_type(takes_mixed_tuple_prefix(x)) # revealed: bytes
reveal_type(takes_mixed_tuple_suffix((1, b"foo", "bar", "baz", True, 42))) # revealed: Literal[True]
reveal_type(takes_mixed_tuple_prefix((1, b"foo", "bar", "baz", True, 42))) # revealed: Literal[b"foo"]
# TODO: revealed: Literal[b"foo"]
reveal_type(takes_mixed_tuple_prefix((1, b"foo", "bar", "baz", True, 42))) # revealed: Unknown
def takes_fixed_tuple[T](x: tuple[T, int]) -> T:
return x[0]
def _(x: tuple[str, int]):
reveal_type(takes_fixed_tuple(x)) # revealed: str
reveal_type(takes_fixed_tuple((True, 42))) # revealed: Literal[True]
def takes_homogeneous_tuple[T](x: tuple[T, ...]) -> T:
return x[0]
def _(x: tuple[str, int], y: tuple[bool, ...], z: tuple[int, str, *tuple[range, ...], bytes]):
reveal_type(takes_homogeneous_tuple(x)) # revealed: str | int
reveal_type(takes_homogeneous_tuple(y)) # revealed: bool
reveal_type(takes_homogeneous_tuple(z)) # revealed: int | str | range | bytes
reveal_type(takes_homogeneous_tuple((42,))) # revealed: Literal[42]
reveal_type(takes_homogeneous_tuple((42, 43))) # revealed: Literal[42, 43]
# TODO: revealed: Literal[42]
reveal_type(takes_homogeneous_tuple((42,))) # revealed: Unknown
# TODO: revealed: Literal[42, 43]
reveal_type(takes_homogeneous_tuple((42, 43))) # revealed: Unknown
```
## Inferring a bound typevar

View File

@@ -1,65 +1,60 @@
# List all members
This test suite acts as a set of unit tests for our `ide_support::all_members` routine, which lists
all members available on a given type. This routine is used for autocomplete suggestions.
## Basic functionality
<!-- snapshot-diagnostics -->
The `ty_extensions.all_members` and `ty_extensions.has_member` functions expose a Python-level API
that can be used to query which attributes `ide_support::all_members` understands as being available
on a given object. For example, all member functions of `str` are available on `"a"`. The Python API
`all_members` returns a tuple of all available members; `has_member` returns `Literal[True]` if a
given member is present in that tuple, and `Literal[False]` if not:
The `ty_extensions.all_members` function allows access to a tuple of accessible members/attributes
on a given object. For example, all member functions of `str` are available on `"a"`:
```py
from ty_extensions import static_assert, has_member
from ty_extensions import all_members, static_assert
static_assert(has_member("a", "replace"))
static_assert(has_member("a", "startswith"))
static_assert(has_member("a", "isupper"))
members_of_str = all_members("a")
static_assert("replace" in members_of_str)
static_assert("startswith" in members_of_str)
static_assert("isupper" in members_of_str)
```
Similarly, special members such as `__add__` are also available:
```py
static_assert(has_member("a", "__add__"))
static_assert(has_member("a", "__gt__"))
static_assert("__add__" in members_of_str)
static_assert("__gt__" in members_of_str)
```
Members of base classes are also included (these dunder methods are defined on `object`):
```py
static_assert(has_member("a", "__doc__"))
static_assert(has_member("a", "__repr__"))
static_assert("__doc__" in members_of_str)
static_assert("__repr__" in members_of_str)
```
Non-existent members are not included:
```py
static_assert(not has_member("a", "non_existent"))
static_assert("non_existent" not in members_of_str)
```
The full list of all members is relatively long, but `reveal_type` can be used in combination with
`all_members` to see them all:
Note: The full list of all members is relatively long, but `reveal_type` can theoretically be used
to see them all:
```py
from typing_extensions import reveal_type
from ty_extensions import all_members
reveal_type(all_members("a")) # error: [revealed-type]
reveal_type(members_of_str) # error: [revealed-type]
```
## Kinds of types
### Class instances
For instances of classes, class members and implicit instance members of all superclasses are
understood as being available:
For instances of classes, `all_members` returns class members and implicit instance members of all
classes in the MRO:
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
class Base:
base_class_attr: int = 1
@@ -91,23 +86,25 @@ class C(Intermediate):
def static_method() -> int:
return 1
static_assert(has_member(C(), "base_class_attr"))
static_assert(has_member(C(), "intermediate_attr"))
static_assert(has_member(C(), "class_attr"))
members_of_instance = all_members(C())
static_assert(has_member(C(), "base_instance_attr"))
static_assert(has_member(C(), "intermediate_instance_attr"))
static_assert(has_member(C(), "instance_attr"))
static_assert("base_class_attr" in members_of_instance)
static_assert("intermediate_attr" in members_of_instance)
static_assert("class_attr" in members_of_instance)
static_assert(has_member(C(), "f_base"))
static_assert(has_member(C(), "f_intermediate"))
static_assert(has_member(C(), "f_c"))
static_assert("base_instance_attr" in members_of_instance)
static_assert("intermediate_instance_attr" in members_of_instance)
static_assert("instance_attr" in members_of_instance)
static_assert(has_member(C(), "property_attr"))
static_assert(has_member(C(), "class_method"))
static_assert(has_member(C(), "static_method"))
static_assert("f_base" in members_of_instance)
static_assert("f_intermediate" in members_of_instance)
static_assert("f_c" in members_of_instance)
static_assert(not has_member(C(), "non_existent"))
static_assert("property_attr" in members_of_instance)
static_assert("class_method" in members_of_instance)
static_assert("static_method" in members_of_instance)
static_assert("non_existent" not in members_of_instance)
```
### Class objects
@@ -115,7 +112,7 @@ static_assert(not has_member(C(), "non_existent"))
Class-level attributes can also be accessed through the class itself:
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
class Base:
base_attr: int = 1
@@ -126,16 +123,18 @@ class C(Base):
def f(self):
self.instance_attr = True
static_assert(has_member(C, "class_attr"))
static_assert(has_member(C, "base_attr"))
members_of_class = all_members(C)
static_assert(not has_member(C, "non_existent"))
static_assert("class_attr" in members_of_class)
static_assert("base_attr" in members_of_class)
static_assert("non_existent" not in members_of_class)
```
But instance attributes can not be accessed this way:
```py
static_assert(not has_member(C, "instance_attr"))
static_assert("instance_attr" not in members_of_class)
```
When a class has a metaclass, members of that metaclass (and bases of that metaclass) are also
@@ -151,16 +150,16 @@ class Meta(MetaBase):
class D(Base, metaclass=Meta):
class_attr = 3
static_assert(has_member(D, "meta_base_attr"))
static_assert(has_member(D, "meta_attr"))
static_assert(has_member(D, "base_attr"))
static_assert(has_member(D, "class_attr"))
static_assert("meta_base_attr" in all_members(D))
static_assert("meta_attr" in all_members(D))
static_assert("base_attr" in all_members(D))
static_assert("class_attr" in all_members(D))
```
### Generic classes
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
from typing import Generic, TypeVar
T = TypeVar("T")
@@ -168,53 +167,52 @@ T = TypeVar("T")
class C(Generic[T]):
base_attr: T
static_assert(has_member(C[int], "base_attr"))
static_assert(has_member(C[int](), "base_attr"))
static_assert("base_attr" in all_members(C[int]))
static_assert("base_attr" in all_members(C[int]()))
```
### Other instance-like types
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
from typing_extensions import LiteralString
static_assert(has_member(True, "__xor__"))
static_assert(has_member(1, "bit_length"))
static_assert(has_member("a", "startswith"))
static_assert(has_member(b"a", "__buffer__"))
static_assert(has_member(3.14, "is_integer"))
static_assert("__xor__" in all_members(True))
static_assert("bit_length" in all_members(1))
static_assert("startswith" in all_members("a"))
static_assert("__buffer__" in all_members(b"a"))
static_assert("is_integer" in all_members(3.14))
def _(literal_string: LiteralString):
static_assert(has_member(literal_string, "startswith"))
static_assert("startswith" in all_members(literal_string))
static_assert(has_member(("some", "tuple", 1, 2), "count"))
static_assert("count" in all_members(("some", "tuple", 1, 2)))
static_assert(has_member(len, "__doc__"))
static_assert(has_member("a".startswith, "__doc__"))
static_assert("__doc__" in all_members(len))
static_assert("__doc__" in all_members("a".startswith))
```
### Enums
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
from enum import Enum
class Answer(Enum):
NO = 0
YES = 1
static_assert(has_member(Answer, "NO"))
static_assert(has_member(Answer, "YES"))
static_assert(has_member(Answer, "__members__"))
static_assert("NO" in all_members(Answer))
static_assert("YES" in all_members(Answer))
static_assert("__members__" in all_members(Answer))
```
### Unions
For unions, `ide_support::all_members` only returns members that are available on all elements of
the union.
For unions, `all_members` will only return members that are available on all elements of the union.
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
class A:
on_both: int = 1
@@ -225,20 +223,20 @@ class B:
only_on_b: str = "b"
def f(union: A | B):
static_assert(has_member(union, "on_both"))
static_assert(not has_member(union, "only_on_a"))
static_assert(not has_member(union, "only_on_b"))
static_assert("on_both" in all_members(union))
static_assert("only_on_a" not in all_members(union))
static_assert("only_on_b" not in all_members(union))
```
### Intersections
#### Only positive types
Conversely, for intersections, `ide_support::all_members` lists members that are available on any of
the elements:
Conversely, for intersections, `all_members` will list members that are available on any of the
elements:
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
class A:
on_both: int = 1
@@ -251,9 +249,9 @@ class B:
def f(intersection: object):
if isinstance(intersection, A):
if isinstance(intersection, B):
static_assert(has_member(intersection, "on_both"))
static_assert(has_member(intersection, "only_on_a"))
static_assert(has_member(intersection, "only_on_b"))
static_assert("on_both" in all_members(intersection))
static_assert("only_on_a" in all_members(intersection))
static_assert("only_on_b" in all_members(intersection))
```
#### With negative types
@@ -261,7 +259,7 @@ def f(intersection: object):
It also works when negative types are introduced:
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
class A:
on_all: int = 1
@@ -286,27 +284,27 @@ def f(intersection: object):
if isinstance(intersection, B):
if not isinstance(intersection, C):
reveal_type(intersection) # revealed: A & B & ~C
static_assert(has_member(intersection, "on_all"))
static_assert(has_member(intersection, "only_on_a"))
static_assert(has_member(intersection, "only_on_b"))
static_assert(not has_member(intersection, "only_on_c"))
static_assert(has_member(intersection, "only_on_ab"))
static_assert(has_member(intersection, "only_on_ac"))
static_assert(has_member(intersection, "only_on_bc"))
static_assert("on_all" in all_members(intersection))
static_assert("only_on_a" in all_members(intersection))
static_assert("only_on_b" in all_members(intersection))
static_assert("only_on_c" not in all_members(intersection))
static_assert("only_on_ab" in all_members(intersection))
static_assert("only_on_ac" in all_members(intersection))
static_assert("only_on_bc" in all_members(intersection))
```
## Modules
### Basic support with sub-modules
`ide_support::all_members` can also list attributes on modules:
`all_members` can also list attributes on modules:
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
import math
static_assert(has_member(math, "pi"))
static_assert(has_member(math, "cos"))
static_assert("pi" in all_members(math))
static_assert("cos" in all_members(math))
```
This also works for submodules:
@@ -314,18 +312,18 @@ This also works for submodules:
```py
import os
static_assert(has_member(os, "path"))
static_assert("path" in all_members(os))
import os.path
static_assert(has_member(os.path, "join"))
static_assert("join" in all_members(os.path))
```
Special members available on all modules are also included:
```py
static_assert(has_member(math, "__name__"))
static_assert(has_member(math, "__doc__"))
static_assert("__name__" in all_members(math))
static_assert("__doc__" in all_members(math))
```
### `__all__` is not respected for direct module access
@@ -333,12 +331,12 @@ static_assert(has_member(math, "__doc__"))
`foo.py`:
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
import bar
static_assert(has_member(bar, "lion"))
static_assert(has_member(bar, "tiger"))
static_assert("lion" in all_members(bar))
static_assert("tiger" in all_members(bar))
```
`bar.py`:
@@ -350,17 +348,17 @@ lion = 1
tiger = 1
```
### `__all__` is respected for `*` imports
### `__all__` is respected for glob imports
`foo.py`:
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
import bar
static_assert(has_member(bar, "lion"))
static_assert(not has_member(bar, "tiger"))
static_assert("lion" in all_members(bar))
static_assert("tiger" not in all_members(bar))
```
`bar.py`:
@@ -402,12 +400,12 @@ def evaluate(x: Optional[int] = None) -> int: ...
`play.py`:
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
import module
static_assert(has_member(module, "evaluate"))
static_assert(not has_member(module, "Optional"))
static_assert("evaluate" in all_members(module))
static_assert("Optional" not in all_members(module))
```
## Conditionally available members
@@ -423,9 +421,9 @@ python-version = "3.9"
```
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
static_assert(not has_member(42, "bit_count"))
static_assert("bit_count" not in all_members(42))
```
### 3.10
@@ -436,19 +434,19 @@ python-version = "3.10"
```
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
static_assert(has_member(42, "bit_count"))
static_assert("bit_count" in all_members(42))
```
## Failure cases
## Failures cases
### Dynamically added members
Dynamically added members cannot be accessed:
Dynamically added members can not be accessed:
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
class C:
static_attr = 1
@@ -462,8 +460,8 @@ class C:
c = C()
c.dynamic_attr = "a"
static_assert(has_member(c, "static_attr"))
static_assert(not has_member(c, "dynamic_attr"))
static_assert("static_attr" in all_members(c))
static_assert("dynamic_attr" not in all_members(c))
```
### Dataclasses
@@ -471,24 +469,24 @@ static_assert(not has_member(c, "dynamic_attr"))
So far, we do not include synthetic members of dataclasses.
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
from dataclasses import dataclass
@dataclass(order=True)
class Person:
age: int
name: str
age: int
static_assert(has_member(Person, "name"))
static_assert(has_member(Person, "age"))
static_assert("name" in all_members(Person))
static_assert("age" in all_members(Person))
# These are always available, since they are also defined on `object`:
static_assert(has_member(Person, "__init__"))
static_assert(has_member(Person, "__repr__"))
static_assert(has_member(Person, "__eq__"))
static_assert("__init__" in all_members(Person))
static_assert("__repr__" in all_members(Person))
static_assert("__eq__" in all_members(Person))
# TODO: this should ideally be available:
static_assert(has_member(Person, "__lt__")) # error: [static-assert-error]
static_assert("__lt__" in all_members(Person)) # error: [static-assert-error]
```
### Attributes not available at runtime
@@ -498,8 +496,8 @@ example, `__annotations__` does not exist on `int` at runtime, but it is availab
on `object` in typeshed:
```py
from ty_extensions import has_member, static_assert
from ty_extensions import all_members, static_assert
# TODO: this should ideally not be available:
static_assert(not has_member(3, "__annotations__")) # error: [static-assert-error]
static_assert("__annotations__" not in all_members(3)) # error: [static-assert-error]
```

View File

@@ -122,14 +122,15 @@ class A:
__slots__ = ()
__slots__ += ("a", "b")
reveal_type(A.__slots__) # revealed: tuple[Literal["a", "b"], ...]
reveal_type(A.__slots__) # revealed: tuple[Literal["a"], Literal["b"]]
class B:
__slots__ = ("c", "d")
# TODO: ideally this would trigger `[instance-layout-conflict]`
# (but it's also not high-priority)
class C(A, B): ...
class C( # error: [instance-layout-conflict]
A,
B,
): ...
```
## Explicitly annotated `__slots__`

View File

@@ -2,6 +2,27 @@
Async `for` loops do not work according to the synchronous iteration protocol.
## Invalid async for loop
```py
async def foo():
class Iterator:
def __next__(self) -> int:
return 42
class Iterable:
def __iter__(self) -> Iterator:
return Iterator()
async for x in Iterator():
pass
# TODO: should reveal `Unknown` because `__aiter__` is not defined
# revealed: @Todo(async iterables/iterators)
# error: [possibly-unresolved-reference]
reveal_type(x)
```
## Basic async for loop
```py
@@ -14,154 +35,11 @@ async def foo():
def __aiter__(self) -> IntAsyncIterator:
return IntAsyncIterator()
# TODO(Alex): async iterables/iterators!
async for x in IntAsyncIterable():
reveal_type(x) # revealed: int
```
## Async for loop with unpacking
```py
async def foo():
class AsyncIterator:
async def __anext__(self) -> tuple[int, str]:
return 42, "hello"
class AsyncIterable:
def __aiter__(self) -> AsyncIterator:
return AsyncIterator()
async for x, y in AsyncIterable():
reveal_type(x) # revealed: int
reveal_type(y) # revealed: str
```
## Error cases
<!-- snapshot-diagnostics -->
### No `__aiter__` method
```py
from typing_extensions import reveal_type
class NotAsyncIterable: ...
async def foo():
# error: [not-iterable] "Object of type `NotAsyncIterable` is not async-iterable"
async for x in NotAsyncIterable():
reveal_type(x) # revealed: Unknown
```
### Synchronously iterable, but not asynchronously iterable
```py
from typing_extensions import reveal_type
async def foo():
class Iterator:
def __next__(self) -> int:
return 42
class Iterable:
def __iter__(self) -> Iterator:
return Iterator()
# error: [not-iterable] "Object of type `Iterator` is not async-iterable"
async for x in Iterator():
reveal_type(x) # revealed: Unknown
```
### No `__anext__` method
```py
from typing_extensions import reveal_type
class NoAnext: ...
class AsyncIterable:
def __aiter__(self) -> NoAnext:
return NoAnext()
async def foo():
# error: [not-iterable] "Object of type `AsyncIterable` is not async-iterable"
async for x in AsyncIterable():
reveal_type(x) # revealed: Unknown
```
### Possibly unbound `__anext__` method
```py
from typing_extensions import reveal_type
async def foo(flag: bool):
class PossiblyUnboundAnext:
if flag:
async def __anext__(self) -> int:
return 42
class AsyncIterable:
def __aiter__(self) -> PossiblyUnboundAnext:
return PossiblyUnboundAnext()
# error: [not-iterable] "Object of type `AsyncIterable` may not be async-iterable"
async for x in AsyncIterable():
reveal_type(x) # revealed: int
```
### Possibly unbound `__aiter__` method
```py
from typing_extensions import reveal_type
async def foo(flag: bool):
class AsyncIterable:
async def __anext__(self) -> int:
return 42
class PossiblyUnboundAiter:
if flag:
def __aiter__(self) -> AsyncIterable:
return AsyncIterable()
# error: "Object of type `PossiblyUnboundAiter` may not be async-iterable"
async for x in PossiblyUnboundAiter():
reveal_type(x) # revealed: int
```
### Wrong signature for `__aiter__`
```py
from typing_extensions import reveal_type
class AsyncIterator:
async def __anext__(self) -> int:
return 42
class AsyncIterable:
def __aiter__(self, arg: int) -> AsyncIterator: # wrong
return AsyncIterator()
async def foo():
# error: [not-iterable] "Object of type `AsyncIterable` is not async-iterable"
async for x in AsyncIterable():
reveal_type(x) # revealed: int
```
### Wrong signature for `__anext__`
```py
from typing_extensions import reveal_type
class AsyncIterator:
async def __anext__(self, arg: int) -> int: # wrong
return 42
class AsyncIterable:
def __aiter__(self) -> AsyncIterator:
return AsyncIterator()
async def foo():
# error: [not-iterable] "Object of type `AsyncIterable` is not async-iterable"
async for x in AsyncIterable():
reveal_type(x) # revealed: int
pass
# error: [possibly-unresolved-reference]
# revealed: @Todo(async iterables/iterators)
reveal_type(x)
```

View File

@@ -253,7 +253,7 @@ does["not"]["exist"] = 0
reveal_type(does["not"]["exist"]) # revealed: Unknown
non_subscriptable = 1
# error: [invalid-assignment]
# error: [non-subscriptable]
non_subscriptable[0] = 0
# error: [non-subscriptable]
reveal_type(non_subscriptable[0]) # revealed: Unknown
@@ -318,7 +318,7 @@ def f(c: C, s: str):
reveal_type(c.x) # revealed: int | None
s = c.x # error: [invalid-assignment]
# error: [invalid-assignment] "Method `__setitem__` of type `Overload[(key: SupportsIndex, value: int, /) -> None, (key: slice[Any, Any, Any], value: Iterable[int], /) -> None]` cannot be called with a key of type `Literal[0]` and a value of type `str` on object of type `list[int]`"
# TODO: This assignment is invalid and should result in an error.
c.l[0] = s
reveal_type(c.l[0]) # revealed: int
```

View File

@@ -12,32 +12,33 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/ide_support/all_members.
## mdtest_snippet.py
```
1 | from ty_extensions import static_assert, has_member
1 | from ty_extensions import all_members, static_assert
2 |
3 | static_assert(has_member("a", "replace"))
4 | static_assert(has_member("a", "startswith"))
5 | static_assert(has_member("a", "isupper"))
6 | static_assert(has_member("a", "__add__"))
7 | static_assert(has_member("a", "__gt__"))
8 | static_assert(has_member("a", "__doc__"))
9 | static_assert(has_member("a", "__repr__"))
10 | static_assert(not has_member("a", "non_existent"))
11 | from typing_extensions import reveal_type
12 | from ty_extensions import all_members
13 |
14 | reveal_type(all_members("a")) # error: [revealed-type]
3 | members_of_str = all_members("a")
4 |
5 | static_assert("replace" in members_of_str)
6 | static_assert("startswith" in members_of_str)
7 | static_assert("isupper" in members_of_str)
8 | static_assert("__add__" in members_of_str)
9 | static_assert("__gt__" in members_of_str)
10 | static_assert("__doc__" in members_of_str)
11 | static_assert("__repr__" in members_of_str)
12 | static_assert("non_existent" not in members_of_str)
13 | from typing_extensions import reveal_type
14 |
15 | reveal_type(members_of_str) # error: [revealed-type]
```
# Diagnostics
```
info[revealed-type]: Revealed type
--> src/mdtest_snippet.py:14:13
--> src/mdtest_snippet.py:15:13
|
12 | from ty_extensions import all_members
13 |
14 | reveal_type(all_members("a")) # error: [revealed-type]
| ^^^^^^^^^^^^^^^^ `tuple[Literal["__add__"], Literal["__annotations__"], Literal["__class__"], Literal["__contains__"], Literal["__delattr__"], Literal["__dict__"], Literal["__dir__"], Literal["__doc__"], Literal["__eq__"], Literal["__format__"], Literal["__ge__"], Literal["__getattribute__"], Literal["__getitem__"], Literal["__getnewargs__"], Literal["__gt__"], Literal["__hash__"], Literal["__init__"], Literal["__init_subclass__"], Literal["__iter__"], Literal["__le__"], Literal["__len__"], Literal["__lt__"], Literal["__mod__"], Literal["__module__"], Literal["__mul__"], Literal["__ne__"], Literal["__new__"], Literal["__reduce__"], Literal["__reduce_ex__"], Literal["__repr__"], Literal["__reversed__"], Literal["__rmul__"], Literal["__setattr__"], Literal["__sizeof__"], Literal["__str__"], Literal["__subclasshook__"], Literal["capitalize"], Literal["casefold"], Literal["center"], Literal["count"], Literal["encode"], Literal["endswith"], Literal["expandtabs"], Literal["find"], Literal["format"], Literal["format_map"], Literal["index"], Literal["isalnum"], Literal["isalpha"], Literal["isascii"], Literal["isdecimal"], Literal["isdigit"], Literal["isidentifier"], Literal["islower"], Literal["isnumeric"], Literal["isprintable"], Literal["isspace"], Literal["istitle"], Literal["isupper"], Literal["join"], Literal["ljust"], Literal["lower"], Literal["lstrip"], Literal["maketrans"], Literal["partition"], Literal["removeprefix"], Literal["removesuffix"], Literal["replace"], Literal["rfind"], Literal["rindex"], Literal["rjust"], Literal["rpartition"], Literal["rsplit"], Literal["rstrip"], Literal["split"], Literal["splitlines"], Literal["startswith"], Literal["strip"], Literal["swapcase"], Literal["title"], Literal["translate"], Literal["upper"], Literal["zfill"]]`
13 | from typing_extensions import reveal_type
14 |
15 | reveal_type(members_of_str) # error: [revealed-type]
| ^^^^^^^^^^^^^^ `tuple[Literal["__add__"], Literal["__annotations__"], Literal["__class__"], Literal["__contains__"], Literal["__delattr__"], Literal["__dict__"], Literal["__dir__"], Literal["__doc__"], Literal["__eq__"], Literal["__format__"], Literal["__ge__"], Literal["__getattribute__"], Literal["__getitem__"], Literal["__getnewargs__"], Literal["__gt__"], Literal["__hash__"], Literal["__init__"], Literal["__init_subclass__"], Literal["__iter__"], Literal["__le__"], Literal["__len__"], Literal["__lt__"], Literal["__mod__"], Literal["__module__"], Literal["__mul__"], Literal["__ne__"], Literal["__new__"], Literal["__reduce__"], Literal["__reduce_ex__"], Literal["__repr__"], Literal["__reversed__"], Literal["__rmul__"], Literal["__setattr__"], Literal["__sizeof__"], Literal["__str__"], Literal["__subclasshook__"], Literal["capitalize"], Literal["casefold"], Literal["center"], Literal["count"], Literal["encode"], Literal["endswith"], Literal["expandtabs"], Literal["find"], Literal["format"], Literal["format_map"], Literal["index"], Literal["isalnum"], Literal["isalpha"], Literal["isascii"], Literal["isdecimal"], Literal["isdigit"], Literal["isidentifier"], Literal["islower"], Literal["isnumeric"], Literal["isprintable"], Literal["isspace"], Literal["istitle"], Literal["isupper"], Literal["join"], Literal["ljust"], Literal["lower"], Literal["lstrip"], Literal["maketrans"], Literal["partition"], Literal["removeprefix"], Literal["removesuffix"], Literal["replace"], Literal["rfind"], Literal["rindex"], Literal["rjust"], Literal["rpartition"], Literal["rsplit"], Literal["rstrip"], Literal["split"], Literal["splitlines"], Literal["startswith"], Literal["strip"], Literal["swapcase"], Literal["title"], Literal["translate"], Literal["upper"], Literal["zfill"]]`
|
```

View File

@@ -1,52 +0,0 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: async_for.md - Async - Error cases - No `__aiter__` method
mdtest path: crates/ty_python_semantic/resources/mdtest/loops/async_for.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing_extensions import reveal_type
2 |
3 | class NotAsyncIterable: ...
4 |
5 | async def foo():
6 | # error: [not-iterable] "Object of type `NotAsyncIterable` is not async-iterable"
7 | async for x in NotAsyncIterable():
8 | reveal_type(x) # revealed: Unknown
```
# Diagnostics
```
error[not-iterable]: Object of type `NotAsyncIterable` is not async-iterable
--> src/mdtest_snippet.py:7:20
|
5 | async def foo():
6 | # error: [not-iterable] "Object of type `NotAsyncIterable` is not async-iterable"
7 | async for x in NotAsyncIterable():
| ^^^^^^^^^^^^^^^^^^
8 | reveal_type(x) # revealed: Unknown
|
info: It has no `__aiter__` method
info: rule `not-iterable` is enabled by default
```
```
info[revealed-type]: Revealed type
--> src/mdtest_snippet.py:8:21
|
6 | # error: [not-iterable] "Object of type `NotAsyncIterable` is not async-iterable"
7 | async for x in NotAsyncIterable():
8 | reveal_type(x) # revealed: Unknown
| ^ `Unknown`
|
```

View File

@@ -1,56 +0,0 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: async_for.md - Async - Error cases - No `__anext__` method
mdtest path: crates/ty_python_semantic/resources/mdtest/loops/async_for.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing_extensions import reveal_type
2 |
3 | class NoAnext: ...
4 |
5 | class AsyncIterable:
6 | def __aiter__(self) -> NoAnext:
7 | return NoAnext()
8 |
9 | async def foo():
10 | # error: [not-iterable] "Object of type `AsyncIterable` is not async-iterable"
11 | async for x in AsyncIterable():
12 | reveal_type(x) # revealed: Unknown
```
# Diagnostics
```
error[not-iterable]: Object of type `AsyncIterable` is not async-iterable
--> src/mdtest_snippet.py:11:20
|
9 | async def foo():
10 | # error: [not-iterable] "Object of type `AsyncIterable` is not async-iterable"
11 | async for x in AsyncIterable():
| ^^^^^^^^^^^^^^^
12 | reveal_type(x) # revealed: Unknown
|
info: Its `__aiter__` method returns an object of type `NoAnext`, which has no `__anext__` method
info: rule `not-iterable` is enabled by default
```
```
info[revealed-type]: Revealed type
--> src/mdtest_snippet.py:12:21
|
10 | # error: [not-iterable] "Object of type `AsyncIterable` is not async-iterable"
11 | async for x in AsyncIterable():
12 | reveal_type(x) # revealed: Unknown
| ^ `Unknown`
|
```

View File

@@ -1,58 +0,0 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: async_for.md - Async - Error cases - Possibly unbound `__aiter__` method
mdtest path: crates/ty_python_semantic/resources/mdtest/loops/async_for.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing_extensions import reveal_type
2 |
3 | async def foo(flag: bool):
4 | class AsyncIterable:
5 | async def __anext__(self) -> int:
6 | return 42
7 |
8 | class PossiblyUnboundAiter:
9 | if flag:
10 | def __aiter__(self) -> AsyncIterable:
11 | return AsyncIterable()
12 |
13 | # error: "Object of type `PossiblyUnboundAiter` may not be async-iterable"
14 | async for x in PossiblyUnboundAiter():
15 | reveal_type(x) # revealed: int
```
# Diagnostics
```
error[not-iterable]: Object of type `PossiblyUnboundAiter` may not be async-iterable
--> src/mdtest_snippet.py:14:20
|
13 | # error: "Object of type `PossiblyUnboundAiter` may not be async-iterable"
14 | async for x in PossiblyUnboundAiter():
| ^^^^^^^^^^^^^^^^^^^^^^
15 | reveal_type(x) # revealed: int
|
info: Its `__aiter__` attribute (with type `bound method PossiblyUnboundAiter.__aiter__() -> AsyncIterable`) may not be callable
info: rule `not-iterable` is enabled by default
```
```
info[revealed-type]: Revealed type
--> src/mdtest_snippet.py:15:21
|
13 | # error: "Object of type `PossiblyUnboundAiter` may not be async-iterable"
14 | async for x in PossiblyUnboundAiter():
15 | reveal_type(x) # revealed: int
| ^ `int`
|
```

View File

@@ -1,58 +0,0 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: async_for.md - Async - Error cases - Possibly unbound `__anext__` method
mdtest path: crates/ty_python_semantic/resources/mdtest/loops/async_for.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing_extensions import reveal_type
2 |
3 | async def foo(flag: bool):
4 | class PossiblyUnboundAnext:
5 | if flag:
6 | async def __anext__(self) -> int:
7 | return 42
8 |
9 | class AsyncIterable:
10 | def __aiter__(self) -> PossiblyUnboundAnext:
11 | return PossiblyUnboundAnext()
12 |
13 | # error: [not-iterable] "Object of type `AsyncIterable` may not be async-iterable"
14 | async for x in AsyncIterable():
15 | reveal_type(x) # revealed: int
```
# Diagnostics
```
error[not-iterable]: Object of type `AsyncIterable` may not be async-iterable
--> src/mdtest_snippet.py:14:20
|
13 | # error: [not-iterable] "Object of type `AsyncIterable` may not be async-iterable"
14 | async for x in AsyncIterable():
| ^^^^^^^^^^^^^^^
15 | reveal_type(x) # revealed: int
|
info: Its `__aiter__` method returns an object of type `PossiblyUnboundAnext`, which may not have a `__anext__` method
info: rule `not-iterable` is enabled by default
```
```
info[revealed-type]: Revealed type
--> src/mdtest_snippet.py:15:21
|
13 | # error: [not-iterable] "Object of type `AsyncIterable` may not be async-iterable"
14 | async for x in AsyncIterable():
15 | reveal_type(x) # revealed: int
| ^ `int`
|
```

View File

@@ -1,57 +0,0 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: async_for.md - Async - Error cases - Synchronously iterable, but not asynchronously iterable
mdtest path: crates/ty_python_semantic/resources/mdtest/loops/async_for.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing_extensions import reveal_type
2 |
3 | async def foo():
4 | class Iterator:
5 | def __next__(self) -> int:
6 | return 42
7 |
8 | class Iterable:
9 | def __iter__(self) -> Iterator:
10 | return Iterator()
11 |
12 | # error: [not-iterable] "Object of type `Iterator` is not async-iterable"
13 | async for x in Iterator():
14 | reveal_type(x) # revealed: Unknown
```
# Diagnostics
```
error[not-iterable]: Object of type `Iterator` is not async-iterable
--> src/mdtest_snippet.py:13:20
|
12 | # error: [not-iterable] "Object of type `Iterator` is not async-iterable"
13 | async for x in Iterator():
| ^^^^^^^^^^
14 | reveal_type(x) # revealed: Unknown
|
info: It has no `__aiter__` method
info: rule `not-iterable` is enabled by default
```
```
info[revealed-type]: Revealed type
--> src/mdtest_snippet.py:14:21
|
12 | # error: [not-iterable] "Object of type `Iterator` is not async-iterable"
13 | async for x in Iterator():
14 | reveal_type(x) # revealed: Unknown
| ^ `Unknown`
|
```

View File

@@ -1,59 +0,0 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: async_for.md - Async - Error cases - Wrong signature for `__anext__`
mdtest path: crates/ty_python_semantic/resources/mdtest/loops/async_for.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing_extensions import reveal_type
2 |
3 | class AsyncIterator:
4 | async def __anext__(self, arg: int) -> int: # wrong
5 | return 42
6 |
7 | class AsyncIterable:
8 | def __aiter__(self) -> AsyncIterator:
9 | return AsyncIterator()
10 |
11 | async def foo():
12 | # error: [not-iterable] "Object of type `AsyncIterable` is not async-iterable"
13 | async for x in AsyncIterable():
14 | reveal_type(x) # revealed: int
```
# Diagnostics
```
error[not-iterable]: Object of type `AsyncIterable` is not async-iterable
--> src/mdtest_snippet.py:13:20
|
11 | async def foo():
12 | # error: [not-iterable] "Object of type `AsyncIterable` is not async-iterable"
13 | async for x in AsyncIterable():
| ^^^^^^^^^^^^^^^
14 | reveal_type(x) # revealed: int
|
info: Its `__aiter__` method returns an object of type `AsyncIterator`, which has an invalid `__anext__` method
info: Expected signature for `__anext__` is `def __anext__(self): ...`
info: rule `not-iterable` is enabled by default
```
```
info[revealed-type]: Revealed type
--> src/mdtest_snippet.py:14:21
|
12 | # error: [not-iterable] "Object of type `AsyncIterable` is not async-iterable"
13 | async for x in AsyncIterable():
14 | reveal_type(x) # revealed: int
| ^ `int`
|
```

View File

@@ -1,59 +0,0 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: async_for.md - Async - Error cases - Wrong signature for `__aiter__`
mdtest path: crates/ty_python_semantic/resources/mdtest/loops/async_for.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing_extensions import reveal_type
2 |
3 | class AsyncIterator:
4 | async def __anext__(self) -> int:
5 | return 42
6 |
7 | class AsyncIterable:
8 | def __aiter__(self, arg: int) -> AsyncIterator: # wrong
9 | return AsyncIterator()
10 |
11 | async def foo():
12 | # error: [not-iterable] "Object of type `AsyncIterable` is not async-iterable"
13 | async for x in AsyncIterable():
14 | reveal_type(x) # revealed: int
```
# Diagnostics
```
error[not-iterable]: Object of type `AsyncIterable` is not async-iterable
--> src/mdtest_snippet.py:13:20
|
11 | async def foo():
12 | # error: [not-iterable] "Object of type `AsyncIterable` is not async-iterable"
13 | async for x in AsyncIterable():
| ^^^^^^^^^^^^^^^
14 | reveal_type(x) # revealed: int
|
info: Its `__aiter__` method has an invalid signature
info: Expected signature `def __aiter__(self): ...`
info: rule `not-iterable` is enabled by default
```
```
info[revealed-type]: Revealed type
--> src/mdtest_snippet.py:14:21
|
12 | # error: [not-iterable] "Object of type `AsyncIterable` is not async-iterable"
13 | async for x in AsyncIterable():
14 | reveal_type(x) # revealed: int
| ^ `int`
|
```

View File

@@ -60,7 +60,7 @@ error[not-iterable]: Object of type `Iterable1` may not be iterable
29 | reveal_type(x) # revealed: int | str
|
info: Its `__iter__` method returns an object of type `Iterator1`, which may have an invalid `__next__` method
info: Expected signature for `__next__` is `def __next__(self): ...`
info: Expected signature for `__next__` is `def __next__(self): ...`)
info: rule `not-iterable` is enabled by default
```

View File

@@ -19,15 +19,14 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/diagnostics/semantic_syn
5 | # error: 19 [invalid-syntax] "cannot use an asynchronous comprehension inside of a synchronous comprehension on Python 3.10 (syntax was added in 3.11)"
6 | return {n: [x async for x in elements(n)] for n in range(3)}
7 | async def test():
8 | # error: [not-iterable] "Object of type `range` is not async-iterable"
9 | return [[x async for x in elements(n)] async for n in range(3)]
10 | async def f():
11 | [x for x in [1]] and [x async for x in elements(1)]
12 |
13 | async def f():
14 | def g():
15 | pass
16 | [x async for x in elements(1)]
8 | return [[x async for x in elements(n)] async for n in range(3)]
9 | async def f():
10 | [x for x in [1]] and [x async for x in elements(1)]
11 |
12 | async def f():
13 | def g():
14 | pass
15 | [x async for x in elements(1)]
```
# Diagnostics
@@ -41,23 +40,7 @@ error[invalid-syntax]
6 | return {n: [x async for x in elements(n)] for n in range(3)}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot use an asynchronous comprehension inside of a synchronous comprehension on Python 3.10 (syntax was added in 3.11)
7 | async def test():
8 | # error: [not-iterable] "Object of type `range` is not async-iterable"
8 | return [[x async for x in elements(n)] async for n in range(3)]
|
```
```
error[not-iterable]: Object of type `range` is not async-iterable
--> src/mdtest_snippet.py:9:59
|
7 | async def test():
8 | # error: [not-iterable] "Object of type `range` is not async-iterable"
9 | return [[x async for x in elements(n)] async for n in range(3)]
| ^^^^^^^^
10 | async def f():
11 | [x for x in [1]] and [x async for x in elements(1)]
|
info: It has no `__aiter__` method
info: rule `not-iterable` is enabled by default
```

View File

@@ -1,6 +1,6 @@
# Instance subscript
## `__getitem__` unbound
## Getitem unbound
```py
class NotSubscriptable: ...
@@ -8,17 +8,17 @@ class NotSubscriptable: ...
a = NotSubscriptable()[0] # error: "Cannot subscript object of type `NotSubscriptable` with no `__getitem__` method"
```
## `__getitem__` not callable
## Getitem not callable
```py
class NotSubscriptable:
__getitem__ = None
# error: "Method `__getitem__` of type `Unknown | None` is possibly not callable on object of type `NotSubscriptable`"
# error: "Method `__getitem__` of type `Unknown | None` is not callable on object of type `NotSubscriptable`"
a = NotSubscriptable()[0]
```
## Valid `__getitem__`
## Valid getitem
```py
class Identity:
@@ -28,7 +28,7 @@ class Identity:
reveal_type(Identity()[0]) # revealed: int
```
## `__getitem__` union
## Getitem union
```py
def _(flag: bool):
@@ -42,68 +42,3 @@ def _(flag: bool):
reveal_type(Identity()[0]) # revealed: int | str
```
## `__getitem__` with invalid index argument
```py
class Identity:
def __getitem__(self, index: int) -> int:
return index
a = Identity()
# error: [invalid-argument-type] "Method `__getitem__` of type `bound method Identity.__getitem__(index: int) -> int` cannot be called with key of type `Literal["a"]` on object of type `Identity`"
a["a"]
```
## `__setitem__` with no `__getitem__`
```py
class NoGetitem:
def __setitem__(self, index: int, value: int) -> None:
pass
a = NoGetitem()
a[0] = 0
```
## Subscript store with no `__setitem__`
```py
class NoSetitem: ...
a = NoSetitem()
a[0] = 0 # error: "Cannot assign to object of type `NoSetitem` with no `__setitem__` method"
```
## `__setitem__` not callable
```py
class NoSetitem:
__setitem__ = None
a = NoSetitem()
a[0] = 0 # error: "Method `__setitem__` of type `Unknown | None` is possibly not callable on object of type `NoSetitem`"
```
## Valid `__setitem__` method
```py
class Identity:
def __setitem__(self, index: int, value: int) -> None:
pass
a = Identity()
a[0] = 0
```
## `__setitem__` with invalid index argument
```py
class Identity:
def __setitem__(self, index: int, value: int) -> None:
pass
a = Identity()
# error: [invalid-assignment] "Method `__setitem__` of type `bound method Identity.__setitem__(index: int, value: int) -> None` cannot be called with a key of type `Literal["a"]` and a value of type `Literal[0]` on object of type `Identity`"
a["a"] = 0
```

View File

@@ -17,7 +17,7 @@ reveal_type(x[0]) # revealed: Unknown
# TODO reveal list[int]
reveal_type(x[0:1]) # revealed: list[Unknown]
# error: [invalid-argument-type]
# error: [call-non-callable]
reveal_type(x["a"]) # revealed: Unknown
```
@@ -29,9 +29,11 @@ In assignment, we might also have a named assignment. This should also get type
x = [1, 2, 3]
x[0 if (y := 2) else 1] = 5
# error: [invalid-assignment]
# TODO: better error than "method `__getitem__` not callable on type `list`"
# error: [call-non-callable]
x["a" if (y := 2) else 1] = 6
# error: [invalid-assignment]
# TODO: better error than "method `__getitem__` not callable on type `list`"
# error: [call-non-callable]
x["a" if (y := 2) else "b"] = 6
```

View File

@@ -2,11 +2,6 @@
## Indexing
```toml
[environment]
python-version = "3.11"
```
```py
t = (1, "a", "b")
@@ -25,148 +20,6 @@ b = t[-4] # error: [index-out-of-bounds]
reveal_type(b) # revealed: Unknown
```
Precise types for index operations are also inferred for tuple subclasses:
```py
class I0: ...
class I1: ...
class I2: ...
class I3: ...
class I5: ...
class HeterogeneousSubclass0(tuple[()]): ...
# revealed: Overload[(self, index: SupportsIndex, /) -> Never, (self, index: slice[Any, Any, Any], /) -> tuple[()]]
reveal_type(HeterogeneousSubclass0.__getitem__)
def f0(h0: HeterogeneousSubclass0, i: int):
reveal_type(h0[0]) # revealed: Never
reveal_type(h0[1]) # revealed: Never
reveal_type(h0[-1]) # revealed: Never
reveal_type(h0[i]) # revealed: Never
class HeterogeneousSubclass1(tuple[I0]): ...
# revealed: Overload[(self, index: SupportsIndex, /) -> I0, (self, index: slice[Any, Any, Any], /) -> tuple[I0, ...]]
reveal_type(HeterogeneousSubclass1.__getitem__)
def f0(h1: HeterogeneousSubclass1, i: int):
reveal_type(h1[0]) # revealed: I0
reveal_type(h1[1]) # revealed: I0
reveal_type(h1[-1]) # revealed: I0
reveal_type(h1[i]) # revealed: I0
# Element at index 2 is deliberately the same as the element at index 1,
# to illustrate that the `__getitem__` overloads for these two indices are combined
class HeterogeneousSubclass4(tuple[I0, I1, I0, I3]): ...
# revealed: Overload[(self, index: Literal[-4, -2, 0, 2], /) -> I0, (self, index: Literal[-3, 1], /) -> I1, (self, index: Literal[-1, 3], /) -> I3, (self, index: SupportsIndex, /) -> I0 | I1 | I3, (self, index: slice[Any, Any, Any], /) -> tuple[I0 | I1 | I3, ...]]
reveal_type(HeterogeneousSubclass4.__getitem__)
def f(h4: HeterogeneousSubclass4, i: int):
reveal_type(h4[0]) # revealed: I0
reveal_type(h4[1]) # revealed: I1
reveal_type(h4[2]) # revealed: I0
reveal_type(h4[3]) # revealed: I3
reveal_type(h4[-1]) # revealed: I3
reveal_type(h4[-2]) # revealed: I0
reveal_type(h4[-3]) # revealed: I1
reveal_type(h4[-4]) # revealed: I0
reveal_type(h4[i]) # revealed: I0 | I1 | I3
class MixedSubclass(tuple[I0, *tuple[I1, ...], I2, I3, I2, I5]): ...
# revealed: Overload[(self, index: Literal[0], /) -> I0, (self, index: Literal[-5], /) -> I1 | I0, (self, index: Literal[-1], /) -> I5, (self, index: Literal[1], /) -> I1 | I2, (self, index: Literal[-4, -2], /) -> I2, (self, index: Literal[2, 3], /) -> I1 | I2 | I3, (self, index: Literal[-3], /) -> I3, (self, index: Literal[4], /) -> I1 | I2 | I3 | I5, (self, index: SupportsIndex, /) -> I0 | I1 | I2 | I3 | I5, (self, index: slice[Any, Any, Any], /) -> tuple[I0 | I1 | I2 | I3 | I5, ...]]
reveal_type(MixedSubclass.__getitem__)
def g(m: MixedSubclass, i: int):
reveal_type(m[0]) # revealed: I0
reveal_type(m[1]) # revealed: I1 | I2
reveal_type(m[2]) # revealed: I1 | I2 | I3
reveal_type(m[3]) # revealed: I1 | I2 | I3
reveal_type(m[4]) # revealed: I1 | I2 | I3 | I5
reveal_type(m[-1]) # revealed: I5
reveal_type(m[-2]) # revealed: I2
reveal_type(m[-3]) # revealed: I3
reveal_type(m[-4]) # revealed: I2
reveal_type(m[-5]) # revealed: I1 | I0
reveal_type(m[i]) # revealed: I0 | I1 | I2 | I3 | I5
# Ideally we would not include `I0` in the unions for these,
# but it's not possible to do this using only synthesized overloads.
reveal_type(m[5]) # revealed: I0 | I1 | I2 | I3 | I5
reveal_type(m[10]) # revealed: I0 | I1 | I2 | I3 | I5
# Similarly, ideally these would just be `I0` | I1`,
# but achieving that with only synthesized overloads wouldn't be possible
reveal_type(m[-6]) # revealed: I0 | I1 | I2 | I3 | I5
reveal_type(m[-10]) # revealed: I0 | I1 | I2 | I3 | I5
class MixedSubclass2(tuple[I0, I1, *tuple[I2, ...], I3]): ...
# revealed: Overload[(self, index: Literal[0], /) -> I0, (self, index: Literal[-2], /) -> I2 | I1, (self, index: Literal[1], /) -> I1, (self, index: Literal[-3], /) -> I2 | I1 | I0, (self, index: Literal[-1], /) -> I3, (self, index: Literal[2], /) -> I2 | I3, (self, index: SupportsIndex, /) -> I0 | I1 | I2 | I3, (self, index: slice[Any, Any, Any], /) -> tuple[I0 | I1 | I2 | I3, ...]]
reveal_type(MixedSubclass2.__getitem__)
def g(m: MixedSubclass2, i: int):
reveal_type(m[0]) # revealed: I0
reveal_type(m[1]) # revealed: I1
reveal_type(m[2]) # revealed: I2 | I3
# Ideally this would just be `I2 | I3`,
# but that's not possible to achieve with synthesized overloads
reveal_type(m[3]) # revealed: I0 | I1 | I2 | I3
reveal_type(m[-1]) # revealed: I3
reveal_type(m[-2]) # revealed: I2 | I1
reveal_type(m[-3]) # revealed: I2 | I1 | I0
# Ideally this would just be `I2 | I1 | I0`,
# but that's not possible to achieve with synthesized overloads
reveal_type(m[-4]) # revealed: I0 | I1 | I2 | I3
```
The stdlib API `os.stat` is a commonly used API that returns an instance of a tuple subclass
(`os.stat_result`), and therefore provides a good integration test for tuple subclasses.
```py
import os
import stat
reveal_type(os.stat("my_file.txt")) # revealed: stat_result
reveal_type(os.stat("my_file.txt")[stat.ST_MODE]) # revealed: int
reveal_type(os.stat("my_file.txt")[stat.ST_ATIME]) # revealed: int | float
# revealed: tuple[<class 'stat_result'>, <class 'structseq[int | float]'>, <class 'tuple[int, int, int, int, int, int, int, int | float, int | float, int | float]'>, <class 'Sequence[int | float]'>, <class 'Reversible[int | float]'>, <class 'Collection[int | float]'>, <class 'Iterable[int | float]'>, <class 'Container[int | float]'>, typing.Protocol, typing.Generic, <class 'object'>]
reveal_type(os.stat_result.__mro__)
# There are no specific overloads for the `float` elements in `os.stat_result`,
# because the fallback `(self, index: SupportsIndex, /) -> int | float` overload
# gives the right result for those elements in the tuple, and we aim to synthesize
# the minimum number of overloads for any given tuple
#
# revealed: Overload[(self, index: Literal[-10, -9, -8, -7, -6, -5, -4, 0, 1, 2, 3, 4, 5, 6], /) -> int, (self, index: SupportsIndex, /) -> int | float, (self, index: slice[Any, Any, Any], /) -> tuple[int | float, ...]]
reveal_type(os.stat_result.__getitem__)
```
Because of the synthesized `__getitem__` overloads we synthesize for tuples and tuple subclasses,
tuples are naturally understood as being subtypes of protocols that have precise return types from
`__getitem__` method members:
```py
from typing import Protocol, Literal
from ty_extensions import static_assert, is_subtype_of
class IntFromZeroSubscript(Protocol):
def __getitem__(self, index: Literal[0], /) -> int: ...
static_assert(is_subtype_of(tuple[int, str], IntFromZeroSubscript))
class TupleSubclass(tuple[int, str]): ...
static_assert(is_subtype_of(TupleSubclass, IntFromZeroSubscript))
```
## Slices
```py
@@ -240,7 +93,9 @@ def homogeneous(t: tuple[str, ...]) -> None:
reveal_type(t[-3]) # revealed: str
reveal_type(t[-4]) # revealed: str
def mixed(t: tuple[Literal[1], Literal[2], Literal[3], *tuple[str, ...], Literal[8], Literal[9], Literal[10]]) -> None:
def mixed(s: tuple[str, ...]) -> None:
t = (1, 2, 3) + s + (8, 9, 10)
reveal_type(t[0]) # revealed: Literal[1]
reveal_type(t[1]) # revealed: Literal[2]
reveal_type(t[2]) # revealed: Literal[3]

View File

@@ -151,28 +151,6 @@ class Foo(type[int]): ...
reveal_type(Foo.__mro__) # revealed: tuple[<class 'Foo'>, @Todo(GenericAlias instance), <class 'object'>]
```
## Display of generic `type[]` types
```toml
[environment]
python-version = "3.12"
```
```py
from typing import Generic, TypeVar
class Foo[T]: ...
S = TypeVar("S")
class Bar(Generic[S]): ...
def _(x: Foo[int], y: Bar[str], z: list[bytes]):
reveal_type(type(x)) # revealed: type[Foo[int]]
reveal_type(type(y)) # revealed: type[Bar[str]]
reveal_type(type(z)) # revealed: type[list[bytes]]
```
## `@final` classes
`type[]` types are eagerly converted to class-literal types if a class decorated with `@final` is

View File

@@ -17,80 +17,5 @@ class Manager:
async def test():
async with Manager() as f:
reveal_type(f) # revealed: Target
```
## Multiple targets
```py
class Manager:
async def __aenter__(self) -> tuple[int, str]:
return 42, "hello"
async def __aexit__(self, exc_type, exc_value, traceback): ...
async def test():
async with Manager() as (x, y):
reveal_type(x) # revealed: int
reveal_type(y) # revealed: str
```
## `@asynccontextmanager`
```py
from contextlib import asynccontextmanager
from typing import AsyncGenerator
class Session: ...
@asynccontextmanager
async def connect() -> AsyncGenerator[Session]:
yield Session()
# TODO: this should be `() -> _AsyncGeneratorContextManager[Session, None]`
reveal_type(connect) # revealed: (...) -> _AsyncGeneratorContextManager[Unknown, None]
async def main():
async with connect() as session:
# TODO: should be `Session`
reveal_type(session) # revealed: Unknown
```
## `asyncio.timeout`
```toml
[environment]
python-version = "3.11"
```
```py
import asyncio
async def long_running_task():
await asyncio.sleep(5)
async def main():
async with asyncio.timeout(1):
await long_running_task()
```
## `asyncio.TaskGroup`
```toml
[environment]
python-version = "3.11"
```
```py
import asyncio
async def long_running_task():
await asyncio.sleep(5)
async def main():
async with asyncio.TaskGroup() as tg:
# TODO: should be `TaskGroup`
reveal_type(tg) # revealed: Unknown
tg.create_task(long_running_task())
reveal_type(f) # revealed: @Todo(async `with` statement)
```

View File

@@ -10,8 +10,6 @@ jax # too many iterations
mypy # too many iterations (self-recursive type alias)
packaging # too many iterations
pandas # slow (9s)
pandas-stubs # panics on versions of pandas-stubs newer than https://github.com/pandas-dev/pandas-stubs/commit/bf1221eb7ea0e582c30fe233d1f4f5713fce376b
# Panicked at crates/ty_python_semantic/src/types/type_ordering.rs:207:13 when checking `/tmp/mypy_primer/projects/pandas-stubs/tests/test_indexes.py`: `internal error: entered unreachable code: our type representation does not permit nested unions`
pandera # too many iterations
pip # vendors packaging, see above
pylint # cycle panics (self-recursive type alias)

View File

@@ -69,6 +69,7 @@ openlibrary
operator
optuna
paasta
pandas-stubs
paroxython
parso
pegen

View File

@@ -26,7 +26,7 @@ fn dunder_all_names_cycle_initial(_db: &dyn Db, _file: File) -> Option<FxHashSet
/// Returns a set of names in the `__all__` variable for `file`, [`None`] if it is not defined or
/// if it contains invalid elements.
#[salsa::tracked(returns(as_ref), cycle_fn=dunder_all_names_cycle_recover, cycle_initial=dunder_all_names_cycle_initial, heap_size=get_size2::heap_size)]
#[salsa::tracked(returns(as_ref), cycle_fn=dunder_all_names_cycle_recover, cycle_initial=dunder_all_names_cycle_initial, heap_size=get_size2::GetSize::get_heap_size)]
pub(crate) fn dunder_all_names(db: &dyn Db, file: File) -> Option<FxHashSet<Name>> {
let _span = tracing::trace_span!("dunder_all_names", file=?file.path(db)).entered();

View File

@@ -319,6 +319,30 @@ impl LintRegistryBuilder {
}
}
#[track_caller]
pub fn register_alias(&mut self, from: LintName, to: &'static LintMetadata) {
let target = match self.by_name.get(to.name.as_str()) {
Some(LintEntry::Lint(target) | LintEntry::Removed(target)) => target,
Some(LintEntry::Alias(target)) => {
panic!(
"lint alias {from} -> {to:?} points to another alias {target:?}",
target = target.name()
)
}
None => panic!(
"lint alias {from} -> {to} points to non-registered lint",
to = to.name
),
};
assert_eq!(
self.by_name
.insert(from.as_str(), LintEntry::Alias(*target)),
None,
"duplicate lint registration for '{from}'",
);
}
pub fn build(self) -> LintRegistry {
LintRegistry {
lints: self.lints,
@@ -338,6 +362,13 @@ impl LintRegistry {
pub fn get(&self, code: &str) -> Result<LintId, GetLintError> {
match self.by_name.get(code) {
Some(LintEntry::Lint(metadata)) => Ok(*metadata),
Some(LintEntry::Alias(lint)) => {
if lint.status.is_removed() {
Err(GetLintError::Removed(lint.name()))
} else {
Ok(*lint)
}
}
Some(LintEntry::Removed(lint)) => Err(GetLintError::Removed(lint.name())),
None => {
if let Some(without_prefix) = DiagnosticId::strip_category(code) {
@@ -359,6 +390,19 @@ impl LintRegistry {
&self.lints
}
/// Returns an iterator over all known aliases and to their target lints.
///
/// This iterator includes aliases that point to removed lints.
pub fn aliases(&self) -> impl Iterator<Item = (LintName, LintId)> + '_ {
self.by_name.iter().filter_map(|(key, value)| {
if let LintEntry::Alias(alias) = value {
Some((LintName::of(key), *alias))
} else {
None
}
})
}
/// Iterates over all removed lints.
pub fn removed(&self) -> impl Iterator<Item = LintId> + '_ {
self.by_name.iter().filter_map(|(_, value)| {
@@ -396,6 +440,7 @@ pub enum LintEntry {
Lint(LintId),
/// A lint rule that has been removed.
Removed(LintId),
Alias(LintId),
}
impl LintEntry {
@@ -403,6 +448,7 @@ impl LintEntry {
match self {
LintEntry::Lint(id) => id,
LintEntry::Removed(id) => id,
LintEntry::Alias(id) => id,
}
}
}
@@ -456,6 +502,18 @@ impl RuleSelection {
RuleSelection { lints }
}
/// Returns an iterator over all enabled lints.
pub fn enabled(&self) -> impl Iterator<Item = LintId> + '_ {
self.lints.keys().copied()
}
/// Returns an iterator over all enabled lints and their severity.
pub fn iter(&self) -> impl ExactSizeIterator<Item = (LintId, Severity)> + '_ {
self.lints
.iter()
.map(|(&lint, &(severity, _))| (lint, severity))
}
/// Returns the configured severity for the lint with the given id or `None` if the lint is disabled.
pub fn severity(&self, lint: LintId) -> Option<Severity> {
self.lints.get(&lint).map(|(severity, _)| *severity)

View File

@@ -1,209 +0,0 @@
// This is a Dot representation of a flow diagram meant to describe Python's
// import resolution rules. This particular diagram starts with one particular
// search path and one particular module name. (Typical import resolution
// implementation will try multiple search paths.)
//
// This diagram also assumes that stubs are allowed. The ty implementation
// of import resolution makes this a configurable parameter, but it should
// be straight-forward to adapt this flow diagram to one where no stubs
// are allowed. (i.e., Remove `.pyi` checks and remove the `package-stubs`
// handling.)
//
// This flow diagram exists to act as a sort of specification. At the time
// of writing (2025-07-29), it was written to capture the implementation of
// resolving a *particular* module name. We wanted to add another code path for
// *listing* available module names. Since code reuse is somewhat difficult
// between these two access patterns, I wrote this flow diagram as a way of 1)
// learning how module resolution works and 2) to provide a "source of truth"
// that we can compare implementations to.
//
// To convert this file into an actual image, you'll need the `dot` program
// (which is typically part of a `graphviz` package in a Linux distro):
//
// dot -Tsvg import-resolution-diagram.dot > import-resolution-diagram.svg
//
// And then view it in a web browser (or some other svg viewer):
//
// firefox ./import-resolution-diagram.svg
//
// [Dot]: https://graphviz.org/doc/info/lang.html
digraph python_import_resolution {
labelloc="t";
label=<
<b>Python import resolution flow diagram for a single module name in a single "search path"</b>
<br/>(assumes that the module name is valid and that stubs are allowed)
>;
// These are the final affirmative states we can end up in. A
// module is a regular `foo.py` file module. A package is a
// directory containing an `__init__.py`. A namespace package is a
// directory that does *not* contain an `__init__.py`.
module [label="Single-file Module",peripheries=2];
package [label="Package",peripheries=2];
namespace_package [label="Namespace Package",peripheries=2];
not_found [label="Module Not Found",peripheries=2];
// The final states are wrapped in a subgraph with invisible edges
// to convince GraphViz to give a more human digestible rendering.
// Without this, the nodes are scattered every which way and the
// flow diagram is pretty hard to follow. This encourages (but does
// not guarantee) GraphViz to put these nodes "close" together, and
// this generally gets us something grokable.
subgraph final {
rank = same;
module -> package -> namespace_package -> not_found [style=invis];
}
START [label=<<b>START</b>>];
START -> non_shadowable;
non_shadowable [label=<
Is the search path not the standard library and<br/>
the module name is `types` or some other built-in?
>];
non_shadowable -> not_found [label="Yes"];
non_shadowable -> stub_package_check [label="No"];
stub_package_check [label=<
Is the search path in the standard library?
>];
stub_package_check -> stub_package_set [label="No"];
stub_package_check -> determine_parent_kind [label="Yes"];
stub_package_set [label=<
Set `module_name` to `{top-package}-stubs.{rest}`
>];
stub_package_set -> determine_parent_kind;
determine_parent_kind [label=<
Does every parent package of `module_name`<br/>
correspond to a directory that contains an<br/>
`__init__.py` or an `__init__.pyi`?
>];
determine_parent_kind -> regular_parent_std [label="Yes"];
determine_parent_kind -> namespace_parent_regular_check [label="No"];
regular_parent_std [label=<
Does the search path correspond to the standard library?
>];
regular_parent_std -> resolved_parent_package [label="No"];
regular_parent_std -> regular_parent_typeshed_check [label="Yes"];
regular_parent_typeshed_check [label=<
Does every parent package of<br/>
`module_name` exist on the configured<br/>
Python version according to <br/>
typeshed's VERSIONS file?
>];
regular_parent_typeshed_check -> resolved_parent_package [label="Yes"];
regular_parent_typeshed_check -> bail [label="No"];
namespace_parent_regular_check [label=<
Is the direct parent package<br/>
a directory that contains<br/>
an `__init__.py` or `__init__.pyi`?
>];
namespace_parent_regular_check -> bail [label="Yes"];
namespace_parent_regular_check -> namespace_parent_std [label="No"];
namespace_parent_std [label=<
Does the search path correspond to the standard library?
>];
namespace_parent_std -> namespace_parent_module_check [label="No"];
namespace_parent_std -> namespace_parent_typeshed_check [label="Yes"];
namespace_parent_typeshed_check [label=<
Does the direct parent package of<br/>
`module_name` exist on the configured<br/>
Python version according to <br/>
typeshed's VERSIONS file?
>];
namespace_parent_typeshed_check -> namespace_parent_module_check [label="Yes"];
namespace_parent_typeshed_check -> bail [label="No"];
namespace_parent_module_check [label=<
Does the direct parent package<br/>
have a sibling file with the same<br/>
basename and a `py` or `pyi` extension?<br/>
>];
namespace_parent_module_check -> bail [label="Yes"];
namespace_parent_module_check -> namespace_parent_above [label="No"];
namespace_parent_above [label=<
Is every parent above the direct<br/>
parent package a normal package or<br/>
otherwise satisfy the previous two<br/>
namespace package requirements?
>];
namespace_parent_above -> bail [label="No"];
namespace_parent_above -> resolved_parent_package [label="Yes"];
resolved_parent_package [label=<
After replacing `.` with `/` in module name,<br/>
does `{path}/__init__.py` or `{path}/__init__.pyi` exist?
>];
resolved_parent_package -> package [label="Yes"];
resolved_parent_package -> maybe_module [label="No"];
maybe_module [label=<
Does `{path}.py` or `{path}.pyi` exist?
>];
maybe_module -> maybe_module_std [label="Yes"];
maybe_module -> maybe_namespace [label="No"];
maybe_module_std [label=<
Does the search path correspond to the standard library?
>];
maybe_module_std -> module [label="No"];
maybe_module_std -> maybe_module_typeshed_check [label="Yes"];
maybe_module_typeshed_check [label=<
Does the module corresponding to `{path}`<br/>
exist on the configured<br/>
Python version according to <br/>
typeshed's VERSIONS file?
>];
maybe_module_typeshed_check -> module [label="Yes"];
maybe_module_typeshed_check -> maybe_namespace [label="No"];
// N.B. In the actual implementation, this check is
// only done when the search path *isn't* the standard
// library. That's because typeshed doesn't use namespace
// packages, so this (and the typeshed VERSIONS check)
// can all be skipped as an optimization. But the flow
// diagram still represents this because this could in
// theory change and optimizations really should be the
// domain of the implementation, not the spec.
maybe_namespace [label=<
Is `{path}` a directory?
>];
maybe_namespace -> maybe_namespace_std [label="Yes"];
maybe_namespace -> bail [label="No"];
maybe_namespace_std [label=<
Does the search path correspond to the standard library?
>];
maybe_namespace_std -> namespace_package [label="No"];
maybe_namespace_std -> maybe_namespace_typeshed_check [label="Yes"];
maybe_namespace_typeshed_check [label=<
Does the module corresponding to `{path}`<br/>
exist on the configured<br/>
Python version according to <br/>
typeshed's VERSIONS file?
>];
maybe_namespace_typeshed_check -> namespace_package [label="Yes"];
maybe_namespace_typeshed_check -> bail [label="No"];
bail [label=<
Is `module_name` set to a stub package candidate?
>];
bail -> not_found [label="No"];
bail -> retry [label="Yes"];
retry [label=<
Reset `module_name` to original
>];
retry -> determine_parent_kind;
}

View File

@@ -1,468 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 13.0.1 (0)
-->
<!-- Title: python_import_resolution Pages: 1 -->
<svg width="1837pt" height="2172pt"
viewBox="0.00 0.00 1837.00 2172.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 2167.68)">
<title>python_import_resolution</title>
<polygon fill="white" stroke="none" points="-4,4 -4,-2167.68 1832.95,-2167.68 1832.95,4 -4,4"/>
<text xml:space="preserve" text-anchor="start" x="524.85" y="-2147.38" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;</text>
<text xml:space="preserve" text-anchor="start" x="560.85" y="-2147.38" font-family="Times,serif" font-weight="bold" font-size="14.00">Python import resolution flow diagram for a single module name in a single &quot;search path&quot;</text>
<text xml:space="preserve" text-anchor="start" x="1268.1" y="-2147.38" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;</text>
<text xml:space="preserve" text-anchor="start" x="663.23" y="-2133.38" font-family="Times,serif" font-size="14.00">(assumes that the module name is valid and that stubs are allowed) &#160;&#160;&#160;</text>
<!-- module -->
<g id="node1" class="node">
<title>module</title>
<ellipse fill="none" stroke="black" cx="105.71" cy="-22" rx="101.71" ry="18"/>
<ellipse fill="none" stroke="black" cx="105.71" cy="-22" rx="105.71" ry="22"/>
<text xml:space="preserve" text-anchor="middle" x="105.71" y="-17.32" font-family="Times,serif" font-size="14.00">Single&#45;file Module</text>
</g>
<!-- package -->
<g id="node2" class="node">
<title>package</title>
<ellipse fill="none" stroke="black" cx="302.71" cy="-22" rx="52.26" ry="18"/>
<ellipse fill="none" stroke="black" cx="302.71" cy="-22" rx="56.26" ry="22"/>
<text xml:space="preserve" text-anchor="middle" x="302.71" y="-17.32" font-family="Times,serif" font-size="14.00">Package</text>
</g>
<!-- module&#45;&gt;package -->
<!-- namespace_package -->
<g id="node3" class="node">
<title>namespace_package</title>
<ellipse fill="none" stroke="black" cx="529.71" cy="-22" rx="113.29" ry="18"/>
<ellipse fill="none" stroke="black" cx="529.71" cy="-22" rx="117.29" ry="22"/>
<text xml:space="preserve" text-anchor="middle" x="529.71" y="-17.32" font-family="Times,serif" font-size="14.00">Namespace Package</text>
</g>
<!-- package&#45;&gt;namespace_package -->
<!-- not_found -->
<g id="node4" class="node">
<title>not_found</title>
<ellipse fill="none" stroke="black" cx="1227.71" cy="-22" rx="104.35" ry="18"/>
<ellipse fill="none" stroke="black" cx="1227.71" cy="-22" rx="108.35" ry="22"/>
<text xml:space="preserve" text-anchor="middle" x="1227.71" y="-17.32" font-family="Times,serif" font-size="14.00">Module Not Found</text>
</g>
<!-- namespace_package&#45;&gt;not_found -->
<!-- START -->
<g id="node5" class="node">
<title>START</title>
<ellipse fill="none" stroke="black" cx="1521.71" cy="-2109.68" rx="47.53" ry="18"/>
<text xml:space="preserve" text-anchor="start" x="1495.84" y="-2106" font-family="Times,serif" font-weight="bold" font-size="14.00">START</text>
</g>
<!-- non_shadowable -->
<g id="node6" class="node">
<title>non_shadowable</title>
<ellipse fill="none" stroke="black" cx="1521.71" cy="-2024.63" rx="307.24" ry="30.05"/>
<text xml:space="preserve" text-anchor="start" x="1335.71" y="-2028.58" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Is the search path not the standard library and</text>
<text xml:space="preserve" text-anchor="start" x="1312.46" y="-2011.33" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;the module name is `types` or some other built&#45;in? &#160;&#160;&#160;</text>
</g>
<!-- START&#45;&gt;non_shadowable -->
<g id="edge4" class="edge">
<title>START&#45;&gt;non_shadowable</title>
<path fill="none" stroke="black" d="M1521.71,-2091.47C1521.71,-2084.14 1521.71,-2075.24 1521.71,-2066.39"/>
<polygon fill="black" stroke="black" points="1525.21,-2066.43 1521.71,-2056.43 1518.21,-2066.43 1525.21,-2066.43"/>
</g>
<!-- non_shadowable&#45;&gt;not_found -->
<g id="edge5" class="edge">
<title>non_shadowable&#45;&gt;not_found</title>
<path fill="none" stroke="black" d="M1654.8,-1997.15C1687.69,-1982.48 1713.71,-1959.69 1713.71,-1924.32 1713.71,-1924.32 1713.71,-1924.32 1713.71,-114.25 1713.71,-76.19 1480.54,-47.08 1337.21,-32.77"/>
<polygon fill="black" stroke="black" points="1337.57,-29.29 1327.28,-31.79 1336.89,-36.26 1337.57,-29.29"/>
<text xml:space="preserve" text-anchor="middle" x="1725.71" y="-949.84" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- stub_package_check -->
<g id="node7" class="node">
<title>stub_package_check</title>
<ellipse fill="none" stroke="black" cx="1329.71" cy="-1923.32" rx="261.65" ry="18"/>
<text xml:space="preserve" text-anchor="start" x="1151.21" y="-1918.65" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Is the search path in the standard library? &#160;&#160;&#160;</text>
</g>
<!-- non_shadowable&#45;&gt;stub_package_check -->
<g id="edge6" class="edge">
<title>non_shadowable&#45;&gt;stub_package_check</title>
<path fill="none" stroke="black" d="M1465.66,-1994.64C1436.12,-1979.36 1400.44,-1960.9 1373.04,-1946.73"/>
<polygon fill="black" stroke="black" points="1374.81,-1943.71 1364.32,-1942.22 1371.59,-1949.93 1374.81,-1943.71"/>
<text xml:space="preserve" text-anchor="middle" x="1435.84" y="-1963.27" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- stub_package_set -->
<g id="node8" class="node">
<title>stub_package_set</title>
<ellipse fill="none" stroke="black" cx="1329.71" cy="-1834.07" rx="312.68" ry="18"/>
<text xml:space="preserve" text-anchor="start" x="1114.84" y="-1829.4" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Set `module_name` to `{top&#45;package}&#45;stubs.{rest}` &#160;&#160;&#160;</text>
</g>
<!-- stub_package_check&#45;&gt;stub_package_set -->
<g id="edge7" class="edge">
<title>stub_package_check&#45;&gt;stub_package_set</title>
<path fill="none" stroke="black" d="M1329.71,-1905.09C1329.71,-1893.26 1329.71,-1877.28 1329.71,-1863.57"/>
<polygon fill="black" stroke="black" points="1333.21,-1863.79 1329.71,-1853.79 1326.21,-1863.79 1333.21,-1863.79"/>
<text xml:space="preserve" text-anchor="middle" x="1339.84" y="-1874.02" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- determine_parent_kind -->
<g id="node9" class="node">
<title>determine_parent_kind</title>
<ellipse fill="none" stroke="black" cx="974.71" cy="-1736.83" rx="269.05" ry="42.25"/>
<text xml:space="preserve" text-anchor="start" x="792.46" y="-1749.4" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Does every parent package of `module_name`</text>
<text xml:space="preserve" text-anchor="start" x="805.96" y="-1732.15" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;correspond to a directory that contains an</text>
<text xml:space="preserve" text-anchor="start" x="832.59" y="-1714.9" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;`__init__.py` or an `__init__.pyi`? &#160;&#160;&#160;</text>
</g>
<!-- stub_package_check&#45;&gt;determine_parent_kind -->
<g id="edge8" class="edge">
<title>stub_package_check&#45;&gt;determine_parent_kind</title>
<path fill="none" stroke="black" d="M1171.52,-1908.62C1092.86,-1898.32 1009.21,-1880.88 983.71,-1852.07 969.18,-1835.66 965.49,-1812.2 966.01,-1790.82"/>
<polygon fill="black" stroke="black" points="969.49,-1791.23 966.56,-1781.05 962.5,-1790.83 969.49,-1791.23"/>
<text xml:space="preserve" text-anchor="middle" x="995.71" y="-1829.4" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- stub_package_set&#45;&gt;determine_parent_kind -->
<g id="edge9" class="edge">
<title>stub_package_set&#45;&gt;determine_parent_kind</title>
<path fill="none" stroke="black" d="M1266.25,-1816.05C1225.47,-1805.11 1170.77,-1790.43 1119.77,-1776.75"/>
<polygon fill="black" stroke="black" points="1120.98,-1773.45 1110.41,-1774.23 1119.17,-1780.21 1120.98,-1773.45"/>
</g>
<!-- regular_parent_std -->
<g id="node10" class="node">
<title>regular_parent_std</title>
<ellipse fill="none" stroke="black" cx="617.71" cy="-1197.73" rx="337.41" ry="18"/>
<text xml:space="preserve" text-anchor="start" x="385.21" y="-1193.06" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Does the search path correspond to the standard library? &#160;&#160;&#160;</text>
</g>
<!-- determine_parent_kind&#45;&gt;regular_parent_std -->
<g id="edge10" class="edge">
<title>determine_parent_kind&#45;&gt;regular_parent_std</title>
<path fill="none" stroke="black" d="M832.87,-1700.59C787.46,-1680.2 748.71,-1648.59 748.71,-1600.08 748.71,-1600.08 748.71,-1600.08 748.71,-1358.88 748.71,-1299.61 694.28,-1250.21 655.59,-1222.58"/>
<polygon fill="black" stroke="black" points="657.6,-1219.71 647.38,-1216.89 653.61,-1225.47 657.6,-1219.71"/>
<text xml:space="preserve" text-anchor="middle" x="760.71" y="-1480.9" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- namespace_parent_regular_check -->
<g id="node11" class="node">
<title>namespace_parent_regular_check</title>
<ellipse fill="none" stroke="black" cx="1296.71" cy="-1599.08" rx="212.31" ry="42.25"/>
<text xml:space="preserve" text-anchor="start" x="1177.84" y="-1611.65" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Is the direct parent package</text>
<text xml:space="preserve" text-anchor="start" x="1190.59" y="-1594.4" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;a directory that contains</text>
<text xml:space="preserve" text-anchor="start" x="1154.59" y="-1577.15" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;an `__init__.py` or `__init__.pyi`? &#160;&#160;&#160;</text>
</g>
<!-- determine_parent_kind&#45;&gt;namespace_parent_regular_check -->
<g id="edge11" class="edge">
<title>determine_parent_kind&#45;&gt;namespace_parent_regular_check</title>
<path fill="none" stroke="black" d="M1067.42,-1696.74C1107.93,-1679.67 1155.51,-1659.6 1196.8,-1642.2"/>
<polygon fill="black" stroke="black" points="1197.91,-1645.53 1205.76,-1638.42 1195.19,-1639.08 1197.91,-1645.53"/>
<text xml:space="preserve" text-anchor="middle" x="1165.12" y="-1663.28" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- resolved_parent_package -->
<g id="node12" class="node">
<title>resolved_parent_package</title>
<ellipse fill="none" stroke="black" cx="512.71" cy="-897.84" rx="328.45" ry="30.05"/>
<text xml:space="preserve" text-anchor="start" x="339.09" y="-901.79" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;After replacing `.` with `/` in module name,</text>
<text xml:space="preserve" text-anchor="start" x="288.46" y="-884.54" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;does `{path}/__init__.py` or `{path}/__init__.pyi` exist? &#160;&#160;&#160;</text>
</g>
<!-- regular_parent_std&#45;&gt;resolved_parent_package -->
<g id="edge12" class="edge">
<title>regular_parent_std&#45;&gt;resolved_parent_package</title>
<path fill="none" stroke="black" d="M568.87,-1179.57C530.36,-1163.14 479.31,-1134.25 455.46,-1090.04 432.49,-1047.44 440.98,-1027.32 455.46,-981.14 460.37,-965.51 469.41,-950.24 478.88,-937.19"/>
<polygon fill="black" stroke="black" points="481.61,-939.38 484.88,-929.3 476.04,-935.14 481.61,-939.38"/>
<text xml:space="preserve" text-anchor="middle" x="465.59" y="-1030.92" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- regular_parent_typeshed_check -->
<g id="node13" class="node">
<title>regular_parent_typeshed_check</title>
<ellipse fill="none" stroke="black" cx="719.71" cy="-1035.59" rx="235.11" ry="54.45"/>
<text xml:space="preserve" text-anchor="start" x="595.59" y="-1056.79" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Does every parent package of</text>
<text xml:space="preserve" text-anchor="start" x="561.46" y="-1039.54" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;`module_name` exist on the configured</text>
<text xml:space="preserve" text-anchor="start" x="599.34" y="-1022.29" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Python version according to </text>
<text xml:space="preserve" text-anchor="start" x="595.96" y="-1005.04" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;typeshed&#39;s VERSIONS file? &#160;&#160;&#160;</text>
</g>
<!-- regular_parent_std&#45;&gt;regular_parent_typeshed_check -->
<g id="edge13" class="edge">
<title>regular_parent_std&#45;&gt;regular_parent_typeshed_check</title>
<path fill="none" stroke="black" d="M628.82,-1179.29C641.02,-1160.14 661.32,-1128.27 679.64,-1099.5"/>
<polygon fill="black" stroke="black" points="682.54,-1101.48 684.96,-1091.16 676.63,-1097.72 682.54,-1101.48"/>
<text xml:space="preserve" text-anchor="middle" x="683.77" y="-1111.99" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- bail -->
<g id="node14" class="node">
<title>bail</title>
<ellipse fill="none" stroke="black" cx="1222.71" cy="-115.25" rx="306.9" ry="18"/>
<text xml:space="preserve" text-anchor="start" x="1011.96" y="-110.58" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Is `module_name` set to a stub package candidate? &#160;&#160;&#160;</text>
</g>
<!-- namespace_parent_regular_check&#45;&gt;bail -->
<g id="edge16" class="edge">
<title>namespace_parent_regular_check&#45;&gt;bail</title>
<path fill="none" stroke="black" d="M1473.33,-1575.23C1566.5,-1558.05 1661.71,-1529.89 1661.71,-1486.58 1661.71,-1486.58 1661.71,-1486.58 1661.71,-239.95 1661.71,-181.48 1515.65,-149.31 1392.02,-132.55"/>
<polygon fill="black" stroke="black" points="1392.77,-129.12 1382.39,-131.27 1391.85,-136.06 1392.77,-129.12"/>
<text xml:space="preserve" text-anchor="middle" x="1673.71" y="-791.86" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- namespace_parent_std -->
<g id="node15" class="node">
<title>namespace_parent_std</title>
<ellipse fill="none" stroke="black" cx="1296.71" cy="-1485.58" rx="337.41" ry="18"/>
<text xml:space="preserve" text-anchor="start" x="1064.21" y="-1480.9" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Does the search path correspond to the standard library? &#160;&#160;&#160;</text>
</g>
<!-- namespace_parent_regular_check&#45;&gt;namespace_parent_std -->
<g id="edge17" class="edge">
<title>namespace_parent_regular_check&#45;&gt;namespace_parent_std</title>
<path fill="none" stroke="black" d="M1296.71,-1556.56C1296.71,-1542.76 1296.71,-1527.72 1296.71,-1515.13"/>
<polygon fill="black" stroke="black" points="1300.21,-1515.45 1296.71,-1505.45 1293.21,-1515.45 1300.21,-1515.45"/>
<text xml:space="preserve" text-anchor="middle" x="1306.84" y="-1525.53" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- resolved_parent_package&#45;&gt;package -->
<g id="edge26" class="edge">
<title>resolved_parent_package&#45;&gt;package</title>
<path fill="none" stroke="black" d="M335.99,-872.15C249.83,-849.72 155.16,-806.93 109.71,-725.29 106.14,-718.87 106.3,-292.18 152.71,-186.5 178.13,-128.62 232.6,-77.77 268.43,-48.71"/>
<polygon fill="black" stroke="black" points="270.28,-51.71 275.92,-42.74 265.92,-46.23 270.28,-51.71"/>
<text xml:space="preserve" text-anchor="middle" x="127.24" y="-451.22" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- maybe_module -->
<g id="node19" class="node">
<title>maybe_module</title>
<ellipse fill="none" stroke="black" cx="512.71" cy="-796.54" rx="249.55" ry="18"/>
<text xml:space="preserve" text-anchor="start" x="342.84" y="-791.86" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Does `{path}.py` or `{path}.pyi` exist? &#160;&#160;&#160;</text>
</g>
<!-- resolved_parent_package&#45;&gt;maybe_module -->
<g id="edge27" class="edge">
<title>resolved_parent_package&#45;&gt;maybe_module</title>
<path fill="none" stroke="black" d="M512.71,-867.32C512.71,-854.41 512.71,-839.31 512.71,-826.47"/>
<polygon fill="black" stroke="black" points="516.21,-826.5 512.71,-816.5 509.21,-826.5 516.21,-826.5"/>
<text xml:space="preserve" text-anchor="middle" x="522.84" y="-836.49" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- regular_parent_typeshed_check&#45;&gt;resolved_parent_package -->
<g id="edge14" class="edge">
<title>regular_parent_typeshed_check&#45;&gt;resolved_parent_package</title>
<path fill="none" stroke="black" d="M642.34,-983.85C617.22,-967.38 589.9,-949.46 566.85,-934.34"/>
<polygon fill="black" stroke="black" points="568.8,-931.44 558.52,-928.88 564.96,-937.29 568.8,-931.44"/>
<text xml:space="preserve" text-anchor="middle" x="622.01" y="-949.84" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- regular_parent_typeshed_check&#45;&gt;bail -->
<g id="edge15" class="edge">
<title>regular_parent_typeshed_check&#45;&gt;bail</title>
<path fill="none" stroke="black" d="M864.91,-992.41C903.53,-971.83 934.71,-941.88 934.71,-898.84 934.71,-898.84 934.71,-898.84 934.71,-661.66 934.71,-545.82 952.52,-226.37 1040.71,-151.25 1048.58,-144.55 1063.97,-139.01 1082.6,-134.46"/>
<polygon fill="black" stroke="black" points="1083.12,-137.93 1092.1,-132.31 1081.57,-131.1 1083.12,-137.93"/>
<text xml:space="preserve" text-anchor="middle" x="949.92" y="-576.92" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- bail&#45;&gt;not_found -->
<g id="edge40" class="edge">
<title>bail&#45;&gt;not_found</title>
<path fill="none" stroke="black" d="M1223.66,-97.09C1224.3,-85.39 1225.16,-69.54 1225.93,-55.5"/>
<polygon fill="black" stroke="black" points="1229.41,-55.95 1226.47,-45.77 1222.42,-55.57 1229.41,-55.95"/>
<text xml:space="preserve" text-anchor="middle" x="1235.64" y="-65.95" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- retry -->
<g id="node25" class="node">
<title>retry</title>
<ellipse fill="none" stroke="black" cx="887.71" cy="-22" rx="213.78" ry="18"/>
<text xml:space="preserve" text-anchor="start" x="743.34" y="-17.32" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Reset `module_name` to original &#160;&#160;&#160;</text>
</g>
<!-- bail&#45;&gt;retry -->
<g id="edge41" class="edge">
<title>bail&#45;&gt;retry</title>
<path fill="none" stroke="black" d="M1160.1,-97.19C1103.23,-81.7 1019.36,-58.86 959.73,-42.62"/>
<polygon fill="black" stroke="black" points="960.92,-39.31 950.35,-40.06 959.08,-46.07 960.92,-39.31"/>
<text xml:space="preserve" text-anchor="middle" x="1097.22" y="-65.95" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- namespace_parent_module_check -->
<g id="node16" class="node">
<title>namespace_parent_module_check</title>
<ellipse fill="none" stroke="black" cx="1358.71" cy="-1197.73" rx="242.54" ry="54.45"/>
<text xml:space="preserve" text-anchor="start" x="1228.59" y="-1218.93" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Does the direct parent package</text>
<text xml:space="preserve" text-anchor="start" x="1225.21" y="-1201.68" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;have a sibling file with the same</text>
<text xml:space="preserve" text-anchor="start" x="1195.21" y="-1184.43" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;basename and a `py` or `pyi` extension?</text>
<text xml:space="preserve" text-anchor="start" x="1349.71" y="-1167.18" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;</text>
</g>
<!-- namespace_parent_std&#45;&gt;namespace_parent_module_check -->
<g id="edge18" class="edge">
<title>namespace_parent_std&#45;&gt;namespace_parent_module_check</title>
<path fill="none" stroke="black" d="M1365.9,-1467.6C1393.51,-1457.06 1422.54,-1440.38 1438.71,-1414.33 1464.25,-1373.21 1454.02,-1351.35 1438.71,-1305.43 1433.43,-1289.58 1424.77,-1274.18 1415.05,-1260.24"/>
<polygon fill="black" stroke="black" points="1418.14,-1258.55 1409.42,-1252.53 1412.48,-1262.67 1418.14,-1258.55"/>
<text xml:space="preserve" text-anchor="middle" x="1464.38" y="-1355.2" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- namespace_parent_typeshed_check -->
<g id="node17" class="node">
<title>namespace_parent_typeshed_check</title>
<ellipse fill="none" stroke="black" cx="1194.71" cy="-1359.88" rx="235.11" ry="54.45"/>
<text xml:space="preserve" text-anchor="start" x="1055.59" y="-1381.08" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Does the direct parent package of</text>
<text xml:space="preserve" text-anchor="start" x="1036.46" y="-1363.83" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;`module_name` exist on the configured</text>
<text xml:space="preserve" text-anchor="start" x="1074.34" y="-1346.58" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Python version according to </text>
<text xml:space="preserve" text-anchor="start" x="1070.96" y="-1329.33" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;typeshed&#39;s VERSIONS file? &#160;&#160;&#160;</text>
</g>
<!-- namespace_parent_std&#45;&gt;namespace_parent_typeshed_check -->
<g id="edge19" class="edge">
<title>namespace_parent_std&#45;&gt;namespace_parent_typeshed_check</title>
<path fill="none" stroke="black" d="M1282.32,-1467.12C1272.54,-1455.26 1258.98,-1438.81 1245.44,-1422.4"/>
<polygon fill="black" stroke="black" points="1248.41,-1420.49 1239.35,-1415.01 1243.01,-1424.95 1248.41,-1420.49"/>
<text xml:space="preserve" text-anchor="middle" x="1278.57" y="-1436.28" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- namespace_parent_module_check&#45;&gt;bail -->
<g id="edge22" class="edge">
<title>namespace_parent_module_check&#45;&gt;bail</title>
<path fill="none" stroke="black" d="M1480.33,-1150.23C1524.93,-1124.65 1564.71,-1087.55 1564.71,-1036.59 1564.71,-1036.59 1564.71,-1036.59 1564.71,-239.95 1564.71,-190.04 1434.32,-154.61 1334.56,-134.8"/>
<polygon fill="black" stroke="black" points="1335.45,-131.4 1324.96,-132.92 1334.11,-138.27 1335.45,-131.4"/>
<text xml:space="preserve" text-anchor="middle" x="1576.71" y="-657.99" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- namespace_parent_above -->
<g id="node18" class="node">
<title>namespace_parent_above</title>
<ellipse fill="none" stroke="black" cx="1309.71" cy="-1035.59" rx="227.16" ry="54.45"/>
<text xml:space="preserve" text-anchor="start" x="1176.96" y="-1056.79" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Is every parent above the direct</text>
<text xml:space="preserve" text-anchor="start" x="1161.59" y="-1039.54" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;parent package a normal package or</text>
<text xml:space="preserve" text-anchor="start" x="1168.34" y="-1022.29" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;otherwise satisfy the previous two</text>
<text xml:space="preserve" text-anchor="start" x="1157.09" y="-1005.04" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;namespace package requirements? &#160;&#160;&#160;</text>
</g>
<!-- namespace_parent_module_check&#45;&gt;namespace_parent_above -->
<g id="edge23" class="edge">
<title>namespace_parent_module_check&#45;&gt;namespace_parent_above</title>
<path fill="none" stroke="black" d="M1342.26,-1142.96C1338.16,-1129.56 1333.72,-1115.04 1329.47,-1101.17"/>
<polygon fill="black" stroke="black" points="1332.84,-1100.21 1326.57,-1091.67 1326.15,-1102.25 1332.84,-1100.21"/>
<text xml:space="preserve" text-anchor="middle" x="1345.81" y="-1111.99" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- namespace_parent_typeshed_check&#45;&gt;bail -->
<g id="edge21" class="edge">
<title>namespace_parent_typeshed_check&#45;&gt;bail</title>
<path fill="none" stroke="black" d="M1117.34,-1308.13C1084.97,-1280.37 1054.71,-1242.68 1054.71,-1198.73 1054.71,-1198.73 1054.71,-1198.73 1054.71,-239.95 1054.71,-188.54 1106.36,-156.11 1152.39,-137.42"/>
<polygon fill="black" stroke="black" points="1153.54,-140.72 1161.6,-133.84 1151.01,-134.2 1153.54,-140.72"/>
<text xml:space="preserve" text-anchor="middle" x="1064.84" y="-702.61" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- namespace_parent_typeshed_check&#45;&gt;namespace_parent_module_check -->
<g id="edge20" class="edge">
<title>namespace_parent_typeshed_check&#45;&gt;namespace_parent_module_check</title>
<path fill="none" stroke="black" d="M1248.41,-1306.44C1263.85,-1291.37 1280.8,-1274.81 1296.7,-1259.3"/>
<polygon fill="black" stroke="black" points="1299.1,-1261.84 1303.81,-1252.35 1294.21,-1256.83 1299.1,-1261.84"/>
<text xml:space="preserve" text-anchor="middle" x="1293.63" y="-1274.13" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- namespace_parent_above&#45;&gt;resolved_parent_package -->
<g id="edge25" class="edge">
<title>namespace_parent_above&#45;&gt;resolved_parent_package</title>
<path fill="none" stroke="black" d="M1125.87,-1003.28C990.29,-980.18 807.48,-949.05 676.82,-926.79"/>
<polygon fill="black" stroke="black" points="677.47,-923.35 667.02,-925.12 676.29,-930.25 677.47,-923.35"/>
<text xml:space="preserve" text-anchor="middle" x="899.32" y="-949.84" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- namespace_parent_above&#45;&gt;bail -->
<g id="edge24" class="edge">
<title>namespace_parent_above&#45;&gt;bail</title>
<path fill="none" stroke="black" d="M1283.87,-981.05C1274.29,-956.57 1265.71,-926.84 1265.71,-898.84 1265.71,-898.84 1265.71,-898.84 1265.71,-239.95 1265.71,-205.29 1250.47,-168.04 1238.13,-143.58"/>
<polygon fill="black" stroke="black" points="1241.35,-142.19 1233.61,-134.96 1235.15,-145.44 1241.35,-142.19"/>
<text xml:space="preserve" text-anchor="middle" x="1275.84" y="-576.92" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- maybe_module_std -->
<g id="node20" class="node">
<title>maybe_module_std</title>
<ellipse fill="none" stroke="black" cx="455.71" cy="-707.29" rx="337.41" ry="18"/>
<text xml:space="preserve" text-anchor="start" x="223.21" y="-702.61" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Does the search path correspond to the standard library? &#160;&#160;&#160;</text>
</g>
<!-- maybe_module&#45;&gt;maybe_module_std -->
<g id="edge28" class="edge">
<title>maybe_module&#45;&gt;maybe_module_std</title>
<path fill="none" stroke="black" d="M501.45,-778.3C493.33,-765.87 482.22,-748.87 472.99,-734.73"/>
<polygon fill="black" stroke="black" points="476.18,-733.22 467.78,-726.76 470.32,-737.04 476.18,-733.22"/>
<text xml:space="preserve" text-anchor="middle" x="501.32" y="-747.24" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- maybe_namespace -->
<g id="node21" class="node">
<title>maybe_namespace</title>
<ellipse fill="none" stroke="black" cx="641.71" cy="-455.89" rx="169.06" ry="18"/>
<text xml:space="preserve" text-anchor="start" x="529.21" y="-451.22" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Is `{path}` a directory? &#160;&#160;&#160;</text>
</g>
<!-- maybe_module&#45;&gt;maybe_namespace -->
<g id="edge29" class="edge">
<title>maybe_module&#45;&gt;maybe_namespace</title>
<path fill="none" stroke="black" d="M637.97,-780.54C706.2,-769.51 780.89,-751.78 801.71,-725.29 867.22,-641.96 739.52,-529.48 675.39,-480.89"/>
<polygon fill="black" stroke="black" points="677.74,-478.28 667.63,-475.1 673.55,-483.89 677.74,-478.28"/>
<text xml:space="preserve" text-anchor="middle" x="829.9" y="-657.99" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- maybe_module_std&#45;&gt;module -->
<g id="edge30" class="edge">
<title>maybe_module_std&#45;&gt;module</title>
<path fill="none" stroke="black" d="M303.75,-690.81C262.51,-680.5 220.31,-663.61 187.71,-636.04 104.95,-566.04 90.71,-520.66 90.71,-412.27 90.71,-412.27 90.71,-412.27 90.71,-114.25 90.71,-94.55 94.16,-72.78 97.75,-55.44"/>
<polygon fill="black" stroke="black" points="101.16,-56.22 99.89,-45.7 94.32,-54.72 101.16,-56.22"/>
<text xml:space="preserve" text-anchor="middle" x="100.84" y="-361.97" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- maybe_module_typeshed_check -->
<g id="node22" class="node">
<title>maybe_module_typeshed_check</title>
<ellipse fill="none" stroke="black" cx="455.71" cy="-581.59" rx="258.98" ry="54.45"/>
<text xml:space="preserve" text-anchor="start" x="280.59" y="-602.79" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Does the module corresponding to `{path}`</text>
<text xml:space="preserve" text-anchor="start" x="355.59" y="-585.54" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;exist on the configured</text>
<text xml:space="preserve" text-anchor="start" x="335.34" y="-568.29" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Python version according to </text>
<text xml:space="preserve" text-anchor="start" x="331.96" y="-551.04" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;typeshed&#39;s VERSIONS file? &#160;&#160;&#160;</text>
</g>
<!-- maybe_module_std&#45;&gt;maybe_module_typeshed_check -->
<g id="edge31" class="edge">
<title>maybe_module_std&#45;&gt;maybe_module_typeshed_check</title>
<path fill="none" stroke="black" d="M455.71,-688.83C455.71,-677.79 455.71,-662.76 455.71,-647.49"/>
<polygon fill="black" stroke="black" points="459.21,-647.89 455.71,-637.89 452.21,-647.89 459.21,-647.89"/>
<text xml:space="preserve" text-anchor="middle" x="467.71" y="-657.99" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- maybe_namespace&#45;&gt;bail -->
<g id="edge35" class="edge">
<title>maybe_namespace&#45;&gt;bail</title>
<path fill="none" stroke="black" d="M774.04,-444.39C815.76,-434.62 858.52,-416.78 887.71,-384.64 923.17,-345.61 892.27,-187.35 930.71,-151.25 940.64,-141.93 959.09,-134.98 982.08,-129.81"/>
<polygon fill="black" stroke="black" points="982.77,-133.24 991.85,-127.79 981.35,-126.39 982.77,-133.24"/>
<text xml:space="preserve" text-anchor="middle" x="916.16" y="-317.34" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- maybe_namespace_std -->
<g id="node23" class="node">
<title>maybe_namespace_std</title>
<ellipse fill="none" stroke="black" cx="541.71" cy="-366.64" rx="337.41" ry="18"/>
<text xml:space="preserve" text-anchor="start" x="309.21" y="-361.97" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Does the search path correspond to the standard library? &#160;&#160;&#160;</text>
</g>
<!-- maybe_namespace&#45;&gt;maybe_namespace_std -->
<g id="edge34" class="edge">
<title>maybe_namespace&#45;&gt;maybe_namespace_std</title>
<path fill="none" stroke="black" d="M621.96,-437.66C607.03,-424.63 586.34,-406.58 569.71,-392.07"/>
<polygon fill="black" stroke="black" points="572.41,-389.78 562.57,-385.84 567.8,-395.05 572.41,-389.78"/>
<text xml:space="preserve" text-anchor="middle" x="612.67" y="-406.59" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- maybe_module_typeshed_check&#45;&gt;module -->
<g id="edge32" class="edge">
<title>maybe_module_typeshed_check&#45;&gt;module</title>
<path fill="none" stroke="black" d="M358.83,-530.67C302.78,-496.86 235.63,-447.01 195.71,-384.64 147.14,-308.74 174.73,-273.88 152.71,-186.5 141.13,-140.51 125.59,-87.89 115.61,-55.05"/>
<polygon fill="black" stroke="black" points="119.04,-54.29 112.77,-45.75 112.34,-56.34 119.04,-54.29"/>
<text xml:space="preserve" text-anchor="middle" x="182.48" y="-317.34" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- maybe_module_typeshed_check&#45;&gt;maybe_namespace -->
<g id="edge33" class="edge">
<title>maybe_module_typeshed_check&#45;&gt;maybe_namespace</title>
<path fill="none" stroke="black" d="M532.98,-529.21C558.48,-512.25 585.62,-494.2 606.4,-480.38"/>
<polygon fill="black" stroke="black" points="608.11,-483.45 614.5,-474.99 604.24,-477.62 608.11,-483.45"/>
<text xml:space="preserve" text-anchor="middle" x="596.88" y="-495.84" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- maybe_namespace_std&#45;&gt;namespace_package -->
<g id="edge36" class="edge">
<title>maybe_namespace_std&#45;&gt;namespace_package</title>
<path fill="none" stroke="black" d="M431.95,-349.28C400.92,-339.02 370.51,-322.38 351.71,-295.39 324.06,-255.68 332.25,-230.81 351.71,-186.5 378.49,-125.55 440.44,-77.36 483.68,-49.51"/>
<polygon fill="black" stroke="black" points="485.54,-52.47 492.14,-44.18 481.81,-46.55 485.54,-52.47"/>
<text xml:space="preserve" text-anchor="middle" x="381.53" y="-155.2" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- maybe_namespace_typeshed_check -->
<g id="node24" class="node">
<title>maybe_namespace_typeshed_check</title>
<ellipse fill="none" stroke="black" cx="619.71" cy="-240.95" rx="258.98" ry="54.45"/>
<text xml:space="preserve" text-anchor="start" x="444.59" y="-262.15" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Does the module corresponding to `{path}`</text>
<text xml:space="preserve" text-anchor="start" x="519.59" y="-244.9" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;exist on the configured</text>
<text xml:space="preserve" text-anchor="start" x="499.34" y="-227.65" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;Python version according to </text>
<text xml:space="preserve" text-anchor="start" x="495.96" y="-210.4" font-family="Times,serif" font-size="14.00"> &#160;&#160;&#160;&#160;&#160;&#160;&#160;typeshed&#39;s VERSIONS file? &#160;&#160;&#160;</text>
</g>
<!-- maybe_namespace_std&#45;&gt;maybe_namespace_typeshed_check -->
<g id="edge37" class="edge">
<title>maybe_namespace_std&#45;&gt;maybe_namespace_typeshed_check</title>
<path fill="none" stroke="black" d="M552.72,-348.18C560.03,-336.6 570.09,-320.64 580.21,-304.6"/>
<polygon fill="black" stroke="black" points="582.97,-306.79 585.34,-296.46 577.05,-303.05 582.97,-306.79"/>
<text xml:space="preserve" text-anchor="middle" x="585.73" y="-317.34" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- maybe_namespace_typeshed_check&#45;&gt;namespace_package -->
<g id="edge38" class="edge">
<title>maybe_namespace_typeshed_check&#45;&gt;namespace_package</title>
<path fill="none" stroke="black" d="M597.47,-186.32C580.32,-144.99 557.2,-89.25 542.88,-54.74"/>
<polygon fill="black" stroke="black" points="546.21,-53.63 539.14,-45.73 539.74,-56.31 546.21,-53.63"/>
<text xml:space="preserve" text-anchor="middle" x="586.71" y="-110.58" font-family="Times,serif" font-size="14.00">Yes</text>
</g>
<!-- maybe_namespace_typeshed_check&#45;&gt;bail -->
<g id="edge39" class="edge">
<title>maybe_namespace_typeshed_check&#45;&gt;bail</title>
<path fill="none" stroke="black" d="M739.84,-192.37C782.94,-177.02 832.31,-161.37 878.46,-151.25 920.17,-142.11 965.24,-135.35 1008.61,-130.35"/>
<polygon fill="black" stroke="black" points="1008.84,-133.84 1018.38,-129.25 1008.06,-126.89 1008.84,-133.84"/>
<text xml:space="preserve" text-anchor="middle" x="888.59" y="-155.2" font-family="Times,serif" font-size="14.00">No</text>
</g>
<!-- retry&#45;&gt;determine_parent_kind -->
<g id="edge42" class="edge">
<title>retry&#45;&gt;determine_parent_kind</title>
<path fill="none" stroke="black" d="M889.55,-40.44C892.13,-62.4 897.56,-101.12 906.71,-133.25 931.39,-219.83 982.71,-230.99 982.71,-321.02 982.71,-1117.66 982.71,-1117.66 982.71,-1117.66 982.71,-1262.68 931.71,-1294.93 931.71,-1439.95 931.71,-1600.08 931.71,-1600.08 931.71,-1600.08 931.71,-1628.61 940.46,-1659.01 950.06,-1683.79"/>
<polygon fill="black" stroke="black" points="946.78,-1685.04 953.78,-1693 953.27,-1682.41 946.78,-1685.04"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 36 KiB

Some files were not shown because too many files have changed in this diff Show More