Compare commits

..

1 Commits

Author SHA1 Message Date
Charlie Marsh
634765c303 [ty] Emit a diagnostic for subclassing with order=True 2026-01-06 11:10:16 -05:00
189 changed files with 3330 additions and 8988 deletions

10
.github/CODEOWNERS vendored
View File

@@ -20,11 +20,9 @@
# ty
/crates/ty* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
/crates/ruff_db/ @carljm @MichaReiser @sharkdp @dcreager
/crates/ty_project/ @carljm @MichaReiser @sharkdp @dcreager @Gankra
/crates/ty_ide/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager @Gankra
/crates/ty_server/ @carljm @MichaReiser @sharkdp @dcreager @Gankra
/crates/ty_project/ @carljm @MichaReiser @sharkdp @dcreager
/crates/ty_server/ @carljm @MichaReiser @sharkdp @dcreager
/crates/ty/ @carljm @MichaReiser @sharkdp @dcreager
/crates/ty_wasm/ @carljm @MichaReiser @sharkdp @dcreager @Gankra
/crates/ty_wasm/ @carljm @MichaReiser @sharkdp @dcreager
/scripts/ty_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
/crates/ty_python_semantic/ @carljm @AlexWaygood @sharkdp @dcreager
/crates/ty_module_resolver/ @carljm @MichaReiser @AlexWaygood @Gankra
/crates/ty_python_semantic @carljm @AlexWaygood @sharkdp @dcreager

View File

@@ -5,4 +5,5 @@
[rules]
possibly-unresolved-reference = "warn"
possibly-missing-import = "warn"
unused-ignore-comment = "warn"
division-by-zero = "warn"

View File

@@ -51,7 +51,6 @@ jobs:
- name: "Build sdist"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
with:
maturin-version: v1.9.6
command: sdist
args: --out dist
- name: "Test sdist"
@@ -82,7 +81,6 @@ jobs:
- name: "Build wheels - x86_64"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
with:
maturin-version: v1.9.6
target: x86_64
args: --release --locked --out dist
- name: "Upload wheels"
@@ -125,7 +123,6 @@ jobs:
- name: "Build wheels - aarch64"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
with:
maturin-version: v1.9.6
target: aarch64
args: --release --locked --out dist
- name: "Test wheel - aarch64"
@@ -182,7 +179,6 @@ jobs:
- name: "Build wheels"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
with:
maturin-version: v1.9.6
target: ${{ matrix.platform.target }}
args: --release --locked --out dist
env:
@@ -236,7 +232,6 @@ jobs:
- name: "Build wheels"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
with:
maturin-version: v1.9.6
target: ${{ matrix.target }}
manylinux: auto
args: --release --locked --out dist
@@ -313,7 +308,6 @@ jobs:
- name: "Build wheels"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
with:
maturin-version: v1.9.6
target: ${{ matrix.platform.target }}
manylinux: auto
docker-options: ${{ matrix.platform.maturin_docker_options }}
@@ -380,7 +374,6 @@ jobs:
- name: "Build wheels"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
with:
maturin-version: v1.9.6
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist
@@ -446,7 +439,6 @@ jobs:
- name: "Build wheels"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
with:
maturin-version: v1.9.6
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist

View File

@@ -1,58 +0,0 @@
# Build ruff_wasm for npm.
#
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
# artifacts job within `cargo-dist`.
name: "Build wasm"
on:
workflow_call:
inputs:
plan:
required: true
type: string
pull_request:
paths:
- .github/workflows/build-wasm.yml
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
build:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: ubuntu-latest
strategy:
matrix:
target: [web, bundler, nodejs]
fail-fast: false
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
with:
version: v0.13.1
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
- name: "Run wasm-pack build"
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
- name: "Rename generated package"
run: | # Replace the package name w/ jq
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
mv /tmp/package.json crates/ruff_wasm/pkg
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
- name: "Upload wasm artifact"
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: artifacts-wasm-${{ matrix.target }}
path: crates/ruff_wasm/pkg

View File

@@ -1,18 +1,25 @@
# Publish ruff_wasm to npm.
# Build and publish ruff-api for wasm.
#
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish
# job within `cargo-dist`.
name: "Publish wasm"
name: "Build and publish wasm"
on:
workflow_dispatch:
workflow_call:
inputs:
plan:
required: true
type: string
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
publish:
ruff_wasm:
runs-on: ubuntu-latest
permissions:
contents: read
@@ -22,19 +29,31 @@ jobs:
target: [web, bundler, nodejs]
fail-fast: false
steps:
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
name: artifacts-wasm-${{ matrix.target }}
path: pkg
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
with:
version: v0.13.1
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
- name: "Run wasm-pack build"
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
- name: "Rename generated package"
run: | # Replace the package name w/ jq
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
mv /tmp/package.json crates/ruff_wasm/pkg
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 24
registry-url: "https://registry.npmjs.org"
- name: "Publish (dry-run)"
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
run: npm publish --dry-run pkg
run: npm publish --dry-run crates/ruff_wasm/pkg
- name: "Publish"
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
run: npm publish --provenance --access public pkg
run: npm publish --provenance --access public crates/ruff_wasm/pkg
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -112,22 +112,12 @@ jobs:
"contents": "read"
"packages": "write"
custom-build-wasm:
needs:
- plan
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
uses: ./.github/workflows/build-wasm.yml
with:
plan: ${{ needs.plan.outputs.val }}
secrets: inherit
# Build and package all the platform-agnostic(ish) things
build-global-artifacts:
needs:
- plan
- custom-build-binaries
- custom-build-docker
- custom-build-wasm
runs-on: "depot-ubuntu-latest-4"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -175,10 +165,9 @@ jobs:
- plan
- custom-build-binaries
- custom-build-docker
- custom-build-wasm
- build-global-artifacts
# Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine)
if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') && (needs.custom-build-wasm.result == 'skipped' || needs.custom-build-wasm.result == 'success') }}
if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
runs-on: "depot-ubuntu-latest-4"

View File

@@ -40,12 +40,12 @@ jobs:
- name: Install the latest version of uv
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
with:
enable-cache: true
enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
workspaces: "ruff"
lookup-only: false
lookup-only: false # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
- name: Install Rust toolchain
run: rustup show

View File

@@ -125,7 +125,6 @@ repos:
args:
- "-ignore=SC2129" # ignorable stylistic lint from shellcheck
- "-ignore=SC2016" # another shellcheck lint: seems to have false positives?
language: golang # means renovate will also update `additional_dependencies`
additional_dependencies:
# actionlint has a shellcheck integration which extracts shell scripts in `run:` steps from GitHub Actions
# and checks these with shellcheck. This is arguably its most useful feature,

View File

@@ -1,63 +1,5 @@
# Changelog
## 0.14.11
Released on 2026-01-08.
### Preview features
- Consolidate diagnostics for matched disable/enable suppression comments ([#22099](https://github.com/astral-sh/ruff/pull/22099))
- Report diagnostics for invalid/unmatched range suppression comments ([#21908](https://github.com/astral-sh/ruff/pull/21908))
- \[`airflow`\] Passing positional argument into `airflow.lineage.hook.HookLineageCollector.create_asset` is not allowed (`AIR303`) ([#22046](https://github.com/astral-sh/ruff/pull/22046))
- \[`refurb`\] Mark `FURB192` fix as always unsafe ([#22210](https://github.com/astral-sh/ruff/pull/22210))
- \[`ruff`\] Add `non-empty-init-module` (`RUF067`) ([#22143](https://github.com/astral-sh/ruff/pull/22143))
### Bug fixes
- Fix GitHub format for multi-line diagnostics ([#22108](https://github.com/astral-sh/ruff/pull/22108))
- \[`flake8-unused-arguments`\] Mark `**kwargs` in `TypeVar` as used (`ARG001`) ([#22214](https://github.com/astral-sh/ruff/pull/22214))
### Rule changes
- Add `help:` subdiagnostics for several Ruff rules that can sometimes appear to disagree with `ty` ([#22331](https://github.com/astral-sh/ruff/pull/22331))
- \[`pylint`\] Demote `PLW1510` fix to display-only ([#22318](https://github.com/astral-sh/ruff/pull/22318))
- \[`pylint`\] Ignore identical members (`PLR1714`) ([#22220](https://github.com/astral-sh/ruff/pull/22220))
- \[`pylint`\] Improve diagnostic range for `PLC0206` ([#22312](https://github.com/astral-sh/ruff/pull/22312))
- \[`ruff`\] Improve fix title for `RUF102` invalid rule code ([#22100](https://github.com/astral-sh/ruff/pull/22100))
- \[`flake8-simplify`\]: Avoid unnecessary builtins import for `SIM105` ([#22358](https://github.com/astral-sh/ruff/pull/22358))
### Configuration
- Allow Python 3.15 as valid `target-version` value in preview ([#22419](https://github.com/astral-sh/ruff/pull/22419))
- Check `required-version` before parsing rules ([#22410](https://github.com/astral-sh/ruff/pull/22410))
- Include configured `src` directories when resolving graphs ([#22451](https://github.com/astral-sh/ruff/pull/22451))
### Documentation
- Update `T201` suggestion to not use root logger to satisfy `LOG015` ([#22059](https://github.com/astral-sh/ruff/pull/22059))
- Fix `iter` example in unsafe fixes doc ([#22118](https://github.com/astral-sh/ruff/pull/22118))
- \[`flake8_print`\] better suggestion for `basicConfig` in `T201` docs ([#22101](https://github.com/astral-sh/ruff/pull/22101))
- \[`pylint`\] Restore the fix safety docs for `PLW0133` ([#22211](https://github.com/astral-sh/ruff/pull/22211))
- Fix Jupyter notebook discovery info for editors ([#22447](https://github.com/astral-sh/ruff/pull/22447))
### Contributors
- [@charliermarsh](https://github.com/charliermarsh)
- [@ntBre](https://github.com/ntBre)
- [@cenviity](https://github.com/cenviity)
- [@njhearp](https://github.com/njhearp)
- [@cbachhuber](https://github.com/cbachhuber)
- [@jelle-openai](https://github.com/jelle-openai)
- [@AlexWaygood](https://github.com/AlexWaygood)
- [@ValdonVitija](https://github.com/ValdonVitija)
- [@BurntSushi](https://github.com/BurntSushi)
- [@Jkhall81](https://github.com/Jkhall81)
- [@PeterJCLaw](https://github.com/PeterJCLaw)
- [@harupy](https://github.com/harupy)
- [@amyreese](https://github.com/amyreese)
- [@sjyangkevin](https://github.com/sjyangkevin)
- [@woodruffw](https://github.com/woodruffw)
## 0.14.10
Released on 2025-12-18.

16
Cargo.lock generated
View File

@@ -2912,7 +2912,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.14.11"
version = "0.14.10"
dependencies = [
"anyhow",
"argfile",
@@ -2928,7 +2928,6 @@ dependencies = [
"filetime",
"globwalk",
"ignore",
"indexmap",
"indoc",
"insta",
"insta-cmd",
@@ -3172,7 +3171,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.14.11"
version = "0.14.10"
dependencies = [
"aho-corasick",
"anyhow",
@@ -3530,7 +3529,7 @@ dependencies = [
[[package]]
name = "ruff_wasm"
version = "0.14.11"
version = "0.14.10"
dependencies = [
"console_error_panic_hook",
"console_log",
@@ -3646,7 +3645,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.25.2"
source = "git+https://github.com/salsa-rs/salsa.git?rev=9860ff6ca0f1f8f3a8d6b832020002790b501254#9860ff6ca0f1f8f3a8d6b832020002790b501254"
source = "git+https://github.com/salsa-rs/salsa.git?rev=309c249088fdeef0129606fa34ec2eefc74736ff#309c249088fdeef0129606fa34ec2eefc74736ff"
dependencies = [
"boxcar",
"compact_str",
@@ -3671,12 +3670,12 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.25.2"
source = "git+https://github.com/salsa-rs/salsa.git?rev=9860ff6ca0f1f8f3a8d6b832020002790b501254#9860ff6ca0f1f8f3a8d6b832020002790b501254"
source = "git+https://github.com/salsa-rs/salsa.git?rev=309c249088fdeef0129606fa34ec2eefc74736ff#309c249088fdeef0129606fa34ec2eefc74736ff"
[[package]]
name = "salsa-macros"
version = "0.25.2"
source = "git+https://github.com/salsa-rs/salsa.git?rev=9860ff6ca0f1f8f3a8d6b832020002790b501254#9860ff6ca0f1f8f3a8d6b832020002790b501254"
source = "git+https://github.com/salsa-rs/salsa.git?rev=309c249088fdeef0129606fa34ec2eefc74736ff#309c249088fdeef0129606fa34ec2eefc74736ff"
dependencies = [
"proc-macro2",
"quote",
@@ -4444,7 +4443,6 @@ version = "0.0.0"
dependencies = [
"bitflags 2.10.0",
"camino",
"compact_str",
"get-size2",
"insta",
"itertools 0.14.0",
@@ -4513,13 +4511,11 @@ dependencies = [
"regex-automata",
"ruff_cache",
"ruff_db",
"ruff_diagnostics",
"ruff_macros",
"ruff_memory_usage",
"ruff_options_metadata",
"ruff_python_ast",
"ruff_python_formatter",
"ruff_python_trivia",
"ruff_text_size",
"rustc-hash",
"salsa",

View File

@@ -150,7 +150,7 @@ regex-automata = { version = "0.4.9" }
rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "9860ff6ca0f1f8f3a8d6b832020002790b501254", default-features = false, features = [
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "309c249088fdeef0129606fa34ec2eefc74736ff", default-features = false, features = [
"compact_str",
"macros",
"salsa_unstable",

View File

@@ -150,8 +150,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.14.11/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.14.11/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.14.10/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.14.10/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -184,7 +184,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.14.11
rev: v0.14.10
hooks:
# Run the linter.
- id: ruff-check

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.14.11"
version = "0.14.10"
publish = true
authors = { workspace = true }
edition = { workspace = true }
@@ -48,7 +48,6 @@ colored = { workspace = true }
filetime = { workspace = true }
globwalk = { workspace = true }
ignore = { workspace = true }
indexmap = { workspace = true }
is-macro = { workspace = true }
itertools = { workspace = true }
jiff = { workspace = true }

View File

@@ -2,7 +2,6 @@ use crate::args::{AnalyzeGraphArgs, ConfigArguments};
use crate::resolve::resolve;
use crate::{ExitStatus, resolve_default_files};
use anyhow::Result;
use indexmap::IndexSet;
use log::{debug, warn};
use path_absolutize::CWD;
use ruff_db::system::{SystemPath, SystemPathBuf};
@@ -12,7 +11,7 @@ use ruff_linter::source_kind::SourceKind;
use ruff_linter::{warn_user, warn_user_once};
use ruff_python_ast::{PySourceType, SourceType};
use ruff_workspace::resolver::{ResolvedFile, match_exclusion, python_files_in_path};
use rustc_hash::{FxBuildHasher, FxHashMap};
use rustc_hash::FxHashMap;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
@@ -60,34 +59,17 @@ pub(crate) fn analyze_graph(
})
.collect::<FxHashMap<_, _>>();
// Create a database from the source roots, combining configured `src` paths with detected
// package roots. Configured paths are added first so they take precedence, and duplicates
// are removed.
let mut src_roots: IndexSet<SystemPathBuf, FxBuildHasher> = IndexSet::default();
// Add configured `src` paths first (for precedence), filtering to only include existing
// directories.
src_roots.extend(
pyproject_config
.settings
.linter
.src
.iter()
.filter(|path| path.is_dir())
.filter_map(|path| SystemPathBuf::from_path_buf(path.clone()).ok()),
);
// Add detected package roots.
src_roots.extend(
package_roots
.values()
.filter_map(|package| package.as_deref())
.filter_map(|path| path.parent())
.filter_map(|path| SystemPathBuf::from_path_buf(path.to_path_buf()).ok()),
);
// Create a database from the source roots.
let src_roots = package_roots
.values()
.filter_map(|package| package.as_deref())
.filter_map(|package| package.parent())
.map(Path::to_path_buf)
.filter_map(|path| SystemPathBuf::from_path_buf(path).ok())
.collect();
let db = ModuleDb::from_src_roots(
src_roots.into_iter().collect(),
src_roots,
pyproject_config
.settings
.analyze

View File

@@ -29,10 +29,10 @@ pub(crate) fn show_settings(
bail!("No files found under the given path");
};
let (settings, config_path) = resolver.resolve_with_path(&path);
let settings = resolver.resolve(&path);
writeln!(writer, "Resolved settings for: \"{}\"", path.display())?;
if let Some(settings_path) = config_path {
if let Some(settings_path) = pyproject_config.path.as_ref() {
writeln!(writer, "Settings path: \"{}\"", settings_path.display())?;
}
write!(writer, "{settings}")?;

View File

@@ -714,121 +714,6 @@ fn notebook_basic() -> Result<()> {
Ok(())
}
/// Test that the `src` configuration option is respected.
///
/// This is useful for monorepos where there are multiple source directories that need to be
/// included in the module resolution search path.
#[test]
fn src_option() -> Result<()> {
let tempdir = TempDir::new()?;
let root = ChildPath::new(tempdir.path());
// Create a lib directory with a package.
root.child("lib")
.child("mylib")
.child("__init__.py")
.write_str("def helper(): pass")?;
// Create an app directory with a file that imports from mylib.
root.child("app").child("__init__.py").write_str("")?;
root.child("app")
.child("main.py")
.write_str("from mylib import helper")?;
// Without src configured, the import from mylib won't resolve.
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().arg("app").current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"app/__init__.py": [],
"app/main.py": []
}
----- stderr -----
"#);
});
// With src = ["lib"], the import should resolve.
root.child("ruff.toml").write_str(indoc::indoc! {r#"
src = ["lib"]
"#})?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().arg("app").current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"app/__init__.py": [],
"app/main.py": [
"lib/mylib/__init__.py"
]
}
----- stderr -----
"#);
});
Ok(())
}
/// Test that glob patterns in `src` are expanded.
#[test]
fn src_glob_expansion() -> Result<()> {
let tempdir = TempDir::new()?;
let root = ChildPath::new(tempdir.path());
// Create multiple lib directories with packages.
root.child("libs")
.child("lib_a")
.child("pkg_a")
.child("__init__.py")
.write_str("def func_a(): pass")?;
root.child("libs")
.child("lib_b")
.child("pkg_b")
.child("__init__.py")
.write_str("def func_b(): pass")?;
// Create an app that imports from both packages.
root.child("app").child("__init__.py").write_str("")?;
root.child("app")
.child("main.py")
.write_str("from pkg_a import func_a\nfrom pkg_b import func_b")?;
// Use a glob pattern to include all lib directories.
root.child("ruff.toml").write_str(indoc::indoc! {r#"
src = ["libs/*"]
"#})?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().arg("app").current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"app/__init__.py": [],
"app/main.py": [
"libs/lib_a/pkg_a/__init__.py",
"libs/lib_b/pkg_b/__init__.py"
]
}
----- stderr -----
"#);
});
Ok(())
}
#[test]
fn notebook_with_magic() -> Result<()> {
let tempdir = TempDir::new()?;

View File

@@ -1126,35 +1126,6 @@ import os
Ok(())
}
#[test]
fn required_version_fails_to_parse() -> Result<()> {
let fixture = CliTest::with_file(
"ruff.toml",
r#"
required-version = "pikachu"
"#,
)?;
assert_cmd_snapshot!(fixture
.check_command(), @r#"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
ruff failed
Cause: Failed to load configuration `[TMP]/ruff.toml`
Cause: Failed to parse [TMP]/ruff.toml
Cause: TOML parse error at line 2, column 20
|
2 | required-version = "pikachu"
| ^^^^^^^^^
Failed to parse version: Unexpected end of version specifier, expected operator:
pikachu
^^^^^^^
"#);
Ok(())
}
#[test]
fn required_version_exact_mismatch() -> Result<()> {
let version = env!("CARGO_PKG_VERSION");
@@ -1166,10 +1137,10 @@ required-version = "0.1.0"
"#,
)?;
let mut settings = insta::Settings::clone_current();
settings.add_filter(version, "[VERSION]");
settings.bind(|| {
assert_cmd_snapshot!(fixture
insta::with_settings!({
filters => vec![(version, "[VERSION]")]
}, {
assert_cmd_snapshot!(fixture
.check_command()
.arg("--config")
.arg("ruff.toml")
@@ -1183,7 +1154,6 @@ import os
----- stderr -----
ruff failed
Cause: Failed to load configuration `[TMP]/ruff.toml`
Cause: Required version `==0.1.0` does not match the running version `[VERSION]`
");
});
@@ -1242,10 +1212,10 @@ required-version = ">{version}"
),
)?;
let mut settings = insta::Settings::clone_current();
settings.add_filter(version, "[VERSION]");
settings.bind(|| {
assert_cmd_snapshot!(fixture
insta::with_settings!({
filters => vec![(version, "[VERSION]")]
}, {
assert_cmd_snapshot!(fixture
.check_command()
.arg("--config")
.arg("ruff.toml")
@@ -1259,48 +1229,6 @@ import os
----- stderr -----
ruff failed
Cause: Failed to load configuration `[TMP]/ruff.toml`
Cause: Required version `>[VERSION]` does not match the running version `[VERSION]`
");
});
Ok(())
}
#[test]
fn required_version_precedes_rule_validation() -> Result<()> {
let version = env!("CARGO_PKG_VERSION");
let fixture = CliTest::with_file(
"ruff.toml",
&format!(
r#"
required-version = ">{version}"
[lint]
select = ["RUF999"]
"#
),
)?;
let mut settings = insta::Settings::clone_current();
settings.add_filter(version, "[VERSION]");
settings.bind(|| {
assert_cmd_snapshot!(fixture
.check_command()
.arg("--config")
.arg("ruff.toml")
.arg("-")
.pass_stdin(r#"
import os
"#), @"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
ruff failed
Cause: Failed to load configuration `[TMP]/ruff.toml`
Cause: Required version `>[VERSION]` does not match the running version `[VERSION]`
");
});

View File

@@ -16,7 +16,6 @@ success: true
exit_code: 0
----- stdout -----
Resolved settings for: "[TMP]/foo/test.py"
Settings path: "[TMP]/foo/pyproject.toml"
# General Settings
cache_dir = "[TMP]/foo/.ruff_cache"

View File

@@ -50,56 +50,6 @@ ignore = [
Ok(())
}
#[test]
fn display_settings_from_nested_directory() -> anyhow::Result<()> {
let tempdir = TempDir::new().context("Failed to create temp directory.")?;
// Tempdir path's on macos are symlinks, which doesn't play nicely with
// our snapshot filtering.
let project_dir =
dunce::canonicalize(tempdir.path()).context("Failed to canonical tempdir path.")?;
// Root pyproject.toml.
std::fs::write(
project_dir.join("pyproject.toml"),
r#"
[tool.ruff]
line-length = 100
[tool.ruff.lint]
select = ["E", "F"]
"#,
)?;
// Create a subdirectory with its own pyproject.toml.
let subdir = project_dir.join("subdir");
std::fs::create_dir(&subdir)?;
std::fs::write(
subdir.join("pyproject.toml"),
r#"
[tool.ruff]
line-length = 120
[tool.ruff.lint]
select = ["E", "F", "I"]
"#,
)?;
std::fs::write(subdir.join("test.py"), r#"import os"#).context("Failed to write test.py.")?;
insta::with_settings!({filters => vec![
(&*tempdir_filter(&project_dir), "<temp_dir>/"),
(r#"\\(\w\w|\s|\.|")"#, "/$1"),
]}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-settings", "subdir/test.py"])
.current_dir(&project_dir));
});
Ok(())
}
fn tempdir_filter(project_dir: &Path) -> String {
format!(r#"{}\\?/?"#, regex::escape(project_dir.to_str().unwrap()))
}

View File

@@ -1,410 +0,0 @@
---
source: crates/ruff/tests/show_settings.rs
info:
program: ruff
args:
- check
- "--show-settings"
- subdir/test.py
---
success: true
exit_code: 0
----- stdout -----
Resolved settings for: "<temp_dir>/subdir/test.py"
Settings path: "<temp_dir>/subdir/pyproject.toml"
# General Settings
cache_dir = "<temp_dir>/subdir/.ruff_cache"
fix = false
fix_only = false
output_format = full
show_fixes = false
unsafe_fixes = hint
# File Resolver Settings
file_resolver.exclude = [
".bzr",
".direnv",
".eggs",
".git",
".git-rewrite",
".hg",
".ipynb_checkpoints",
".mypy_cache",
".nox",
".pants.d",
".pyenv",
".pytest_cache",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
".vscode",
"__pypackages__",
"_build",
"buck-out",
"dist",
"node_modules",
"site-packages",
"venv",
]
file_resolver.extend_exclude = []
file_resolver.force_exclude = false
file_resolver.include = [
"*.py",
"*.pyi",
"*.ipynb",
"**/pyproject.toml",
]
file_resolver.extend_include = []
file_resolver.respect_gitignore = true
file_resolver.project_root = "<temp_dir>/subdir"
# Linter Settings
linter.exclude = []
linter.project_root = "<temp_dir>/subdir"
linter.rules.enabled = [
unsorted-imports (I001),
missing-required-import (I002),
mixed-spaces-and-tabs (E101),
multiple-imports-on-one-line (E401),
module-import-not-at-top-of-file (E402),
line-too-long (E501),
multiple-statements-on-one-line-colon (E701),
multiple-statements-on-one-line-semicolon (E702),
useless-semicolon (E703),
none-comparison (E711),
true-false-comparison (E712),
not-in-test (E713),
not-is-test (E714),
type-comparison (E721),
bare-except (E722),
lambda-assignment (E731),
ambiguous-variable-name (E741),
ambiguous-class-name (E742),
ambiguous-function-name (E743),
io-error (E902),
unused-import (F401),
import-shadowed-by-loop-var (F402),
undefined-local-with-import-star (F403),
late-future-import (F404),
undefined-local-with-import-star-usage (F405),
undefined-local-with-nested-import-star-usage (F406),
future-feature-not-defined (F407),
percent-format-invalid-format (F501),
percent-format-expected-mapping (F502),
percent-format-expected-sequence (F503),
percent-format-extra-named-arguments (F504),
percent-format-missing-argument (F505),
percent-format-mixed-positional-and-named (F506),
percent-format-positional-count-mismatch (F507),
percent-format-star-requires-sequence (F508),
percent-format-unsupported-format-character (F509),
string-dot-format-invalid-format (F521),
string-dot-format-extra-named-arguments (F522),
string-dot-format-extra-positional-arguments (F523),
string-dot-format-missing-arguments (F524),
string-dot-format-mixing-automatic (F525),
f-string-missing-placeholders (F541),
multi-value-repeated-key-literal (F601),
multi-value-repeated-key-variable (F602),
expressions-in-star-assignment (F621),
multiple-starred-expressions (F622),
assert-tuple (F631),
is-literal (F632),
invalid-print-syntax (F633),
if-tuple (F634),
break-outside-loop (F701),
continue-outside-loop (F702),
yield-outside-function (F704),
return-outside-function (F706),
default-except-not-last (F707),
forward-annotation-syntax-error (F722),
redefined-while-unused (F811),
undefined-name (F821),
undefined-export (F822),
undefined-local (F823),
unused-variable (F841),
unused-annotation (F842),
raise-not-implemented (F901),
]
linter.rules.should_fix = [
unsorted-imports (I001),
missing-required-import (I002),
mixed-spaces-and-tabs (E101),
multiple-imports-on-one-line (E401),
module-import-not-at-top-of-file (E402),
line-too-long (E501),
multiple-statements-on-one-line-colon (E701),
multiple-statements-on-one-line-semicolon (E702),
useless-semicolon (E703),
none-comparison (E711),
true-false-comparison (E712),
not-in-test (E713),
not-is-test (E714),
type-comparison (E721),
bare-except (E722),
lambda-assignment (E731),
ambiguous-variable-name (E741),
ambiguous-class-name (E742),
ambiguous-function-name (E743),
io-error (E902),
unused-import (F401),
import-shadowed-by-loop-var (F402),
undefined-local-with-import-star (F403),
late-future-import (F404),
undefined-local-with-import-star-usage (F405),
undefined-local-with-nested-import-star-usage (F406),
future-feature-not-defined (F407),
percent-format-invalid-format (F501),
percent-format-expected-mapping (F502),
percent-format-expected-sequence (F503),
percent-format-extra-named-arguments (F504),
percent-format-missing-argument (F505),
percent-format-mixed-positional-and-named (F506),
percent-format-positional-count-mismatch (F507),
percent-format-star-requires-sequence (F508),
percent-format-unsupported-format-character (F509),
string-dot-format-invalid-format (F521),
string-dot-format-extra-named-arguments (F522),
string-dot-format-extra-positional-arguments (F523),
string-dot-format-missing-arguments (F524),
string-dot-format-mixing-automatic (F525),
f-string-missing-placeholders (F541),
multi-value-repeated-key-literal (F601),
multi-value-repeated-key-variable (F602),
expressions-in-star-assignment (F621),
multiple-starred-expressions (F622),
assert-tuple (F631),
is-literal (F632),
invalid-print-syntax (F633),
if-tuple (F634),
break-outside-loop (F701),
continue-outside-loop (F702),
yield-outside-function (F704),
return-outside-function (F706),
default-except-not-last (F707),
forward-annotation-syntax-error (F722),
redefined-while-unused (F811),
undefined-name (F821),
undefined-export (F822),
undefined-local (F823),
unused-variable (F841),
unused-annotation (F842),
raise-not-implemented (F901),
]
linter.per_file_ignores = {}
linter.safety_table.forced_safe = []
linter.safety_table.forced_unsafe = []
linter.unresolved_target_version = none
linter.per_file_target_version = {}
linter.preview = disabled
linter.explicit_preview_rules = false
linter.extension = ExtensionMapping({})
linter.allowed_confusables = []
linter.builtins = []
linter.dummy_variable_rgx = ^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$
linter.external = []
linter.ignore_init_module_imports = true
linter.logger_objects = []
linter.namespace_packages = []
linter.src = [
"<temp_dir>/subdir",
"<temp_dir>/subdir/src",
]
linter.tab_size = 4
linter.line_length = 120
linter.task_tags = [
TODO,
FIXME,
XXX,
]
linter.typing_modules = []
linter.typing_extensions = true
# Linter Plugins
linter.flake8_annotations.mypy_init_return = false
linter.flake8_annotations.suppress_dummy_args = false
linter.flake8_annotations.suppress_none_returning = false
linter.flake8_annotations.allow_star_arg_any = false
linter.flake8_annotations.ignore_fully_untyped = false
linter.flake8_bandit.hardcoded_tmp_directory = [
/tmp,
/var/tmp,
/dev/shm,
]
linter.flake8_bandit.check_typed_exception = false
linter.flake8_bandit.extend_markup_names = []
linter.flake8_bandit.allowed_markup_calls = []
linter.flake8_bugbear.extend_immutable_calls = []
linter.flake8_builtins.allowed_modules = []
linter.flake8_builtins.ignorelist = []
linter.flake8_builtins.strict_checking = false
linter.flake8_comprehensions.allow_dict_calls_with_keyword_arguments = false
linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|,\s)\d{4})*
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.function_names = [
_,
gettext,
ngettext,
]
linter.flake8_implicit_str_concat.allow_multiline = true
linter.flake8_import_conventions.aliases = {
altair = alt,
holoviews = hv,
matplotlib = mpl,
matplotlib.pyplot = plt,
networkx = nx,
numpy = np,
numpy.typing = npt,
pandas = pd,
panel = pn,
plotly.express = px,
polars = pl,
pyarrow = pa,
seaborn = sns,
tensorflow = tf,
tkinter = tk,
xml.etree.ElementTree = ET,
}
linter.flake8_import_conventions.banned_aliases = {}
linter.flake8_import_conventions.banned_from = []
linter.flake8_pytest_style.fixture_parentheses = false
linter.flake8_pytest_style.parametrize_names_type = tuple
linter.flake8_pytest_style.parametrize_values_type = list
linter.flake8_pytest_style.parametrize_values_row_type = tuple
linter.flake8_pytest_style.raises_require_match_for = [
BaseException,
Exception,
ValueError,
OSError,
IOError,
EnvironmentError,
socket.error,
]
linter.flake8_pytest_style.raises_extend_require_match_for = []
linter.flake8_pytest_style.mark_parentheses = false
linter.flake8_quotes.inline_quotes = double
linter.flake8_quotes.multiline_quotes = double
linter.flake8_quotes.docstring_quotes = double
linter.flake8_quotes.avoid_escape = true
linter.flake8_self.ignore_names = [
_make,
_asdict,
_replace,
_fields,
_field_defaults,
_name_,
_value_,
]
linter.flake8_tidy_imports.ban_relative_imports = "parents"
linter.flake8_tidy_imports.banned_api = {}
linter.flake8_tidy_imports.banned_module_level_imports = []
linter.flake8_type_checking.strict = false
linter.flake8_type_checking.exempt_modules = [
typing,
typing_extensions,
]
linter.flake8_type_checking.runtime_required_base_classes = []
linter.flake8_type_checking.runtime_required_decorators = []
linter.flake8_type_checking.quote_annotations = false
linter.flake8_unused_arguments.ignore_variadic_names = false
linter.isort.required_imports = []
linter.isort.combine_as_imports = false
linter.isort.force_single_line = false
linter.isort.force_sort_within_sections = false
linter.isort.detect_same_package = true
linter.isort.case_sensitive = false
linter.isort.force_wrap_aliases = false
linter.isort.force_to_top = []
linter.isort.known_modules = {}
linter.isort.order_by_type = true
linter.isort.relative_imports_order = furthest_to_closest
linter.isort.single_line_exclusions = []
linter.isort.split_on_trailing_comma = true
linter.isort.classes = []
linter.isort.constants = []
linter.isort.variables = []
linter.isort.no_lines_before = []
linter.isort.lines_after_imports = -1
linter.isort.lines_between_types = 0
linter.isort.forced_separate = []
linter.isort.section_order = [
known { type = future },
known { type = standard_library },
known { type = third_party },
known { type = first_party },
known { type = local_folder },
]
linter.isort.default_section = known { type = third_party }
linter.isort.no_sections = false
linter.isort.from_first = false
linter.isort.length_sort = false
linter.isort.length_sort_straight = false
linter.mccabe.max_complexity = 10
linter.pep8_naming.ignore_names = [
setUp,
tearDown,
setUpClass,
tearDownClass,
setUpModule,
tearDownModule,
asyncSetUp,
asyncTearDown,
setUpTestData,
failureException,
longMessage,
maxDiff,
]
linter.pep8_naming.classmethod_decorators = []
linter.pep8_naming.staticmethod_decorators = []
linter.pycodestyle.max_line_length = 120
linter.pycodestyle.max_doc_length = none
linter.pycodestyle.ignore_overlong_task_comments = false
linter.pyflakes.extend_generics = []
linter.pyflakes.allowed_unused_imports = []
linter.pylint.allow_magic_value_types = [
str,
bytes,
]
linter.pylint.allow_dunder_method_names = []
linter.pylint.max_args = 5
linter.pylint.max_positional_args = 5
linter.pylint.max_returns = 6
linter.pylint.max_bool_expr = 5
linter.pylint.max_branches = 12
linter.pylint.max_statements = 50
linter.pylint.max_public_methods = 20
linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
linter.ruff.strictly_empty_init_modules = false
# Formatter Settings
formatter.exclude = []
formatter.unresolved_target_version = 3.10
formatter.per_file_target_version = {}
formatter.preview = disabled
formatter.line_width = 120
formatter.line_ending = auto
formatter.indent_style = space
formatter.indent_width = 4
formatter.quote_style = double
formatter.magic_trailing_comma = respect
formatter.docstring_code_format = disabled
formatter.docstring_code_line_width = dynamic
# Analyze Settings
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.10
analyze.string_imports = disabled
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
analyze.type_checking_imports = true
----- stderr -----

View File

@@ -15,7 +15,7 @@ use ruff_db::files::{File, system_path_to_file};
use ruff_db::source::source_text;
use ruff_db::system::{InMemorySystem, MemoryFileSystem, SystemPath, SystemPathBuf, TestSystem};
use ruff_python_ast::PythonVersion;
use ty_project::metadata::options::{AnalysisOptions, EnvironmentOptions, Options};
use ty_project::metadata::options::{EnvironmentOptions, Options};
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
use ty_project::watch::{ChangeEvent, ChangedKind};
use ty_project::{CheckMode, Db, ProjectDatabase, ProjectMetadata};
@@ -67,7 +67,6 @@ fn tomllib_path(file: &TestFile) -> SystemPathBuf {
SystemPathBuf::from("src").join(file.name())
}
#[expect(clippy::needless_update)]
fn setup_tomllib_case() -> Case {
let system = TestSystem::default();
let fs = system.memory_file_system().clone();
@@ -86,10 +85,6 @@ fn setup_tomllib_case() -> Case {
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
..EnvironmentOptions::default()
}),
analysis: Some(AnalysisOptions {
respect_type_ignore_comments: Some(false),
..AnalysisOptions::default()
}),
..Options::default()
});
@@ -226,7 +221,7 @@ fn setup_micro_case(code: &str) -> Case {
let file_path = "src/test.py";
fs.write_file_all(
SystemPathBuf::from(file_path),
&*ruff_python_trivia::textwrap::dedent(code),
ruff_python_trivia::textwrap::dedent(code),
)
.unwrap();
@@ -760,7 +755,7 @@ fn datetype(criterion: &mut Criterion) {
max_dep_date: "2025-07-04",
python_version: PythonVersion::PY313,
},
4,
2,
);
bench_project(&benchmark, criterion);

View File

@@ -71,8 +71,6 @@ impl Display for Benchmark<'_> {
}
}
#[track_caller]
#[expect(clippy::cast_precision_loss)]
fn check_project(db: &ProjectDatabase, project_name: &str, max_diagnostics: usize) {
let result = db.check();
let diagnostics = result.len();
@@ -81,12 +79,6 @@ fn check_project(db: &ProjectDatabase, project_name: &str, max_diagnostics: usiz
diagnostics > 1 && diagnostics <= max_diagnostics,
"Expected between 1 and {max_diagnostics} diagnostics on project '{project_name}' but got {diagnostics}",
);
if (max_diagnostics - diagnostics) as f64 / max_diagnostics as f64 > 0.10 {
tracing::warn!(
"The expected diagnostics for project `{project_name}` can be reduced: expected {max_diagnostics} but got {diagnostics}"
);
}
}
static ALTAIR: Benchmark = Benchmark::new(
@@ -109,7 +101,7 @@ static ALTAIR: Benchmark = Benchmark::new(
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY312,
},
850,
1000,
);
static COLOUR_SCIENCE: Benchmark = Benchmark::new(
@@ -128,7 +120,7 @@ static COLOUR_SCIENCE: Benchmark = Benchmark::new(
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY310,
},
350,
1070,
);
static FREQTRADE: Benchmark = Benchmark::new(
@@ -171,7 +163,7 @@ static PANDAS: Benchmark = Benchmark::new(
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY312,
},
3800,
4000,
);
static PYDANTIC: Benchmark = Benchmark::new(
@@ -189,7 +181,7 @@ static PYDANTIC: Benchmark = Benchmark::new(
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY39,
},
3200,
7000,
);
static SYMPY: Benchmark = Benchmark::new(
@@ -202,7 +194,7 @@ static SYMPY: Benchmark = Benchmark::new(
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY312,
},
13400,
13116,
);
static TANJUN: Benchmark = Benchmark::new(
@@ -215,7 +207,7 @@ static TANJUN: Benchmark = Benchmark::new(
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY312,
},
110,
320,
);
static STATIC_FRAME: Benchmark = Benchmark::new(
@@ -231,7 +223,7 @@ static STATIC_FRAME: Benchmark = Benchmark::new(
max_dep_date: "2025-08-09",
python_version: PythonVersion::PY311,
},
1700,
1100,
);
#[track_caller]

View File

@@ -1,4 +1,3 @@
use std::fmt::Formatter;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
@@ -50,15 +49,3 @@ impl CancellationToken {
self.cancelled.load(std::sync::atomic::Ordering::Relaxed)
}
}
/// The operation was canceled by the provided [`CancellationToken`].
#[derive(Debug)]
pub struct Canceled;
impl std::error::Error for Canceled {}
impl std::fmt::Display for Canceled {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str("operation was canceled")
}
}

View File

@@ -98,44 +98,6 @@ impl Diagnostic {
diag
}
/// Adds sub diagnostics that tell the user that this is a bug in ty
/// and asks them to open an issue on GitHub.
pub fn add_bug_sub_diagnostics(&mut self, url_encoded_title: &str) {
self.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
"This indicates a bug in ty.",
));
self.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format_args!(
"If you could open an issue at https://github.com/astral-sh/ty/issues/new?title={url_encoded_title}, we'd be very appreciative!"
),
));
self.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!(
"Platform: {os} {arch}",
os = std::env::consts::OS,
arch = std::env::consts::ARCH
),
));
if let Some(version) = crate::program_version() {
self.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!("Version: {version}"),
));
}
self.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!(
"Args: {args:?}",
args = std::env::args().collect::<Vec<_>>()
),
));
}
/// Add an annotation to this diagnostic.
///
/// Annotations for a diagnostic are optional, but if any are added,
@@ -1057,13 +1019,6 @@ impl DiagnosticId {
matches!(self, DiagnosticId::Lint(_))
}
pub const fn as_lint(&self) -> Option<LintName> {
match self {
DiagnosticId::Lint(name) => Some(*name),
_ => None,
}
}
/// Returns `true` if this `DiagnosticId` represents a lint with the given name.
pub fn is_lint_named(&self, name: &str) -> bool {
matches!(self, DiagnosticId::Lint(self_name) if self_name == name)

View File

@@ -14,7 +14,6 @@ use crate::diagnostic::{Span, UnifiedFile};
use crate::file_revision::FileRevision;
use crate::files::file_root::FileRoots;
use crate::files::private::FileStatus;
use crate::source::SourceText;
use crate::system::{SystemPath, SystemPathBuf, SystemVirtualPath, SystemVirtualPathBuf};
use crate::vendored::{VendoredPath, VendoredPathBuf};
use crate::{Db, FxDashMap, vendored};
@@ -324,17 +323,6 @@ pub struct File {
/// the file has been deleted is to change the status to `Deleted`.
#[default]
status: FileStatus,
/// Overrides the result of [`source_text`](crate::source::source_text).
///
/// This is useful when running queries after modifying a file's content but
/// before the content is written to disk. For example, to verify that the applied fixes
/// didn't introduce any new errors.
///
/// The override gets automatically removed the next time the file changes.
#[default]
#[returns(ref)]
pub source_text_override: Option<SourceText>,
}
// The Salsa heap is tracked separately.
@@ -456,28 +444,20 @@ impl File {
_ => (FileStatus::NotFound, FileRevision::zero(), None),
};
let mut clear_override = false;
if file.status(db) != status {
tracing::debug!("Updating the status of `{}`", file.path(db));
file.set_status(db).to(status);
clear_override = true;
}
if file.revision(db) != revision {
tracing::debug!("Updating the revision of `{}`", file.path(db));
file.set_revision(db).to(revision);
clear_override = true;
}
if file.permissions(db) != permission {
tracing::debug!("Updating the permissions of `{}`", file.path(db));
file.set_permissions(db).to(permission);
}
if clear_override && file.source_text_override(db).is_some() {
file.set_source_text_override(db).to(None);
}
}
/// Returns `true` if the file exists.

View File

@@ -85,13 +85,6 @@ pub fn max_parallelism() -> NonZeroUsize {
})
}
// Use a reasonably large stack size to avoid running into stack overflows too easily. The
// size was chosen in such a way as to still be able to handle large expressions involving
// binary operators (x + x + … + x) both during the AST walk in semantic index building as
// well as during type checking. Using this stack size, we can handle handle expressions
// that are several times larger than the corresponding limits in existing type checkers.
pub const STACK_SIZE: usize = 16 * 1024 * 1024;
/// Trait for types that can provide Rust documentation.
///
/// Use `derive(RustDoc)` to automatically implement this trait for types that have a static string documentation.

View File

@@ -1,8 +1,6 @@
use std::borrow::Cow;
use std::ops::Deref;
use std::sync::Arc;
use ruff_diagnostics::SourceMap;
use ruff_notebook::Notebook;
use ruff_python_ast::PySourceType;
use ruff_source_file::LineIndex;
@@ -18,10 +16,6 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
let _span = tracing::trace_span!("source_text", file = %path).entered();
let mut read_error = None;
if let Some(source) = file.source_text_override(db) {
return source.clone();
}
let kind = if is_notebook(db.system(), path) {
file.read_to_notebook(db)
.unwrap_or_else(|error| {
@@ -96,45 +90,6 @@ impl SourceText {
pub fn read_error(&self) -> Option<&SourceTextError> {
self.inner.read_error.as_ref()
}
/// Returns a new instance for this file with the updated source text (Python code).
///
/// Uses the `source_map` to preserve the cell-boundaries.
#[must_use]
pub fn with_text(&self, new_text: String, source_map: &SourceMap) -> Self {
let new_kind = match &self.inner.kind {
SourceTextKind::Text(_) => SourceTextKind::Text(new_text),
SourceTextKind::Notebook { notebook } => {
let mut new_notebook = notebook.as_ref().clone();
new_notebook.update(source_map, new_text);
SourceTextKind::Notebook {
notebook: new_notebook.into(),
}
}
};
Self {
inner: Arc::new(SourceTextInner {
kind: new_kind,
read_error: self.inner.read_error.clone(),
}),
}
}
pub fn to_bytes(&self) -> Cow<'_, [u8]> {
match &self.inner.kind {
SourceTextKind::Text(source) => Cow::Borrowed(source.as_bytes()),
SourceTextKind::Notebook { notebook } => {
let mut output: Vec<u8> = Vec::new();
notebook
.write(&mut output)
.expect("writing to a Vec should never fail");
Cow::Owned(output)
}
}
}
}
impl Deref for SourceText {
@@ -162,13 +117,13 @@ impl std::fmt::Debug for SourceText {
}
}
#[derive(Eq, PartialEq, get_size2::GetSize, Clone)]
#[derive(Eq, PartialEq, get_size2::GetSize)]
struct SourceTextInner {
kind: SourceTextKind,
read_error: Option<SourceTextError>,
}
#[derive(Eq, PartialEq, get_size2::GetSize, Clone)]
#[derive(Eq, PartialEq, get_size2::GetSize)]
enum SourceTextKind {
Text(String),
Notebook {

View File

@@ -271,12 +271,7 @@ pub trait WritableSystem: System {
fn create_new_file(&self, path: &SystemPath) -> Result<()>;
/// Writes the given content to the file at the given path.
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
self.write_file_bytes(path, content.as_bytes())
}
/// Writes the given content to the file at the given path.
fn write_file_bytes(&self, path: &SystemPath, content: &[u8]) -> Result<()>;
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()>;
/// Creates a directory at `path` as well as any intermediate directories.
fn create_directory_all(&self, path: &SystemPath) -> Result<()>;
@@ -316,8 +311,6 @@ pub trait WritableSystem: System {
Ok(Some(cache_path))
}
fn dyn_clone(&self) -> Box<dyn WritableSystem>;
}
#[derive(Clone, Debug, Eq, PartialEq)]

View File

@@ -122,9 +122,7 @@ impl MemoryFileSystem {
let entry = by_path.get(&normalized).ok_or_else(not_found)?;
match entry {
Entry::File(file) => {
String::from_utf8(file.content.to_vec()).map_err(|_| invalid_utf8())
}
Entry::File(file) => Ok(file.content.clone()),
Entry::Directory(_) => Err(is_a_directory()),
}
}
@@ -141,7 +139,7 @@ impl MemoryFileSystem {
.get(&path.as_ref().to_path_buf())
.ok_or_else(not_found)?;
String::from_utf8(file.content.to_vec()).map_err(|_| invalid_utf8())
Ok(file.content.clone())
}
pub fn exists(&self, path: &SystemPath) -> bool {
@@ -163,7 +161,7 @@ impl MemoryFileSystem {
match by_path.entry(normalized) {
btree_map::Entry::Vacant(entry) => {
entry.insert(Entry::File(File {
content: Box::default(),
content: String::new(),
last_modified: file_time_now(),
}));
@@ -179,17 +177,13 @@ impl MemoryFileSystem {
/// Stores a new file in the file system.
///
/// The operation overrides the content for an existing file with the same normalized `path`.
pub fn write_file(
&self,
path: impl AsRef<SystemPath>,
content: impl AsRef<[u8]>,
) -> Result<()> {
pub fn write_file(&self, path: impl AsRef<SystemPath>, content: impl ToString) -> Result<()> {
let mut by_path = self.inner.by_path.write().unwrap();
let normalized = self.normalize_path(path.as_ref());
let file = get_or_create_file(&mut by_path, &normalized)?;
file.content = content.as_ref().to_vec().into_boxed_slice();
file.content = content.to_string();
file.last_modified = file_time_now();
Ok(())
@@ -220,7 +214,7 @@ impl MemoryFileSystem {
pub fn write_file_all(
&self,
path: impl AsRef<SystemPath>,
content: impl AsRef<[u8]>,
content: impl ToString,
) -> Result<()> {
let path = path.as_ref();
@@ -234,24 +228,19 @@ impl MemoryFileSystem {
/// Stores a new virtual file in the file system.
///
/// The operation overrides the content for an existing virtual file with the same `path`.
pub fn write_virtual_file(
&self,
path: impl AsRef<SystemVirtualPath>,
content: impl AsRef<[u8]>,
) {
pub fn write_virtual_file(&self, path: impl AsRef<SystemVirtualPath>, content: impl ToString) {
let path = path.as_ref();
let mut virtual_files = self.inner.virtual_files.write().unwrap();
let content = content.as_ref().to_vec().into_boxed_slice();
match virtual_files.entry(path.to_path_buf()) {
std::collections::hash_map::Entry::Vacant(entry) => {
entry.insert(File {
content,
content: content.to_string(),
last_modified: file_time_now(),
});
}
std::collections::hash_map::Entry::Occupied(mut entry) => {
entry.get_mut().content = content;
entry.get_mut().content = content.to_string();
}
}
}
@@ -479,7 +468,7 @@ impl Entry {
#[derive(Debug)]
struct File {
content: Box<[u8]>,
content: String,
last_modified: FileTime,
}
@@ -508,13 +497,6 @@ fn directory_not_empty() -> std::io::Error {
std::io::Error::other("directory not empty")
}
fn invalid_utf8() -> std::io::Error {
std::io::Error::new(
std::io::ErrorKind::InvalidData,
"stream did not contain valid UTF-8",
)
}
fn create_dir_all(
paths: &mut RwLockWriteGuard<BTreeMap<Utf8PathBuf, Entry>>,
normalized: &Utf8Path,
@@ -551,7 +533,7 @@ fn get_or_create_file<'a>(
let entry = paths.entry(normalized.to_path_buf()).or_insert_with(|| {
Entry::File(File {
content: Box::default(),
content: String::new(),
last_modified: file_time_now(),
})
});
@@ -862,7 +844,7 @@ mod tests {
let fs = with_files(["c.py"]);
let error = fs
.write_file(SystemPath::new("a/b.py"), "content")
.write_file(SystemPath::new("a/b.py"), "content".to_string())
.unwrap_err();
assert_eq!(error.kind(), ErrorKind::NotFound);
@@ -873,7 +855,7 @@ mod tests {
let fs = with_files(["a/b.py"]);
let error = fs
.write_file_all(SystemPath::new("a/b.py/c"), "content")
.write_file_all(SystemPath::new("a/b.py/c"), "content".to_string())
.unwrap_err();
assert_eq!(error.kind(), ErrorKind::Other);
@@ -896,7 +878,7 @@ mod tests {
let fs = MemoryFileSystem::new();
let path = SystemPath::new("a.py");
fs.write_file_all(path, "Test content")?;
fs.write_file_all(path, "Test content".to_string())?;
assert_eq!(fs.read_to_string(path)?, "Test content");
@@ -933,7 +915,9 @@ mod tests {
fs.create_directory_all("a")?;
let error = fs.write_file(SystemPath::new("a"), "content").unwrap_err();
let error = fs
.write_file(SystemPath::new("a"), "content".to_string())
.unwrap_err();
assert_eq!(error.kind(), ErrorKind::Other);

View File

@@ -361,17 +361,13 @@ impl WritableSystem for OsSystem {
std::fs::File::create_new(path).map(drop)
}
fn write_file_bytes(&self, path: &SystemPath, content: &[u8]) -> Result<()> {
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
std::fs::write(path.as_std_path(), content)
}
fn create_directory_all(&self, path: &SystemPath) -> Result<()> {
std::fs::create_dir_all(path.as_std_path())
}
fn dyn_clone(&self) -> Box<dyn WritableSystem> {
Box::new(self.clone())
}
}
impl Default for OsSystem {

View File

@@ -205,17 +205,13 @@ impl WritableSystem for TestSystem {
self.system().create_new_file(path)
}
fn write_file_bytes(&self, path: &SystemPath, content: &[u8]) -> Result<()> {
self.system().write_file_bytes(path, content)
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
self.system().write_file(path, content)
}
fn create_directory_all(&self, path: &SystemPath) -> Result<()> {
self.system().create_directory_all(path)
}
fn dyn_clone(&self) -> Box<dyn WritableSystem> {
Box::new(self.clone())
}
}
/// Extension trait for databases that use a [`WritableSystem`].
@@ -287,11 +283,7 @@ pub trait DbWithTestSystem: Db + Sized {
///
/// ## Panics
/// If the db isn't using the [`InMemorySystem`].
fn write_virtual_file(
&mut self,
path: impl AsRef<SystemVirtualPath>,
content: impl AsRef<[u8]>,
) {
fn write_virtual_file(&mut self, path: impl AsRef<SystemVirtualPath>, content: impl ToString) {
let path = path.as_ref();
self.test_system()
.memory_file_system()
@@ -330,23 +322,23 @@ where
}
}
#[derive(Clone, Default, Debug)]
#[derive(Default, Debug)]
pub struct InMemorySystem {
user_config_directory: Arc<Mutex<Option<SystemPathBuf>>>,
user_config_directory: Mutex<Option<SystemPathBuf>>,
memory_fs: MemoryFileSystem,
}
impl InMemorySystem {
pub fn new(cwd: SystemPathBuf) -> Self {
Self {
user_config_directory: Mutex::new(None).into(),
user_config_directory: Mutex::new(None),
memory_fs: MemoryFileSystem::with_current_directory(cwd),
}
}
pub fn from_memory_fs(memory_fs: MemoryFileSystem) -> Self {
Self {
user_config_directory: Mutex::new(None).into(),
user_config_directory: Mutex::new(None),
memory_fs,
}
}
@@ -448,7 +440,10 @@ impl System for InMemorySystem {
}
fn dyn_clone(&self) -> Box<dyn System> {
Box::new(self.clone())
Box::new(Self {
user_config_directory: Mutex::new(self.user_config_directory.lock().unwrap().clone()),
memory_fs: self.memory_fs.clone(),
})
}
}
@@ -457,15 +452,11 @@ impl WritableSystem for InMemorySystem {
self.memory_fs.create_new_file(path)
}
fn write_file_bytes(&self, path: &SystemPath, content: &[u8]) -> Result<()> {
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
self.memory_fs.write_file(path, content)
}
fn create_directory_all(&self, path: &SystemPath) -> Result<()> {
self.memory_fs.create_directory_all(path)
}
fn dyn_clone(&self) -> Box<dyn WritableSystem> {
Box::new(self.clone())
}
}

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.14.11"
version = "0.14.10"
publish = false
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -26,7 +26,6 @@ use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
use crate::fix::{FixResult, fix_file};
use crate::noqa::add_noqa;
use crate::package::PackageRoot;
use crate::preview::is_py315_support_enabled;
use crate::registry::Rule;
#[cfg(any(feature = "test-rules", test))]
use crate::rules::ruff::rules::test_rules::{self, TEST_RULES, TestRule};
@@ -34,7 +33,7 @@ use crate::settings::types::UnsafeFixes;
use crate::settings::{LinterSettings, TargetVersion, flags};
use crate::source_kind::SourceKind;
use crate::suppression::Suppressions;
use crate::{Locator, directives, fs, warn_user_once};
use crate::{Locator, directives, fs};
pub(crate) mod float;
@@ -451,14 +450,6 @@ pub fn lint_only(
) -> LinterResult {
let target_version = settings.resolve_target_version(path);
if matches!(target_version.linter_version(), PythonVersion::PY315)
&& !is_py315_support_enabled(settings)
{
warn_user_once!(
"Support for Python 3.15 is under development and may be unstable. Enable `preview` to remove this warning."
);
}
let parsed = source.into_parsed(source_kind, source_type, target_version.parser_version());
// Map row and column locations to byte slices (lazily).
@@ -564,14 +555,6 @@ pub fn lint_fix<'a>(
let target_version = settings.resolve_target_version(path);
if matches!(target_version.linter_version(), PythonVersion::PY315)
&& !is_py315_support_enabled(settings)
{
warn_user_once!(
"Support for Python 3.15 is under development and may be unstable. Enable `preview` to remove this warning."
);
}
// Continuously fix until the source code stabilizes.
loop {
// Parse once.

View File

@@ -296,8 +296,3 @@ pub(crate) const fn is_s310_resolve_string_literal_bindings_enabled(
pub(crate) const fn is_range_suppressions_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/22419
pub(crate) const fn is_py315_support_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}

View File

@@ -52,7 +52,6 @@ impl InvalidRuleCodeKind {
pub(crate) struct InvalidRuleCode {
pub(crate) rule_code: String,
pub(crate) kind: InvalidRuleCodeKind,
pub(crate) whole_comment: bool,
}
impl AlwaysFixableViolation for InvalidRuleCode {
@@ -66,11 +65,7 @@ impl AlwaysFixableViolation for InvalidRuleCode {
}
fn fix_title(&self) -> String {
if self.whole_comment {
format!("Remove the {} comment", self.kind.as_str())
} else {
format!("Remove the rule code `{}`", self.rule_code)
}
"Remove the rule code".to_string()
}
}
@@ -127,7 +122,6 @@ fn all_codes_invalid_diagnostic(
.collect::<Vec<_>>()
.join(", "),
kind: InvalidRuleCodeKind::Noqa,
whole_comment: true,
},
directive.range(),
)
@@ -145,7 +139,6 @@ fn some_codes_are_invalid_diagnostic(
InvalidRuleCode {
rule_code: invalid_code.to_string(),
kind: InvalidRuleCodeKind::Noqa,
whole_comment: false,
},
invalid_code.range(),
)

View File

@@ -52,25 +52,6 @@ impl UnusedNOQAKind {
/// foo.bar()
/// ```
///
/// ## Conflict with other linters
/// When using `RUF100` with the `--fix` option, Ruff may remove trailing comments
/// that follow a `# noqa` directive on the same line, as it interprets the
/// remainder of the line as a description for the suppression.
///
/// To prevent Ruff from removing suppressions for other tools (like `pylint`
/// or `mypy`), separate them with a second `#` character:
///
/// ```python
/// # Bad: Ruff --fix will remove the pylint comment
/// def visit_ImportFrom(self, node): # noqa: N802, pylint: disable=invalid-name
/// pass
///
///
/// # Good: Ruff will preserve the pylint comment
/// def visit_ImportFrom(self, node): # noqa: N802 # pylint: disable=invalid-name
/// pass
/// ```
///
/// ## Options
/// - `lint.external`
///

View File

@@ -10,7 +10,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID123
3 | # External code
4 | import re # noqa: V123
|
help: Remove the `# noqa` comment
help: Remove the rule code
1 | # Invalid code
- import os # noqa: INVALID123
2 + import os
@@ -28,7 +28,7 @@ RUF102 [*] Invalid rule code in `# noqa`: V123
5 | # Valid noqa
6 | import sys # noqa: E402
|
help: Remove the `# noqa` comment
help: Remove the rule code
1 | # Invalid code
2 | import os # noqa: INVALID123
3 | # External code
@@ -48,7 +48,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID456
8 | from itertools import product # Preceeding comment # noqa: INVALID789
9 | # Succeeding comment
|
help: Remove the rule code `INVALID456`
help: Remove the rule code
4 | import re # noqa: V123
5 | # Valid noqa
6 | import sys # noqa: E402
@@ -68,7 +68,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID789
9 | # Succeeding comment
10 | import math # noqa: INVALID000 # Succeeding comment
|
help: Remove the `# noqa` comment
help: Remove the rule code
5 | # Valid noqa
6 | import sys # noqa: E402
7 | from functools import cache # Preceeding comment # noqa: F401, INVALID456
@@ -88,7 +88,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID000
11 | # Mixed valid and invalid
12 | from typing import List # noqa: F401, INVALID123
|
help: Remove the `# noqa` comment
help: Remove the rule code
7 | from functools import cache # Preceeding comment # noqa: F401, INVALID456
8 | from itertools import product # Preceeding comment # noqa: INVALID789
9 | # Succeeding comment
@@ -108,7 +108,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID123
13 | # Test for multiple invalid
14 | from collections import defaultdict # noqa: INVALID100, INVALID200, F401
|
help: Remove the rule code `INVALID123`
help: Remove the rule code
9 | # Succeeding comment
10 | import math # noqa: INVALID000 # Succeeding comment
11 | # Mixed valid and invalid
@@ -128,7 +128,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID100
15 | # Test for preserving valid codes when fixing
16 | from itertools import chain # noqa: E402, INVALID300, F401
|
help: Remove the rule code `INVALID100`
help: Remove the rule code
11 | # Mixed valid and invalid
12 | from typing import List # noqa: F401, INVALID123
13 | # Test for multiple invalid
@@ -148,7 +148,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID200
15 | # Test for preserving valid codes when fixing
16 | from itertools import chain # noqa: E402, INVALID300, F401
|
help: Remove the rule code `INVALID200`
help: Remove the rule code
11 | # Mixed valid and invalid
12 | from typing import List # noqa: F401, INVALID123
13 | # Test for multiple invalid
@@ -168,7 +168,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID300
17 | # Test for mixed code types
18 | import json # noqa: E402, INVALID400, V100
|
help: Remove the rule code `INVALID300`
help: Remove the rule code
13 | # Test for multiple invalid
14 | from collections import defaultdict # noqa: INVALID100, INVALID200, F401
15 | # Test for preserving valid codes when fixing
@@ -188,7 +188,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID400
19 | # Test for rule redirects
20 | import pandas as pd # noqa: TCH002
|
help: Remove the rule code `INVALID400`
help: Remove the rule code
15 | # Test for preserving valid codes when fixing
16 | from itertools import chain # noqa: E402, INVALID300, F401
17 | # Test for mixed code types
@@ -207,7 +207,7 @@ RUF102 [*] Invalid rule code in `# noqa`: V100
19 | # Test for rule redirects
20 | import pandas as pd # noqa: TCH002
|
help: Remove the rule code `V100`
help: Remove the rule code
15 | # Test for preserving valid codes when fixing
16 | from itertools import chain # noqa: E402, INVALID300, F401
17 | # Test for mixed code types

View File

@@ -10,7 +10,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID123
3 | # External code
4 | import re # noqa: V123
|
help: Remove the `# noqa` comment
help: Remove the rule code
1 | # Invalid code
- import os # noqa: INVALID123
2 + import os
@@ -28,7 +28,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID456
8 | from itertools import product # Preceeding comment # noqa: INVALID789
9 | # Succeeding comment
|
help: Remove the rule code `INVALID456`
help: Remove the rule code
4 | import re # noqa: V123
5 | # Valid noqa
6 | import sys # noqa: E402
@@ -48,7 +48,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID789
9 | # Succeeding comment
10 | import math # noqa: INVALID000 # Succeeding comment
|
help: Remove the `# noqa` comment
help: Remove the rule code
5 | # Valid noqa
6 | import sys # noqa: E402
7 | from functools import cache # Preceeding comment # noqa: F401, INVALID456
@@ -68,7 +68,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID000
11 | # Mixed valid and invalid
12 | from typing import List # noqa: F401, INVALID123
|
help: Remove the `# noqa` comment
help: Remove the rule code
7 | from functools import cache # Preceeding comment # noqa: F401, INVALID456
8 | from itertools import product # Preceeding comment # noqa: INVALID789
9 | # Succeeding comment
@@ -88,7 +88,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID123
13 | # Test for multiple invalid
14 | from collections import defaultdict # noqa: INVALID100, INVALID200, F401
|
help: Remove the rule code `INVALID123`
help: Remove the rule code
9 | # Succeeding comment
10 | import math # noqa: INVALID000 # Succeeding comment
11 | # Mixed valid and invalid
@@ -108,7 +108,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID100
15 | # Test for preserving valid codes when fixing
16 | from itertools import chain # noqa: E402, INVALID300, F401
|
help: Remove the rule code `INVALID100`
help: Remove the rule code
11 | # Mixed valid and invalid
12 | from typing import List # noqa: F401, INVALID123
13 | # Test for multiple invalid
@@ -128,7 +128,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID200
15 | # Test for preserving valid codes when fixing
16 | from itertools import chain # noqa: E402, INVALID300, F401
|
help: Remove the rule code `INVALID200`
help: Remove the rule code
11 | # Mixed valid and invalid
12 | from typing import List # noqa: F401, INVALID123
13 | # Test for multiple invalid
@@ -148,7 +148,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID300
17 | # Test for mixed code types
18 | import json # noqa: E402, INVALID400, V100
|
help: Remove the rule code `INVALID300`
help: Remove the rule code
13 | # Test for multiple invalid
14 | from collections import defaultdict # noqa: INVALID100, INVALID200, F401
15 | # Test for preserving valid codes when fixing
@@ -168,7 +168,7 @@ RUF102 [*] Invalid rule code in `# noqa`: INVALID400
19 | # Test for rule redirects
20 | import pandas as pd # noqa: TCH002
|
help: Remove the rule code `INVALID400`
help: Remove the rule code
15 | # Test for preserving valid codes when fixing
16 | from itertools import chain # noqa: E402, INVALID300, F401
17 | # Test for mixed code types

View File

@@ -7,7 +7,7 @@ source: crates/ruff_linter/src/rules/ruff/mod.rs
--- Summary ---
Removed: 15
Added: 20
Added: 23
--- Removed ---
E741 Ambiguous variable name: `I`
@@ -301,7 +301,6 @@ RUF100 [*] Unused suppression (non-enabled: `E501`)
| ^^^^^^^^^^^^^^^^^^^^^
47 | I = 1
48 | # ruff: enable[E501]
| --------------------
|
help: Remove unused suppression
43 | def f():
@@ -309,10 +308,26 @@ help: Remove unused suppression
45 | # logged to user
- # ruff: disable[E501]
46 | I = 1
- # ruff: enable[E501]
47 |
47 | # ruff: enable[E501]
48 |
49 | def f():
RUF100 [*] Unused suppression (non-enabled: `E501`)
--> suppressions.py:48:5
|
46 | # ruff: disable[E501]
47 | I = 1
48 | # ruff: enable[E501]
| ^^^^^^^^^^^^^^^^^^^^
|
help: Remove unused suppression
45 | # logged to user
46 | # ruff: disable[E501]
47 | I = 1
- # ruff: enable[E501]
48 |
49 |
50 | def f():
RUF100 [*] Unused `noqa` directive (unused: `E741`, `F841`)
@@ -548,11 +563,8 @@ RUF102 [*] Invalid rule code in suppression: YF829
| ^^^^^
94 | # ruff: disable[F841, RQW320]
95 | value = 0
96 | # ruff: enable[F841, RQW320]
97 | # ruff: enable[YF829]
| -----
|
help: Remove the suppression comment
help: Remove the rule code
90 |
91 | def f():
92 | # Unknown rule codes
@@ -560,10 +572,6 @@ help: Remove the suppression comment
93 | # ruff: disable[F841, RQW320]
94 | value = 0
95 | # ruff: enable[F841, RQW320]
- # ruff: enable[YF829]
96 |
97 |
98 | def f():
RUF102 [*] Invalid rule code in suppression: RQW320
@@ -575,15 +583,30 @@ RUF102 [*] Invalid rule code in suppression: RQW320
| ^^^^^^
95 | value = 0
96 | # ruff: enable[F841, RQW320]
| ------
97 | # ruff: enable[YF829]
|
help: Remove the rule code `RQW320`
help: Remove the rule code
91 | def f():
92 | # Unknown rule codes
93 | # ruff: disable[YF829]
- # ruff: disable[F841, RQW320]
94 + # ruff: disable[F841]
95 | value = 0
96 | # ruff: enable[F841, RQW320]
97 | # ruff: enable[YF829]
RUF102 [*] Invalid rule code in suppression: RQW320
--> suppressions.py:96:26
|
94 | # ruff: disable[F841, RQW320]
95 | value = 0
96 | # ruff: enable[F841, RQW320]
| ^^^^^^
97 | # ruff: enable[YF829]
|
help: Remove the rule code
93 | # ruff: disable[YF829]
94 | # ruff: disable[F841, RQW320]
95 | value = 0
- # ruff: enable[F841, RQW320]
96 + # ruff: enable[F841]
@@ -592,6 +615,24 @@ help: Remove the rule code `RQW320`
99 |
RUF102 [*] Invalid rule code in suppression: YF829
--> suppressions.py:97:20
|
95 | value = 0
96 | # ruff: enable[F841, RQW320]
97 | # ruff: enable[YF829]
| ^^^^^
|
help: Remove the rule code
94 | # ruff: disable[F841, RQW320]
95 | value = 0
96 | # ruff: enable[F841, RQW320]
- # ruff: enable[YF829]
97 |
98 |
99 | def f():
RUF103 [*] Invalid suppression comment: missing suppression codes like `[E501, ...]`
--> suppressions.py:109:5
|

View File

@@ -36,7 +36,6 @@ pub enum PythonVersion {
Py312,
Py313,
Py314,
Py315,
}
impl Default for PythonVersion {
@@ -59,7 +58,6 @@ impl TryFrom<ast::PythonVersion> for PythonVersion {
ast::PythonVersion::PY312 => Ok(Self::Py312),
ast::PythonVersion::PY313 => Ok(Self::Py313),
ast::PythonVersion::PY314 => Ok(Self::Py314),
ast::PythonVersion::PY315 => Ok(Self::Py315),
_ => Err(format!("unrecognized python version {value}")),
}
}
@@ -90,7 +88,6 @@ impl PythonVersion {
Self::Py312 => (3, 12),
Self::Py313 => (3, 13),
Self::Py314 => (3, 14),
Self::Py315 => (3, 15),
}
}
}
@@ -607,21 +604,13 @@ impl TryFrom<String> for RequiredVersion {
type Error = pep440_rs::VersionSpecifiersParseError;
fn try_from(value: String) -> Result<Self, Self::Error> {
value.parse()
}
}
impl FromStr for RequiredVersion {
type Err = pep440_rs::VersionSpecifiersParseError;
fn from_str(value: &str) -> Result<Self, Self::Err> {
// Treat `0.3.1` as `==0.3.1`, for backwards compatibility.
if let Ok(version) = pep440_rs::Version::from_str(value) {
if let Ok(version) = pep440_rs::Version::from_str(&value) {
Ok(Self(VersionSpecifiers::from(
VersionSpecifier::equals_version(version),
)))
} else {
Ok(Self(VersionSpecifiers::from_str(value)?))
Ok(Self(VersionSpecifiers::from_str(&value)?))
}
}
}

View File

@@ -13,6 +13,7 @@ use ruff_python_trivia::Cursor;
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize, TextSlice};
use smallvec::{SmallVec, smallvec};
use crate::Locator;
use crate::checkers::ast::LintContext;
use crate::codes::Rule;
use crate::fix::edits::delete_comment;
@@ -23,7 +24,6 @@ use crate::rules::ruff::rules::{
UnmatchedSuppressionComment, UnusedCodes, UnusedNOQA, UnusedNOQAKind, code_is_valid,
};
use crate::settings::LinterSettings;
use crate::{Locator, Violation};
#[derive(Clone, Debug, Eq, PartialEq)]
enum SuppressionAction {
@@ -85,39 +85,11 @@ pub(crate) struct Suppression {
/// Range for which the suppression applies
range: TextRange,
/// Any comments associated with the suppression
comments: SmallVec<[SuppressionComment; 2]>,
/// Whether this suppression actually suppressed a diagnostic
used: Cell<bool>,
comments: DisableEnableComments,
}
impl Suppression {
fn codes(&self) -> &[TextRange] {
&self.comments.disable_comment().codes
}
}
#[derive(Debug)]
pub(crate) enum DisableEnableComments {
/// An implicitly closed disable comment without a matching enable comment.
Disable(SuppressionComment),
/// A matching pair of disable and enable comments.
DisableEnable(SuppressionComment, SuppressionComment),
}
impl DisableEnableComments {
pub(crate) fn disable_comment(&self) -> &SuppressionComment {
match self {
DisableEnableComments::Disable(comment) => comment,
DisableEnableComments::DisableEnable(disable, _) => disable,
}
}
pub(crate) fn enable_comment(&self) -> Option<&SuppressionComment> {
match self {
DisableEnableComments::Disable(_) => None,
DisableEnableComments::DisableEnable(_, enable) => Some(enable),
}
}
}
#[derive(Copy, Clone, Debug)]
@@ -199,17 +171,23 @@ impl Suppressions {
if !code_is_valid(&suppression.code, &context.settings().external) {
// InvalidRuleCode
if context.is_rule_enabled(Rule::InvalidRuleCode) {
Suppressions::report_suppression(
context,
locator,
suppression,
true,
InvalidRuleCode {
rule_code: suppression.code.to_string(),
kind: InvalidRuleCodeKind::Suppression,
whole_comment: suppression.codes().len() == 1,
},
);
for comment in &suppression.comments {
let (range, edit) = Suppressions::delete_code_or_comment(
locator,
suppression,
comment,
true,
);
context
.report_diagnostic(
InvalidRuleCode {
rule_code: suppression.code.to_string(),
kind: InvalidRuleCodeKind::Suppression,
},
range,
)
.set_fix(Fix::safe_edit(edit));
}
}
} else if !suppression.used.get() {
// UnusedNOQA
@@ -219,37 +197,42 @@ impl Suppressions {
) else {
continue; // "external" lint code, don't treat it as unused
};
for comment in &suppression.comments {
let (range, edit) = Suppressions::delete_code_or_comment(
locator,
suppression,
comment,
false,
);
let codes = if context.is_rule_enabled(rule) {
UnusedCodes {
unmatched: vec![suppression.code.to_string()],
..Default::default()
}
} else {
UnusedCodes {
disabled: vec![suppression.code.to_string()],
..Default::default()
}
};
let codes = if context.is_rule_enabled(rule) {
UnusedCodes {
unmatched: vec![suppression.code.to_string()],
..Default::default()
}
} else {
UnusedCodes {
disabled: vec![suppression.code.to_string()],
..Default::default()
}
};
Suppressions::report_suppression(
context,
locator,
suppression,
false,
UnusedNOQA {
codes: Some(codes),
kind: UnusedNOQAKind::Suppression,
},
);
context
.report_diagnostic(
UnusedNOQA {
codes: Some(codes),
kind: UnusedNOQAKind::Suppression,
},
range,
)
.set_fix(Fix::safe_edit(edit));
}
}
} else if let DisableEnableComments::Disable(comment) = &suppression.comments {
} else if suppression.comments.len() == 1 {
// UnmatchedSuppressionComment
if unmatched_ranges.insert(comment.range) {
context.report_diagnostic_if_enabled(
UnmatchedSuppressionComment {},
comment.range,
);
let range = suppression.comments[0].range;
if unmatched_ranges.insert(range) {
context.report_diagnostic_if_enabled(UnmatchedSuppressionComment {}, range);
}
}
}
@@ -284,35 +267,6 @@ impl Suppressions {
}
}
fn report_suppression<T: Violation>(
context: &LintContext,
locator: &Locator,
suppression: &Suppression,
highlight_only_code: bool,
kind: T,
) {
let disable_comment = suppression.comments.disable_comment();
let (range, edit) = Suppressions::delete_code_or_comment(
locator,
suppression,
disable_comment,
highlight_only_code,
);
let mut diagnostic = context.report_diagnostic(kind, range);
if let Some(enable_comment) = suppression.comments.enable_comment() {
let (enable_range, enable_range_edit) = Suppressions::delete_code_or_comment(
locator,
suppression,
enable_comment,
highlight_only_code,
);
diagnostic.secondary_annotation("", enable_range);
diagnostic.set_fix(Fix::safe_edits(edit, [enable_range_edit]));
} else {
diagnostic.set_fix(Fix::safe_edit(edit));
}
}
fn delete_code_or_comment(
locator: &Locator<'_>,
suppression: &Suppression,
@@ -470,10 +424,7 @@ impl<'a> SuppressionsBuilder<'a> {
self.valid.push(Suppression {
code: code.into(),
range: combined_range,
comments: DisableEnableComments::DisableEnable(
comment.comment.clone(),
other.comment.clone(),
),
comments: smallvec![comment.comment.clone(), other.comment.clone()],
used: false.into(),
});
}
@@ -490,7 +441,7 @@ impl<'a> SuppressionsBuilder<'a> {
self.valid.push(Suppression {
code: code.into(),
range: implicit_range,
comments: DisableEnableComments::Disable(comment.comment.clone()),
comments: smallvec![comment.comment.clone()],
used: false.into(),
});
}
@@ -692,7 +643,7 @@ mod tests {
use insta::assert_debug_snapshot;
use itertools::Itertools;
use ruff_python_parser::{Mode, ParseOptions, parse};
use ruff_text_size::{TextLen, TextRange, TextSize};
use ruff_text_size::{TextRange, TextSize};
use similar::DiffableStr;
use crate::{
@@ -754,22 +705,24 @@ print('hello')
Suppression {
covered_source: "# ruff: disable[foo]\nprint('hello')\n# ruff: enable[foo]",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
],
},
],
invalid: [],
@@ -798,28 +751,30 @@ def foo():
Suppression {
covered_source: "# ruff: disable[bar]\n print('hello')\n\n",
code: "bar",
disable_comment: SuppressionComment {
text: "# ruff: disable[bar]",
action: Disable,
codes: [
"bar",
],
reason: "",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[bar]",
action: Disable,
codes: [
"bar",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[foo]\nprint('hello')\n\ndef foo():\n # ruff: disable[bar]\n print('hello')\n\n",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
],
},
],
invalid: [],
@@ -848,42 +803,46 @@ class Foo:
Suppression {
covered_source: "# ruff: disable[bar]\n print('hello')\n # ruff: enable[bar]",
code: "bar",
disable_comment: SuppressionComment {
text: "# ruff: disable[bar]",
action: Disable,
codes: [
"bar",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[bar]",
action: Enable,
codes: [
"bar",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[bar]",
action: Disable,
codes: [
"bar",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[bar]",
action: Enable,
codes: [
"bar",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[foo]\n def bar(self):\n # ruff: disable[bar]\n print('hello')\n # ruff: enable[bar]\n # ruff: enable[foo]",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
],
},
],
invalid: [],
@@ -913,42 +872,46 @@ def foo():
Suppression {
covered_source: "# ruff: disable[foo]\n print('hello')\n # ruff: disable[bar]\n print('hello')\n # ruff: enable[foo]",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[bar]\n print('hello')\n # ruff: enable[foo]\n print('hello')\n # ruff: enable[bar]",
code: "bar",
disable_comment: SuppressionComment {
text: "# ruff: disable[bar]",
action: Disable,
codes: [
"bar",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[bar]",
action: Enable,
codes: [
"bar",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[bar]",
action: Disable,
codes: [
"bar",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[bar]",
action: Enable,
codes: [
"bar",
],
reason: "",
},
],
},
],
invalid: [],
@@ -973,46 +936,50 @@ print('hello')
Suppression {
covered_source: "# ruff: disable[foo, bar]\nprint('hello')\n# ruff: enable[foo, bar]",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[foo, bar]",
action: Enable,
codes: [
"foo",
"bar",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[foo, bar]",
action: Enable,
codes: [
"foo",
"bar",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[foo, bar]\nprint('hello')\n# ruff: enable[foo, bar]",
code: "bar",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[foo, bar]",
action: Enable,
codes: [
"foo",
"bar",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[foo, bar]",
action: Enable,
codes: [
"foo",
"bar",
],
reason: "",
},
],
},
],
invalid: [],
@@ -1038,15 +1005,16 @@ print('world')
Suppression {
covered_source: "# ruff: disable[foo]\nprint('hello')\n# ruff: enable[bar]\nprint('world')\n",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
],
},
],
invalid: [
@@ -1083,30 +1051,32 @@ print('hello')
Suppression {
covered_source: "# ruff: disable[foo, bar]\nprint('hello')\n# ruff: enable[bar, foo]\n",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[foo, bar]\nprint('hello')\n# ruff: enable[bar, foo]\n",
code: "bar",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
],
},
],
invalid: [
@@ -1146,35 +1116,38 @@ print('hello')
Suppression {
covered_source: "# ruff: disable[foo] first\nprint('hello')\n# ruff: disable[foo] second\nprint('hello')\n# ruff: enable[foo]",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo] first",
action: Disable,
codes: [
"foo",
],
reason: "first",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[foo] first",
action: Disable,
codes: [
"foo",
],
reason: "first",
},
SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[foo] second\nprint('hello')\n# ruff: enable[foo]\n",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo] second",
action: Disable,
codes: [
"foo",
],
reason: "second",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[foo] second",
action: Disable,
codes: [
"foo",
],
reason: "second",
},
],
},
],
invalid: [],
@@ -1216,92 +1189,100 @@ def bar():
Suppression {
covered_source: "# ruff: disable[delta] unmatched\n pass\n # ruff: enable[beta,gamma]\n# ruff: enable[alpha]\n\n# ruff: disable # parse error!\n",
code: "delta",
disable_comment: SuppressionComment {
text: "# ruff: disable[delta] unmatched",
action: Disable,
codes: [
"delta",
],
reason: "unmatched",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[delta] unmatched",
action: Disable,
codes: [
"delta",
],
reason: "unmatched",
},
],
},
Suppression {
covered_source: "# ruff: disable[beta,gamma]\n if True:\n # ruff: disable[delta] unmatched\n pass\n # ruff: enable[beta,gamma]",
code: "beta",
disable_comment: SuppressionComment {
text: "# ruff: disable[beta,gamma]",
action: Disable,
codes: [
"beta",
"gamma",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[beta,gamma]",
action: Enable,
codes: [
"beta",
"gamma",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[beta,gamma]",
action: Disable,
codes: [
"beta",
"gamma",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[beta,gamma]",
action: Enable,
codes: [
"beta",
"gamma",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[beta,gamma]\n if True:\n # ruff: disable[delta] unmatched\n pass\n # ruff: enable[beta,gamma]",
code: "gamma",
disable_comment: SuppressionComment {
text: "# ruff: disable[beta,gamma]",
action: Disable,
codes: [
"beta",
"gamma",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[beta,gamma]",
action: Enable,
codes: [
"beta",
"gamma",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[beta,gamma]",
action: Disable,
codes: [
"beta",
"gamma",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[beta,gamma]",
action: Enable,
codes: [
"beta",
"gamma",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[zeta] unmatched\n pass\n# ruff: enable[zeta] underindented\n pass\n",
code: "zeta",
disable_comment: SuppressionComment {
text: "# ruff: disable[zeta] unmatched",
action: Disable,
codes: [
"zeta",
],
reason: "unmatched",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[zeta] unmatched",
action: Disable,
codes: [
"zeta",
],
reason: "unmatched",
},
],
},
Suppression {
covered_source: "# ruff: disable[alpha]\ndef foo():\n # ruff: disable[beta,gamma]\n if True:\n # ruff: disable[delta] unmatched\n pass\n # ruff: enable[beta,gamma]\n# ruff: enable[alpha]",
code: "alpha",
disable_comment: SuppressionComment {
text: "# ruff: disable[alpha]",
action: Disable,
codes: [
"alpha",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[alpha]",
action: Enable,
codes: [
"alpha",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[alpha]",
action: Disable,
codes: [
"alpha",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[alpha]",
action: Enable,
codes: [
"alpha",
],
reason: "",
},
],
},
],
invalid: [
@@ -1551,8 +1532,10 @@ def bar():
#[test]
fn comment_attributes() {
let source = "# ruff: disable[foo, bar] hello world";
let mut parser =
SuppressionParser::new(source, TextRange::new(0.into(), source.text_len()));
let mut parser = SuppressionParser::new(
source,
TextRange::new(0.into(), TextSize::try_from(source.len()).unwrap()),
);
let comment = parser.parse_comment().unwrap();
assert_eq!(comment.action, SuppressionAction::Disable);
assert_eq!(
@@ -1571,12 +1554,12 @@ def bar():
source: &'_ str,
) -> Result<DebugSuppressionComment<'_>, ParseError> {
let offset = TextSize::new(source.find('#').unwrap_or(0).try_into().unwrap());
let mut parser = SuppressionParser::new(source, TextRange::new(offset, source.text_len()));
let mut parser = SuppressionParser::new(
source,
TextRange::new(offset, TextSize::try_from(source.len()).unwrap()),
);
match parser.parse_comment() {
Ok(comment) => Ok(DebugSuppressionComment {
source,
comment: Some(comment),
}),
Ok(comment) => Ok(DebugSuppressionComment { source, comment }),
Err(error) => Err(error),
}
}
@@ -1656,18 +1639,16 @@ def bar():
.field("covered_source", &&self.source[self.suppression.range])
.field("code", &self.suppression.code)
.field(
"disable_comment",
&DebugSuppressionComment {
source: self.source,
comment: Some(self.suppression.comments.disable_comment().clone()),
},
)
.field(
"enable_comment",
&DebugSuppressionComment {
source: self.source,
comment: self.suppression.comments.enable_comment().cloned(),
},
"comments",
&self
.suppression
.comments
.iter()
.map(|comment| DebugSuppressionComment {
source: self.source,
comment: comment.clone(),
})
.collect_vec(),
)
.finish()
}
@@ -1686,7 +1667,7 @@ def bar():
"comment",
&DebugSuppressionComment {
source: self.source,
comment: Some(self.invalid.comment.clone()),
comment: self.invalid.comment.clone(),
},
)
.finish()
@@ -1709,27 +1690,23 @@ def bar():
struct DebugSuppressionComment<'a> {
source: &'a str,
comment: Option<SuppressionComment>,
comment: SuppressionComment,
}
impl fmt::Debug for DebugSuppressionComment<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match &self.comment {
Some(comment) => f
.debug_struct("SuppressionComment")
.field("text", &&self.source[comment.range])
.field("action", &comment.action)
.field(
"codes",
&DebugCodes {
source: self.source,
codes: &comment.codes,
},
)
.field("reason", &&self.source[comment.reason])
.finish(),
None => f.debug_tuple("None").finish(),
}
f.debug_struct("SuppressionComment")
.field("text", &&self.source[self.comment.range])
.field("action", &self.comment.action)
.field(
"codes",
&DebugCodes {
source: self.source,
codes: &self.comment.codes,
},
)
.field("reason", &&self.source[self.comment.reason])
.finish()
}
}

View File

@@ -35,10 +35,6 @@ impl PythonVersion {
major: 3,
minor: 14,
};
pub const PY315: PythonVersion = PythonVersion {
major: 3,
minor: 15,
};
pub fn iter() -> impl Iterator<Item = PythonVersion> {
[
@@ -50,7 +46,6 @@ impl PythonVersion {
PythonVersion::PY312,
PythonVersion::PY313,
PythonVersion::PY314,
PythonVersion::PY315,
]
.into_iter()
}
@@ -66,7 +61,7 @@ impl PythonVersion {
/// The latest Python version supported in preview
pub fn latest_preview() -> Self {
let latest_preview = Self::PY315;
let latest_preview = Self::PY314;
debug_assert!(latest_preview >= Self::latest());
latest_preview
}

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_wasm"
version = "0.14.11"
version = "0.14.10"
publish = false
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -7,6 +7,7 @@ use std::collections::BTreeMap;
use std::env::VarError;
use std::num::{NonZeroU8, NonZeroU16};
use std::path::{Path, PathBuf};
use std::str::FromStr;
use anyhow::{Context, Result, anyhow};
use glob::{GlobError, Paths, PatternError, glob};
@@ -35,7 +36,8 @@ use ruff_linter::settings::{
DEFAULT_SELECTORS, DUMMY_VARIABLE_RGX, LinterSettings, TASK_TAGS, TargetVersion,
};
use ruff_linter::{
RuleSelector, fs, warn_user_once, warn_user_once_by_id, warn_user_once_by_message,
RUFF_PKG_VERSION, RuleSelector, fs, warn_user_once, warn_user_once_by_id,
warn_user_once_by_message,
};
use ruff_python_ast as ast;
use ruff_python_formatter::{
@@ -51,7 +53,6 @@ use crate::options::{
Flake8UnusedArgumentsOptions, FormatOptions, IsortOptions, LintCommonOptions, LintOptions,
McCabeOptions, Options, Pep8NamingOptions, PyUpgradeOptions, PycodestyleOptions,
PydoclintOptions, PydocstyleOptions, PyflakesOptions, PylintOptions, RuffOptions,
validate_required_version,
};
use crate::settings::{
EXCLUDE, FileResolverSettings, FormatterSettings, INCLUDE, INCLUDE_PREVIEW, LineEnding,
@@ -154,7 +155,13 @@ pub struct Configuration {
impl Configuration {
pub fn into_settings(self, project_root: &Path) -> Result<Settings> {
if let Some(required_version) = &self.required_version {
validate_required_version(required_version)?;
let ruff_pkg_version = pep440_rs::Version::from_str(RUFF_PKG_VERSION)
.expect("RUFF_PKG_VERSION is not a valid PEP 440 version specifier");
if !required_version.contains(&ruff_pkg_version) {
return Err(anyhow!(
"Required version `{required_version}` does not match the running version `{RUFF_PKG_VERSION}`"
));
}
}
let linter_target_version = TargetVersion(self.target_version);

View File

@@ -1,19 +1,15 @@
use anyhow::Result;
use regex::Regex;
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use serde::de::{self};
use serde::{Deserialize, Deserializer, Serialize};
use std::collections::{BTreeMap, BTreeSet};
use std::path::PathBuf;
use std::str::FromStr;
use strum::IntoEnumIterator;
use unicode_normalization::UnicodeNormalization;
use crate::settings::LineEnding;
use ruff_formatter::IndentStyle;
use ruff_graph::Direction;
use ruff_linter::RUFF_PKG_VERSION;
use ruff_linter::line_width::{IndentWidth, LineLength};
use ruff_linter::rules::flake8_import_conventions::settings::BannedAliases;
use ruff_linter::rules::flake8_pytest_style::settings::SettingsError;
@@ -560,17 +556,6 @@ pub struct LintOptions {
pub future_annotations: Option<bool>,
}
pub fn validate_required_version(required_version: &RequiredVersion) -> anyhow::Result<()> {
let ruff_pkg_version = pep440_rs::Version::from_str(RUFF_PKG_VERSION)
.expect("RUFF_PKG_VERSION is not a valid PEP 440 version specifier");
if !required_version.contains(&ruff_pkg_version) {
return Err(anyhow::anyhow!(
"Required version `{required_version}` does not match the running version `{RUFF_PKG_VERSION}`"
));
}
Ok(())
}
/// Newtype wrapper for [`LintCommonOptions`] that allows customizing the JSON schema and omitting the fields from the [`OptionsMetadata`].
#[derive(Clone, Debug, PartialEq, Eq, Default, Serialize, Deserialize)]
#[serde(transparent)]

View File

@@ -5,13 +5,12 @@ use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use log::debug;
use pep440_rs::{Operator, Version, VersionSpecifiers};
use serde::de::DeserializeOwned;
use serde::{Deserialize, Serialize};
use strum::IntoEnumIterator;
use ruff_linter::settings::types::{PythonVersion, RequiredVersion};
use ruff_linter::settings::types::PythonVersion;
use crate::options::{Options, validate_required_version};
use crate::options::Options;
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
struct Tools {
@@ -41,38 +40,20 @@ impl Pyproject {
}
}
fn parse_toml<P: AsRef<Path>, T: DeserializeOwned>(path: P, table_path: &[&str]) -> Result<T> {
/// Parse a `ruff.toml` file.
fn parse_ruff_toml<P: AsRef<Path>>(path: P) -> Result<Options> {
let path = path.as_ref();
let contents = std::fs::read_to_string(path)
.with_context(|| format!("Failed to read {}", path.display()))?;
// Parse the TOML document once into a spanned representation so we can:
// - Inspect `required-version` without triggering strict deserialization errors.
// - Deserialize with precise spans (line/column and excerpt) on errors.
let root = toml::de::DeTable::parse(&contents)
.with_context(|| format!("Failed to parse {}", path.display()))?;
check_required_version(root.get_ref(), table_path)?;
let deserializer = toml::de::Deserializer::from(root);
T::deserialize(deserializer)
.map_err(|mut err| {
// `Deserializer::from` doesn't have access to the original input, but we do.
// Attach it so TOML errors include line/column and a source excerpt.
err.set_input(Some(&contents));
err
})
.with_context(|| format!("Failed to parse {}", path.display()))
}
/// Parse a `ruff.toml` file.
fn parse_ruff_toml<P: AsRef<Path>>(path: P) -> Result<Options> {
parse_toml(path, &[])
toml::from_str(&contents).with_context(|| format!("Failed to parse {}", path.display()))
}
/// Parse a `pyproject.toml` file.
fn parse_pyproject_toml<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
parse_toml(path, &["tool", "ruff"])
let path = path.as_ref();
let contents = std::fs::read_to_string(path)
.with_context(|| format!("Failed to read {}", path.display()))?;
toml::from_str(&contents).with_context(|| format!("Failed to parse {}", path.display()))
}
/// Return `true` if a `pyproject.toml` contains a `[tool.ruff]` section.
@@ -117,33 +98,6 @@ pub fn find_settings_toml<P: AsRef<Path>>(path: P) -> Result<Option<PathBuf>> {
Ok(None)
}
fn check_required_version(value: &toml::de::DeTable, table_path: &[&str]) -> Result<()> {
let mut current = value;
for key in table_path {
let Some(next) = current.get(*key) else {
return Ok(());
};
let toml::de::DeValue::Table(next) = next.get_ref() else {
return Ok(());
};
current = next;
}
let required_version = current
.get("required-version")
.and_then(|value| value.get_ref().as_str());
let Some(required_version) = required_version else {
return Ok(());
};
// If it doesn't parse, we just fall through to normal parsing; it will give a nicer error message.
if let Ok(required_version) = required_version.parse::<RequiredVersion>() {
validate_required_version(&required_version)?;
}
Ok(())
}
/// Derive target version from `required-version` in `pyproject.toml`, if
/// such a file exists in an ancestor directory.
pub fn find_fallback_target_version<P: AsRef<Path>>(path: P) -> Option<PythonVersion> {

View File

@@ -102,8 +102,8 @@ impl Relativity {
#[derive(Debug)]
pub struct Resolver<'a> {
pyproject_config: &'a PyprojectConfig,
/// All [`Settings`] that have been added to the resolver, along with their config file paths.
settings: Vec<(Settings, PathBuf)>,
/// All [`Settings`] that have been added to the resolver.
settings: Vec<Settings>,
/// A router from path to index into the `settings` vector.
router: Router<usize>,
}
@@ -146,8 +146,8 @@ impl<'a> Resolver<'a> {
}
/// Add a resolved [`Settings`] under a given [`PathBuf`] scope.
fn add(&mut self, path: &Path, settings: Settings, config_path: PathBuf) {
self.settings.push((settings, config_path));
fn add(&mut self, path: &Path, settings: Settings) {
self.settings.push(settings);
// Normalize the path to use `/` separators and escape the '{' and '}' characters,
// which matchit uses for routing parameters.
@@ -172,27 +172,13 @@ impl<'a> Resolver<'a> {
/// Return the appropriate [`Settings`] for a given [`Path`].
pub fn resolve(&self, path: &Path) -> &Settings {
self.resolve_with_path(path).0
}
/// Return the appropriate [`Settings`] and config file path for a given [`Path`].
pub fn resolve_with_path(&self, path: &Path) -> (&Settings, Option<&Path>) {
match self.pyproject_config.strategy {
PyprojectDiscoveryStrategy::Fixed => (
&self.pyproject_config.settings,
self.pyproject_config.path.as_deref(),
),
PyprojectDiscoveryStrategy::Fixed => &self.pyproject_config.settings,
PyprojectDiscoveryStrategy::Hierarchical => self
.router
.at(path.to_slash_lossy().as_ref())
.map(|Match { value, .. }| {
let (settings, config_path) = &self.settings[*value];
(settings, Some(config_path.as_path()))
})
.unwrap_or((
&self.pyproject_config.settings,
self.pyproject_config.path.as_deref(),
)),
.map(|Match { value, .. }| &self.settings[*value])
.unwrap_or(&self.pyproject_config.settings),
}
}
@@ -269,8 +255,7 @@ impl<'a> Resolver<'a> {
/// Return an iterator over the resolved [`Settings`] in this [`Resolver`].
pub fn settings(&self) -> impl Iterator<Item = &Settings> {
std::iter::once(&self.pyproject_config.settings)
.chain(self.settings.iter().map(|(settings, _)| settings))
std::iter::once(&self.pyproject_config.settings).chain(&self.settings)
}
}
@@ -394,17 +379,17 @@ pub fn resolve_configuration(
/// Extract the project root (scope) and [`Settings`] from a given
/// `pyproject.toml`.
fn resolve_scoped_settings(
pyproject: &Path,
fn resolve_scoped_settings<'a>(
pyproject: &'a Path,
transformer: &dyn ConfigurationTransformer,
origin: ConfigurationOrigin,
) -> Result<(PathBuf, Settings)> {
) -> Result<(&'a Path, Settings)> {
let relativity = Relativity::from(origin);
let configuration = resolve_configuration(pyproject, transformer, origin)?;
let project_root = relativity.resolve(pyproject);
let settings = configuration.into_settings(project_root)?;
Ok((project_root.to_path_buf(), settings))
Ok((project_root, settings))
}
/// Extract the [`Settings`] from a given `pyproject.toml` and process the
@@ -470,7 +455,7 @@ pub fn python_files_in_path<'a>(
transformer,
ConfigurationOrigin::Ancestor,
)?;
resolver.add(&root, settings, pyproject);
resolver.add(root, settings);
// We found the closest configuration.
break;
}
@@ -662,11 +647,7 @@ impl ParallelVisitor for PythonFilesVisitor<'_, '_> {
ConfigurationOrigin::Ancestor,
) {
Ok((root, settings)) => {
self.global
.resolver
.write()
.unwrap()
.add(&root, settings, pyproject);
self.global.resolver.write().unwrap().add(root, settings);
}
Err(err) => {
self.local_error = Err(err);
@@ -786,7 +767,7 @@ pub fn python_file_at_path(
if let Some(pyproject) = settings_toml(ancestor)? {
let (root, settings) =
resolve_scoped_settings(&pyproject, transformer, ConfigurationOrigin::Unknown)?;
resolver.add(&root, settings, pyproject);
resolver.add(root, settings);
break;
}
}

3
crates/ty/docs/cli.md generated
View File

@@ -37,8 +37,7 @@ ty check [OPTIONS] [PATH]...
<h3 class="cli-reference">Options</h3>
<dl class="cli-reference"><dt id="ty-check--add-ignore"><a href="#ty-check--add-ignore"><code>--add-ignore</code></a></dt><dd><p>Adds <code>ty: ignore</code> comments to suppress all rule diagnostics</p>
</dd><dt id="ty-check--color"><a href="#ty-check--color"><code>--color</code></a> <i>when</i></dt><dd><p>Control when colored output is used</p>
<dl class="cli-reference"><dt id="ty-check--color"><a href="#ty-check--color"><code>--color</code></a> <i>when</i></dt><dd><p>Control when colored output is used</p>
<p>Possible values:</p>
<ul>
<li><code>auto</code>: Display colors if the output goes to an interactive terminal</li>

217
crates/ty/docs/rules.md generated
View File

@@ -8,7 +8,7 @@
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20ambiguous-protocol-member" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L540" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L541" target="_blank">View source</a>
</small>
@@ -80,7 +80,7 @@ def test(): -> "int":
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20call-non-callable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L139" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L140" target="_blank">View source</a>
</small>
@@ -104,7 +104,7 @@ Calling a non-callable object will raise a `TypeError` at runtime.
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.7">0.0.7</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20call-top-callable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L157" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L158" target="_blank">View source</a>
</small>
@@ -135,7 +135,7 @@ def f(x: object):
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-argument-forms" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L208" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L209" target="_blank">View source</a>
</small>
@@ -167,7 +167,7 @@ f(int) # error
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-declarations" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L234" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L235" target="_blank">View source</a>
</small>
@@ -198,7 +198,7 @@ a = 1
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-metaclass" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L259" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L260" target="_blank">View source</a>
</small>
@@ -230,7 +230,7 @@ class C(A, B): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20cyclic-class-definition" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L285" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L286" target="_blank">View source</a>
</small>
@@ -262,7 +262,7 @@ class B(A): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/1.0.0">1.0.0</a>) ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20cyclic-type-alias-definition" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L311" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L312" target="_blank">View source</a>
</small>
@@ -290,7 +290,7 @@ type B = A
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.16">0.0.1-alpha.16</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20deprecated" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L355" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L356" target="_blank">View source</a>
</small>
@@ -317,7 +317,7 @@ old_func() # emits [deprecated] diagnostic
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'ignore'."><code>ignore</code></a> ·
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a>) ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20division-by-zero" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L333" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L334" target="_blank">View source</a>
</small>
@@ -346,7 +346,7 @@ false positives it can produce.
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20duplicate-base" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L376" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L377" target="_blank">View source</a>
</small>
@@ -373,7 +373,7 @@ class B(A, A): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.12">0.0.1-alpha.12</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20duplicate-kw-only" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L397" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L398" target="_blank">View source</a>
</small>
@@ -467,7 +467,7 @@ def test(): -> "int":
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20ignore-comment-unknown-rule" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Fsuppression.rs#L54" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Fsuppression.rs#L50" target="_blank">View source</a>
</small>
@@ -529,7 +529,7 @@ def test(): -> "Literal[5]":
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20inconsistent-mro" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L623" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L624" target="_blank">View source</a>
</small>
@@ -559,7 +559,7 @@ class C(A, B): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20index-out-of-bounds" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L647" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L648" target="_blank">View source</a>
</small>
@@ -585,7 +585,7 @@ t[3] # IndexError: tuple index out of range
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.12">0.0.1-alpha.12</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20instance-layout-conflict" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L429" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L430" target="_blank">View source</a>
</small>
@@ -674,7 +674,7 @@ an atypical memory layout.
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-argument-type" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L701" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L702" target="_blank">View source</a>
</small>
@@ -701,7 +701,7 @@ func("foo") # error: [invalid-argument-type]
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-assignment" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L741" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L742" target="_blank">View source</a>
</small>
@@ -729,7 +729,7 @@ a: int = ''
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-attribute-access" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2044" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2085" target="_blank">View source</a>
</small>
@@ -763,7 +763,7 @@ C.instance_var = 3 # error: Cannot assign to instance variable
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.19">0.0.1-alpha.19</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-await" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L763" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L764" target="_blank">View source</a>
</small>
@@ -799,7 +799,7 @@ asyncio.run(main())
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-base" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L793" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L794" target="_blank">View source</a>
</small>
@@ -823,7 +823,7 @@ class A(42): ... # error: [invalid-base]
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-context-manager" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L844" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L845" target="_blank">View source</a>
</small>
@@ -850,7 +850,7 @@ with 1:
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-declaration" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L865" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L866" target="_blank">View source</a>
</small>
@@ -879,7 +879,7 @@ a: str
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-exception-caught" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L888" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L889" target="_blank">View source</a>
</small>
@@ -923,7 +923,7 @@ except ZeroDivisionError:
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.28">0.0.1-alpha.28</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-explicit-override" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1714" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1755" target="_blank">View source</a>
</small>
@@ -965,7 +965,7 @@ class D(A):
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.35">0.0.1-alpha.35</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-frozen-dataclass-subclass" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2295" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2336" target="_blank">View source</a>
</small>
@@ -1009,7 +1009,7 @@ class NonFrozenChild(FrozenBase): # Error raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-generic-class" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L924" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L925" target="_blank">View source</a>
</small>
@@ -1047,7 +1047,7 @@ class D(Generic[U, T]): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-ignore-comment" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Fsuppression.rs#L79" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Fsuppression.rs#L75" target="_blank">View source</a>
</small>
@@ -1077,7 +1077,7 @@ a = 20 / 0 # type: ignore
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.17">0.0.1-alpha.17</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-key" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L668" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L669" target="_blank">View source</a>
</small>
@@ -1116,7 +1116,7 @@ carol = Person(name="Carol", age=25) # typo!
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-legacy-type-variable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L955" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L956" target="_blank">View source</a>
</small>
@@ -1151,7 +1151,7 @@ def f(t: TypeVar("U")): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-metaclass" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1052" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1053" target="_blank">View source</a>
</small>
@@ -1185,7 +1185,7 @@ class B(metaclass=f): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-method-override" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2197" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2238" target="_blank">View source</a>
</small>
@@ -1292,7 +1292,7 @@ Correct use of `@override` is enforced by ty's `invalid-explicit-override` rule.
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.19">0.0.1-alpha.19</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-named-tuple" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L575" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L576" target="_blank">View source</a>
</small>
@@ -1346,7 +1346,7 @@ AttributeError: Cannot overwrite NamedTuple attribute _asdict
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/1.0.0">1.0.0</a>) ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-newtype" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1028" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1029" target="_blank">View source</a>
</small>
@@ -1376,7 +1376,7 @@ Baz = NewType("Baz", int | str) # error: invalid base for `typing.NewType`
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-overload" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1079" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1080" target="_blank">View source</a>
</small>
@@ -1426,7 +1426,7 @@ def foo(x: int) -> int: ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-parameter-default" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1178" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1179" target="_blank">View source</a>
</small>
@@ -1452,7 +1452,7 @@ def f(a: int = ''): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-paramspec" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L983" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L984" target="_blank">View source</a>
</small>
@@ -1483,7 +1483,7 @@ P2 = ParamSpec("S2") # error: ParamSpec name must match the variable it's assig
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-protocol" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L511" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L512" target="_blank">View source</a>
</small>
@@ -1517,7 +1517,7 @@ TypeError: Protocols can only inherit from other protocols, got <class 'int'>
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-raise" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1198" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1199" target="_blank">View source</a>
</small>
@@ -1566,7 +1566,7 @@ def g():
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-return-type" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L722" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L723" target="_blank">View source</a>
</small>
@@ -1591,7 +1591,7 @@ def func() -> int:
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-super-argument" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1241" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1242" target="_blank">View source</a>
</small>
@@ -1687,7 +1687,7 @@ class C: ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.10">0.0.10</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-total-ordering" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2333" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2374" target="_blank">View source</a>
</small>
@@ -1733,7 +1733,7 @@ class MyClass:
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.6">0.0.1-alpha.6</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-alias-type" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1007" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1008" target="_blank">View source</a>
</small>
@@ -1760,7 +1760,7 @@ NewAlias = TypeAliasType(get_name(), int) # error: TypeAliasType name mus
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.29">0.0.1-alpha.29</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-arguments" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1473" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1474" target="_blank">View source</a>
</small>
@@ -1807,7 +1807,7 @@ Bar[int] # error: too few arguments
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-checking-constant" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1280" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1281" target="_blank">View source</a>
</small>
@@ -1837,7 +1837,7 @@ TYPE_CHECKING = ''
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-form" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1304" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1305" target="_blank">View source</a>
</small>
@@ -1867,7 +1867,7 @@ b: Annotated[int] # `Annotated` expects at least two arguments
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.11">0.0.1-alpha.11</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-guard-call" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1356" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1357" target="_blank">View source</a>
</small>
@@ -1901,7 +1901,7 @@ f(10) # Error
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.11">0.0.1-alpha.11</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-guard-definition" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1328" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1329" target="_blank">View source</a>
</small>
@@ -1935,7 +1935,7 @@ class C:
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-variable-constraints" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1384" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1385" target="_blank">View source</a>
</small>
@@ -1970,7 +1970,7 @@ T = TypeVar('T', bound=str) # valid bound TypeVar
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.9">0.0.9</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-typed-dict-statement" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2172" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2213" target="_blank">View source</a>
</small>
@@ -2001,7 +2001,7 @@ class Foo(TypedDict):
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20missing-argument" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1413" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1414" target="_blank">View source</a>
</small>
@@ -2026,7 +2026,7 @@ func() # TypeError: func() missing 1 required positional argument: 'x'
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20missing-typed-dict-key" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2145" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2186" target="_blank">View source</a>
</small>
@@ -2059,7 +2059,7 @@ alice["age"] # KeyError
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20no-matching-overload" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1432" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1433" target="_blank">View source</a>
</small>
@@ -2088,7 +2088,7 @@ func("string") # error: [no-matching-overload]
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20not-iterable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1514" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1515" target="_blank">View source</a>
</small>
@@ -2114,7 +2114,7 @@ for i in 34: # TypeError: 'int' object is not iterable
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20not-subscriptable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1455" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1456" target="_blank">View source</a>
</small>
@@ -2138,7 +2138,7 @@ Subscripting an object that does not support it will raise a `TypeError` at runt
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.29">0.0.1-alpha.29</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20override-of-final-method" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1687" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1728" target="_blank">View source</a>
</small>
@@ -2171,7 +2171,7 @@ class B(A):
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20parameter-already-assigned" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1565" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1566" target="_blank">View source</a>
</small>
@@ -2198,7 +2198,7 @@ f(1, x=2) # Error raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20positional-only-parameter-as-kwarg" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1898" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1939" target="_blank">View source</a>
</small>
@@ -2225,7 +2225,7 @@ f(x=1) # Error raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-attribute" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1586" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1587" target="_blank">View source</a>
</small>
@@ -2253,7 +2253,7 @@ A.c # AttributeError: type object 'A' has no attribute 'c'
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-implicit-call" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L182" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L183" target="_blank">View source</a>
</small>
@@ -2285,7 +2285,7 @@ A()[0] # TypeError: 'A' object is not subscriptable
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'ignore'."><code>ignore</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-import" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1608" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1609" target="_blank">View source</a>
</small>
@@ -2322,7 +2322,7 @@ from module import a # ImportError: cannot import name 'a' from 'module'
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'ignore'."><code>ignore</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-unresolved-reference" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1638" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1639" target="_blank">View source</a>
</small>
@@ -2386,7 +2386,7 @@ def test(): -> "int":
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20redundant-cast" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2072" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2113" target="_blank">View source</a>
</small>
@@ -2413,7 +2413,7 @@ cast(int, f()) # Redundant
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20static-assert-error" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2020" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2061" target="_blank">View source</a>
</small>
@@ -2437,13 +2437,59 @@ static_assert(1 + 1 == 3) # error: evaluates to `False`
static_assert(int(2.0 * 3.0) == 6) # error: does not have a statically known truthiness
```
## `subclass-of-dataclass-with-order`
<small>
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.11">0.0.11</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20subclass-of-dataclass-with-order" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1688" target="_blank">View source</a>
</small>
**What it does**
Checks for classes that inherit from a dataclass with `order=True`.
**Why is this bad?**
When a dataclass has `order=True`, comparison methods (`__lt__`, `__le__`, `__gt__`, `__ge__`)
are generated that compare instances as tuples of their fields. These methods raise a
`TypeError` at runtime when comparing instances of different classes in the inheritance
hierarchy, even if one is a subclass of the other.
This violates the [Liskov Substitution Principle] because child class instances cannot be
used in all contexts where parent class instances are expected.
**Example**
```python
from dataclasses import dataclass
@dataclass(order=True)
class Parent:
value: int
class Child(Parent): # Ty emits a warning here
pass
# At runtime, this raises TypeError:
# Child(1) < Parent(2)
```
Consider using [`functools.total_ordering`] instead, which does not have this limitation.
[Liskov Substitution Principle]: https://en.wikipedia.org/wiki/Liskov_substitution_principle
[`functools.total_ordering`]: https://docs.python.org/3/library/functools.html#functools.total_ordering
## `subclass-of-final-class`
<small>
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20subclass-of-final-class" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1664" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1665" target="_blank">View source</a>
</small>
@@ -2472,7 +2518,7 @@ class B(A): ... # Error raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.30">0.0.1-alpha.30</a>) ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20super-call-in-named-tuple-method" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1832" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1873" target="_blank">View source</a>
</small>
@@ -2506,7 +2552,7 @@ class F(NamedTuple):
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20too-many-positional-arguments" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1772" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1813" target="_blank">View source</a>
</small>
@@ -2533,7 +2579,7 @@ f("foo") # Error raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20type-assertion-failure" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1750" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1791" target="_blank">View source</a>
</small>
@@ -2561,7 +2607,7 @@ def _(x: int):
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unavailable-implicit-super-arguments" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1793" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1834" target="_blank">View source</a>
</small>
@@ -2607,7 +2653,7 @@ class A:
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20undefined-reveal" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1859" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1900" target="_blank">View source</a>
</small>
@@ -2631,7 +2677,7 @@ reveal_type(1) # NameError: name 'reveal_type' is not defined
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unknown-argument" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1877" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1918" target="_blank">View source</a>
</small>
@@ -2658,7 +2704,7 @@ f(x=1, y=2) # Error raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-attribute" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1919" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1960" target="_blank">View source</a>
</small>
@@ -2686,7 +2732,7 @@ A().foo # AttributeError: 'A' object has no attribute 'foo'
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.15">0.0.1-alpha.15</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-global" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2093" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2134" target="_blank">View source</a>
</small>
@@ -2744,7 +2790,7 @@ def g():
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-import" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1941" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1982" target="_blank">View source</a>
</small>
@@ -2769,7 +2815,7 @@ import foo # ModuleNotFoundError: No module named 'foo'
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-reference" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1960" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2001" target="_blank">View source</a>
</small>
@@ -2794,7 +2840,7 @@ print(x) # NameError: name 'x' is not defined
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.7">0.0.1-alpha.7</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-base" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L811" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L812" target="_blank">View source</a>
</small>
@@ -2833,7 +2879,7 @@ class D(C): ... # error: [unsupported-base]
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-bool-conversion" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1534" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1535" target="_blank">View source</a>
</small>
@@ -2870,7 +2916,7 @@ b1 < b2 < b1 # exception raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-operator" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1979" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2020" target="_blank">View source</a>
</small>
@@ -2895,7 +2941,7 @@ A() + A() # TypeError: unsupported operand type(s) for +: 'A' and 'A'
## `unused-ignore-comment`
<small>
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'ignore'."><code>ignore</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unused-ignore-comment" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Fsuppression.rs#L25" target="_blank">View source</a>
@@ -2904,11 +2950,11 @@ Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.
**What it does**
Checks for `ty: ignore` or `type: ignore` directives that are no longer applicable.
Checks for `type: ignore` or `ty: ignore` directives that are no longer applicable.
**Why is this bad?**
A `ty: ignore` directive that no longer matches any diagnostic violations is likely
A `type: ignore` directive that no longer matches any diagnostic violations is likely
included by mistake, and should be removed to avoid confusion.
**Examples**
@@ -2923,18 +2969,13 @@ Use instead:
a = 20 / 2
```
**Options**
Set [`analysis.respect-type-ignore-comments`](https://docs.astral.sh/ty/reference/configuration/#respect-type-ignore-comments)
to `false` to prevent this rule from reporting unused `type: ignore` comments.
## `useless-overload-body`
<small>
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20useless-overload-body" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1122" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1123" target="_blank">View source</a>
</small>
@@ -2997,7 +3038,7 @@ def foo(x: int | str) -> int | str:
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20zero-stepsize-in-slice" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2001" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2042" target="_blank">View source</a>
</small>

View File

@@ -54,10 +54,6 @@ pub(crate) struct CheckCommand {
)]
pub paths: Vec<SystemPathBuf>,
/// Adds `ty: ignore` comments to suppress all rule diagnostics.
#[arg(long)]
pub(crate) add_ignore: bool,
/// Run the command within the given project directory.
///
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,

View File

@@ -4,36 +4,37 @@ mod printer;
mod python_version;
mod version;
pub use args::Cli;
use ty_project::metadata::settings::TerminalSettings;
use ty_static::EnvVars;
use std::fmt::Write;
use std::process::{ExitCode, Termination};
use std::sync::Mutex;
use anyhow::Result;
use crate::args::{CheckCommand, Command, TerminalColor};
use crate::logging::{VerbosityLevel, setup_tracing};
use crate::printer::Printer;
use anyhow::{Context, anyhow};
use clap::{CommandFactory, Parser};
use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use rayon::ThreadPoolBuilder;
use ruff_db::cancellation::{Canceled, CancellationToken, CancellationTokenSource};
use ruff_db::cancellation::{CancellationToken, CancellationTokenSource};
use ruff_db::diagnostic::{
Diagnostic, DiagnosticId, DisplayDiagnosticConfig, DisplayDiagnostics, Severity,
};
use ruff_db::files::File;
use ruff_db::max_parallelism;
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
use ruff_db::{STACK_SIZE, max_parallelism};
use salsa::Database;
use ty_project::metadata::options::ProjectOptionsOverrides;
use ty_project::metadata::settings::TerminalSettings;
use ty_project::watch::ProjectWatcher;
use ty_project::{CollectReporter, Db, suppress_all_diagnostics, watch};
use ty_project::{CollectReporter, Db, watch};
use ty_project::{ProjectDatabase, ProjectMetadata};
use ty_server::run_server;
use ty_static::EnvVars;
use crate::args::{CheckCommand, Command, TerminalColor};
use crate::logging::{VerbosityLevel, setup_tracing};
use crate::printer::Printer;
pub use args::Cli;
pub fn run() -> anyhow::Result<ExitStatus> {
setup_rayon();
@@ -111,12 +112,6 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
.map(|path| SystemPath::absolute(path, &cwd))
.collect();
let mode = if args.add_ignore {
MainLoopMode::AddIgnore
} else {
MainLoopMode::Check
};
let system = OsSystem::new(&cwd);
let watch = args.watch;
let exit_zero = args.exit_zero;
@@ -149,7 +144,7 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
}
let (main_loop, main_loop_cancellation_token) =
MainLoop::new(mode, project_options_overrides, printer);
MainLoop::new(project_options_overrides, printer);
// Listen to Ctrl+C and abort the watch mode.
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
@@ -220,8 +215,6 @@ impl Termination for ExitStatus {
}
struct MainLoop {
mode: MainLoopMode,
/// Sender that can be used to send messages to the main loop.
sender: crossbeam_channel::Sender<MainLoopMessage>,
@@ -244,7 +237,6 @@ struct MainLoop {
impl MainLoop {
fn new(
mode: MainLoopMode,
project_options_overrides: ProjectOptionsOverrides,
printer: Printer,
) -> (Self, MainLoopCancellationToken) {
@@ -255,7 +247,6 @@ impl MainLoop {
(
Self {
mode,
sender: sender.clone(),
receiver,
watcher: None,
@@ -334,78 +325,80 @@ impl MainLoop {
result,
revision: check_revision,
} => {
if check_revision != revision {
tracing::debug!(
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
);
continue;
}
let terminal_settings = db.project().settings(db).terminal();
let display_config = DisplayDiagnosticConfig::default()
.format(terminal_settings.output_format.into())
.color(colored::control::SHOULD_COLORIZE.should_colorize())
.with_cancellation_token(Some(self.cancellation_token.clone()))
.show_fix_diff(true);
if db.project().files(db).is_empty() {
tracing::warn!("No python files found under the given path(s)");
}
if check_revision == revision {
if db.project().files(db).is_empty() {
tracing::warn!("No python files found under the given path(s)");
}
let result = match self.mode {
MainLoopMode::Check => {
// TODO: We should have an official flag to silence workspace diagnostics.
if std::env::var("TY_MEMORY_REPORT").as_deref() == Ok("mypy_primer") {
// TODO: We should have an official flag to silence workspace diagnostics.
if std::env::var("TY_MEMORY_REPORT").as_deref() == Ok("mypy_primer") {
return Ok(ExitStatus::Success);
}
let is_human_readable = terminal_settings.output_format.is_human_readable();
if result.is_empty() {
if is_human_readable {
writeln!(
self.printer.stream_for_success_summary(),
"{}",
"All checks passed!".green().bold()
)?;
}
if self.watcher.is_none() {
return Ok(ExitStatus::Success);
}
} else {
let diagnostics_count = result.len();
self.write_diagnostics(db, &result)?;
let mut stdout = self.printer.stream_for_details().lock();
let exit_status =
exit_status_from_diagnostics(&result, terminal_settings);
if self.cancellation_token.is_cancelled() {
Err(Canceled)
} else {
Ok(result)
// Only render diagnostics if they're going to be displayed, since doing
// so is expensive.
if stdout.is_enabled() {
write!(
stdout,
"{}",
DisplayDiagnostics::new(db, &display_config, &result)
)?;
}
}
MainLoopMode::AddIgnore => {
if let Ok(result) =
suppress_all_diagnostics(db, result, &self.cancellation_token)
{
self.write_diagnostics(db, &result.diagnostics)?;
let terminal_settings = db.project().settings(db).terminal();
let is_human_readable =
terminal_settings.output_format.is_human_readable();
if !self.cancellation_token.is_cancelled() {
if is_human_readable {
writeln!(
self.printer.stream_for_failure_summary(),
"Added {} ignore comment{}",
result.count,
if result.count > 1 { "s" } else { "" }
"Found {} diagnostic{}",
diagnostics_count,
if diagnostics_count > 1 { "s" } else { "" }
)?;
}
Ok(result.diagnostics)
} else {
Err(Canceled)
if exit_status.is_internal_error() {
tracing::warn!(
"A fatal error occurred while checking some files. Not all project files were analyzed. See the diagnostics list above for details."
);
}
}
if self.watcher.is_none() {
return Ok(exit_status);
}
}
};
let exit_status = match result.as_deref() {
Ok([]) => ExitStatus::Success,
Ok(diagnostics) => {
let terminal_settings = db.project().settings(db).terminal();
exit_status_from_diagnostics(diagnostics, terminal_settings)
}
Err(Canceled) => ExitStatus::Success,
};
if exit_status.is_internal_error() {
tracing::warn!(
"A fatal error occurred while checking some files. Not all project files were analyzed. See the diagnostics list above for details."
} else {
tracing::debug!(
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
);
}
if self.watcher.is_some() {
continue;
}
return Ok(exit_status);
}
MainLoopMessage::ApplyChanges(changes) => {
@@ -432,65 +425,6 @@ impl MainLoop {
Ok(ExitStatus::Success)
}
fn write_diagnostics(
&self,
db: &ProjectDatabase,
diagnostics: &[Diagnostic],
) -> anyhow::Result<()> {
let terminal_settings = db.project().settings(db).terminal();
let is_human_readable = terminal_settings.output_format.is_human_readable();
match diagnostics {
[] => {
if is_human_readable {
writeln!(
self.printer.stream_for_success_summary(),
"{}",
"All checks passed!".green().bold()
)?;
}
}
diagnostics => {
let diagnostics_count = diagnostics.len();
let mut stdout = self.printer.stream_for_details().lock();
// Only render diagnostics if they're going to be displayed, since doing
// so is expensive.
if stdout.is_enabled() {
let display_config = DisplayDiagnosticConfig::default()
.format(terminal_settings.output_format.into())
.color(colored::control::SHOULD_COLORIZE.should_colorize())
.with_cancellation_token(Some(self.cancellation_token.clone()))
.show_fix_diff(true);
write!(
stdout,
"{}",
DisplayDiagnostics::new(db, &display_config, diagnostics)
)?;
}
if !self.cancellation_token.is_cancelled() && is_human_readable {
writeln!(
self.printer.stream_for_failure_summary(),
"Found {} diagnostic{}",
diagnostics_count,
if diagnostics_count > 1 { "s" } else { "" }
)?;
}
}
}
Ok(())
}
}
#[derive(Copy, Clone, Debug)]
enum MainLoopMode {
Check,
AddIgnore,
}
fn exit_status_from_diagnostics(
@@ -625,7 +559,12 @@ fn set_colored_override(color: Option<TerminalColor>) {
fn setup_rayon() {
ThreadPoolBuilder::default()
.num_threads(max_parallelism().get())
.stack_size(STACK_SIZE)
// Use a reasonably large stack size to avoid running into stack overflows too easily. The
// size was chosen in such a way as to still be able to handle large expressions involving
// binary operators (x + x + … + x) both during the AST walk in semantic index building as
// well as during type checking. Using this stack size, we can handle handle expressions
// that are several times larger than the corresponding limits in existing type checkers.
.stack_size(16 * 1024 * 1024)
.build_global()
.unwrap();
}

View File

@@ -160,65 +160,6 @@ fn configuration_include() -> anyhow::Result<()> {
Ok(())
}
/// Files without extensions can be included by adding a literal glob to `include` that matches
/// the path exactly. A literal glob is a glob without any meta characters.
#[test]
fn configuration_include_no_extension() -> anyhow::Result<()> {
let case = CliTest::with_files([(
"src/main",
r#"
print(undefined_var) # error: unresolved-reference
"#,
)])?;
// By default, `src/main` is excluded because the file has no supported extension.
case.write_file(
"ty.toml",
r#"
[src]
include = ["src"]
"#,
)?;
assert_cmd_snapshot!(case.command(), @r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
----- stderr -----
WARN No python files found under the given path(s)
");
// The file can be included by adding an exactly matching pattern
case.write_file(
"ty.toml",
r#"
[src]
include = ["src", "src/main"]
"#,
)?;
assert_cmd_snapshot!(case.command(), @r"
success: false
exit_code: 1
----- stdout -----
error[unresolved-reference]: Name `undefined_var` used when not defined
--> src/main:2:7
|
2 | print(undefined_var) # error: unresolved-reference
| ^^^^^^^^^^^^^
|
info: rule `unresolved-reference` is enabled by default
Found 1 diagnostic
----- stderr -----
");
Ok(())
}
/// Test configuration file exclude functionality
#[test]
fn configuration_exclude() -> anyhow::Result<()> {

View File

@@ -1,114 +0,0 @@
use insta_cmd::assert_cmd_snapshot;
use crate::CliTest;
#[test]
fn add_ignore() -> anyhow::Result<()> {
let case = CliTest::with_file(
"different_violations.py",
r#"
import sys
x = 1 + a
if sys.does_not_exist:
...
def test(a, b): ...
test(x = 10, b = 12)
"#,
)?;
assert_cmd_snapshot!(case.command().arg("--add-ignore"), @r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
Added 4 ignore comments
----- stderr -----
");
// There should be no diagnostics when running ty again
assert_cmd_snapshot!(case.command(), @r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
----- stderr -----
");
Ok(())
}
#[test]
fn add_ignore_unfixable() -> anyhow::Result<()> {
let case = CliTest::with_files([
("has_syntax_error.py", r"print(x # [unresolved-reference]"),
(
"different_violations.py",
r#"
import sys
x = 1 + a
reveal_type(x)
if sys.does_not_exist:
...
"#,
),
(
"repeated_violations.py",
r#"
x = (
1 +
a * b
)
y = y # ty: ignore[unresolved-reference]
"#,
),
])?;
assert_cmd_snapshot!(case.command().arg("--add-ignore").env("RUST_BACKTRACE", "1"), @r"
success: false
exit_code: 1
----- stdout -----
info[revealed-type]: Revealed type
--> different_violations.py:6:13
|
4 | x = 1 + a # ty:ignore[unresolved-reference]
5 |
6 | reveal_type(x) # ty:ignore[undefined-reveal]
| ^ `Unknown`
7 |
8 | if sys.does_not_exist: # ty:ignore[unresolved-attribute]
|
error[unresolved-reference]: Name `x` used when not defined
--> has_syntax_error.py:1:7
|
1 | print(x # [unresolved-reference]
| ^
|
info: rule `unresolved-reference` is enabled by default
error[invalid-syntax]: unexpected EOF while parsing
--> has_syntax_error.py:1:34
|
1 | print(x # [unresolved-reference]
| ^
|
Found 3 diagnostics
Added 5 ignore comments
----- stderr -----
WARN Skipping file `<temp_dir>/has_syntax_error.py` with syntax errors
");
Ok(())
}

View File

@@ -2,7 +2,6 @@ mod analysis_options;
mod config_option;
mod exit_code;
mod file_selection;
mod fixes;
mod python_environment;
mod rule_selection;

View File

@@ -1,10 +1,9 @@
name,file,index,rank
auto-import-includes-modules,main.py,0,1
auto-import-includes-modules,main.py,1,2
auto-import-includes-modules,main.py,1,7
auto-import-includes-modules,main.py,2,1
auto-import-skips-current-module,main.py,0,1
class-arg-completion,main.py,0,1
exact-over-fuzzy,main.py,0,1
fstring-completions,main.py,0,1
higher-level-symbols-preferred,main.py,0,
higher-level-symbols-preferred,main.py,1,1
@@ -17,10 +16,8 @@ import-deprioritizes-type_check_only,main.py,3,2
import-deprioritizes-type_check_only,main.py,4,3
import-keyword-completion,main.py,0,1
internal-typeshed-hidden,main.py,0,2
local-over-auto-import,main.py,0,1
modules-over-other-symbols,main.py,0,1
none-completion,main.py,0,1
numpy-array,main.py,0,57
numpy-array,main.py,0,159
numpy-array,main.py,1,1
object-attr-instance-methods,main.py,0,1
object-attr-instance-methods,main.py,1,1
@@ -29,12 +26,7 @@ raise-uses-base-exception,main.py,0,1
scope-existing-over-new-import,main.py,0,1
scope-prioritize-closer,main.py,0,2
scope-simple-long-identifier,main.py,0,1
third-party-over-stdlib,main.py,0,1
tighter-over-looser-scope,main.py,0,3
tstring-completions,main.py,0,1
ty-extensions-lower-stdlib,main.py,0,1
typing-gets-priority,main.py,0,1
typing-gets-priority,main.py,1,1
typing-gets-priority,main.py,2,1
typing-gets-priority,main.py,3,1
typing-gets-priority,main.py,4,1
ty-extensions-lower-stdlib,main.py,0,9
type-var-typing-over-ast,main.py,0,3
type-var-typing-over-ast,main.py,1,253
1 name file index rank
2 auto-import-includes-modules main.py 0 1
3 auto-import-includes-modules main.py 1 2 7
4 auto-import-includes-modules main.py 2 1
5 auto-import-skips-current-module main.py 0 1
6 class-arg-completion main.py 0 1
exact-over-fuzzy main.py 0 1
7 fstring-completions main.py 0 1
8 higher-level-symbols-preferred main.py 0
9 higher-level-symbols-preferred main.py 1 1
16 import-deprioritizes-type_check_only main.py 4 3
17 import-keyword-completion main.py 0 1
18 internal-typeshed-hidden main.py 0 2
local-over-auto-import main.py 0 1
modules-over-other-symbols main.py 0 1
19 none-completion main.py 0 1
20 numpy-array main.py 0 57 159
21 numpy-array main.py 1 1
22 object-attr-instance-methods main.py 0 1
23 object-attr-instance-methods main.py 1 1
26 scope-existing-over-new-import main.py 0 1
27 scope-prioritize-closer main.py 0 2
28 scope-simple-long-identifier main.py 0 1
third-party-over-stdlib main.py 0 1
tighter-over-looser-scope main.py 0 3
29 tstring-completions main.py 0 1
30 ty-extensions-lower-stdlib main.py 0 1 9
31 typing-gets-priority type-var-typing-over-ast main.py 0 1 3
32 typing-gets-priority type-var-typing-over-ast main.py 1 1 253
typing-gets-priority main.py 2 1
typing-gets-priority main.py 3 1
typing-gets-priority main.py 4 1

View File

@@ -540,17 +540,16 @@ fn copy_project(src_dir: &SystemPath, dst_dir: &SystemPath) -> anyhow::Result<Ve
std::fs::create_dir_all(dst_dir).with_context(|| dst_dir.to_string())?;
let mut cursors = vec![];
let it = walkdir::WalkDir::new(src_dir.as_std_path())
.into_iter()
.filter_entry(|dent| {
!dent
.file_name()
.to_str()
.is_some_and(|name| name.starts_with('.'))
});
for result in it {
for result in walkdir::WalkDir::new(src_dir.as_std_path()) {
let dent =
result.with_context(|| format!("failed to get directory entry from {src_dir}"))?;
if dent
.file_name()
.to_str()
.is_some_and(|name| name.starts_with('.'))
{
continue;
}
let src = SystemPath::from_std_path(dent.path()).ok_or_else(|| {
anyhow::anyhow!("path `{}` is not valid UTF-8", dent.path().display())

View File

@@ -1,3 +0,0 @@
pattern = 1
pttn = 1
pttn<CURSOR: pttn>

View File

@@ -1,2 +0,0 @@
[settings]
auto-import = true

View File

@@ -1,11 +0,0 @@
def foo(x):
# We specifically want the local `x` to be
# suggested first here, and NOT an `x` or an
# `X` from some other module (via auto-import).
# We'd also like this to come before `except`,
# which is a keyword that contains `x`, but is
# not an exact match (where as `x` is). `except`
# also isn't legal in this context, although
# that sort of context sensitivity is a bit
# trickier.
return x<CURSOR: x>

View File

@@ -1,5 +0,0 @@
[project]
name = "test"
version = "0.1.0"
requires-python = ">=3.13"
dependencies = []

View File

@@ -1,8 +0,0 @@
version = 1
revision = 3
requires-python = ">=3.13"
[[package]]
name = "test"
version = "0.1.0"
source = { virtual = "." }

View File

@@ -1,2 +0,0 @@
[settings]
auto-import = true

View File

@@ -1 +0,0 @@
os<CURSOR: os>

View File

@@ -1,5 +0,0 @@
[project]
name = "test"
version = "0.1.0"
requires-python = ">=3.13"
dependencies = []

View File

@@ -1,78 +0,0 @@
version = 1
revision = 3
requires-python = ">=3.13"
[[package]]
name = "regex"
version = "2025.11.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669, upload-time = "2025-11-03T21:34:22.089Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e1/a7/dda24ebd49da46a197436ad96378f17df30ceb40e52e859fc42cac45b850/regex-2025.11.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c1e448051717a334891f2b9a620fe36776ebf3dd8ec46a0b877c8ae69575feb4", size = 489081, upload-time = "2025-11-03T21:31:55.9Z" },
{ url = "https://files.pythonhosted.org/packages/19/22/af2dc751aacf88089836aa088a1a11c4f21a04707eb1b0478e8e8fb32847/regex-2025.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9b5aca4d5dfd7fbfbfbdaf44850fcc7709a01146a797536a8f84952e940cca76", size = 291123, upload-time = "2025-11-03T21:31:57.758Z" },
{ url = "https://files.pythonhosted.org/packages/a3/88/1a3ea5672f4b0a84802ee9891b86743438e7c04eb0b8f8c4e16a42375327/regex-2025.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:04d2765516395cf7dda331a244a3282c0f5ae96075f728629287dfa6f76ba70a", size = 288814, upload-time = "2025-11-03T21:32:01.12Z" },
{ url = "https://files.pythonhosted.org/packages/fb/8c/f5987895bf42b8ddeea1b315c9fedcfe07cadee28b9c98cf50d00adcb14d/regex-2025.11.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d9903ca42bfeec4cebedba8022a7c97ad2aab22e09573ce9976ba01b65e4361", size = 798592, upload-time = "2025-11-03T21:32:03.006Z" },
{ url = "https://files.pythonhosted.org/packages/99/2a/6591ebeede78203fa77ee46a1c36649e02df9eaa77a033d1ccdf2fcd5d4e/regex-2025.11.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:639431bdc89d6429f6721625e8129413980ccd62e9d3f496be618a41d205f160", size = 864122, upload-time = "2025-11-03T21:32:04.553Z" },
{ url = "https://files.pythonhosted.org/packages/94/d6/be32a87cf28cf8ed064ff281cfbd49aefd90242a83e4b08b5a86b38e8eb4/regex-2025.11.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f117efad42068f9715677c8523ed2be1518116d1c49b1dd17987716695181efe", size = 912272, upload-time = "2025-11-03T21:32:06.148Z" },
{ url = "https://files.pythonhosted.org/packages/62/11/9bcef2d1445665b180ac7f230406ad80671f0fc2a6ffb93493b5dd8cd64c/regex-2025.11.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4aecb6f461316adf9f1f0f6a4a1a3d79e045f9b71ec76055a791affa3b285850", size = 803497, upload-time = "2025-11-03T21:32:08.162Z" },
{ url = "https://files.pythonhosted.org/packages/e5/a7/da0dc273d57f560399aa16d8a68ae7f9b57679476fc7ace46501d455fe84/regex-2025.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3b3a5f320136873cc5561098dfab677eea139521cb9a9e8db98b7e64aef44cbc", size = 787892, upload-time = "2025-11-03T21:32:09.769Z" },
{ url = "https://files.pythonhosted.org/packages/da/4b/732a0c5a9736a0b8d6d720d4945a2f1e6f38f87f48f3173559f53e8d5d82/regex-2025.11.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:75fa6f0056e7efb1f42a1c34e58be24072cb9e61a601340cc1196ae92326a4f9", size = 858462, upload-time = "2025-11-03T21:32:11.769Z" },
{ url = "https://files.pythonhosted.org/packages/0c/f5/a2a03df27dc4c2d0c769220f5110ba8c4084b0bfa9ab0f9b4fcfa3d2b0fc/regex-2025.11.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:dbe6095001465294f13f1adcd3311e50dd84e5a71525f20a10bd16689c61ce0b", size = 850528, upload-time = "2025-11-03T21:32:13.906Z" },
{ url = "https://files.pythonhosted.org/packages/d6/09/e1cd5bee3841c7f6eb37d95ca91cdee7100b8f88b81e41c2ef426910891a/regex-2025.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:454d9b4ae7881afbc25015b8627c16d88a597479b9dea82b8c6e7e2e07240dc7", size = 789866, upload-time = "2025-11-03T21:32:15.748Z" },
{ url = "https://files.pythonhosted.org/packages/eb/51/702f5ea74e2a9c13d855a6a85b7f80c30f9e72a95493260193c07f3f8d74/regex-2025.11.3-cp313-cp313-win32.whl", hash = "sha256:28ba4d69171fc6e9896337d4fc63a43660002b7da53fc15ac992abcf3410917c", size = 266189, upload-time = "2025-11-03T21:32:17.493Z" },
{ url = "https://files.pythonhosted.org/packages/8b/00/6e29bb314e271a743170e53649db0fdb8e8ff0b64b4f425f5602f4eb9014/regex-2025.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:bac4200befe50c670c405dc33af26dad5a3b6b255dd6c000d92fe4629f9ed6a5", size = 277054, upload-time = "2025-11-03T21:32:19.042Z" },
{ url = "https://files.pythonhosted.org/packages/25/f1/b156ff9f2ec9ac441710764dda95e4edaf5f36aca48246d1eea3f1fd96ec/regex-2025.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:2292cd5a90dab247f9abe892ac584cb24f0f54680c73fcb4a7493c66c2bf2467", size = 270325, upload-time = "2025-11-03T21:32:21.338Z" },
{ url = "https://files.pythonhosted.org/packages/20/28/fd0c63357caefe5680b8ea052131acbd7f456893b69cc2a90cc3e0dc90d4/regex-2025.11.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:1eb1ebf6822b756c723e09f5186473d93236c06c579d2cc0671a722d2ab14281", size = 491984, upload-time = "2025-11-03T21:32:23.466Z" },
{ url = "https://files.pythonhosted.org/packages/df/ec/7014c15626ab46b902b3bcc4b28a7bae46d8f281fc7ea9c95e22fcaaa917/regex-2025.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1e00ec2970aab10dc5db34af535f21fcf32b4a31d99e34963419636e2f85ae39", size = 292673, upload-time = "2025-11-03T21:32:25.034Z" },
{ url = "https://files.pythonhosted.org/packages/23/ab/3b952ff7239f20d05f1f99e9e20188513905f218c81d52fb5e78d2bf7634/regex-2025.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a4cb042b615245d5ff9b3794f56be4138b5adc35a4166014d31d1814744148c7", size = 291029, upload-time = "2025-11-03T21:32:26.528Z" },
{ url = "https://files.pythonhosted.org/packages/21/7e/3dc2749fc684f455f162dcafb8a187b559e2614f3826877d3844a131f37b/regex-2025.11.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44f264d4bf02f3176467d90b294d59bf1db9fe53c141ff772f27a8b456b2a9ed", size = 807437, upload-time = "2025-11-03T21:32:28.363Z" },
{ url = "https://files.pythonhosted.org/packages/1b/0b/d529a85ab349c6a25d1ca783235b6e3eedf187247eab536797021f7126c6/regex-2025.11.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7be0277469bf3bd7a34a9c57c1b6a724532a0d235cd0dc4e7f4316f982c28b19", size = 873368, upload-time = "2025-11-03T21:32:30.4Z" },
{ url = "https://files.pythonhosted.org/packages/7d/18/2d868155f8c9e3e9d8f9e10c64e9a9f496bb8f7e037a88a8bed26b435af6/regex-2025.11.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0d31e08426ff4b5b650f68839f5af51a92a5b51abd8554a60c2fbc7c71f25d0b", size = 914921, upload-time = "2025-11-03T21:32:32.123Z" },
{ url = "https://files.pythonhosted.org/packages/2d/71/9d72ff0f354fa783fe2ba913c8734c3b433b86406117a8db4ea2bf1c7a2f/regex-2025.11.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e43586ce5bd28f9f285a6e729466841368c4a0353f6fd08d4ce4630843d3648a", size = 812708, upload-time = "2025-11-03T21:32:34.305Z" },
{ url = "https://files.pythonhosted.org/packages/e7/19/ce4bf7f5575c97f82b6e804ffb5c4e940c62609ab2a0d9538d47a7fdf7d4/regex-2025.11.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0f9397d561a4c16829d4e6ff75202c1c08b68a3bdbfe29dbfcdb31c9830907c6", size = 795472, upload-time = "2025-11-03T21:32:36.364Z" },
{ url = "https://files.pythonhosted.org/packages/03/86/fd1063a176ffb7b2315f9a1b08d17b18118b28d9df163132615b835a26ee/regex-2025.11.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:dd16e78eb18ffdb25ee33a0682d17912e8cc8a770e885aeee95020046128f1ce", size = 868341, upload-time = "2025-11-03T21:32:38.042Z" },
{ url = "https://files.pythonhosted.org/packages/12/43/103fb2e9811205e7386366501bc866a164a0430c79dd59eac886a2822950/regex-2025.11.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:ffcca5b9efe948ba0661e9df0fa50d2bc4b097c70b9810212d6b62f05d83b2dd", size = 854666, upload-time = "2025-11-03T21:32:40.079Z" },
{ url = "https://files.pythonhosted.org/packages/7d/22/e392e53f3869b75804762c7c848bd2dd2abf2b70fb0e526f58724638bd35/regex-2025.11.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c56b4d162ca2b43318ac671c65bd4d563e841a694ac70e1a976ac38fcf4ca1d2", size = 799473, upload-time = "2025-11-03T21:32:42.148Z" },
{ url = "https://files.pythonhosted.org/packages/4f/f9/8bd6b656592f925b6845fcbb4d57603a3ac2fb2373344ffa1ed70aa6820a/regex-2025.11.3-cp313-cp313t-win32.whl", hash = "sha256:9ddc42e68114e161e51e272f667d640f97e84a2b9ef14b7477c53aac20c2d59a", size = 268792, upload-time = "2025-11-03T21:32:44.13Z" },
{ url = "https://files.pythonhosted.org/packages/e5/87/0e7d603467775ff65cd2aeabf1b5b50cc1c3708556a8b849a2fa4dd1542b/regex-2025.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7a7c7fdf755032ffdd72c77e3d8096bdcb0eb92e89e17571a196f03d88b11b3c", size = 280214, upload-time = "2025-11-03T21:32:45.853Z" },
{ url = "https://files.pythonhosted.org/packages/8d/d0/2afc6f8e94e2b64bfb738a7c2b6387ac1699f09f032d363ed9447fd2bb57/regex-2025.11.3-cp313-cp313t-win_arm64.whl", hash = "sha256:df9eb838c44f570283712e7cff14c16329a9f0fb19ca492d21d4b7528ee6821e", size = 271469, upload-time = "2025-11-03T21:32:48.026Z" },
{ url = "https://files.pythonhosted.org/packages/31/e9/f6e13de7e0983837f7b6d238ad9458800a874bf37c264f7923e63409944c/regex-2025.11.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9697a52e57576c83139d7c6f213d64485d3df5bf84807c35fa409e6c970801c6", size = 489089, upload-time = "2025-11-03T21:32:50.027Z" },
{ url = "https://files.pythonhosted.org/packages/a3/5c/261f4a262f1fa65141c1b74b255988bd2fa020cc599e53b080667d591cfc/regex-2025.11.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e18bc3f73bd41243c9b38a6d9f2366cd0e0137a9aebe2d8ff76c5b67d4c0a3f4", size = 291059, upload-time = "2025-11-03T21:32:51.682Z" },
{ url = "https://files.pythonhosted.org/packages/8e/57/f14eeb7f072b0e9a5a090d1712741fd8f214ec193dba773cf5410108bb7d/regex-2025.11.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:61a08bcb0ec14ff4e0ed2044aad948d0659604f824cbd50b55e30b0ec6f09c73", size = 288900, upload-time = "2025-11-03T21:32:53.569Z" },
{ url = "https://files.pythonhosted.org/packages/3c/6b/1d650c45e99a9b327586739d926a1cd4e94666b1bd4af90428b36af66dc7/regex-2025.11.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9c30003b9347c24bcc210958c5d167b9e4f9be786cb380a7d32f14f9b84674f", size = 799010, upload-time = "2025-11-03T21:32:55.222Z" },
{ url = "https://files.pythonhosted.org/packages/99/ee/d66dcbc6b628ce4e3f7f0cbbb84603aa2fc0ffc878babc857726b8aab2e9/regex-2025.11.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4e1e592789704459900728d88d41a46fe3969b82ab62945560a31732ffc19a6d", size = 864893, upload-time = "2025-11-03T21:32:57.239Z" },
{ url = "https://files.pythonhosted.org/packages/bf/2d/f238229f1caba7ac87a6c4153d79947fb0261415827ae0f77c304260c7d3/regex-2025.11.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6538241f45eb5a25aa575dbba1069ad786f68a4f2773a29a2bd3dd1f9de787be", size = 911522, upload-time = "2025-11-03T21:32:59.274Z" },
{ url = "https://files.pythonhosted.org/packages/bd/3d/22a4eaba214a917c80e04f6025d26143690f0419511e0116508e24b11c9b/regex-2025.11.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce22519c989bb72a7e6b36a199384c53db7722fe669ba891da75907fe3587db", size = 803272, upload-time = "2025-11-03T21:33:01.393Z" },
{ url = "https://files.pythonhosted.org/packages/84/b1/03188f634a409353a84b5ef49754b97dbcc0c0f6fd6c8ede505a8960a0a4/regex-2025.11.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:66d559b21d3640203ab9075797a55165d79017520685fb407b9234d72ab63c62", size = 787958, upload-time = "2025-11-03T21:33:03.379Z" },
{ url = "https://files.pythonhosted.org/packages/99/6a/27d072f7fbf6fadd59c64d210305e1ff865cc3b78b526fd147db768c553b/regex-2025.11.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:669dcfb2e38f9e8c69507bace46f4889e3abbfd9b0c29719202883c0a603598f", size = 859289, upload-time = "2025-11-03T21:33:05.374Z" },
{ url = "https://files.pythonhosted.org/packages/9a/70/1b3878f648e0b6abe023172dacb02157e685564853cc363d9961bcccde4e/regex-2025.11.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:32f74f35ff0f25a5021373ac61442edcb150731fbaa28286bbc8bb1582c89d02", size = 850026, upload-time = "2025-11-03T21:33:07.131Z" },
{ url = "https://files.pythonhosted.org/packages/dd/d5/68e25559b526b8baab8e66839304ede68ff6727237a47727d240006bd0ff/regex-2025.11.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e6c7a21dffba883234baefe91bc3388e629779582038f75d2a5be918e250f0ed", size = 789499, upload-time = "2025-11-03T21:33:09.141Z" },
{ url = "https://files.pythonhosted.org/packages/fc/df/43971264857140a350910d4e33df725e8c94dd9dee8d2e4729fa0d63d49e/regex-2025.11.3-cp314-cp314-win32.whl", hash = "sha256:795ea137b1d809eb6836b43748b12634291c0ed55ad50a7d72d21edf1cd565c4", size = 271604, upload-time = "2025-11-03T21:33:10.9Z" },
{ url = "https://files.pythonhosted.org/packages/01/6f/9711b57dc6894a55faf80a4c1b5aa4f8649805cb9c7aef46f7d27e2b9206/regex-2025.11.3-cp314-cp314-win_amd64.whl", hash = "sha256:9f95fbaa0ee1610ec0fc6b26668e9917a582ba80c52cc6d9ada15e30aa9ab9ad", size = 280320, upload-time = "2025-11-03T21:33:12.572Z" },
{ url = "https://files.pythonhosted.org/packages/f1/7e/f6eaa207d4377481f5e1775cdeb5a443b5a59b392d0065f3417d31d80f87/regex-2025.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:dfec44d532be4c07088c3de2876130ff0fbeeacaa89a137decbbb5f665855a0f", size = 273372, upload-time = "2025-11-03T21:33:14.219Z" },
{ url = "https://files.pythonhosted.org/packages/c3/06/49b198550ee0f5e4184271cee87ba4dfd9692c91ec55289e6282f0f86ccf/regex-2025.11.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ba0d8a5d7f04f73ee7d01d974d47c5834f8a1b0224390e4fe7c12a3a92a78ecc", size = 491985, upload-time = "2025-11-03T21:33:16.555Z" },
{ url = "https://files.pythonhosted.org/packages/ce/bf/abdafade008f0b1c9da10d934034cb670432d6cf6cbe38bbb53a1cfd6cf8/regex-2025.11.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:442d86cf1cfe4faabf97db7d901ef58347efd004934da045c745e7b5bd57ac49", size = 292669, upload-time = "2025-11-03T21:33:18.32Z" },
{ url = "https://files.pythonhosted.org/packages/f9/ef/0c357bb8edbd2ad8e273fcb9e1761bc37b8acbc6e1be050bebd6475f19c1/regex-2025.11.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fd0a5e563c756de210bb964789b5abe4f114dacae9104a47e1a649b910361536", size = 291030, upload-time = "2025-11-03T21:33:20.048Z" },
{ url = "https://files.pythonhosted.org/packages/79/06/edbb67257596649b8fb088d6aeacbcb248ac195714b18a65e018bf4c0b50/regex-2025.11.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf3490bcbb985a1ae97b2ce9ad1c0f06a852d5b19dde9b07bdf25bf224248c95", size = 807674, upload-time = "2025-11-03T21:33:21.797Z" },
{ url = "https://files.pythonhosted.org/packages/f4/d9/ad4deccfce0ea336296bd087f1a191543bb99ee1c53093dcd4c64d951d00/regex-2025.11.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3809988f0a8b8c9dcc0f92478d6501fac7200b9ec56aecf0ec21f4a2ec4b6009", size = 873451, upload-time = "2025-11-03T21:33:23.741Z" },
{ url = "https://files.pythonhosted.org/packages/13/75/a55a4724c56ef13e3e04acaab29df26582f6978c000ac9cd6810ad1f341f/regex-2025.11.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f4ff94e58e84aedb9c9fce66d4ef9f27a190285b451420f297c9a09f2b9abee9", size = 914980, upload-time = "2025-11-03T21:33:25.999Z" },
{ url = "https://files.pythonhosted.org/packages/67/1e/a1657ee15bd9116f70d4a530c736983eed997b361e20ecd8f5ca3759d5c5/regex-2025.11.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eb542fd347ce61e1321b0a6b945d5701528dca0cd9759c2e3bb8bd57e47964d", size = 812852, upload-time = "2025-11-03T21:33:27.852Z" },
{ url = "https://files.pythonhosted.org/packages/b8/6f/f7516dde5506a588a561d296b2d0044839de06035bb486b326065b4c101e/regex-2025.11.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2d5919075a1f2e413c00b056ea0c2f065b3f5fe83c3d07d325ab92dce51d6", size = 795566, upload-time = "2025-11-03T21:33:32.364Z" },
{ url = "https://files.pythonhosted.org/packages/d9/dd/3d10b9e170cc16fb34cb2cef91513cf3df65f440b3366030631b2984a264/regex-2025.11.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3f8bf11a4827cc7ce5a53d4ef6cddd5ad25595d3c1435ef08f76825851343154", size = 868463, upload-time = "2025-11-03T21:33:34.459Z" },
{ url = "https://files.pythonhosted.org/packages/f5/8e/935e6beff1695aa9085ff83195daccd72acc82c81793df480f34569330de/regex-2025.11.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:22c12d837298651e5550ac1d964e4ff57c3f56965fc1812c90c9fb2028eaf267", size = 854694, upload-time = "2025-11-03T21:33:36.793Z" },
{ url = "https://files.pythonhosted.org/packages/92/12/10650181a040978b2f5720a6a74d44f841371a3d984c2083fc1752e4acf6/regex-2025.11.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:62ba394a3dda9ad41c7c780f60f6e4a70988741415ae96f6d1bf6c239cf01379", size = 799691, upload-time = "2025-11-03T21:33:39.079Z" },
{ url = "https://files.pythonhosted.org/packages/67/90/8f37138181c9a7690e7e4cb388debbd389342db3c7381d636d2875940752/regex-2025.11.3-cp314-cp314t-win32.whl", hash = "sha256:4bf146dca15cdd53224a1bf46d628bd7590e4a07fbb69e720d561aea43a32b38", size = 274583, upload-time = "2025-11-03T21:33:41.302Z" },
{ url = "https://files.pythonhosted.org/packages/8f/cd/867f5ec442d56beb56f5f854f40abcfc75e11d10b11fdb1869dd39c63aaf/regex-2025.11.3-cp314-cp314t-win_amd64.whl", hash = "sha256:adad1a1bcf1c9e76346e091d22d23ac54ef28e1365117d99521631078dfec9de", size = 284286, upload-time = "2025-11-03T21:33:43.324Z" },
{ url = "https://files.pythonhosted.org/packages/20/31/32c0c4610cbc070362bf1d2e4ea86d1ea29014d400a6d6c2486fcfd57766/regex-2025.11.3-cp314-cp314t-win_arm64.whl", hash = "sha256:c54f768482cef41e219720013cd05933b6f971d9562544d691c68699bf2b6801", size = 274741, upload-time = "2025-11-03T21:33:45.557Z" },
]
[[package]]
name = "test"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "regex" },
]
[package.metadata]
requires-dist = [{ name = "regex", specifier = ">=2025.11.3" }]

View File

@@ -1,2 +0,0 @@
[settings]
auto-import = true

View File

@@ -1,12 +0,0 @@
# This test was originally written to
# check that a third party dependency
# gets priority over stdlib. But it was
# not clearly the right choice[1].
#
# 2026-01-09: We stuck with it for now,
# but it seems likely that we'll want
# to regress this task in favor of
# another.
#
# [1]: https://github.com/astral-sh/ruff/pull/22460#discussion_r2676343225
fullma<CURSOR: regex.fullmatch>

View File

@@ -1,7 +0,0 @@
[project]
name = "test"
version = "0.1.0"
requires-python = ">=3.13"
dependencies = [
"regex>=2025.11.3",
]

View File

@@ -1,78 +0,0 @@
version = 1
revision = 3
requires-python = ">=3.13"
[[package]]
name = "regex"
version = "2025.11.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669, upload-time = "2025-11-03T21:34:22.089Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e1/a7/dda24ebd49da46a197436ad96378f17df30ceb40e52e859fc42cac45b850/regex-2025.11.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c1e448051717a334891f2b9a620fe36776ebf3dd8ec46a0b877c8ae69575feb4", size = 489081, upload-time = "2025-11-03T21:31:55.9Z" },
{ url = "https://files.pythonhosted.org/packages/19/22/af2dc751aacf88089836aa088a1a11c4f21a04707eb1b0478e8e8fb32847/regex-2025.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9b5aca4d5dfd7fbfbfbdaf44850fcc7709a01146a797536a8f84952e940cca76", size = 291123, upload-time = "2025-11-03T21:31:57.758Z" },
{ url = "https://files.pythonhosted.org/packages/a3/88/1a3ea5672f4b0a84802ee9891b86743438e7c04eb0b8f8c4e16a42375327/regex-2025.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:04d2765516395cf7dda331a244a3282c0f5ae96075f728629287dfa6f76ba70a", size = 288814, upload-time = "2025-11-03T21:32:01.12Z" },
{ url = "https://files.pythonhosted.org/packages/fb/8c/f5987895bf42b8ddeea1b315c9fedcfe07cadee28b9c98cf50d00adcb14d/regex-2025.11.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d9903ca42bfeec4cebedba8022a7c97ad2aab22e09573ce9976ba01b65e4361", size = 798592, upload-time = "2025-11-03T21:32:03.006Z" },
{ url = "https://files.pythonhosted.org/packages/99/2a/6591ebeede78203fa77ee46a1c36649e02df9eaa77a033d1ccdf2fcd5d4e/regex-2025.11.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:639431bdc89d6429f6721625e8129413980ccd62e9d3f496be618a41d205f160", size = 864122, upload-time = "2025-11-03T21:32:04.553Z" },
{ url = "https://files.pythonhosted.org/packages/94/d6/be32a87cf28cf8ed064ff281cfbd49aefd90242a83e4b08b5a86b38e8eb4/regex-2025.11.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f117efad42068f9715677c8523ed2be1518116d1c49b1dd17987716695181efe", size = 912272, upload-time = "2025-11-03T21:32:06.148Z" },
{ url = "https://files.pythonhosted.org/packages/62/11/9bcef2d1445665b180ac7f230406ad80671f0fc2a6ffb93493b5dd8cd64c/regex-2025.11.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4aecb6f461316adf9f1f0f6a4a1a3d79e045f9b71ec76055a791affa3b285850", size = 803497, upload-time = "2025-11-03T21:32:08.162Z" },
{ url = "https://files.pythonhosted.org/packages/e5/a7/da0dc273d57f560399aa16d8a68ae7f9b57679476fc7ace46501d455fe84/regex-2025.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3b3a5f320136873cc5561098dfab677eea139521cb9a9e8db98b7e64aef44cbc", size = 787892, upload-time = "2025-11-03T21:32:09.769Z" },
{ url = "https://files.pythonhosted.org/packages/da/4b/732a0c5a9736a0b8d6d720d4945a2f1e6f38f87f48f3173559f53e8d5d82/regex-2025.11.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:75fa6f0056e7efb1f42a1c34e58be24072cb9e61a601340cc1196ae92326a4f9", size = 858462, upload-time = "2025-11-03T21:32:11.769Z" },
{ url = "https://files.pythonhosted.org/packages/0c/f5/a2a03df27dc4c2d0c769220f5110ba8c4084b0bfa9ab0f9b4fcfa3d2b0fc/regex-2025.11.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:dbe6095001465294f13f1adcd3311e50dd84e5a71525f20a10bd16689c61ce0b", size = 850528, upload-time = "2025-11-03T21:32:13.906Z" },
{ url = "https://files.pythonhosted.org/packages/d6/09/e1cd5bee3841c7f6eb37d95ca91cdee7100b8f88b81e41c2ef426910891a/regex-2025.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:454d9b4ae7881afbc25015b8627c16d88a597479b9dea82b8c6e7e2e07240dc7", size = 789866, upload-time = "2025-11-03T21:32:15.748Z" },
{ url = "https://files.pythonhosted.org/packages/eb/51/702f5ea74e2a9c13d855a6a85b7f80c30f9e72a95493260193c07f3f8d74/regex-2025.11.3-cp313-cp313-win32.whl", hash = "sha256:28ba4d69171fc6e9896337d4fc63a43660002b7da53fc15ac992abcf3410917c", size = 266189, upload-time = "2025-11-03T21:32:17.493Z" },
{ url = "https://files.pythonhosted.org/packages/8b/00/6e29bb314e271a743170e53649db0fdb8e8ff0b64b4f425f5602f4eb9014/regex-2025.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:bac4200befe50c670c405dc33af26dad5a3b6b255dd6c000d92fe4629f9ed6a5", size = 277054, upload-time = "2025-11-03T21:32:19.042Z" },
{ url = "https://files.pythonhosted.org/packages/25/f1/b156ff9f2ec9ac441710764dda95e4edaf5f36aca48246d1eea3f1fd96ec/regex-2025.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:2292cd5a90dab247f9abe892ac584cb24f0f54680c73fcb4a7493c66c2bf2467", size = 270325, upload-time = "2025-11-03T21:32:21.338Z" },
{ url = "https://files.pythonhosted.org/packages/20/28/fd0c63357caefe5680b8ea052131acbd7f456893b69cc2a90cc3e0dc90d4/regex-2025.11.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:1eb1ebf6822b756c723e09f5186473d93236c06c579d2cc0671a722d2ab14281", size = 491984, upload-time = "2025-11-03T21:32:23.466Z" },
{ url = "https://files.pythonhosted.org/packages/df/ec/7014c15626ab46b902b3bcc4b28a7bae46d8f281fc7ea9c95e22fcaaa917/regex-2025.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1e00ec2970aab10dc5db34af535f21fcf32b4a31d99e34963419636e2f85ae39", size = 292673, upload-time = "2025-11-03T21:32:25.034Z" },
{ url = "https://files.pythonhosted.org/packages/23/ab/3b952ff7239f20d05f1f99e9e20188513905f218c81d52fb5e78d2bf7634/regex-2025.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a4cb042b615245d5ff9b3794f56be4138b5adc35a4166014d31d1814744148c7", size = 291029, upload-time = "2025-11-03T21:32:26.528Z" },
{ url = "https://files.pythonhosted.org/packages/21/7e/3dc2749fc684f455f162dcafb8a187b559e2614f3826877d3844a131f37b/regex-2025.11.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44f264d4bf02f3176467d90b294d59bf1db9fe53c141ff772f27a8b456b2a9ed", size = 807437, upload-time = "2025-11-03T21:32:28.363Z" },
{ url = "https://files.pythonhosted.org/packages/1b/0b/d529a85ab349c6a25d1ca783235b6e3eedf187247eab536797021f7126c6/regex-2025.11.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7be0277469bf3bd7a34a9c57c1b6a724532a0d235cd0dc4e7f4316f982c28b19", size = 873368, upload-time = "2025-11-03T21:32:30.4Z" },
{ url = "https://files.pythonhosted.org/packages/7d/18/2d868155f8c9e3e9d8f9e10c64e9a9f496bb8f7e037a88a8bed26b435af6/regex-2025.11.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0d31e08426ff4b5b650f68839f5af51a92a5b51abd8554a60c2fbc7c71f25d0b", size = 914921, upload-time = "2025-11-03T21:32:32.123Z" },
{ url = "https://files.pythonhosted.org/packages/2d/71/9d72ff0f354fa783fe2ba913c8734c3b433b86406117a8db4ea2bf1c7a2f/regex-2025.11.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e43586ce5bd28f9f285a6e729466841368c4a0353f6fd08d4ce4630843d3648a", size = 812708, upload-time = "2025-11-03T21:32:34.305Z" },
{ url = "https://files.pythonhosted.org/packages/e7/19/ce4bf7f5575c97f82b6e804ffb5c4e940c62609ab2a0d9538d47a7fdf7d4/regex-2025.11.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0f9397d561a4c16829d4e6ff75202c1c08b68a3bdbfe29dbfcdb31c9830907c6", size = 795472, upload-time = "2025-11-03T21:32:36.364Z" },
{ url = "https://files.pythonhosted.org/packages/03/86/fd1063a176ffb7b2315f9a1b08d17b18118b28d9df163132615b835a26ee/regex-2025.11.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:dd16e78eb18ffdb25ee33a0682d17912e8cc8a770e885aeee95020046128f1ce", size = 868341, upload-time = "2025-11-03T21:32:38.042Z" },
{ url = "https://files.pythonhosted.org/packages/12/43/103fb2e9811205e7386366501bc866a164a0430c79dd59eac886a2822950/regex-2025.11.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:ffcca5b9efe948ba0661e9df0fa50d2bc4b097c70b9810212d6b62f05d83b2dd", size = 854666, upload-time = "2025-11-03T21:32:40.079Z" },
{ url = "https://files.pythonhosted.org/packages/7d/22/e392e53f3869b75804762c7c848bd2dd2abf2b70fb0e526f58724638bd35/regex-2025.11.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c56b4d162ca2b43318ac671c65bd4d563e841a694ac70e1a976ac38fcf4ca1d2", size = 799473, upload-time = "2025-11-03T21:32:42.148Z" },
{ url = "https://files.pythonhosted.org/packages/4f/f9/8bd6b656592f925b6845fcbb4d57603a3ac2fb2373344ffa1ed70aa6820a/regex-2025.11.3-cp313-cp313t-win32.whl", hash = "sha256:9ddc42e68114e161e51e272f667d640f97e84a2b9ef14b7477c53aac20c2d59a", size = 268792, upload-time = "2025-11-03T21:32:44.13Z" },
{ url = "https://files.pythonhosted.org/packages/e5/87/0e7d603467775ff65cd2aeabf1b5b50cc1c3708556a8b849a2fa4dd1542b/regex-2025.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7a7c7fdf755032ffdd72c77e3d8096bdcb0eb92e89e17571a196f03d88b11b3c", size = 280214, upload-time = "2025-11-03T21:32:45.853Z" },
{ url = "https://files.pythonhosted.org/packages/8d/d0/2afc6f8e94e2b64bfb738a7c2b6387ac1699f09f032d363ed9447fd2bb57/regex-2025.11.3-cp313-cp313t-win_arm64.whl", hash = "sha256:df9eb838c44f570283712e7cff14c16329a9f0fb19ca492d21d4b7528ee6821e", size = 271469, upload-time = "2025-11-03T21:32:48.026Z" },
{ url = "https://files.pythonhosted.org/packages/31/e9/f6e13de7e0983837f7b6d238ad9458800a874bf37c264f7923e63409944c/regex-2025.11.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9697a52e57576c83139d7c6f213d64485d3df5bf84807c35fa409e6c970801c6", size = 489089, upload-time = "2025-11-03T21:32:50.027Z" },
{ url = "https://files.pythonhosted.org/packages/a3/5c/261f4a262f1fa65141c1b74b255988bd2fa020cc599e53b080667d591cfc/regex-2025.11.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e18bc3f73bd41243c9b38a6d9f2366cd0e0137a9aebe2d8ff76c5b67d4c0a3f4", size = 291059, upload-time = "2025-11-03T21:32:51.682Z" },
{ url = "https://files.pythonhosted.org/packages/8e/57/f14eeb7f072b0e9a5a090d1712741fd8f214ec193dba773cf5410108bb7d/regex-2025.11.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:61a08bcb0ec14ff4e0ed2044aad948d0659604f824cbd50b55e30b0ec6f09c73", size = 288900, upload-time = "2025-11-03T21:32:53.569Z" },
{ url = "https://files.pythonhosted.org/packages/3c/6b/1d650c45e99a9b327586739d926a1cd4e94666b1bd4af90428b36af66dc7/regex-2025.11.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9c30003b9347c24bcc210958c5d167b9e4f9be786cb380a7d32f14f9b84674f", size = 799010, upload-time = "2025-11-03T21:32:55.222Z" },
{ url = "https://files.pythonhosted.org/packages/99/ee/d66dcbc6b628ce4e3f7f0cbbb84603aa2fc0ffc878babc857726b8aab2e9/regex-2025.11.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4e1e592789704459900728d88d41a46fe3969b82ab62945560a31732ffc19a6d", size = 864893, upload-time = "2025-11-03T21:32:57.239Z" },
{ url = "https://files.pythonhosted.org/packages/bf/2d/f238229f1caba7ac87a6c4153d79947fb0261415827ae0f77c304260c7d3/regex-2025.11.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6538241f45eb5a25aa575dbba1069ad786f68a4f2773a29a2bd3dd1f9de787be", size = 911522, upload-time = "2025-11-03T21:32:59.274Z" },
{ url = "https://files.pythonhosted.org/packages/bd/3d/22a4eaba214a917c80e04f6025d26143690f0419511e0116508e24b11c9b/regex-2025.11.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce22519c989bb72a7e6b36a199384c53db7722fe669ba891da75907fe3587db", size = 803272, upload-time = "2025-11-03T21:33:01.393Z" },
{ url = "https://files.pythonhosted.org/packages/84/b1/03188f634a409353a84b5ef49754b97dbcc0c0f6fd6c8ede505a8960a0a4/regex-2025.11.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:66d559b21d3640203ab9075797a55165d79017520685fb407b9234d72ab63c62", size = 787958, upload-time = "2025-11-03T21:33:03.379Z" },
{ url = "https://files.pythonhosted.org/packages/99/6a/27d072f7fbf6fadd59c64d210305e1ff865cc3b78b526fd147db768c553b/regex-2025.11.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:669dcfb2e38f9e8c69507bace46f4889e3abbfd9b0c29719202883c0a603598f", size = 859289, upload-time = "2025-11-03T21:33:05.374Z" },
{ url = "https://files.pythonhosted.org/packages/9a/70/1b3878f648e0b6abe023172dacb02157e685564853cc363d9961bcccde4e/regex-2025.11.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:32f74f35ff0f25a5021373ac61442edcb150731fbaa28286bbc8bb1582c89d02", size = 850026, upload-time = "2025-11-03T21:33:07.131Z" },
{ url = "https://files.pythonhosted.org/packages/dd/d5/68e25559b526b8baab8e66839304ede68ff6727237a47727d240006bd0ff/regex-2025.11.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e6c7a21dffba883234baefe91bc3388e629779582038f75d2a5be918e250f0ed", size = 789499, upload-time = "2025-11-03T21:33:09.141Z" },
{ url = "https://files.pythonhosted.org/packages/fc/df/43971264857140a350910d4e33df725e8c94dd9dee8d2e4729fa0d63d49e/regex-2025.11.3-cp314-cp314-win32.whl", hash = "sha256:795ea137b1d809eb6836b43748b12634291c0ed55ad50a7d72d21edf1cd565c4", size = 271604, upload-time = "2025-11-03T21:33:10.9Z" },
{ url = "https://files.pythonhosted.org/packages/01/6f/9711b57dc6894a55faf80a4c1b5aa4f8649805cb9c7aef46f7d27e2b9206/regex-2025.11.3-cp314-cp314-win_amd64.whl", hash = "sha256:9f95fbaa0ee1610ec0fc6b26668e9917a582ba80c52cc6d9ada15e30aa9ab9ad", size = 280320, upload-time = "2025-11-03T21:33:12.572Z" },
{ url = "https://files.pythonhosted.org/packages/f1/7e/f6eaa207d4377481f5e1775cdeb5a443b5a59b392d0065f3417d31d80f87/regex-2025.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:dfec44d532be4c07088c3de2876130ff0fbeeacaa89a137decbbb5f665855a0f", size = 273372, upload-time = "2025-11-03T21:33:14.219Z" },
{ url = "https://files.pythonhosted.org/packages/c3/06/49b198550ee0f5e4184271cee87ba4dfd9692c91ec55289e6282f0f86ccf/regex-2025.11.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ba0d8a5d7f04f73ee7d01d974d47c5834f8a1b0224390e4fe7c12a3a92a78ecc", size = 491985, upload-time = "2025-11-03T21:33:16.555Z" },
{ url = "https://files.pythonhosted.org/packages/ce/bf/abdafade008f0b1c9da10d934034cb670432d6cf6cbe38bbb53a1cfd6cf8/regex-2025.11.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:442d86cf1cfe4faabf97db7d901ef58347efd004934da045c745e7b5bd57ac49", size = 292669, upload-time = "2025-11-03T21:33:18.32Z" },
{ url = "https://files.pythonhosted.org/packages/f9/ef/0c357bb8edbd2ad8e273fcb9e1761bc37b8acbc6e1be050bebd6475f19c1/regex-2025.11.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fd0a5e563c756de210bb964789b5abe4f114dacae9104a47e1a649b910361536", size = 291030, upload-time = "2025-11-03T21:33:20.048Z" },
{ url = "https://files.pythonhosted.org/packages/79/06/edbb67257596649b8fb088d6aeacbcb248ac195714b18a65e018bf4c0b50/regex-2025.11.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf3490bcbb985a1ae97b2ce9ad1c0f06a852d5b19dde9b07bdf25bf224248c95", size = 807674, upload-time = "2025-11-03T21:33:21.797Z" },
{ url = "https://files.pythonhosted.org/packages/f4/d9/ad4deccfce0ea336296bd087f1a191543bb99ee1c53093dcd4c64d951d00/regex-2025.11.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3809988f0a8b8c9dcc0f92478d6501fac7200b9ec56aecf0ec21f4a2ec4b6009", size = 873451, upload-time = "2025-11-03T21:33:23.741Z" },
{ url = "https://files.pythonhosted.org/packages/13/75/a55a4724c56ef13e3e04acaab29df26582f6978c000ac9cd6810ad1f341f/regex-2025.11.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f4ff94e58e84aedb9c9fce66d4ef9f27a190285b451420f297c9a09f2b9abee9", size = 914980, upload-time = "2025-11-03T21:33:25.999Z" },
{ url = "https://files.pythonhosted.org/packages/67/1e/a1657ee15bd9116f70d4a530c736983eed997b361e20ecd8f5ca3759d5c5/regex-2025.11.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eb542fd347ce61e1321b0a6b945d5701528dca0cd9759c2e3bb8bd57e47964d", size = 812852, upload-time = "2025-11-03T21:33:27.852Z" },
{ url = "https://files.pythonhosted.org/packages/b8/6f/f7516dde5506a588a561d296b2d0044839de06035bb486b326065b4c101e/regex-2025.11.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2d5919075a1f2e413c00b056ea0c2f065b3f5fe83c3d07d325ab92dce51d6", size = 795566, upload-time = "2025-11-03T21:33:32.364Z" },
{ url = "https://files.pythonhosted.org/packages/d9/dd/3d10b9e170cc16fb34cb2cef91513cf3df65f440b3366030631b2984a264/regex-2025.11.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3f8bf11a4827cc7ce5a53d4ef6cddd5ad25595d3c1435ef08f76825851343154", size = 868463, upload-time = "2025-11-03T21:33:34.459Z" },
{ url = "https://files.pythonhosted.org/packages/f5/8e/935e6beff1695aa9085ff83195daccd72acc82c81793df480f34569330de/regex-2025.11.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:22c12d837298651e5550ac1d964e4ff57c3f56965fc1812c90c9fb2028eaf267", size = 854694, upload-time = "2025-11-03T21:33:36.793Z" },
{ url = "https://files.pythonhosted.org/packages/92/12/10650181a040978b2f5720a6a74d44f841371a3d984c2083fc1752e4acf6/regex-2025.11.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:62ba394a3dda9ad41c7c780f60f6e4a70988741415ae96f6d1bf6c239cf01379", size = 799691, upload-time = "2025-11-03T21:33:39.079Z" },
{ url = "https://files.pythonhosted.org/packages/67/90/8f37138181c9a7690e7e4cb388debbd389342db3c7381d636d2875940752/regex-2025.11.3-cp314-cp314t-win32.whl", hash = "sha256:4bf146dca15cdd53224a1bf46d628bd7590e4a07fbb69e720d561aea43a32b38", size = 274583, upload-time = "2025-11-03T21:33:41.302Z" },
{ url = "https://files.pythonhosted.org/packages/8f/cd/867f5ec442d56beb56f5f854f40abcfc75e11d10b11fdb1869dd39c63aaf/regex-2025.11.3-cp314-cp314t-win_amd64.whl", hash = "sha256:adad1a1bcf1c9e76346e091d22d23ac54ef28e1365117d99521631078dfec9de", size = 284286, upload-time = "2025-11-03T21:33:43.324Z" },
{ url = "https://files.pythonhosted.org/packages/20/31/32c0c4610cbc070362bf1d2e4ea86d1ea29014d400a6d6c2486fcfd57766/regex-2025.11.3-cp314-cp314t-win_arm64.whl", hash = "sha256:c54f768482cef41e219720013cd05933b6f971d9562544d691c68699bf2b6801", size = 274741, upload-time = "2025-11-03T21:33:45.557Z" },
]
[[package]]
name = "test"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "regex" },
]
[package.metadata]
requires-dist = [{ name = "regex", specifier = ">=2025.11.3" }]

View File

@@ -1,2 +0,0 @@
[settings]
auto-import = true

View File

@@ -1,10 +0,0 @@
scope1 = 1
def x():
scope2 = 1
def xx():
scope3 = 1
def xxx():
# We specifically want `scope3` to be
# suggested first here, since that's in
# the "tighter" scope.
scope<CURSOR: scope3>

View File

@@ -1,5 +0,0 @@
[project]
name = "test"
version = "0.1.0"
requires-python = ">=3.13"
dependencies = []

View File

@@ -1,8 +0,0 @@
version = 1
revision = 3
requires-python = ">=3.13"
[[package]]
name = "test"
version = "0.1.0"
source = { virtual = "." }

View File

@@ -0,0 +1,12 @@
# This one demands that `TypeVa` complete to `typing.TypeVar`
# even though there is also an `ast.TypeVar`. Getting this one
# right seems tricky, and probably requires module-specific
# heuristics.
#
# ref: https://github.com/astral-sh/ty/issues/1274#issuecomment-3345884227
TypeVa<CURSOR: typing.TypeVar>
# This is a similar case of `ctypes.cast` being preferred over
# `typing.cast`. Maybe `typing` should just get a slightly higher
# weight than most other stdlib modules?
cas<CURSOR: typing.cast>

View File

@@ -1,2 +0,0 @@
[settings]
auto-import = true

View File

@@ -1,19 +0,0 @@
# Many of these came from discussion in:
# <https://github.com/astral-sh/ty/issues/1274>
# We should prefer `typing` over `asyncio` here.
class Foo(Protoco<CURSOR: typing.Protocol>): ...
# We should prefer `typing` over `ty_extensions`
# or `typing_extensions`.
reveal_<CURSOR: typing.reveal_type>
# We should prefer `typing` over `ast`.
TypeVa<CURSOR: typing.TypeVar>
# We should prefer `typing` over `ctypes`.
cast<CURSOR: typing.cast>
# We should prefer a non-stdlib project import
# over a stdlib `typing` import.
NoRetur<CURSOR: sub1.NoReturn>

View File

@@ -1,5 +0,0 @@
[project]
name = "test"
version = "0.1.0"
requires-python = ">=3.13"
dependencies = []

View File

@@ -1 +0,0 @@
NoReturn = 1

View File

@@ -1,8 +0,0 @@
version = 1
revision = 3
requires-python = ">=3.13"
[[package]]
name = "test"
version = "0.1.0"
source = { virtual = "." }

View File

@@ -27,7 +27,6 @@ ty_project = { workspace = true, features = ["testing"] }
ty_python_semantic = { workspace = true }
ty_vendored = { workspace = true }
compact_str = { workspace = true }
get-size2 = { workspace = true }
itertools = { workspace = true }
rayon = { workspace = true }

View File

@@ -64,11 +64,7 @@ pub fn all_symbols<'db>(
continue;
}
s.spawn(move |_| {
let symbols_for_file_span = tracing::debug_span!(
parent: all_symbols_span,
"symbols_for_file_global_only",
path = %file.path(&*db),
);
let symbols_for_file_span = tracing::debug_span!(parent: all_symbols_span, "symbols_for_file_global_only", ?file);
let _entered = symbols_for_file_span.entered();
if query.is_match_symbol_name(module.name(&*db)) {
@@ -98,13 +94,11 @@ pub fn all_symbols<'db>(
let key1 = (
s1.name_in_file()
.unwrap_or_else(|| s1.module().name(db).as_str()),
s1.module().name(db).as_str(),
s1.file.path(db).as_str(),
);
let key2 = (
s2.name_in_file()
.unwrap_or_else(|| s2.module().name(db).as_str()),
s2.module().name(db).as_str(),
s2.file.path(db).as_str(),
);
key1.cmp(&key2)

View File

@@ -5,8 +5,8 @@ use ruff_diagnostics::Edit;
use ruff_python_ast::find_node::covering_node;
use ruff_text_size::TextRange;
use ty_project::Db;
use ty_python_semantic::create_suppression_fix;
use ty_python_semantic::lint::LintId;
use ty_python_semantic::suppress_single;
use ty_python_semantic::types::{UNDEFINED_REVEAL, UNRESOLVED_REFERENCE};
/// A `QuickFix` Code Action
@@ -42,7 +42,7 @@ pub fn code_actions(
// Suggest just suppressing the lint (always a valid option, but never ideal)
actions.push(QuickFix {
title: format!("Ignore '{}' for this line", lint_id.name()),
edits: suppress_single(db, file, lint_id, diagnostic_range).into_edits(),
edits: create_suppression_fix(db, file, lint_id, diagnostic_range).into_edits(),
preferred: false,
});
@@ -437,38 +437,6 @@ mod tests {
"#);
}
#[test]
fn add_ignore_line_continuation_empty_lines() {
let test = CodeActionTest::with_source(
r#"b = bbbbb \
[ ccc # test
+ <START>ddd<END> \
] # test
"#,
);
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:4:11
|
2 | [ ccc # test
3 |
4 | + ddd \
| ^^^
5 |
6 | ] # test
|
2 | [ ccc # test
3 |
4 | + ddd \
-
5 + # ty:ignore[unresolved-reference]
6 | ] # test
");
}
#[test]
fn undefined_reveal_type() {
let test = CodeActionTest::with_source(

File diff suppressed because it is too large Load Diff

View File

@@ -34,8 +34,8 @@ pub struct ParameterDetails<'db> {
pub name: String,
/// The parameter label in the signature (e.g., "param1: str")
pub label: String,
/// The annotated type of the parameter. If no annotation was provided, this is `Unknown`.
pub ty: Type<'db>,
/// The annotated type of the parameter, if any
pub ty: Option<Type<'db>>,
/// Documentation specific to the parameter, typically extracted from the
/// function's docstring
pub documentation: Option<String>,
@@ -237,7 +237,7 @@ fn create_parameters_from_offsets<'db>(
docstring: Option<&Docstring>,
parameter_names: &[String],
parameter_kinds: &[ParameterKind],
parameter_types: &[Type<'db>],
parameter_types: &[Option<Type<'db>>],
) -> Vec<ParameterDetails<'db>> {
// Extract parameter documentation from the function's docstring if available.
let param_docs = if let Some(docstring) = docstring {
@@ -264,11 +264,12 @@ fn create_parameters_from_offsets<'db>(
parameter_kinds.get(i),
Some(ParameterKind::PositionalOnly { .. })
);
let ty = parameter_types.get(i).copied().flatten();
ParameterDetails {
name: param_name.to_string(),
label,
ty: parameter_types[i],
ty,
documentation: param_docs.get(param_name).cloned(),
is_positional_only,
}

View File

@@ -554,17 +554,10 @@ impl SearchPath {
)
}
/// Is this search path in "first party" code? i.e., The
/// end user's project code.
pub fn is_first_party(&self) -> bool {
matches!(&*self.0, SearchPathInner::FirstParty(_))
}
/// Is the module in a site-packages directory?
pub fn is_site_packages(&self) -> bool {
matches!(&*self.0, SearchPathInner::SitePackages(_))
}
fn is_valid_extension(&self, extension: &str) -> bool {
if self.is_standard_library() {
extension == "pyi"

View File

@@ -14,7 +14,6 @@ license.workspace = true
[dependencies]
ruff_cache = { workspace = true }
ruff_db = { workspace = true, features = ["cache", "serde"] }
ruff_diagnostics = { workspace = true }
ruff_macros = { workspace = true }
ruff_memory_usage = { workspace = true }
ruff_options_metadata = { workspace = true }
@@ -31,7 +30,7 @@ anyhow = { workspace = true }
camino = { workspace = true }
colored = { workspace = true }
crossbeam = { workspace = true }
get-size2 = { workspace = true, features = ["ordermap"] }
get-size2 = { workspace = true }
globset = { workspace = true }
notify = { workspace = true }
ordermap = { workspace = true, features = ["serde"] }
@@ -49,10 +48,8 @@ toml = { workspace = true }
tracing = { workspace = true }
[dev-dependencies]
ruff_db = { workspace = true, features = ["testing"] }
ruff_python_trivia = { workspace = true }
insta = { workspace = true, features = ["redactions", "ron"] }
ruff_db = { workspace = true, features = ["testing"] }
[features]
default = ["zstd"]

View File

@@ -1,794 +0,0 @@
use ruff_db::cancellation::{Canceled, CancellationToken};
use ruff_db::diagnostic::{DisplayDiagnosticConfig, DisplayDiagnostics};
use ruff_db::parsed::parsed_module;
use ruff_db::source::SourceText;
use ruff_db::system::{SystemPath, WritableSystem};
use ruff_db::{
diagnostic::{Annotation, Diagnostic, DiagnosticId, Severity, Span},
files::File,
source::source_text,
};
use ruff_diagnostics::{Fix, IsolationLevel, SourceMap};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use rustc_hash::FxHashSet;
use salsa::Setter as _;
use std::collections::BTreeMap;
use thiserror::Error;
use ty_python_semantic::{UNUSED_IGNORE_COMMENT, suppress_all};
use crate::Db;
pub struct SuppressAllResult {
/// The non-lint diagnostics that can't be suppressed or the diagnostics of files
/// that couldn't be suppressed (because ty failed to write the result back to disk,
/// or the file contains syntax errors).
pub diagnostics: Vec<Diagnostic>,
/// The number of diagnostics that were suppressed.
pub count: usize,
}
/// Adds suppressions to all lint diagnostics and writes the changed files back to disk.
///
/// Returns how many diagnostics were suppressed along the remaining, non-suppressed diagnostics.
///
/// ## Panics
/// If the `db`'s system isn't [writable](WritableSystem).
pub fn suppress_all_diagnostics(
db: &mut dyn Db,
mut diagnostics: Vec<Diagnostic>,
cancellation_token: &CancellationToken,
) -> Result<SuppressAllResult, Canceled> {
let system = WritableSystem::dyn_clone(
db.system()
.as_writable()
.expect("System should be writable"),
);
let has_fixable = diagnostics.iter().any(|diagnostic| {
diagnostic
.primary_span()
.and_then(|span| span.range())
.is_some()
&& diagnostic.id().is_lint()
&& diagnostic.id() != DiagnosticId::Lint(UNUSED_IGNORE_COMMENT.name())
});
// Early return if there are no diagnostics that can be suppressed to avoid all the heavy work below.
if !has_fixable {
return Ok(SuppressAllResult {
diagnostics,
count: 0,
});
}
let mut by_file: BTreeMap<File, Vec<_>> = BTreeMap::new();
// Group the diagnostics by file, leave the file-agnostic diagnostics in `diagnostics`.
for diagnostic in diagnostics.extract_if(.., |diagnostic| diagnostic.primary_span().is_some()) {
let span = diagnostic
.primary_span()
.expect("should be set because `extract_if` only yields elements with a primary_span");
by_file
.entry(span.expect_ty_file())
.or_default()
.push(diagnostic);
}
let mut fixed_count = 0usize;
let project = db.project();
// Try to suppress all lint-diagnostics in the given file.
for (&file, file_diagnostics) in &mut by_file {
if cancellation_token.is_cancelled() {
return Err(Canceled);
}
let Some(path) = file.path(db).as_system_path() else {
tracing::debug!(
"Skipping file `{}` with non-system path because vendored and system virtual file paths are read-only",
file.path(db)
);
continue;
};
let parsed = parsed_module(db, file);
if parsed.load(db).has_syntax_errors() {
tracing::warn!("Skipping file `{path}` with syntax errors",);
continue;
}
let fixable_diagnostics: Vec<_> = file_diagnostics
.iter()
.filter_map(|diagnostic| {
let lint_id = diagnostic.id().as_lint()?;
// Don't suppress unused ignore comments.
if lint_id == UNUSED_IGNORE_COMMENT.name() {
return None;
}
// We can't suppress diagnostics without a corresponding file or range.
let span = diagnostic.primary_span()?;
let range = span.range()?;
Some((lint_id, range))
})
.collect();
if fixable_diagnostics.is_empty() {
tracing::debug!(
"Skipping file `{path}` because it contains no suppressable diagnostics"
);
continue;
}
tracing::debug!(
"Suppressing {} diagnostics in `{path}`.",
fixable_diagnostics.len()
);
// Required to work around borrow checker issues.
let path = path.to_path_buf();
let fixes = suppress_all(db, file, &fixable_diagnostics);
let source = source_text(db, file);
// TODO: Handle overlapping fixes when adding support for `--fix` by iterating until all fixes
// were successfully applied. We don't need to do that for suppressions because suppression fixes
// should never overlap (and, if they were, the worst outcome is that some suppressions are missing).
let FixedCode {
source: new_source,
source_map,
} = apply_fixes(&source, fixes).unwrap_or_else(|fixed| fixed);
let new_source = source.with_text(new_source, &source_map);
// Verify that the fix didn't introduce any syntax errors by overriding
// the source text for `file`.
let mut source_guard = WithUpdatedSourceGuard::new(db, file, &source, new_source.clone());
let db = source_guard.db();
let new_parsed = parsed_module(db, file);
let new_parsed = new_parsed.load(db);
if new_parsed.has_syntax_errors() {
let mut diag = Diagnostic::new(
DiagnosticId::InternalError,
Severity::Fatal,
format_args!(
"Adding suppressions introduced a syntax error. Reverting all changes."
),
);
let mut file_annotation = Annotation::primary(Span::from(file));
file_annotation.hide_snippet(true);
diag.annotate(file_annotation);
let parse_diagnostics: Vec<_> = new_parsed
.errors()
.iter()
.map(|error| {
Diagnostic::invalid_syntax(Span::from(file), &error.error, error.location)
})
.collect();
diag.add_bug_sub_diagnostics("%5BFix%20error%5D");
let file_db: &dyn ruff_db::Db = db;
diag.info(format_args!(
"Introduced syntax errors:\n\n{}",
DisplayDiagnostics::new(
&file_db,
&DisplayDiagnosticConfig::default(),
&parse_diagnostics
)
));
file_diagnostics.push(diag);
continue;
}
// Write the changes back to disk.
if let Err(err) = write_changes(db, &*system, file, &path, &new_source) {
let mut diag = Diagnostic::new(
DiagnosticId::Io,
Severity::Error,
format_args!("Failed to write fixes to file: {err}"),
);
diag.annotate(Annotation::primary(Span::from(file)));
diagnostics.push(diag);
continue;
}
// If we got here then we've been successful. Re-check to get the diagnostics with the
// update source, update the fix count.
if fixable_diagnostics.len() == file_diagnostics.len() {
file_diagnostics.clear();
} else {
// If there are any other file level diagnostics, call `check_file` to re-compute them
// with updated ranges.
let diagnostics = project.check_file(db, file);
*file_diagnostics = diagnostics;
}
fixed_count += fixable_diagnostics.len();
// Don't restore the source text or we risk a panic when rendering the diagnostics
// if reading any of the fixed files fails (for whatever reason).
// The override will get removed on the next `File::sync_path` call.
source_guard.defuse();
}
// Stitch the remaining diagnostics back together.
diagnostics.extend(by_file.into_values().flatten());
diagnostics.sort_by(|left, right| {
left.rendering_sort_key(db)
.cmp(&right.rendering_sort_key(db))
});
Ok(SuppressAllResult {
diagnostics,
count: fixed_count,
})
}
fn write_changes(
db: &dyn Db,
system: &dyn WritableSystem,
file: File,
path: &SystemPath,
new_source: &SourceText,
) -> Result<(), WriteChangesError> {
let metadata = system.path_metadata(path)?;
if metadata.revision() != file.revision(db) {
return Err(WriteChangesError::FileWasModified);
}
system.write_file_bytes(path, &new_source.to_bytes())?;
Ok(())
}
#[derive(Debug, Error)]
enum WriteChangesError {
#[error("failed to write changes to disk: {0}")]
Io(#[from] std::io::Error),
#[error("the file has been modified")]
FileWasModified,
}
/// Apply a series of fixes to `File` and returns the updated source code along with the source map.
///
/// Returns an error if not all fixes were applied because some fixes are overlapping.
fn apply_fixes(source: &str, mut fixes: Vec<Fix>) -> Result<FixedCode, FixedCode> {
let mut output = String::with_capacity(source.len());
let mut last_pos: Option<TextSize> = None;
let mut has_overlapping_fixes = false;
let mut isolated: FxHashSet<u32> = FxHashSet::default();
let mut source_map = SourceMap::default();
fixes.sort_unstable_by_key(Fix::min_start);
for fix in fixes {
let mut edits = fix.edits().iter().peekable();
// If the fix contains at least one new edit, enforce isolation and positional requirements.
if let Some(first) = edits.peek() {
// If this fix requires isolation, and we've already applied another fix in the
// same isolation group, skip it.
if let IsolationLevel::Group(id) = fix.isolation() {
if !isolated.insert(id) {
has_overlapping_fixes = true;
continue;
}
}
// If this fix overlaps with a fix we've already applied, skip it.
if last_pos.is_some_and(|last_pos| last_pos >= first.start()) {
has_overlapping_fixes = true;
continue;
}
}
let mut applied_edits = Vec::with_capacity(fix.edits().len());
for edit in edits {
// Add all contents from `last_pos` to `fix.location`.
let slice = &source[TextRange::new(last_pos.unwrap_or_default(), edit.start())];
output.push_str(slice);
// Add the start source marker for the patch.
source_map.push_start_marker(edit, output.text_len());
// Add the patch itself.
output.push_str(edit.content().unwrap_or_default());
// Add the end source marker for the added patch.
source_map.push_end_marker(edit, output.text_len());
// Track that the edit was applied.
last_pos = Some(edit.end());
applied_edits.push(edit);
}
}
// Add the remaining content.
let slice = &source[last_pos.unwrap_or_default().to_usize()..];
output.push_str(slice);
let fixed = FixedCode {
source: output,
source_map,
};
if has_overlapping_fixes {
Err(fixed)
} else {
Ok(fixed)
}
}
struct FixedCode {
/// Source map that allows mapping positions in the fixed code back to positions in the original
/// source code (useful for mapping fixed lines back to their original notebook cells).
source_map: SourceMap,
/// The fixed source code
source: String,
}
/// Guard that sets [`File::set_source_text_override`] and guarantees to restore the original source
/// text unless the guard is explicitly defused.
struct WithUpdatedSourceGuard<'db> {
db: &'db mut dyn Db,
file: File,
old_source: Option<SourceText>,
}
impl<'db> WithUpdatedSourceGuard<'db> {
fn new(
db: &'db mut dyn Db,
file: File,
old_source: &SourceText,
new_source: SourceText,
) -> Self {
file.set_source_text_override(db).to(Some(new_source));
Self {
db,
file,
old_source: Some(old_source.clone()),
}
}
fn defuse(&mut self) {
self.old_source = None;
}
fn db(&mut self) -> &mut dyn Db {
self.db
}
}
impl Drop for WithUpdatedSourceGuard<'_> {
fn drop(&mut self) {
if let Some(old_source) = self.old_source.take() {
// We don't set `source_text_override` to `None` here because setting the value
// invalidates the `source_text` query and there's the chance that reading the file's content
// will fail this time (e.g. because the file was deleted), resulting in ty panicking
// when trying to render any diagnostic for that file (because all offsets now point nowhere).
// The override will be cleared by `File::sync_path`, the next time the revision changes.
self.file
.set_source_text_override(self.db)
.to(Some(old_source));
}
}
}
#[cfg(test)]
mod tests {
use std::collections::hash_map::Entry;
use std::hash::{DefaultHasher, Hash, Hasher};
use insta::assert_snapshot;
use ruff_db::cancellation::CancellationTokenSource;
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, DisplayDiagnostics};
use ruff_db::files::{File, system_path_to_file};
use ruff_db::parsed::parsed_module;
use ruff_db::source::source_text;
use ruff_db::system::{DbWithWritableSystem, SystemPath, SystemPathBuf};
use ruff_python_ast::name::Name;
use rustc_hash::FxHashMap;
use ty_python_semantic::UNUSED_IGNORE_COMMENT;
use ty_python_semantic::lint::Level;
use crate::db::tests::TestDb;
use crate::metadata::options::Rules;
use crate::metadata::value::RangedValue;
use crate::{Db, ProjectMetadata, suppress_all_diagnostics};
#[test]
fn simple_suppression() {
assert_snapshot!(
suppress_all_in(r#"
a = b + 10"#
),
@r"
Added 1 suppressions
## Fixed source
```py
a = b + 10 # ty:ignore[unresolved-reference]
```
");
}
#[test]
fn multiple_suppressions_same_code() {
assert_snapshot!(
suppress_all_in(r#"
a = b + 10 + c"#
),
@r"
Added 2 suppressions
## Fixed source
```py
a = b + 10 + c # ty:ignore[unresolved-reference]
```
");
}
#[test]
fn multiple_suppressions_different_codes() {
assert_snapshot!(
suppress_all_in(r#"
import sys
a = b + 10 + sys.veeersion"#
),
@r"
Added 2 suppressions
## Fixed source
```py
import sys
a = b + 10 + sys.veeersion # ty:ignore[unresolved-attribute, unresolved-reference]
```
");
}
#[test]
fn dont_fix_unused_ignore() {
assert_snapshot!(
suppress_all_in(r#"
import sys
a = 5 + 10 # ty: ignore[unresolved-reference]"#
),
@r"
Added 0 suppressions
## Fixed source
```py
import sys
a = 5 + 10 # ty: ignore[unresolved-reference]
```
## Diagnostics after applying fixes
warning[unused-ignore-comment]: Unused `ty: ignore` directive
--> test.py:2:13
|
1 | import sys
2 | a = 5 + 10 # ty: ignore[unresolved-reference]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Remove the unused suppression comment
");
}
#[test]
fn dont_fix_files_containing_syntax_errors() {
assert_snapshot!(
suppress_all_in(r#"
import sys
a = x +
"#
),
@r"
Added 0 suppressions
## Fixed source
```py
import sys
a = x +
```
## Diagnostics after applying fixes
error[unresolved-reference]: Name `x` used when not defined
--> test.py:2:5
|
1 | import sys
2 | a = x +
| ^
|
info: rule `unresolved-reference` is enabled by default
error[invalid-syntax]: Expected an expression
--> test.py:2:8
|
1 | import sys
2 | a = x +
| ^
|
");
}
#[test]
fn arguments() {
assert_snapshot!(
suppress_all_in(r#"
def test(a, b):
pass
test(
a = 10,
c = "unknown"
)
"#
),
@r#"
Added 2 suppressions
## Fixed source
```py
def test(a, b):
pass
test(
a = 10,
c = "unknown" # ty:ignore[unknown-argument]
) # ty:ignore[missing-argument]
```
"#);
}
#[test]
fn return_type() {
assert_snapshot!(
suppress_all_in(r#"class A:
def test(self, b: int) -> str:
return "test"
class B(A):
def test(
self,
b: str
) -> A.b:
pass"#
),
@r#"
Added 2 suppressions
## Fixed source
```py
class A:
def test(self, b: int) -> str:
return "test"
class B(A):
def test(
self,
b: str
) -> A.b: # ty:ignore[invalid-method-override, unresolved-attribute]
pass
```
"#);
}
#[test]
fn existing_ty_ignore() {
assert_snapshot!(
suppress_all_in(r#"class A:
def test(self, b: int) -> str:
return "test"
class B(A):
def test( # ty:ignore[unresolved-reference]
self,
b: str
) -> A.b:
pass"#
),
@r#"
Added 2 suppressions
## Fixed source
```py
class A:
def test(self, b: int) -> str:
return "test"
class B(A):
def test( # ty:ignore[unresolved-reference, invalid-method-override]
self,
b: str
) -> A.b: # ty:ignore[unresolved-attribute]
pass
```
## Diagnostics after applying fixes
warning[unused-ignore-comment]: Unused `ty: ignore` directive: 'unresolved-reference'
--> test.py:7:28
|
6 | class B(A):
7 | def test( # ty:ignore[unresolved-reference, invalid-method-override]
| ^^^^^^^^^^^^^^^^^^^^
8 | self,
9 | b: str
|
help: Remove the unused suppression code
"#);
}
#[track_caller]
fn suppress_all_in(source: &str) -> String {
use std::fmt::Write as _;
let mut metadata = ProjectMetadata::new(Name::new_static("test"), SystemPathBuf::from("."));
metadata.options.rules = Some(Rules::from_iter([(
RangedValue::cli(UNUSED_IGNORE_COMMENT.name.to_string()),
RangedValue::cli(Level::Warn),
)]));
let mut db = TestDb::new(metadata);
db.init_program().unwrap();
db.write_file(
"test.py",
ruff_python_trivia::textwrap::dedent(source).trim(),
)
.unwrap();
let file = system_path_to_file(&db, "test.py").unwrap();
let parsed_before = parsed_module(&db, file);
let had_syntax_errors = parsed_before.load(&db).has_syntax_errors();
let diagnostics = db.project().check_file(&db, file);
let total_diagnostics = diagnostics.len();
let cancellation_token_source = CancellationTokenSource::new();
let fixes =
suppress_all_diagnostics(&mut db, diagnostics, &cancellation_token_source.token())
.expect("operation never gets cancelled");
assert_eq!(fixes.count, total_diagnostics - fixes.diagnostics.len());
File::sync_path(&mut db, SystemPath::new("test.py"));
let fixed = source_text(&db, file);
let parsed = parsed_module(&db, file);
let parsed = parsed.load(&db);
let diagnostics_after_applying_fixes = db.project().check_file(&db, file);
let mut output = String::new();
writeln!(
output,
"Added {} suppressions\n\n## Fixed source\n\n```py\n{}\n```\n",
fixes.count,
fixed.as_str()
)
.unwrap();
if !fixes.diagnostics.is_empty() {
writeln!(
output,
"## Diagnostics after applying fixes\n\n{diagnostics}\n",
diagnostics = DisplayDiagnostics::new(
&db,
&DisplayDiagnosticConfig::default(),
&fixes.diagnostics
)
)
.unwrap();
}
assert!(
!parsed.has_syntax_errors() || had_syntax_errors,
"Fixed introduced syntax errors\n\n{output}"
);
let new_diagnostics =
diff_diagnostics(&fixes.diagnostics, &diagnostics_after_applying_fixes);
if !new_diagnostics.is_empty() {
writeln!(
&mut output,
"## New diagnostics after re-checking file\n\n{diagnostics}\n",
diagnostics = DisplayDiagnostics::new(
&db,
&DisplayDiagnosticConfig::default(),
&new_diagnostics
)
)
.unwrap();
}
output
}
fn diff_diagnostics<'a>(before: &'a [Diagnostic], after: &'a [Diagnostic]) -> Vec<Diagnostic> {
let before = DiagnosticFingerprint::group_diagnostics(before);
let after = DiagnosticFingerprint::group_diagnostics(after);
after
.into_iter()
.filter(|(key, _)| !before.contains_key(key))
.map(|(_, diagnostic)| diagnostic.clone())
.collect()
}
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
struct DiagnosticFingerprint(u64);
impl DiagnosticFingerprint {
fn group_diagnostics(diagnostics: &[Diagnostic]) -> FxHashMap<Self, &Diagnostic> {
let mut result = FxHashMap::default();
for diagnostic in diagnostics {
Self::from_diagnostic(diagnostic, &mut result);
}
result
}
fn from_diagnostic<'a>(
diagnostic: &'a Diagnostic,
seen: &mut FxHashMap<DiagnosticFingerprint, &'a Diagnostic>,
) -> DiagnosticFingerprint {
let mut disambiguator = 0u64;
loop {
let mut h = DefaultHasher::default();
disambiguator.hash(&mut h);
diagnostic.id().hash(&mut h);
let key = DiagnosticFingerprint(h.finish());
match seen.entry(key) {
Entry::Occupied(_) => {
disambiguator += 1;
}
Entry::Vacant(entry) => {
entry.insert(diagnostic);
return key;
}
}
}
}
}
}

View File

@@ -1,6 +1,5 @@
use ruff_db::system::SystemPath;
use crate::glob::include::MatchFile;
pub(crate) use exclude::{ExcludeFilter, ExcludeFilterBuilder};
pub(crate) use include::{IncludeFilter, IncludeFilterBuilder};
pub(crate) use portable::{
@@ -40,9 +39,7 @@ impl IncludeExcludeFilter {
if self.exclude.match_directory(path, mode) {
IncludeResult::Excluded
} else if self.include.match_directory(path) {
IncludeResult::Included {
literal_match: None,
}
IncludeResult::Included
} else {
IncludeResult::NotIncluded
}
@@ -55,16 +52,10 @@ impl IncludeExcludeFilter {
) -> IncludeResult {
if self.exclude.match_file(path, mode) {
IncludeResult::Excluded
} else if self.include.match_file(path) {
IncludeResult::Included
} else {
match self.include.match_file(path) {
MatchFile::Literal => IncludeResult::Included {
literal_match: Some(true),
},
MatchFile::Pattern => IncludeResult::Included {
literal_match: Some(false),
},
MatchFile::No => IncludeResult::NotIncluded,
}
IncludeResult::NotIncluded
}
}
}
@@ -95,7 +86,7 @@ pub(crate) enum IncludeResult {
///
/// For directories: This isn't a guarantee that any file in this directory gets included
/// but we need to traverse it to make this decision.
Included { literal_match: Option<bool> },
Included,
/// The path matches an exclude pattern.
Excluded,

View File

@@ -40,22 +40,25 @@ impl ExcludeFilter {
}
fn matches(&self, path: &SystemPath, mode: GlobFilterCheckMode, directory: bool) -> bool {
// If the path is excluded, return `ignore`
if self.ignore.matched(path, directory).is_ignore() {
return true;
}
match mode {
GlobFilterCheckMode::TopDown => {
// No hit or an allow hit means the file or directory is not excluded.
false
match self.ignore.matched(path, directory) {
// No hit or an allow hit means the file or directory is not excluded.
Match::None | Match::Allow => false,
Match::Ignore => true,
}
}
GlobFilterCheckMode::Adhoc => {
// If the path is allowlisted or there's no hit, try the parent to ensure we don't return false
// for a folder where there's an exclude for a parent.
path.ancestors()
.skip(1)
.any(|ancestor| self.ignore.matched(ancestor, true).is_ignore())
for ancestor in path.ancestors() {
match self.ignore.matched(ancestor, directory) {
// If the path is allowlisted or there's no hit, try the parent to ensure we don't return false
// for a folder where there's an exclude for a parent.
Match::None | Match::Allow => {}
Match::Ignore => return true,
}
}
false
}
}
}
@@ -121,7 +124,7 @@ struct Gitignore {
set: GlobSet,
globs: Vec<IgnoreGlob>,
#[get_size(ignore)]
matches: Arc<Pool<Vec<usize>>>,
matches: Option<Arc<Pool<Vec<usize>>>>,
}
impl Gitignore {
@@ -137,7 +140,7 @@ impl Gitignore {
return Match::None;
}
let mut matches = self.matches.get();
let mut matches = self.matches.as_ref().unwrap().get();
let candidate = Candidate::new(path);
self.set.matches_candidate_into(&candidate, &mut matches);
for &i in matches.iter().rev() {
@@ -184,12 +187,6 @@ enum Match {
Allow,
}
impl Match {
const fn is_ignore(self) -> bool {
matches!(self, Match::Ignore)
}
}
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
struct IgnoreGlob {
/// The pattern that was originally parsed.
@@ -235,7 +232,7 @@ impl GitignoreBuilder {
Ok(Gitignore {
set,
globs: self.globs.clone(),
matches: Arc::new(Pool::new(Vec::new)),
matches: Some(Arc::new(Pool::new(Vec::new))),
})
}

View File

@@ -1,11 +1,9 @@
use globset::{Glob, GlobBuilder, GlobSet, GlobSetBuilder};
use regex_automata::dfa;
use regex_automata::dfa::Automaton;
use regex_automata::util::pool::Pool;
use ruff_db::system::SystemPath;
use std::fmt::Formatter;
use std::path::{MAIN_SEPARATOR, MAIN_SEPARATOR_STR};
use std::sync::Arc;
use tracing::warn;
use crate::glob::portable::AbsolutePortableGlobPattern;
@@ -35,12 +33,9 @@ const DFA_SIZE_LIMIT: usize = 1_000_000;
pub(crate) struct IncludeFilter {
#[get_size(ignore)]
glob_set: GlobSet,
original_patterns: Box<[Box<str>]>,
literal_pattern_indices: Box<[usize]>,
original_patterns: Box<[String]>,
#[get_size(size_fn = dfa_memory_usage)]
dfa: Option<dfa::dense::DFA<Vec<u32>>>,
#[get_size(ignore)]
matches: Arc<Pool<Vec<usize>>>,
}
#[allow(clippy::ref_option)]
@@ -50,30 +45,10 @@ fn dfa_memory_usage(dfa: &Option<dfa::dense::DFA<Vec<u32>>>) -> usize {
impl IncludeFilter {
/// Whether the file matches any of the globs.
pub(crate) fn match_file(&self, path: impl AsRef<SystemPath>) -> MatchFile {
pub(crate) fn match_file(&self, path: impl AsRef<SystemPath>) -> bool {
let path = path.as_ref();
if self.literal_pattern_indices.is_empty() {
return if self.glob_set.is_match(path) {
MatchFile::Pattern
} else {
MatchFile::No
};
}
let mut matches = self.matches.get();
self.glob_set.matches_into(path, &mut matches);
if matches.is_empty() {
MatchFile::No
} else {
for match_index in matches.iter() {
if self.literal_pattern_indices.contains(match_index) {
return MatchFile::Literal;
}
}
MatchFile::Pattern
}
self.glob_set.is_match(path)
}
/// Check whether a directory or any of its children can be matched by any of the globs.
@@ -145,36 +120,18 @@ impl PartialEq for IncludeFilter {
impl Eq for IncludeFilter {}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum MatchFile {
No,
/// The file path matches the glob literally exactly. This is only the case for globs
/// that don't use any wildcards.
Literal,
/// The file path matches the glob pattern.
Pattern,
}
impl MatchFile {}
#[derive(Debug)]
pub(crate) struct IncludeFilterBuilder {
set: GlobSetBuilder,
set_len: usize,
original_patterns: Vec<Box<str>>,
original_pattern: Vec<String>,
regexes: Vec<String>,
/// Indices of literal patterns (contain no meta characters).
literal_pattern_indices: Vec<usize>,
}
impl IncludeFilterBuilder {
pub(crate) fn new() -> Self {
Self {
literal_pattern_indices: Vec::new(),
set: GlobSetBuilder::new(),
set_len: 0,
original_patterns: Vec::new(),
original_pattern: Vec::new(),
regexes: Vec::new(),
}
}
@@ -204,16 +161,13 @@ impl IncludeFilterBuilder {
// No need to support Windows-style paths, so the backslash can be used a escape.
.backslash_escape(true)
.build()?;
let is_literal_pattern = globset::escape(glob_pattern) == glob_pattern;
self.original_patterns.push(input.relative().into());
self.original_pattern.push(input.relative().to_string());
// `lib` is the same as `lib/**`
// Add a glob that matches `lib` exactly, change the glob to `lib/**`.
if glob_pattern.ends_with("**") {
self.push_prefix_regex(&glob);
self.add_glob(glob);
self.set.add(glob);
} else {
let prefix_glob = GlobBuilder::new(&format!("{glob_pattern}/**"))
.literal_separator(true)
@@ -222,28 +176,19 @@ impl IncludeFilterBuilder {
.build()?;
self.push_prefix_regex(&prefix_glob);
self.add_glob(prefix_glob);
self.set.add(prefix_glob);
// The reason we add the exact glob, e.g. `src` when the original pattern was `src/` is
// so that `match_file` returns true when matching against a file. However, we don't
// need to do this if this is a pattern that should only match a directory (specifically, its contents).
if !only_directory {
if is_literal_pattern {
self.literal_pattern_indices.push(self.set_len);
}
self.add_glob(glob);
self.set.add(glob);
}
}
Ok(self)
}
fn add_glob(&mut self, glob: Glob) {
self.set.add(glob);
self.set_len += 1;
}
fn push_prefix_regex(&mut self, glob: &Glob) {
let main_separator = regex::escape(MAIN_SEPARATOR_STR);
@@ -294,9 +239,7 @@ impl IncludeFilterBuilder {
Ok(IncludeFilter {
glob_set,
dfa,
literal_pattern_indices: self.literal_pattern_indices.into(),
original_patterns: self.original_patterns.into(),
matches: Arc::new(Pool::new(Vec::new)),
original_patterns: self.original_pattern.into(),
})
}
}
@@ -305,7 +248,7 @@ impl IncludeFilterBuilder {
mod tests {
use std::path::{MAIN_SEPARATOR, MAIN_SEPARATOR_STR};
use crate::glob::include::{IncludeFilter, IncludeFilterBuilder, MatchFile};
use crate::glob::include::{IncludeFilter, IncludeFilterBuilder};
use crate::glob::{PortableGlobKind, PortableGlobPattern};
use ruff_db::system::{MemoryFileSystem, walk_directory::WalkState};
@@ -383,33 +326,33 @@ mod tests {
"files/*.py",
]);
assert_eq!(filter.match_file("lib"), MatchFile::Literal);
assert_eq!(filter.match_file("lib/more/test"), MatchFile::Pattern);
assert!(filter.match_file("lib"));
assert!(filter.match_file("lib/more/test"));
// Unlike `directory`, `directory/` only includes a directory with the given name and its contents
assert_eq!(filter.match_file("directory"), MatchFile::No);
assert_eq!(filter.match_file("directory/more/test"), MatchFile::Pattern);
assert!(!filter.match_file("directory"));
assert!(filter.match_file("directory/more/test"));
// Unlike `src`, `src/*` only includes a directory with the given name.
assert_eq!(filter.match_file("src"), MatchFile::No);
assert_eq!(filter.match_file("src/more/test"), MatchFile::Pattern);
assert!(!filter.match_file("src"));
assert!(filter.match_file("src/more/test"));
// Unlike `tests`, `tests/**` only includes files under `tests`, but not a file named tests
assert_eq!(filter.match_file("tests"), MatchFile::No);
assert_eq!(filter.match_file("tests/more/test"), MatchFile::Pattern);
assert!(!filter.match_file("tests"));
assert!(filter.match_file("tests/more/test"));
// Unlike `match_directory`, prefixes should not be included.
assert_eq!(filter.match_file("a"), MatchFile::No);
assert_eq!(filter.match_file("a/test-b"), MatchFile::No);
assert!(!filter.match_file("a"));
assert!(!filter.match_file("a/test-b"));
assert_eq!(filter.match_file("a/test-b/x"), MatchFile::No);
assert_eq!(filter.match_file("a/test"), MatchFile::No);
assert!(!filter.match_file("a/test-b/x"));
assert!(!filter.match_file("a/test"));
assert_eq!(filter.match_file("files/a.py"), MatchFile::Pattern);
assert_eq!(filter.match_file("files/a.py/bcd"), MatchFile::Pattern);
assert!(filter.match_file("files/a.py"));
assert!(filter.match_file("files/a.py/bcd"));
assert_eq!(filter.match_file("not_included"), MatchFile::No);
assert_eq!(filter.match_file("files/a.pi"), MatchFile::No);
assert!(!filter.match_file("not_included"));
assert!(!filter.match_file("files/a.pi"));
}
/// Check that we skip directories that can never match.

View File

@@ -9,7 +9,6 @@ use crate::walk::{ProjectFilesFilter, ProjectFilesWalker};
pub use db::tests::TestDb;
pub use db::{ChangeResult, CheckMode, Db, ProjectDatabase, SalsaMemoryDump};
use files::{Index, Indexed, IndexedFiles};
pub use fixes::suppress_all_diagnostics;
use metadata::settings::Settings;
pub use metadata::{ProjectMetadata, ProjectMetadataError};
use ruff_db::diagnostic::{
@@ -34,7 +33,6 @@ use ty_python_semantic::types::check_types;
mod db;
mod files;
mod fixes;
mod glob;
pub mod metadata;
mod walk;
@@ -216,19 +214,15 @@ impl Project {
/// This means, that this method is an over-approximation of `Self::files` and may return `true` for paths
/// that won't be included when checking the project because they're ignored in a `.gitignore` file.
pub fn is_file_included(self, db: &dyn Db, path: &SystemPath) -> bool {
matches!(
ProjectFilesFilter::from_project(db, self)
.is_file_included(path, GlobFilterCheckMode::Adhoc),
IncludeResult::Included { .. }
)
ProjectFilesFilter::from_project(db, self)
.is_file_included(path, GlobFilterCheckMode::Adhoc)
== IncludeResult::Included
}
pub fn is_directory_included(self, db: &dyn Db, path: &SystemPath) -> bool {
matches!(
ProjectFilesFilter::from_project(db, self)
.is_directory_included(path, GlobFilterCheckMode::Adhoc),
IncludeResult::Included { .. }
)
ProjectFilesFilter::from_project(db, self)
.is_directory_included(path, GlobFilterCheckMode::Adhoc)
== IncludeResult::Included
}
pub fn reload(self, db: &mut dyn Db, metadata: ProjectMetadata) {
@@ -700,7 +694,38 @@ where
Err(error) => {
let message = error.to_diagnostic_message(Some(file.path(db)));
let mut diagnostic = Diagnostic::new(DiagnosticId::Panic, Severity::Fatal, message);
diagnostic.add_bug_sub_diagnostics("%5Bpanic%5D");
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
"This indicates a bug in ty.",
));
let report_message = "If you could open an issue at https://github.com/astral-sh/ty/issues/new?title=%5Bpanic%5D, we'd be very appreciative!";
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
report_message,
));
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!(
"Platform: {os} {arch}",
os = std::env::consts::OS,
arch = std::env::consts::ARCH
),
));
if let Some(version) = ruff_db::program_version() {
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!("Version: {version}"),
));
}
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!(
"Args: {args:?}",
args = std::env::args().collect::<Vec<_>>()
),
));
if let Some(backtrace) = error.backtrace {
match backtrace.status() {

View File

@@ -850,6 +850,7 @@ impl SrcOptions {
)]
#[serde(rename_all = "kebab-case", transparent)]
pub struct Rules {
#[get_size(ignore)] // TODO: Add `GetSize` support for `OrderMap`.
inner: OrderMap<RangedValue<String>, RangedValue<Level>, BuildHasherDefault<FxHasher>>,
}
@@ -1292,7 +1293,7 @@ pub struct AnalysisOptions {
respect-type-ignore-comments = false
"#
)]
pub respect_type_ignore_comments: Option<bool>,
respect_type_ignore_comments: Option<bool>,
}
impl AnalysisOptions {

View File

@@ -97,7 +97,7 @@ impl Override {
matches!(
self.files
.is_file_included(path, GlobFilterCheckMode::Adhoc),
IncludeResult::Included { .. }
IncludeResult::Included
)
}
}

View File

@@ -79,9 +79,7 @@ impl<'a> ProjectFilesFilter<'a> {
match self.match_included_paths(path, mode) {
None => IncludeResult::NotIncluded,
Some(CheckPathMatch::Partial) => self.src_filter.is_file_included(path, mode),
Some(CheckPathMatch::Full) => IncludeResult::Included {
literal_match: Some(true),
},
Some(CheckPathMatch::Full) => IncludeResult::Included,
}
}
@@ -95,9 +93,7 @@ impl<'a> ProjectFilesFilter<'a> {
Some(CheckPathMatch::Partial) => {
self.src_filter.is_directory_maybe_included(path, mode)
}
Some(CheckPathMatch::Full) => IncludeResult::Included {
literal_match: Some(true),
},
Some(CheckPathMatch::Full) => IncludeResult::Included,
}
}
}
@@ -193,59 +189,60 @@ impl<'a> ProjectFilesWalker<'a> {
let directory_included = filter
.is_directory_included(entry.path(), GlobFilterCheckMode::TopDown);
return match directory_included {
IncludeResult::Included { .. } => WalkState::Continue,
IncludeResult::Included => WalkState::Continue,
IncludeResult::Excluded => {
tracing::debug!(
"Skipping directory '{path}' because it is excluded by a default or `src.exclude` pattern",
path=entry.path()
);
WalkState::Skip
}
},
IncludeResult::NotIncluded => {
tracing::debug!(
"Skipping directory `{path}` because it doesn't match any `src.include` pattern or path specified on the CLI",
path=entry.path()
);
WalkState::Skip
}
},
};
}
} else {
// Ignore any non python files to avoid creating too many entries in `Files`.
// Unless the file is explicitly passed, we then always assume it's a python file.
let source_type = entry.path().extension().and_then(PySourceType::try_from_extension).or_else(|| {
if entry.depth() == 0 {
Some(PySourceType::Python)
} else {
db.system().source_type(entry.path())
}
});
if source_type.is_none()
{
return WalkState::Continue;
}
// For all files, except the ones that were explicitly passed to the walker (CLI),
// check if they're included in the project.
if entry.depth() > 0 || self.force_exclude {
match filter
.is_file_included(entry.path(), GlobFilterCheckMode::TopDown)
{
IncludeResult::Included { literal_match } => {
// Ignore any non python files to avoid creating too many entries in `Files`.
// Unless the file is explicitly passed on the CLI or a literal match in the `include`, we then always assume it's a file ty can analyze
let source_type = if literal_match == Some(true) || entry.depth() == 0 {
Some(PySourceType::Python)
} else {
entry.path().extension().and_then(PySourceType::try_from_extension).or_else(|| db.system().source_type(entry.path()))
};
if source_type.is_none()
{
return WalkState::Continue;
}
}
IncludeResult::Included => {},
IncludeResult::Excluded => {
tracing::debug!(
"Ignoring file `{path}` because it is excluded by a default or `src.exclude` pattern.",
path=entry.path()
);
return WalkState::Continue;
}
},
IncludeResult::NotIncluded => {
tracing::debug!(
"Ignoring file `{path}` because it doesn't match any `src.include` pattern or path specified on the CLI.",
path=entry.path()
);
return WalkState::Continue;
}
},
}
}

View File

@@ -168,56 +168,6 @@ on top of that:
Foo = NewType("Foo", 42)
```
## `NewType`s in arithmetic and comparison expressions might or might not act as their base
These expressions are valid because `Foo` acts as its base type, `int`:
```py
from typing import NewType
Foo = NewType("Foo", int)
reveal_type(Foo(42) + 1) # revealed: int
reveal_type(1 + Foo(42)) # revealed: int
reveal_type(Foo(42) + Foo(42)) # revealed: int
reveal_type(Foo(42) == 42) # revealed: bool
reveal_type(42 == Foo(42)) # revealed: bool
reveal_type(Foo(42) == Foo(42)) # revealed: bool
```
However, we can't always substitute `int` for `Foo` to evaluate expressions like these. In the
following cases, only `Foo` itself is valid:
```py
class Bar:
def __add__(self, other: Foo) -> Foo:
return other
def __radd__(self, other: Foo) -> Foo:
return other
def __lt__(self, other: Foo) -> bool:
return True
def __gt__(self, other: Foo) -> bool:
return True
def __contains__(self, key: Foo) -> bool:
return True
reveal_type(Foo(42) + Bar()) # revealed: Foo
reveal_type(Bar() + Foo(42)) # revealed: Foo
reveal_type(Foo(42) < Bar()) # revealed: bool
reveal_type(Bar() < Foo(42)) # revealed: bool
reveal_type(Foo(42) in Bar()) # revealed: bool
42 + Bar() # error: [unsupported-operator]
Bar() + 42 # error: [unsupported-operator]
42 < Bar() # error: [unsupported-operator]
Bar() < 42 # error: [unsupported-operator]
42 in Bar() # error: [unsupported-operator]
```
## `float` and `complex` special cases
`float` and `complex` are subject to a special case in the typing spec, which we currently interpret
@@ -228,7 +178,6 @@ and we accept the unions they expand into.
```py
from typing import NewType
from ty_extensions import static_assert, is_assignable_to
Foo = NewType("Foo", float)
Foo(3.14)
@@ -237,15 +186,6 @@ Foo("hello") # error: [invalid-argument-type] "Argument is incorrect: Expected
reveal_type(Foo(3.14).__class__) # revealed: type[int] | type[float]
reveal_type(Foo(42).__class__) # revealed: type[int] | type[float]
static_assert(is_assignable_to(Foo, float))
static_assert(is_assignable_to(Foo, int | float))
static_assert(is_assignable_to(Foo, int | float | None))
# The assignments above require treating `Foo` as its underlying union type. Each of its members is
# assignable to the union on the right, so `Foo` is assignable to the union, even though `Foo` as a
# whole isn't assignable to any one member. However, as in the previous section, we need to be sure
# that this treatment doesn't break cases like the assignment below, where `Foo` *is* assignable to
# the union on the right, even though its members *aren't*.
static_assert(is_assignable_to(Foo, Foo | None))
Bar = NewType("Bar", complex)
Bar(1 + 2j)
@@ -256,11 +196,6 @@ Bar("goodbye") # error: [invalid-argument-type]
reveal_type(Bar(1 + 2j).__class__) # revealed: type[int] | type[float] | type[complex]
reveal_type(Bar(3.14).__class__) # revealed: type[int] | type[float] | type[complex]
reveal_type(Bar(42).__class__) # revealed: type[int] | type[float] | type[complex]
static_assert(is_assignable_to(Bar, complex))
static_assert(is_assignable_to(Bar, int | float | complex))
static_assert(is_assignable_to(Bar, int | float | complex | None))
# See the `Foo | None` case above.
static_assert(is_assignable_to(Bar, Bar | None))
```
We don't currently try to distinguish between an implicit union (e.g. `float`) and the equivalent
@@ -288,52 +223,6 @@ def g(_: Callable[[int | float | complex], Bar]): ...
g(Bar)
```
The arithmetic and comparison test cases in the previous section used a `NewType` of `int`, but
`NewType`s of `float` and `complex` are more complicated, because their base type is a union, and
that union needs special handling in binary expressions. In these examples, we we need to lower
`Foo` to `int | float` and then check each member of that union _individually_, as we would with an
explicit `Union` on the left side:
```py
reveal_type(Foo(3.14) < Foo(42)) # revealed: bool
reveal_type(Foo(3.14) == Foo(42)) # revealed: bool
reveal_type(Foo(3.14) + Foo(42)) # revealed: int | float
reveal_type(Foo(3.14) / Foo(42)) # revealed: int | float
```
But again as above, we can't _always_ lower `Foo` to `int | float`, because there are also binary
expressions where only `Foo` itself is valid:
```py
class Bing:
def __add__(self, other: Foo) -> Foo:
return other
def __radd__(self, other: Foo) -> Foo:
return other
def __lt__(self, other: Foo) -> bool:
return True
def __gt__(self, other: Foo) -> bool:
return True
def __contains__(self, key: Foo) -> bool:
return True
reveal_type(Foo(3.14) + Bing()) # revealed: Foo
reveal_type(Bing() + Foo(42)) # revealed: Foo
reveal_type(Foo(3.14) < Bing()) # revealed: bool
reveal_type(Bing() < Foo(42)) # revealed: bool
reveal_type(Foo(3.14) in Bing()) # revealed: bool
3.14 + Bing() # error: [unsupported-operator]
Bing() + 3.14 # error: [unsupported-operator]
3.14 < Bing() # error: [unsupported-operator]
Bing() < 3.14 # error: [unsupported-operator]
3.14 in Bing() # error: [unsupported-operator]
```
## A `NewType` definition must be a simple variable assignment
```py

View File

@@ -359,51 +359,6 @@ reveal_type(GenericCircle[int].bar()) # revealed: GenericCircle[int]
reveal_type(GenericCircle.baz(1)) # revealed: GenericShape[Literal[1]]
```
### Calling `super()` in overridden methods with `Self` return type
This is a regression test for <https://github.com/astral-sh/ty/issues/2122>.
When a child class overrides a parent method with a `Self` return type and calls `super().method()`,
the return type should be the child's `Self` type variable, not the concrete child class type.
```py
from typing import Self
class Parent:
def copy(self) -> Self:
return self
class Child(Parent):
def copy(self) -> Self:
result = super().copy()
reveal_type(result) # revealed: Self@copy
return result
# When called on concrete types, Self is substituted correctly.
reveal_type(Child().copy()) # revealed: Child
```
The same applies to classmethods with `Self` return types:
```py
from typing import Self
class Parent:
@classmethod
def create(cls) -> Self:
return cls()
class Child(Parent):
@classmethod
def create(cls) -> Self:
result = super().create()
reveal_type(result) # revealed: Self@create
return result
# When called on concrete types, Self is substituted correctly.
reveal_type(Child.create()) # revealed: Child
```
## Attributes
TODO: The use of `Self` to annotate the `next_node` attribute should be

Some files were not shown because too many files have changed in this diff Show More