Compare commits

...

1 Commits

Author SHA1 Message Date
Charlie Marsh
400732a655 Use FoldHash 2024-08-16 18:24:38 -04:00
107 changed files with 421 additions and 425 deletions

30
Cargo.lock generated
View File

@@ -846,6 +846,12 @@ version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "foldhash"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4deb59dd6330afa472c000b86c0c9ada26274836eb59563506c3e34e4bb9a819"
[[package]]
name = "form_urlencoded"
version = "1.2.1"
@@ -1916,6 +1922,7 @@ version = "0.0.0"
dependencies = [
"anyhow",
"crossbeam",
"foldhash",
"jod-thread",
"libc",
"lsp-server",
@@ -1928,7 +1935,6 @@ dependencies = [
"ruff_python_ast",
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.0.0",
"serde",
"serde_json",
"shellexpand",
@@ -1958,13 +1964,13 @@ version = "0.0.0"
dependencies = [
"anyhow",
"crossbeam",
"foldhash",
"notify",
"red_knot_python_semantic",
"ruff_cache",
"ruff_db",
"ruff_python_ast",
"ruff_text_size",
"rustc-hash 2.0.0",
"salsa",
"thiserror",
"tracing",
@@ -2073,6 +2079,7 @@ dependencies = [
"clearscreen",
"colored",
"filetime",
"foldhash",
"ignore",
"insta",
"insta-cmd",
@@ -2095,7 +2102,6 @@ dependencies = [
"ruff_source_file",
"ruff_text_size",
"ruff_workspace",
"rustc-hash 2.0.0",
"serde",
"serde_json",
"shellexpand",
@@ -2154,6 +2160,7 @@ dependencies = [
"countme",
"dashmap 6.0.1",
"filetime",
"foldhash",
"ignore",
"insta",
"matchit",
@@ -2165,7 +2172,6 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.0.0",
"salsa",
"tempfile",
"thiserror",
@@ -2231,10 +2237,10 @@ name = "ruff_formatter"
version = "0.0.0"
dependencies = [
"drop_bomb",
"foldhash",
"ruff_cache",
"ruff_macros",
"ruff_text_size",
"rustc-hash 2.0.0",
"schemars",
"serde",
"static_assertions",
@@ -2262,6 +2268,7 @@ dependencies = [
"clap",
"colored",
"fern",
"foldhash",
"glob",
"globset",
"imperative",
@@ -2294,7 +2301,6 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.0.0",
"schemars",
"serde",
"serde_json",
@@ -2349,6 +2355,7 @@ dependencies = [
"aho-corasick",
"bitflags 2.6.0",
"compact_str",
"foldhash",
"is-macro",
"itertools 0.13.0",
"once_cell",
@@ -2357,7 +2364,6 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.0.0",
"schemars",
"serde",
]
@@ -2392,6 +2398,7 @@ dependencies = [
"anyhow",
"clap",
"countme",
"foldhash",
"insta",
"itertools 0.13.0",
"memchr",
@@ -2405,7 +2412,6 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.0.0",
"schemars",
"serde",
"serde_json",
@@ -2446,13 +2452,13 @@ dependencies = [
"bitflags 2.6.0",
"bstr",
"compact_str",
"foldhash",
"insta",
"memchr",
"ruff_python_ast",
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.0.0",
"static_assertions",
"unicode-ident",
"unicode-normalization",
@@ -2475,6 +2481,7 @@ name = "ruff_python_semantic"
version = "0.0.0"
dependencies = [
"bitflags 2.6.0",
"foldhash",
"is-macro",
"ruff_cache",
"ruff_index",
@@ -2484,7 +2491,6 @@ dependencies = [
"ruff_python_stdlib",
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.0.0",
"schemars",
"serde",
]
@@ -2523,6 +2529,7 @@ version = "0.2.2"
dependencies = [
"anyhow",
"crossbeam",
"foldhash",
"ignore",
"insta",
"jod-thread",
@@ -2542,7 +2549,6 @@ dependencies = [
"ruff_source_file",
"ruff_text_size",
"ruff_workspace",
"rustc-hash 2.0.0",
"serde",
"serde_json",
"shellexpand",
@@ -2602,6 +2608,7 @@ dependencies = [
"anyhow",
"colored",
"etcetera",
"foldhash",
"glob",
"globset",
"ignore",
@@ -2621,7 +2628,6 @@ dependencies = [
"ruff_python_formatter",
"ruff_python_semantic",
"ruff_source_file",
"rustc-hash 2.0.0",
"schemars",
"serde",
"shellexpand",

View File

@@ -105,9 +105,10 @@ pyproject-toml = { version = "0.9.0" }
quick-junit = { version = "0.4.0" }
quote = { version = "1.0.23" }
rand = { version = "0.8.5" }
rustc-hash = { version = "2.0.0" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
foldhash = { version = "0.1.0" }
salsa = { git = "https://github.com/MichaReiser/salsa.git", tag = "red-knot-0.0.1" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }

View File

@@ -25,7 +25,7 @@ crossbeam = { workspace = true }
jod-thread = { workspace = true }
lsp-server = { workspace = true }
lsp-types = { workspace = true }
rustc-hash = { workspace = true }
foldhash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
shellexpand = { workspace = true }

View File

@@ -1,7 +1,7 @@
use anyhow::Ok;
use foldhash::{HashMap, HashMapExt};
use lsp_types::NotebookCellKind;
use ruff_notebook::CellMetadata;
use rustc_hash::{FxBuildHasher, FxHashMap};
use crate::{PositionEncoding, TextDocument};
@@ -17,7 +17,7 @@ pub struct NotebookDocument {
metadata: ruff_notebook::RawNotebookMetadata,
version: DocumentVersion,
// Used to quickly find the index of a cell for a given URL.
cell_index: FxHashMap<lsp_types::Url, CellId>,
cell_index: HashMap<lsp_types::Url, CellId>,
}
/// A single cell within a notebook, which has text contents represented as a `TextDocument`.
@@ -35,7 +35,7 @@ impl NotebookDocument {
metadata: serde_json::Map<String, serde_json::Value>,
cell_documents: Vec<lsp_types::TextDocumentItem>,
) -> crate::Result<Self> {
let mut cell_contents: FxHashMap<_, _> = cell_documents
let mut cell_contents: HashMap<_, _> = cell_documents
.into_iter()
.map(|document| (document.uri, document.text))
.collect();
@@ -122,7 +122,7 @@ impl NotebookDocument {
// Instead, it only provides that (a) these cell URIs were removed, and (b) these
// cell URIs were added.
// https://github.com/astral-sh/ruff/issues/12573
let mut deleted_cells = FxHashMap::default();
let mut deleted_cells = HashMap::default();
// First, delete the cells and remove them from the index.
if delete > 0 {
@@ -216,8 +216,8 @@ impl NotebookDocument {
self.cells.get_mut(*self.cell_index.get(uri)?)
}
fn make_cell_index(cells: &[NotebookCell]) -> FxHashMap<lsp_types::Url, CellId> {
let mut index = FxHashMap::with_capacity_and_hasher(cells.len(), FxBuildHasher);
fn make_cell_index(cells: &[NotebookCell]) -> HashMap<lsp_types::Url, CellId> {
let mut index = HashMap::with_capacity(cells.len());
for (i, cell) in cells.iter().enumerate() {
index.insert(cell.url.clone(), i);
}

View File

@@ -1,7 +1,7 @@
use std::any::TypeId;
use foldhash::HashMap;
use lsp_server::{Notification, RequestId};
use rustc_hash::FxHashMap;
use serde_json::Value;
use super::{schedule::Task, ClientSender};
@@ -23,7 +23,7 @@ pub(crate) struct Responder(ClientSender);
pub(crate) struct Requester<'s> {
sender: ClientSender,
next_request_id: i32,
response_handlers: FxHashMap<lsp_server::RequestId, ResponseBuilder<'s>>,
response_handlers: HashMap<lsp_server::RequestId, ResponseBuilder<'s>>,
}
impl<'s> Client<'s> {
@@ -34,7 +34,7 @@ impl<'s> Client<'s> {
requester: Requester {
sender,
next_request_id: 1,
response_handlers: FxHashMap::default(),
response_handlers: HashMap::default(),
},
}
}

View File

@@ -2,8 +2,8 @@ use std::borrow::Cow;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use foldhash::HashMap;
use lsp_types::Url;
use rustc_hash::FxHashMap;
use crate::{
edit::{DocumentKey, DocumentVersion, NotebookDocument},
@@ -16,10 +16,10 @@ use super::ClientSettings;
#[derive(Default, Debug)]
pub(crate) struct Index {
/// Maps all document file URLs to the associated document controller
documents: FxHashMap<Url, DocumentController>,
documents: HashMap<Url, DocumentController>,
/// Maps opaque cell URLs to a notebook URL (document)
notebook_cells: FxHashMap<Url, Url>,
notebook_cells: HashMap<Url, Url>,
/// Global settings provided by the client.
global_settings: ClientSettings,
@@ -28,8 +28,8 @@ pub(crate) struct Index {
impl Index {
pub(super) fn new(global_settings: ClientSettings) -> Self {
Self {
documents: FxHashMap::default(),
notebook_cells: FxHashMap::default(),
documents: HashMap::default(),
notebook_cells: HashMap::default(),
global_settings,
}
}

View File

@@ -1,11 +1,11 @@
use std::path::PathBuf;
use foldhash::HashMap;
use lsp_types::Url;
use rustc_hash::FxHashMap;
use serde::Deserialize;
/// Maps a workspace URI to its associated client settings. Used during server initialization.
pub(crate) type WorkspaceSettingsMap = FxHashMap<Url, ClientSettings>;
pub(crate) type WorkspaceSettingsMap = HashMap<Url, ClientSettings>;
/// This is a direct representation of the settings schema sent by the client.
#[derive(Debug, Deserialize, Default)]

View File

@@ -22,7 +22,7 @@ ruff_text_size = { workspace = true }
anyhow = { workspace = true }
crossbeam = { workspace = true }
notify = { workspace = true }
rustc-hash = { workspace = true }
foldhash = { workspace = true }
salsa = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }

View File

@@ -1,4 +1,4 @@
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use ruff_db::files::{system_path_to_file, File, Files};
use ruff_db::system::walk_directory::WalkState;
@@ -18,13 +18,13 @@ impl RootDatabase {
let mut workspace_change = false;
// Packages that need reloading
let mut changed_packages = FxHashSet::default();
let mut changed_packages = HashSet::default();
// Paths that were added
let mut added_paths = FxHashSet::default();
let mut added_paths = HashSet::default();
// Deduplicate the `sync` calls. Many file watchers emit multiple events for the same path.
let mut synced_files = FxHashSet::default();
let mut synced_recursively = FxHashSet::default();
let mut synced_files = HashSet::default();
let mut synced_recursively = HashSet::default();
let mut sync_path = |db: &mut RootDatabase, path: &SystemPath| {
if synced_files.insert(path.to_path_buf()) {

View File

@@ -1,6 +1,6 @@
use std::{collections::BTreeMap, sync::Arc};
use rustc_hash::{FxBuildHasher, FxHashSet};
use foldhash::{HashMapExt, HashSet, HashSetExt};
use salsa::{Durability, Setter as _};
pub use metadata::{PackageMetadata, WorkspaceMetadata};
@@ -74,7 +74,7 @@ pub struct Workspace {
/// open files rather than all files in the workspace.
#[return_ref]
#[default]
open_fileset: Option<Arc<FxHashSet<File>>>,
open_fileset: Option<Arc<HashSet<File>>>,
/// The (first-party) packages in this workspace.
#[return_ref]
@@ -219,7 +219,7 @@ impl Workspace {
}
/// Returns the open files in the workspace or `None` if the entire workspace should be checked.
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
pub fn open_files(self, db: &dyn Db) -> Option<&HashSet<File>> {
self.open_fileset(db).as_deref()
}
@@ -227,7 +227,7 @@ impl Workspace {
///
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
#[tracing::instrument(level = "debug", skip(self, db))]
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
pub fn set_open_files(self, db: &mut dyn Db, open_files: HashSet<File>) {
tracing::debug!("Set open workspace files (count: {})", open_files.len());
self.set_open_fileset(db).to(Some(Arc::new(open_files)));
@@ -236,7 +236,7 @@ impl Workspace {
/// This takes the open files from the workspace and returns them.
///
/// This changes the behavior of `check` to check all files in the workspace instead of just the open files.
pub fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
pub fn take_open_files(self, db: &mut dyn Db) -> HashSet<File> {
tracing::debug!("Take open workspace files");
// Salsa will cancel any pending queries and remove its own reference to `open_files`
@@ -246,7 +246,7 @@ impl Workspace {
if let Some(open_files) = open_files {
Arc::try_unwrap(open_files).unwrap()
} else {
FxHashSet::default()
HashSet::default()
}
}
}
@@ -372,7 +372,7 @@ pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
Diagnostics::from(diagnostics)
}
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> HashSet<File> {
let paths = std::sync::Mutex::new(Vec::new());
db.system().walk_directory(path).run(|| {
@@ -402,7 +402,7 @@ fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
});
let paths = paths.into_inner().unwrap();
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
let mut files = HashSet::with_capacity(paths.len());
for path in paths {
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.

View File

@@ -2,7 +2,7 @@ use std::iter::FusedIterator;
use std::ops::Deref;
use std::sync::Arc;
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use salsa::Setter;
use ruff_db::files::File;
@@ -105,7 +105,7 @@ pub struct LazyFiles<'a> {
impl<'a> LazyFiles<'a> {
/// Sets the indexed files of a package to `files`.
pub fn set(mut self, files: FxHashSet<File>) -> IndexedFiles {
pub fn set(mut self, files: HashSet<File>) -> IndexedFiles {
let files = IndexedFiles::new(files);
*self.files = State::Indexed(files.clone());
files
@@ -127,11 +127,11 @@ impl<'a> LazyFiles<'a> {
#[derive(Debug, Clone)]
pub struct IndexedFiles {
revision: u64,
files: Arc<std::sync::Mutex<FxHashSet<File>>>,
files: Arc<std::sync::Mutex<HashSet<File>>>,
}
impl IndexedFiles {
fn new(files: FxHashSet<File>) -> Self {
fn new(files: HashSet<File>) -> Self {
Self {
files: Arc::new(std::sync::Mutex::new(files)),
revision: 0,
@@ -155,11 +155,11 @@ impl PartialEq for IndexedFiles {
impl Eq for IndexedFiles {}
pub struct IndexedFilesGuard<'a> {
guard: std::sync::MutexGuard<'a, FxHashSet<File>>,
guard: std::sync::MutexGuard<'a, HashSet<File>>,
}
impl Deref for IndexedFilesGuard<'_> {
type Target = FxHashSet<File>;
type Target = HashSet<File>;
fn deref(&self) -> &Self::Target {
&self.guard

View File

@@ -44,7 +44,7 @@ notify = { workspace = true }
path-absolutize = { workspace = true, features = ["once_cell_cache"] }
rayon = { workspace = true }
regex = { workspace = true }
rustc-hash = { workspace = true }
foldhash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
shellexpand = { workspace = true }

View File

@@ -9,9 +9,9 @@ use anyhow::{anyhow, bail};
use clap::builder::{TypedValueParser, ValueParserFactory};
use clap::{command, Parser};
use colored::Colorize;
use foldhash::HashMap;
use path_absolutize::path_dedot;
use regex::Regex;
use rustc_hash::FxHashMap;
use toml;
use ruff_linter::line_width::LineLength;
@@ -1278,7 +1278,7 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
/// Convert a list of `PatternPrefixPair` structs to `PerFileIgnore`.
pub fn collect_per_file_ignores(pairs: Vec<PatternPrefixPair>) -> Vec<PerFileIgnore> {
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
let mut per_file_ignores: HashMap<String, Vec<RuleSelector>> = HashMap::default();
for pair in pairs {
per_file_ignores
.entry(pair.pattern)

View File

@@ -9,11 +9,11 @@ use std::time::{Duration, SystemTime};
use anyhow::{Context, Result};
use filetime::FileTime;
use foldhash::HashMap;
use itertools::Itertools;
use log::{debug, error};
use rayon::iter::ParallelIterator;
use rayon::iter::{IntoParallelIterator, ParallelBridge};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use tempfile::NamedTempFile;
@@ -140,7 +140,7 @@ impl Cache {
fn empty(path: PathBuf, package_root: PathBuf) -> Self {
let package = PackageCache {
package_root,
files: FxHashMap::default(),
files: HashMap::default(),
};
Cache::new(path, package)
}
@@ -318,7 +318,7 @@ struct PackageCache {
/// single file "packages", e.g. scripts.
package_root: PathBuf,
/// Mapping of source file path to it's cached data.
files: FxHashMap<RelativePathBuf, FileCache>,
files: HashMap<RelativePathBuf, FileCache>,
}
/// On disk representation of the cache per source file.
@@ -357,9 +357,9 @@ impl FileCache {
.collect()
};
let notebook_indexes = if let Some(notebook_index) = lint.notebook_index.as_ref() {
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook_index.clone())])
HashMap::from_iter([(path.to_string_lossy().to_string(), notebook_index.clone())])
} else {
FxHashMap::default()
HashMap::default()
};
Diagnostics::new(messages, notebook_indexes)
})
@@ -493,11 +493,11 @@ where
}
}
pub(crate) struct PackageCacheMap<'a>(FxHashMap<&'a Path, Cache>);
pub(crate) struct PackageCacheMap<'a>(HashMap<&'a Path, Cache>);
impl<'a> PackageCacheMap<'a> {
pub(crate) fn init(
package_roots: &FxHashMap<&'a Path, Option<&'a Path>>,
package_roots: &HashMap<&'a Path, Option<&'a Path>>,
resolver: &Resolver,
) -> Self {
fn init_cache(path: &Path) {

View File

@@ -5,11 +5,11 @@ use std::time::Instant;
use anyhow::Result;
use colored::Colorize;
use foldhash::HashMap;
use ignore::Error;
use log::{debug, error, warn};
#[cfg(not(target_family = "wasm"))]
use rayon::prelude::*;
use rustc_hash::FxHashMap;
use ruff_diagnostics::Diagnostic;
use ruff_linter::message::Message;
@@ -133,7 +133,7 @@ pub(crate) fn check(
dummy,
TextSize::default(),
)],
FxHashMap::default(),
HashMap::default(),
)
} else {
warn!(
@@ -221,7 +221,7 @@ mod test {
use std::os::unix::fs::OpenOptionsExt;
use anyhow::Result;
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use tempfile::TempDir;
use ruff_linter::message::{Emitter, EmitterContext, TextEmitter};
@@ -284,7 +284,7 @@ mod test {
.emit(
&mut output,
&diagnostics.messages,
&EmitterContext::new(&FxHashMap::default()),
&EmitterContext::new(&HashMap::default()),
)
.unwrap();

View File

@@ -7,11 +7,11 @@ use std::time::Instant;
use anyhow::Result;
use colored::Colorize;
use foldhash::HashSet;
use itertools::Itertools;
use log::{error, warn};
use rayon::iter::Either::{Left, Right};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use rustc_hash::FxHashSet;
use thiserror::Error;
use tracing::debug;
@@ -782,7 +782,7 @@ impl Display for FormatCommandError {
pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
// First, collect all rules that are incompatible regardless of the linter-specific settings.
let mut incompatible_rules = FxHashSet::default();
let mut incompatible_rules = HashSet::default();
for setting in resolver.settings() {
for rule in [
// The formatter might collapse implicit string concatenation on a single line.

View File

@@ -9,8 +9,8 @@ use std::path::Path;
use anyhow::{Context, Result};
use colored::Colorize;
use foldhash::HashMap;
use log::{debug, warn};
use rustc_hash::FxHashMap;
use ruff_diagnostics::Diagnostic;
use ruff_linter::codes::Rule;
@@ -33,13 +33,13 @@ use crate::cache::{Cache, FileCacheKey, LintCacheData};
pub(crate) struct Diagnostics {
pub(crate) messages: Vec<Message>,
pub(crate) fixed: FixMap,
pub(crate) notebook_indexes: FxHashMap<String, NotebookIndex>,
pub(crate) notebook_indexes: HashMap<String, NotebookIndex>,
}
impl Diagnostics {
pub(crate) fn new(
messages: Vec<Message>,
notebook_indexes: FxHashMap<String, NotebookIndex>,
notebook_indexes: HashMap<String, NotebookIndex>,
) -> Self {
Self {
messages,
@@ -72,7 +72,7 @@ impl Diagnostics {
source_file,
TextSize::default(),
)],
FxHashMap::default(),
HashMap::default(),
)
} else {
match path {
@@ -106,7 +106,7 @@ impl Diagnostics {
range: TextRange::default(),
file: dummy,
})],
FxHashMap::default(),
HashMap::default(),
)
}
}
@@ -132,7 +132,7 @@ impl AddAssign for Diagnostics {
/// A collection of fixes indexed by file path.
#[derive(Debug, Default, PartialEq)]
pub(crate) struct FixMap(FxHashMap<String, FixTable>);
pub(crate) struct FixMap(HashMap<String, FixTable>);
impl FixMap {
/// Returns `true` if there are no fixes in the map.
@@ -314,7 +314,7 @@ pub(crate) fn lint_path(
ParseSource::None,
);
let transformed = source_kind;
let fixed = FxHashMap::default();
let fixed = HashMap::default();
(result, transformed, fixed)
}
} else {
@@ -328,7 +328,7 @@ pub(crate) fn lint_path(
ParseSource::None,
);
let transformed = source_kind;
let fixed = FxHashMap::default();
let fixed = HashMap::default();
(result, transformed, fixed)
};
@@ -357,9 +357,9 @@ pub(crate) fn lint_path(
}
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook.into_index())])
HashMap::from_iter([(path.to_string_lossy().to_string(), notebook.into_index())])
} else {
FxHashMap::default()
HashMap::default()
};
Ok(Diagnostics {
@@ -456,7 +456,7 @@ pub(crate) fn lint_stdin(
}
let transformed = source_kind;
let fixed = FxHashMap::default();
let fixed = HashMap::default();
(result, transformed, fixed)
}
} else {
@@ -470,17 +470,17 @@ pub(crate) fn lint_stdin(
ParseSource::None,
);
let transformed = source_kind;
let fixed = FxHashMap::default();
let fixed = HashMap::default();
(result, transformed, fixed)
};
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
FxHashMap::from_iter([(
HashMap::from_iter([(
path.map_or_else(|| "-".into(), |path| path.to_string_lossy().to_string()),
notebook.into_index(),
)])
} else {
FxHashMap::default()
HashMap::default()
};
Ok(Diagnostics {

View File

@@ -31,7 +31,7 @@ thiserror = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true, optional = true }
tracing-tree = { workspace = true, optional = true }
rustc-hash = { workspace = true }
foldhash = { workspace = true }
[target.'cfg(not(target_arch="wasm32"))'.dependencies]
zip = { workspace = true, features = ["zstd"] }

View File

@@ -6,6 +6,7 @@ use dashmap::mapref::entry::Entry;
use salsa::{Durability, Setter};
pub use file_root::{FileRoot, FileRootKind};
use foldhash::{HashMapExt, HashSetExt};
pub use path::FilePath;
use ruff_notebook::{Notebook, NotebookError};

View File

@@ -1,6 +1,4 @@
use std::hash::BuildHasherDefault;
use rustc_hash::FxHasher;
use foldhash::fast::RandomState;
use crate::files::Files;
use crate::system::System;
@@ -15,8 +13,8 @@ pub mod system;
pub mod testing;
pub mod vendored;
pub type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
pub type FxDashSet<K> = dashmap::DashSet<K, BuildHasherDefault<FxHasher>>;
pub type FxDashMap<K, V> = dashmap::DashMap<K, V, RandomState>;
pub type FxDashSet<K> = dashmap::DashSet<K, RandomState>;
/// Most basic database that gives access to files, the host system, source code, and parsed AST.
#[salsa::db]

View File

@@ -4,7 +4,7 @@ use std::sync::{Arc, RwLock, RwLockWriteGuard};
use camino::{Utf8Path, Utf8PathBuf};
use filetime::FileTime;
use rustc_hash::FxHashMap;
use foldhash::{HashMap, HashMapExt, HashSetExt};
use ruff_notebook::{Notebook, NotebookError};
@@ -54,7 +54,7 @@ impl MemoryFileSystem {
let fs = Self {
inner: Arc::new(MemoryFileSystemInner {
by_path: RwLock::new(BTreeMap::default()),
virtual_files: RwLock::new(FxHashMap::default()),
virtual_files: RwLock::new(HashMap::default()),
cwd: cwd.clone(),
}),
};
@@ -385,7 +385,7 @@ impl std::fmt::Debug for MemoryFileSystem {
struct MemoryFileSystemInner {
by_path: RwLock<BTreeMap<Utf8PathBuf, Entry>>,
virtual_files: RwLock<FxHashMap<SystemVirtualPathBuf, File>>,
virtual_files: RwLock<HashMap<SystemVirtualPathBuf, File>>,
cwd: SystemPathBuf,
}

View File

@@ -16,7 +16,7 @@ ruff_macros = { workspace = true }
ruff_text_size = { workspace = true }
drop_bomb = { workspace = true }
rustc-hash = { workspace = true }
foldhash = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, optional = true }
static_assertions = { workspace = true }

View File

@@ -2,7 +2,7 @@ use super::{write, Arguments, FormatElement};
use crate::format_element::Interned;
use crate::prelude::LineMode;
use crate::{FormatResult, FormatState};
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use std::any::{Any, TypeId};
use std::fmt::Debug;
use std::ops::{Deref, DerefMut};
@@ -349,7 +349,7 @@ pub struct RemoveSoftLinesBuffer<'a, Context> {
///
/// It's fine to not snapshot the cache. The worst that can happen is that it holds on interned elements
/// that are now unused. But there's little harm in that and the cache is cleaned when dropping the buffer.
interned_cache: FxHashMap<Interned, Interned>,
interned_cache: HashMap<Interned, Interned>,
}
impl<'a, Context> RemoveSoftLinesBuffer<'a, Context> {
@@ -357,7 +357,7 @@ impl<'a, Context> RemoveSoftLinesBuffer<'a, Context> {
pub fn new(inner: &'a mut dyn Buffer<Context = Context>) -> Self {
Self {
inner,
interned_cache: FxHashMap::default(),
interned_cache: HashMap::default(),
}
}
@@ -370,7 +370,7 @@ impl<'a, Context> RemoveSoftLinesBuffer<'a, Context> {
// Extracted to function to avoid monomorphization
fn clean_interned(
interned: &Interned,
interned_cache: &mut FxHashMap<Interned, Interned>,
interned_cache: &mut HashMap<Interned, Interned>,
) -> Interned {
if let Some(cleaned) = interned_cache.get(interned) {
cleaned.clone()

View File

@@ -1,7 +1,6 @@
use std::collections::HashMap;
use std::ops::Deref;
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use crate::format_element::tag::{Condition, DedentMode};
use crate::prelude::tag::GroupMode;
@@ -57,7 +56,7 @@ impl Document {
fn propagate_expands<'a>(
elements: &'a [FormatElement],
enclosing: &mut Vec<Enclosing<'a>>,
checked_interned: &mut FxHashMap<&'a Interned, bool>,
checked_interned: &mut HashMap<&'a Interned, bool>,
) -> bool {
let mut expands = false;
for element in elements {
@@ -147,7 +146,7 @@ impl Document {
} else {
self.len().ilog2() as usize
});
let mut interned = FxHashMap::default();
let mut interned = HashMap::default();
propagate_expands(self, &mut enclosing, &mut interned);
}
@@ -210,7 +209,7 @@ impl<'a> IrFormatContext<'a> {
fn new(source_code: SourceCode<'a>) -> Self {
Self {
source_code,
printed_interned_elements: HashMap::new(),
printed_interned_elements: HashMap::default(),
}
}
}

View File

@@ -56,7 +56,7 @@ pep440_rs = { workspace = true, features = ["serde"] }
pyproject-toml = { workspace = true }
quick-junit = { workspace = true }
regex = { workspace = true }
rustc-hash = { workspace = true }
foldhash = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true }
serde_json = { workspace = true }

View File

@@ -2,8 +2,8 @@
use std::path::Path;
use foldhash::HashSet;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use ruff_diagnostics::{Diagnostic, Edit, Fix};
use ruff_python_trivia::CommentRanges;
@@ -132,7 +132,7 @@ pub(crate) fn check_noqa(
let mut unknown_codes = vec![];
let mut unmatched_codes = vec![];
let mut valid_codes = vec![];
let mut seen_codes = FxHashSet::default();
let mut seen_codes = HashSet::default();
let mut self_ignore = false;
for original_code in directive.iter().map(Code::as_str) {
let code = get_redirect_target(original_code).unwrap_or(original_code);

View File

@@ -1,8 +1,8 @@
use itertools::Itertools;
use std::collections::BTreeSet;
use foldhash::{HashMap, HashMapExt, HashSet};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use rustc_hash::{FxHashMap, FxHashSet};
use ruff_diagnostics::{Diagnostic, Edit, Fix, IsolationLevel, SourceMap};
use ruff_source_file::Locator;
@@ -57,8 +57,8 @@ fn apply_fixes<'a>(
let mut output = String::with_capacity(locator.len());
let mut last_pos: Option<TextSize> = None;
let mut applied: BTreeSet<&Edit> = BTreeSet::default();
let mut isolated: FxHashSet<u32> = FxHashSet::default();
let mut fixed = FxHashMap::default();
let mut isolated: HashSet<u32> = HashSet::default();
let mut fixed = HashMap::default();
let mut source_map = SourceMap::default();
for (rule, fix) in diagnostics

View File

@@ -4,8 +4,8 @@ use std::path::Path;
use anyhow::{anyhow, Result};
use colored::Colorize;
use foldhash::HashMap;
use itertools::Itertools;
use rustc_hash::FxHashMap;
use ruff_diagnostics::Diagnostic;
use ruff_notebook::Notebook;
@@ -43,7 +43,7 @@ pub struct LinterResult {
pub has_syntax_error: bool,
}
pub type FixTable = FxHashMap<Rule, usize>;
pub type FixTable = HashMap<Rule, usize>;
pub struct FixerResult<'a> {
/// The result returned by the linter, after applying any fixes.
@@ -476,7 +476,7 @@ pub fn lint_fix<'a>(
let mut transformed = Cow::Borrowed(source_kind);
// Track the number of fixed errors across iterations.
let mut fixed = FxHashMap::default();
let mut fixed = HashMap::default();
// As an escape hatch, bail after 100 iterations.
let mut iterations = 0;

View File

@@ -5,10 +5,10 @@ use std::sync::Mutex;
use anyhow::Result;
use colored::Colorize;
use fern;
use foldhash::HashSet;
use log::Level;
use once_cell::sync::Lazy;
use ruff_python_parser::{ParseError, ParseErrorType};
use rustc_hash::FxHashSet;
use ruff_source_file::{LineIndex, OneIndexed, SourceCode, SourceLocation};
@@ -35,7 +35,7 @@ macro_rules! warn_user_once_by_id {
};
}
pub static MESSAGES: Lazy<Mutex<FxHashSet<String>>> = Lazy::new(Mutex::default);
pub static MESSAGES: Lazy<Mutex<HashSet<String>>> = Lazy::new(Mutex::default);
/// Warn a user once, if warnings are enabled, with uniqueness determined by the content of the
/// message.

View File

@@ -3,7 +3,7 @@ use std::collections::BTreeMap;
use std::io::Write;
use std::ops::Deref;
use rustc_hash::FxHashMap;
use foldhash::HashMap;
pub use azure::AzureEmitter;
pub use github::GithubEmitter;
@@ -285,11 +285,11 @@ pub trait Emitter {
/// Context passed to [`Emitter`].
pub struct EmitterContext<'a> {
notebook_indexes: &'a FxHashMap<String, NotebookIndex>,
notebook_indexes: &'a HashMap<String, NotebookIndex>,
}
impl<'a> EmitterContext<'a> {
pub fn new(notebook_indexes: &'a FxHashMap<String, NotebookIndex>) -> Self {
pub fn new(notebook_indexes: &'a HashMap<String, NotebookIndex>) -> Self {
Self { notebook_indexes }
}
@@ -305,7 +305,7 @@ impl<'a> EmitterContext<'a> {
#[cfg(test)]
mod tests {
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Edit, Fix};
use ruff_notebook::NotebookIndex;
@@ -399,7 +399,7 @@ def fibonacci(n):
]
}
pub(super) fn create_notebook_messages() -> (Vec<Message>, FxHashMap<String, NotebookIndex>) {
pub(super) fn create_notebook_messages() -> (Vec<Message>, HashMap<String, NotebookIndex>) {
let notebook = r"# cell 1
import os
# cell 2
@@ -453,7 +453,7 @@ def foo():
let notebook_source = SourceFileBuilder::new("notebook.ipynb", notebook).finish();
let mut notebook_indexes = FxHashMap::default();
let mut notebook_indexes = HashMap::default();
notebook_indexes.insert(
"notebook.ipynb".to_string(),
NotebookIndex::new(
@@ -510,7 +510,7 @@ def foo():
emitter: &mut dyn Emitter,
messages: &[Message],
) -> String {
let notebook_indexes = FxHashMap::default();
let notebook_indexes = HashMap::default();
let context = EmitterContext::new(&notebook_indexes);
let mut output: Vec<u8> = Vec::new();
emitter.emit(&mut output, messages, &context).unwrap();
@@ -521,7 +521,7 @@ def foo():
pub(super) fn capture_emitter_notebook_output(
emitter: &mut dyn Emitter,
messages: &[Message],
notebook_indexes: &FxHashMap<String, NotebookIndex>,
notebook_indexes: &HashMap<String, NotebookIndex>,
) -> String {
let context = EmitterContext::new(notebook_indexes);
let mut output: Vec<u8> = Vec::new();

View File

@@ -1,5 +1,5 @@
use foldhash::HashSet;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use ruff_diagnostics::Edit;
use ruff_python_ast::helpers::{
@@ -109,7 +109,7 @@ pub(crate) fn auto_return_type(function: &ast::StmtFunctionDef) -> Option<AutoPy
pub(crate) enum AutoPythonType {
Never,
Atom(PythonType),
Union(FxHashSet<PythonType>),
Union(HashSet<PythonType>),
}
impl AutoPythonType {

View File

@@ -1,5 +1,5 @@
use foldhash::{HashMap, HashMapExt, HashSet};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use ruff_diagnostics::{AlwaysFixableViolation, Violation};
use ruff_diagnostics::{Diagnostic, Edit, Fix};
@@ -118,9 +118,9 @@ fn duplicate_handler_exceptions<'a>(
checker: &mut Checker,
expr: &'a Expr,
elts: &'a [Expr],
) -> FxHashMap<UnqualifiedName<'a>, &'a Expr> {
let mut seen: FxHashMap<UnqualifiedName, &Expr> = FxHashMap::default();
let mut duplicates: FxHashSet<UnqualifiedName> = FxHashSet::default();
) -> HashMap<UnqualifiedName<'a>, &'a Expr> {
let mut seen: HashMap<UnqualifiedName, &Expr> = HashMap::default();
let mut duplicates: HashSet<UnqualifiedName> = HashSet::default();
let mut unique_elts: Vec<&Expr> = Vec::default();
for type_ in elts {
if let Some(name) = UnqualifiedName::from_expr(type_) {
@@ -171,8 +171,8 @@ fn duplicate_handler_exceptions<'a>(
/// B025
pub(crate) fn duplicate_exceptions(checker: &mut Checker, handlers: &[ExceptHandler]) {
let mut seen: FxHashSet<UnqualifiedName> = FxHashSet::default();
let mut duplicates: FxHashMap<UnqualifiedName, Vec<&Expr>> = FxHashMap::default();
let mut seen: HashSet<UnqualifiedName> = HashSet::default();
let mut duplicates: HashMap<UnqualifiedName, Vec<&Expr>> = HashMap::default();
for handler in handlers {
let ExceptHandler::ExceptHandler(ast::ExceptHandlerExceptHandler {
type_: Some(type_),

View File

@@ -1,5 +1,5 @@
use anyhow::{Context, Result};
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -48,7 +48,7 @@ impl Violation for DuplicateValue {
/// B033
pub(crate) fn duplicate_value(checker: &mut Checker, set: &ast::ExprSet) {
let mut seen_values: FxHashSet<ComparableExpr> = FxHashSet::default();
let mut seen_values: HashSet<ComparableExpr> = HashSet::default();
for (index, value) in set.iter().enumerate() {
if value.is_literal_expr() {
let comparable_value = ComparableExpr::from(value);

View File

@@ -1,5 +1,5 @@
use foldhash::HashMap;
use ruff_python_ast::{self as ast, Expr, ParameterWithDefault};
use rustc_hash::FxHashMap;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -76,7 +76,7 @@ pub(crate) fn loop_variable_overrides_iterator(checker: &mut Checker, target: &E
#[derive(Default)]
struct NameFinder<'a> {
names: FxHashMap<&'a str, &'a Expr>,
names: HashMap<&'a str, &'a Expr>,
}
impl<'a> Visitor<'a> for NameFinder<'a> {

View File

@@ -1,4 +1,4 @@
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -70,7 +70,7 @@ pub(crate) fn static_key_dict_comprehension(checker: &mut Checker, dict_comp: &a
/// Returns `true` if the given expression is a constant in the context of the dictionary
/// comprehension.
fn is_constant(key: &Expr, names: &FxHashMap<&str, &ast::ExprName>) -> bool {
fn is_constant(key: &Expr, names: &HashMap<&str, &ast::ExprName>) -> bool {
match key {
Expr::Tuple(tuple) => tuple.iter().all(|elem| is_constant(elem, names)),
Expr::Name(ast::ExprName { id, .. }) => !names.contains_key(id.as_str()),

View File

@@ -7,7 +7,7 @@ mod tests {
use std::path::Path;
use anyhow::Result;
use rustc_hash::{FxHashMap, FxHashSet};
use foldhash::{HashMap, HashMapExt, HashSet};
use crate::assert_messages;
use crate::registry::Rule;
@@ -28,7 +28,7 @@ mod tests {
#[test]
fn custom() -> Result<()> {
let mut aliases = default_aliases();
aliases.extend(FxHashMap::from_iter([
aliases.extend(HashMap::from_iter([
("dask.array".to_string(), "da".to_string()),
("dask.dataframe".to_string(), "dd".to_string()),
]));
@@ -37,8 +37,8 @@ mod tests {
&LinterSettings {
flake8_import_conventions: super::settings::Settings {
aliases,
banned_aliases: FxHashMap::default(),
banned_from: FxHashSet::default(),
banned_aliases: HashMap::default(),
banned_from: HashSet::default(),
},
..LinterSettings::for_rule(Rule::UnconventionalImportAlias)
},
@@ -54,7 +54,7 @@ mod tests {
&LinterSettings {
flake8_import_conventions: super::settings::Settings {
aliases: default_aliases(),
banned_aliases: FxHashMap::from_iter([
banned_aliases: HashMap::from_iter([
(
"typing".to_string(),
BannedAliases::from_iter(["t".to_string(), "ty".to_string()]),
@@ -72,7 +72,7 @@ mod tests {
BannedAliases::from_iter(["F".to_string()]),
),
]),
banned_from: FxHashSet::default(),
banned_from: HashSet::default(),
},
..LinterSettings::for_rule(Rule::BannedImportAlias)
},
@@ -88,8 +88,8 @@ mod tests {
&LinterSettings {
flake8_import_conventions: super::settings::Settings {
aliases: default_aliases(),
banned_aliases: FxHashMap::default(),
banned_from: FxHashSet::from_iter([
banned_aliases: HashMap::default(),
banned_from: HashSet::from_iter([
"logging.config".to_string(),
"typing".to_string(),
"pandas".to_string(),
@@ -108,14 +108,14 @@ mod tests {
Path::new("flake8_import_conventions/remove_default.py"),
&LinterSettings {
flake8_import_conventions: super::settings::Settings {
aliases: FxHashMap::from_iter([
aliases: HashMap::from_iter([
("altair".to_string(), "alt".to_string()),
("matplotlib.pyplot".to_string(), "plt".to_string()),
("pandas".to_string(), "pd".to_string()),
("seaborn".to_string(), "sns".to_string()),
]),
banned_aliases: FxHashMap::default(),
banned_from: FxHashSet::default(),
banned_aliases: HashMap::default(),
banned_from: HashSet::default(),
},
..LinterSettings::for_rule(Rule::UnconventionalImportAlias)
},
@@ -127,7 +127,7 @@ mod tests {
#[test]
fn override_defaults() -> Result<()> {
let mut aliases = default_aliases();
aliases.extend(FxHashMap::from_iter([(
aliases.extend(HashMap::from_iter([(
"numpy".to_string(),
"nmp".to_string(),
)]));
@@ -137,8 +137,8 @@ mod tests {
&LinterSettings {
flake8_import_conventions: super::settings::Settings {
aliases,
banned_aliases: FxHashMap::default(),
banned_from: FxHashSet::default(),
banned_aliases: HashMap::default(),
banned_from: HashSet::default(),
},
..LinterSettings::for_rule(Rule::UnconventionalImportAlias)
},
@@ -150,7 +150,7 @@ mod tests {
#[test]
fn from_imports() -> Result<()> {
let mut aliases = default_aliases();
aliases.extend(FxHashMap::from_iter([
aliases.extend(HashMap::from_iter([
("xml.dom.minidom".to_string(), "md".to_string()),
(
"xml.dom.minidom.parseString".to_string(),
@@ -163,8 +163,8 @@ mod tests {
&LinterSettings {
flake8_import_conventions: super::settings::Settings {
aliases,
banned_aliases: FxHashMap::default(),
banned_from: FxHashSet::default(),
banned_aliases: HashMap::default(),
banned_from: HashSet::default(),
},
..LinterSettings::for_rule(Rule::UnconventionalImportAlias)
},
@@ -186,7 +186,7 @@ mod tests {
#[test]
fn same_name() -> Result<()> {
let mut aliases = default_aliases();
aliases.extend(FxHashMap::from_iter([(
aliases.extend(HashMap::from_iter([(
"django.conf.settings".to_string(),
"settings".to_string(),
)]));
@@ -195,8 +195,8 @@ mod tests {
&LinterSettings {
flake8_import_conventions: super::settings::Settings {
aliases,
banned_aliases: FxHashMap::default(),
banned_from: FxHashSet::default(),
banned_aliases: HashMap::default(),
banned_from: HashSet::default(),
},
..LinterSettings::for_rule(Rule::UnconventionalImportAlias)
},

View File

@@ -1,4 +1,4 @@
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -51,7 +51,7 @@ pub(crate) fn banned_import_alias(
stmt: &Stmt,
name: &str,
asname: &str,
banned_conventions: &FxHashMap<String, BannedAliases>,
banned_conventions: &HashMap<String, BannedAliases>,
) -> Option<Diagnostic> {
if let Some(banned_aliases) = banned_conventions.get(name) {
if banned_aliases

View File

@@ -1,5 +1,5 @@
use foldhash::HashSet;
use ruff_python_ast::Stmt;
use rustc_hash::FxHashSet;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -48,7 +48,7 @@ impl Violation for BannedImportFrom {
pub(crate) fn banned_import_from(
stmt: &Stmt,
name: &str,
banned_conventions: &FxHashSet<String>,
banned_conventions: &HashSet<String>,
) -> Option<Diagnostic> {
if banned_conventions.contains(name) {
return Some(Diagnostic::new(

View File

@@ -1,4 +1,4 @@
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use ruff_diagnostics::{Diagnostic, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -59,7 +59,7 @@ impl Violation for UnconventionalImportAlias {
pub(crate) fn unconventional_import_alias(
checker: &Checker,
binding: &Binding,
conventions: &FxHashMap<String, String>,
conventions: &HashMap<String, String>,
) -> Option<Diagnostic> {
let import = binding.as_any_import()?;
let qualified_name = import.qualified_name().to_string();

View File

@@ -2,7 +2,7 @@
use std::fmt::{Display, Formatter};
use rustc_hash::{FxHashMap, FxHashSet};
use foldhash::{HashMap, HashSet};
use serde::{Deserialize, Serialize};
use ruff_macros::CacheKey;
@@ -60,24 +60,24 @@ impl FromIterator<String> for BannedAliases {
#[derive(Debug, Clone, CacheKey)]
pub struct Settings {
pub aliases: FxHashMap<String, String>,
pub banned_aliases: FxHashMap<String, BannedAliases>,
pub banned_from: FxHashSet<String>,
pub aliases: HashMap<String, String>,
pub banned_aliases: HashMap<String, BannedAliases>,
pub banned_from: HashSet<String>,
}
pub fn default_aliases() -> FxHashMap<String, String> {
pub fn default_aliases() -> HashMap<String, String> {
CONVENTIONAL_ALIASES
.iter()
.map(|(k, v)| ((*k).to_string(), (*v).to_string()))
.collect::<FxHashMap<_, _>>()
.collect::<HashMap<_, _>>()
}
impl Default for Settings {
fn default() -> Self {
Self {
aliases: default_aliases(),
banned_aliases: FxHashMap::default(),
banned_from: FxHashSet::default(),
banned_aliases: HashMap::default(),
banned_from: HashSet::default(),
}
}
}

View File

@@ -1,4 +1,4 @@
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use ruff_diagnostics::Diagnostic;
use ruff_diagnostics::{AlwaysFixableViolation, Fix};
@@ -51,7 +51,7 @@ impl AlwaysFixableViolation for DuplicateClassFieldDefinition {
/// PIE794
pub(crate) fn duplicate_class_field_definition(checker: &mut Checker, body: &[Stmt]) {
let mut seen_targets: FxHashSet<&str> = FxHashSet::default();
let mut seen_targets: HashSet<&str> = HashSet::default();
for stmt in body {
// Extract the property name from the assignment statement.
let target = match stmt {

View File

@@ -1,4 +1,4 @@
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use ruff_diagnostics::Diagnostic;
use ruff_diagnostics::Violation;
@@ -68,7 +68,7 @@ pub(crate) fn non_unique_enums(checker: &mut Checker, parent: &Stmt, body: &[Stm
return;
}
let mut seen_targets: FxHashSet<ComparableExpr> = FxHashSet::default();
let mut seen_targets: HashSet<ComparableExpr> = HashSet::default();
for stmt in body {
let Stmt::Assign(ast::StmtAssign { value, .. }) = stmt else {
continue;

View File

@@ -1,5 +1,5 @@
use foldhash::{HashMapExt, HashSet, HashSetExt};
use itertools::Itertools;
use rustc_hash::{FxBuildHasher, FxHashSet};
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -147,11 +147,9 @@ pub(crate) fn unnecessary_dict_kwargs(checker: &mut Checker, call: &ast::ExprCal
/// Determine the set of keywords that appear in multiple positions (either directly, as in
/// `func(x=1)`, or indirectly, as in `func(**{"x": 1})`).
fn duplicates(call: &ast::ExprCall) -> FxHashSet<&str> {
let mut seen =
FxHashSet::with_capacity_and_hasher(call.arguments.keywords.len(), FxBuildHasher);
let mut duplicates =
FxHashSet::with_capacity_and_hasher(call.arguments.keywords.len(), FxBuildHasher);
fn duplicates(call: &ast::ExprCall) -> HashSet<&str> {
let mut seen = HashSet::with_capacity(call.arguments.keywords.len());
let mut duplicates = HashSet::with_capacity(call.arguments.keywords.len());
for keyword in &*call.arguments.keywords {
if let Some(name) = &keyword.arg {
if !seen.insert(name.as_str()) {

View File

@@ -1,6 +1,4 @@
use std::collections::HashSet;
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use ruff_diagnostics::{Diagnostic, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -45,7 +43,7 @@ impl Violation for DuplicateLiteralMember {
/// PYI062
pub(crate) fn duplicate_literal_member<'a>(checker: &mut Checker, expr: &'a Expr) {
let mut seen_nodes: HashSet<ComparableExpr<'_>, _> = FxHashSet::default();
let mut seen_nodes = HashSet::<ComparableExpr<'_>>::default();
let mut diagnostics: Vec<Diagnostic> = Vec::new();
// Adds a member to `literal_exprs` if it is a `Literal` annotation

View File

@@ -1,6 +1,4 @@
use std::collections::HashSet;
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -52,7 +50,7 @@ impl Violation for DuplicateUnionMember {
/// PYI016
pub(crate) fn duplicate_union_member<'a>(checker: &mut Checker, expr: &'a Expr) {
let mut seen_nodes: HashSet<ComparableExpr<'_>, _> = FxHashSet::default();
let mut seen_nodes = HashSet::<ComparableExpr<'_>>::default();
let mut diagnostics: Vec<Diagnostic> = Vec::new();
// Adds a member to `literal_exprs` if it is a `Literal` annotation

View File

@@ -1,6 +1,6 @@
use std::fmt;
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -60,7 +60,7 @@ impl Violation for RedundantLiteralUnion {
/// PYI051
pub(crate) fn redundant_literal_union<'a>(checker: &mut Checker, union: &'a Expr) {
let mut typing_literal_exprs = Vec::new();
let mut builtin_types_in_union = FxHashSet::default();
let mut builtin_types_in_union = HashSet::default();
// Adds a member to `literal_exprs` for each value in a `Literal`, and any builtin types
// to `builtin_types_in_union`.

View File

@@ -1,4 +1,4 @@
use rustc_hash::{FxBuildHasher, FxHashMap};
use foldhash::{HashMap, HashMapExt, HashSetExt};
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -651,8 +651,7 @@ fn check_duplicates(checker: &mut Checker, values: &Expr) {
return;
};
let mut seen: FxHashMap<ComparableExpr, usize> =
FxHashMap::with_capacity_and_hasher(elts.len(), FxBuildHasher);
let mut seen: HashMap<ComparableExpr, usize> = HashMap::with_capacity(elts.len());
let mut prev = None;
for (index, element) in elts.iter().enumerate() {
let expr = ComparableExpr::from(element);

View File

@@ -1,10 +1,10 @@
use anyhow::{anyhow, bail, Result};
use foldhash::{HashMap, HashMapExt, HashSetExt};
use ruff_python_ast::name::Name;
use ruff_python_ast::{
self as ast, Arguments, CmpOp, Expr, ExprContext, Identifier, Keyword, Stmt, UnaryOp,
};
use ruff_text_size::TextRange;
use rustc_hash::{FxBuildHasher, FxHashMap};
/// An enum to represent the different types of assertions present in the
/// `unittest` module. Note: any variants that can't be replaced with plain
@@ -230,7 +230,7 @@ impl UnittestAssert {
&'a self,
args: &'a [Expr],
keywords: &'a [Keyword],
) -> Result<FxHashMap<&'a str, &'a Expr>> {
) -> Result<HashMap<&'a str, &'a Expr>> {
// If we have variable-length arguments, abort.
if args.iter().any(Expr::is_starred_expr) || keywords.iter().any(|kw| kw.arg.is_none()) {
bail!("Variable-length arguments are not supported");
@@ -248,8 +248,8 @@ impl UnittestAssert {
}
// Generate a map from argument name to value.
let mut args_map: FxHashMap<&str, &Expr> =
FxHashMap::with_capacity_and_hasher(args.len() + keywords.len(), FxBuildHasher);
let mut args_map: HashMap<&str, &Expr> =
HashMap::with_capacity(args.len() + keywords.len());
// Process positional arguments.
for (arg_name, value) in arg_spec.iter().zip(args.iter()) {

View File

@@ -1,5 +1,5 @@
use foldhash::HashSet;
use ruff_python_ast::{self as ast, ElifElseClause, Expr, Identifier, Stmt};
use rustc_hash::FxHashSet;
use ruff_python_ast::visitor;
use ruff_python_ast::visitor::Visitor;
@@ -12,7 +12,7 @@ pub(super) struct Stack<'data> {
/// The `elif` or `else` statements in the current function.
pub(super) elifs_elses: Vec<(&'data [Stmt], &'data ElifElseClause)>,
/// The non-local variables in the current function.
pub(super) non_locals: FxHashSet<&'data str>,
pub(super) non_locals: HashSet<&'data str>,
/// The annotated variables in the current function.
///
/// For example, consider:
@@ -27,7 +27,7 @@ pub(super) struct Stack<'data> {
/// In this case, the annotation on `x` is used to cast the return value
/// of `foo()` to an `int`. Removing the `x = foo()` statement would
/// change the return type of the function.
pub(super) annotations: FxHashSet<&'data str>,
pub(super) annotations: HashSet<&'data str>,
/// Whether the current function is a generator.
pub(super) is_generator: bool,
/// The `assignment`-to-`return` statement pairs in the current function.

View File

@@ -1,4 +1,4 @@
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -94,7 +94,7 @@ pub(crate) fn if_else_block_instead_of_dict_lookup(checker: &mut Checker, stmt_i
}
// The `expr` was checked to be a literal above, so this is safe.
let mut literals: FxHashSet<ComparableLiteral> = FxHashSet::default();
let mut literals: HashSet<ComparableLiteral> = HashSet::default();
literals.insert(literal_expr.into());
for clause in elif_else_clauses {

View File

@@ -8,7 +8,7 @@ mod tests {
use std::path::Path;
use anyhow::Result;
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use crate::assert_messages;
use crate::registry::Rule;
@@ -23,7 +23,7 @@ mod tests {
Path::new("flake8_tidy_imports/TID251.py"),
&LinterSettings {
flake8_tidy_imports: flake8_tidy_imports::settings::Settings {
banned_api: FxHashMap::from_iter([
banned_api: HashMap::from_iter([
(
"cgi".to_string(),
ApiBan {
@@ -52,7 +52,7 @@ mod tests {
Path::new("flake8_tidy_imports/TID/my_package/sublib/api/application.py"),
&LinterSettings {
flake8_tidy_imports: flake8_tidy_imports::settings::Settings {
banned_api: FxHashMap::from_iter([
banned_api: HashMap::from_iter([
(
"attrs".to_string(),
ApiBan {

View File

@@ -1,4 +1,4 @@
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use serde::{Deserialize, Serialize};
use std::fmt::{Display, Formatter};
@@ -42,7 +42,7 @@ impl Display for Strictness {
#[derive(Debug, Clone, CacheKey, Default)]
pub struct Settings {
pub ban_relative_imports: Strictness,
pub banned_api: FxHashMap<String, ApiBan>,
pub banned_api: HashMap<String, ApiBan>,
pub banned_module_level_imports: Vec<String>,
}

View File

@@ -1,7 +1,7 @@
use std::borrow::Cow;
use anyhow::Result;
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use ruff_diagnostics::{Diagnostic, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -103,7 +103,7 @@ pub(crate) fn runtime_import_in_type_checking_block(
diagnostics: &mut Vec<Diagnostic>,
) {
// Collect all runtime imports by statement.
let mut actions: FxHashMap<(NodeId, Action), Vec<ImportBinding>> = FxHashMap::default();
let mut actions: HashMap<(NodeId, Action), Vec<ImportBinding>> = HashMap::default();
for binding_id in scope.binding_ids() {
let binding = checker.semantic().binding(binding_id);

View File

@@ -1,7 +1,7 @@
use std::borrow::Cow;
use anyhow::Result;
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -248,10 +248,10 @@ pub(crate) fn typing_only_runtime_import(
diagnostics: &mut Vec<Diagnostic>,
) {
// Collect all typing-only imports by statement and import type.
let mut errors_by_statement: FxHashMap<(NodeId, ImportType), Vec<ImportBinding>> =
FxHashMap::default();
let mut ignores_by_statement: FxHashMap<(NodeId, ImportType), Vec<ImportBinding>> =
FxHashMap::default();
let mut errors_by_statement: HashMap<(NodeId, ImportType), Vec<ImportBinding>> =
HashMap::default();
let mut ignores_by_statement: HashMap<(NodeId, ImportType), Vec<ImportBinding>> =
HashMap::default();
for binding_id in scope.binding_ids() {
let binding = checker.semantic().binding(binding_id);

View File

@@ -3,8 +3,8 @@ use std::fmt;
use std::path::{Path, PathBuf};
use std::{fs, iter};
use foldhash::{HashMap, HashMapExt, HashSet, HashSetExt};
use log::debug;
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
use strum_macros::EnumIter;
@@ -283,7 +283,7 @@ impl KnownModules {
third_party: Vec<glob::Pattern>,
local_folder: Vec<glob::Pattern>,
standard_library: Vec<glob::Pattern>,
user_defined: FxHashMap<String, Vec<glob::Pattern>>,
user_defined: HashMap<String, Vec<glob::Pattern>>,
) -> Self {
let known: Vec<(glob::Pattern, ImportSection)> = user_defined
.into_iter()
@@ -315,7 +315,7 @@ impl KnownModules {
.collect();
// Warn in the case of duplicate modules.
let mut seen = FxHashSet::with_capacity_and_hasher(known.len(), FxBuildHasher);
let mut seen = HashSet::with_capacity(known.len());
for (module, _) in &known {
if !seen.insert(module) {
warn_user_once!("One or more modules are part of multiple import sections, including: `{module}`");
@@ -382,8 +382,8 @@ impl KnownModules {
}
/// Return the list of user-defined modules, indexed by section.
pub fn user_defined(&self) -> FxHashMap<&str, Vec<&glob::Pattern>> {
let mut user_defined: FxHashMap<&str, Vec<&glob::Pattern>> = FxHashMap::default();
pub fn user_defined(&self) -> HashMap<&str, Vec<&glob::Pattern>> {
let mut user_defined: HashMap<&str, Vec<&glob::Pattern>> = HashMap::default();
for (module, section) in &self.known {
if let ImportSection::UserDefined(section_name) = section {
user_defined

View File

@@ -282,9 +282,9 @@ mod tests {
use std::path::Path;
use anyhow::Result;
use foldhash::{HashMap, HashMapExt, HashSet, HashSetExt};
use ruff_python_semantic::{MemberNameImport, ModuleNameImport, NameImport};
use ruff_text_size::Ranged;
use rustc_hash::{FxHashMap, FxHashSet};
use test_case::test_case;
use crate::assert_messages;
@@ -378,7 +378,7 @@ mod tests {
vec![pattern("foo"), pattern("__future__")],
vec![],
vec![],
FxHashMap::default(),
HashMap::default(),
),
..super::settings::Settings::default()
},
@@ -402,7 +402,7 @@ mod tests {
vec![pattern("foo"), pattern("__future__")],
vec![],
vec![],
FxHashMap::default(),
HashMap::default(),
),
..super::settings::Settings::default()
},
@@ -426,7 +426,7 @@ mod tests {
vec![pattern("foo.bar")],
vec![],
vec![],
FxHashMap::default(),
HashMap::default(),
),
..super::settings::Settings::default()
},
@@ -465,7 +465,7 @@ mod tests {
vec![],
vec![pattern("ruff")],
vec![],
FxHashMap::default(),
HashMap::default(),
),
..super::settings::Settings::default()
},
@@ -489,7 +489,7 @@ mod tests {
vec![],
vec![pattern("ruff")],
vec![],
FxHashMap::default(),
HashMap::default(),
),
relative_imports_order: RelativeImportsOrder::ClosestToFurthest,
..super::settings::Settings::default()
@@ -527,7 +527,7 @@ mod tests {
Path::new("isort").join(path).as_path(),
&LinterSettings {
isort: super::settings::Settings {
force_to_top: FxHashSet::from_iter([
force_to_top: HashSet::from_iter([
"z".to_string(),
"lib1".to_string(),
"lib3".to_string(),
@@ -607,7 +607,7 @@ mod tests {
&LinterSettings {
isort: super::settings::Settings {
force_single_line: true,
single_line_exclusions: FxHashSet::from_iter([
single_line_exclusions: HashSet::from_iter([
"os".to_string(),
"logging.handlers".to_string(),
]),
@@ -669,7 +669,7 @@ mod tests {
&LinterSettings {
isort: super::settings::Settings {
order_by_type: true,
classes: FxHashSet::from_iter([
classes: HashSet::from_iter([
"SVC".to_string(),
"SELU".to_string(),
"N_CLASS".to_string(),
@@ -697,7 +697,7 @@ mod tests {
&LinterSettings {
isort: super::settings::Settings {
order_by_type: true,
constants: FxHashSet::from_iter([
constants: HashSet::from_iter([
"Const".to_string(),
"constant".to_string(),
"First".to_string(),
@@ -727,7 +727,7 @@ mod tests {
&LinterSettings {
isort: super::settings::Settings {
order_by_type: true,
variables: FxHashSet::from_iter([
variables: HashSet::from_iter([
"VAR".to_string(),
"Variable".to_string(),
"MyVar".to_string(),
@@ -754,7 +754,7 @@ mod tests {
&LinterSettings {
isort: super::settings::Settings {
force_sort_within_sections: true,
force_to_top: FxHashSet::from_iter(["z".to_string()]),
force_to_top: HashSet::from_iter(["z".to_string()]),
..super::settings::Settings::default()
},
src: vec![test_resource_path("fixtures/isort")],
@@ -1010,7 +1010,7 @@ mod tests {
vec![],
vec![],
vec![],
FxHashMap::from_iter([("django".to_string(), vec![pattern("django")])]),
HashMap::from_iter([("django".to_string(), vec![pattern("django")])]),
),
section_order: vec![
ImportSection::Known(ImportType::Future),
@@ -1061,7 +1061,7 @@ mod tests {
Path::new("isort").join(path).as_path(),
&LinterSettings {
isort: super::settings::Settings {
no_lines_before: FxHashSet::from_iter([
no_lines_before: HashSet::from_iter([
ImportSection::Known(ImportType::Future),
ImportSection::Known(ImportType::StandardLibrary),
ImportSection::Known(ImportType::ThirdParty),
@@ -1089,7 +1089,7 @@ mod tests {
Path::new("isort").join(path).as_path(),
&LinterSettings {
isort: super::settings::Settings {
no_lines_before: FxHashSet::from_iter([
no_lines_before: HashSet::from_iter([
ImportSection::Known(ImportType::StandardLibrary),
ImportSection::Known(ImportType::LocalFolder),
]),
@@ -1202,7 +1202,7 @@ mod tests {
vec![],
vec![],
vec![],
FxHashMap::from_iter([("django".to_string(), vec![pattern("django")])]),
HashMap::from_iter([("django".to_string(), vec![pattern("django")])]),
),
section_order: vec![
ImportSection::Known(ImportType::Future),
@@ -1235,7 +1235,7 @@ mod tests {
vec![],
vec![],
vec![],
FxHashMap::from_iter([("django".to_string(), vec![pattern("django")])]),
HashMap::from_iter([("django".to_string(), vec![pattern("django")])]),
),
section_order: vec![
ImportSection::Known(ImportType::Future),
@@ -1267,7 +1267,7 @@ mod tests {
vec![],
vec![],
vec![],
FxHashMap::default(),
HashMap::default(),
),
..super::settings::Settings::default()
},

View File

@@ -5,7 +5,7 @@ use std::error::Error;
use std::fmt;
use std::fmt::{Display, Formatter};
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use serde::{Deserialize, Serialize};
use strum::IntoEnumIterator;
@@ -53,17 +53,17 @@ pub struct Settings {
pub force_sort_within_sections: bool,
pub case_sensitive: bool,
pub force_wrap_aliases: bool,
pub force_to_top: FxHashSet<String>,
pub force_to_top: HashSet<String>,
pub known_modules: KnownModules,
pub detect_same_package: bool,
pub order_by_type: bool,
pub relative_imports_order: RelativeImportsOrder,
pub single_line_exclusions: FxHashSet<String>,
pub single_line_exclusions: HashSet<String>,
pub split_on_trailing_comma: bool,
pub classes: FxHashSet<String>,
pub constants: FxHashSet<String>,
pub variables: FxHashSet<String>,
pub no_lines_before: FxHashSet<ImportSection>,
pub classes: HashSet<String>,
pub constants: HashSet<String>,
pub variables: HashSet<String>,
pub no_lines_before: HashSet<ImportSection>,
pub lines_after_imports: isize,
pub lines_between_types: usize,
pub forced_separate: Vec<String>,
@@ -85,16 +85,16 @@ impl Default for Settings {
detect_same_package: true,
case_sensitive: false,
force_wrap_aliases: false,
force_to_top: FxHashSet::default(),
force_to_top: HashSet::default(),
known_modules: KnownModules::default(),
order_by_type: true,
relative_imports_order: RelativeImportsOrder::default(),
single_line_exclusions: FxHashSet::default(),
single_line_exclusions: HashSet::default(),
split_on_trailing_comma: true,
classes: FxHashSet::default(),
constants: FxHashSet::default(),
variables: FxHashSet::default(),
no_lines_before: FxHashSet::default(),
classes: HashSet::default(),
constants: HashSet::default(),
variables: HashSet::default(),
no_lines_before: HashSet::default(),
lines_after_imports: -1,
lines_between_types: 0,
forced_separate: Vec::new(),

View File

@@ -1,6 +1,6 @@
use std::borrow::Cow;
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use ruff_python_ast::helpers::format_import_from;
@@ -73,7 +73,7 @@ impl<'a> Importable<'a> for ImportFromData<'a> {
#[derive(Debug, Default)]
pub(crate) struct ImportFromStatement<'a> {
pub(crate) comments: ImportFromCommentSet<'a>,
pub(crate) aliases: FxHashMap<AliasData<'a>, ImportFromCommentSet<'a>>,
pub(crate) aliases: HashMap<AliasData<'a>, ImportFromCommentSet<'a>>,
pub(crate) trailing_comma: TrailingComma,
}
@@ -81,17 +81,17 @@ pub(crate) struct ImportFromStatement<'a> {
pub(crate) struct ImportBlock<'a> {
// Set of (name, asname), used to track regular imports.
// Ex) `import module`
pub(crate) import: FxHashMap<AliasData<'a>, ImportCommentSet<'a>>,
pub(crate) import: HashMap<AliasData<'a>, ImportCommentSet<'a>>,
// Map from (module, level) to `AliasData`, used to track 'from' imports.
// Ex) `from module import member`
pub(crate) import_from: FxHashMap<ImportFromData<'a>, ImportFromStatement<'a>>,
pub(crate) import_from: HashMap<ImportFromData<'a>, ImportFromStatement<'a>>,
// Set of (module, level, name, asname), used to track re-exported 'from' imports.
// Ex) `from module import member as member`
pub(crate) import_from_as:
FxHashMap<(ImportFromData<'a>, AliasData<'a>), ImportFromStatement<'a>>,
HashMap<(ImportFromData<'a>, AliasData<'a>), ImportFromStatement<'a>>,
// Map from (module, level) to `AliasData`, used to track star imports.
// Ex) `from module import *`
pub(crate) import_from_star: FxHashMap<ImportFromData<'a>, ImportFromStatement<'a>>,
pub(crate) import_from_star: HashMap<ImportFromData<'a>, ImportFromStatement<'a>>,
}
type Import<'a> = (AliasData<'a>, ImportCommentSet<'a>);

View File

@@ -1,4 +1,4 @@
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation};
@@ -175,7 +175,7 @@ pub(crate) fn literal_comparisons(checker: &mut Checker, compare: &ast::ExprComp
// through the list of operators, we apply "dummy" fixes for each error,
// then replace the entire expression at the end with one "real" fix, to
// avoid conflicts.
let mut bad_ops: FxHashMap<usize, CmpOp> = FxHashMap::default();
let mut bad_ops: HashMap<usize, CmpOp> = HashMap::default();
let mut diagnostics: Vec<Diagnostic> = vec![];
// Check `left`.

View File

@@ -1,7 +1,7 @@
use foldhash::HashSet;
use itertools::Itertools;
use once_cell::sync::Lazy;
use regex::Regex;
use rustc_hash::FxHashSet;
use std::ops::Add;
use ruff_diagnostics::{AlwaysFixableViolation, Violation};
@@ -1779,13 +1779,13 @@ fn common_section(
blanks_and_section_underline(checker, docstring, context);
}
fn missing_args(checker: &mut Checker, docstring: &Docstring, docstrings_args: &FxHashSet<String>) {
fn missing_args(checker: &mut Checker, docstring: &Docstring, docstrings_args: &HashSet<String>) {
let Some(function) = docstring.definition.as_function_def() else {
return;
};
// Look for arguments that weren't included in the docstring.
let mut missing_arg_names: FxHashSet<String> = FxHashSet::default();
let mut missing_arg_names: HashSet<String> = HashSet::default();
// If this is a non-static method, skip `cls` or `self`.
for ParameterWithDefault {
@@ -1847,10 +1847,10 @@ fn missing_args(checker: &mut Checker, docstring: &Docstring, docstrings_args: &
static GOOGLE_ARGS_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"^\s*(\*?\*?\w+)\s*(\(.*?\))?\s*:(\r\n|\n)?\s*.+").unwrap());
fn args_section(context: &SectionContext) -> FxHashSet<String> {
fn args_section(context: &SectionContext) -> HashSet<String> {
let mut following_lines = context.following_lines().peekable();
let Some(first_line) = following_lines.next() else {
return FxHashSet::default();
return HashSet::default();
};
// Normalize leading whitespace, by removing any lines with less indentation
@@ -1896,12 +1896,12 @@ fn args_section(context: &SectionContext) -> FxHashSet<String> {
matches
.iter()
.filter_map(|captures| captures.get(1).map(|arg_name| arg_name.as_str().to_owned()))
.collect::<FxHashSet<String>>()
.collect::<HashSet<String>>()
}
fn parameters_section(checker: &mut Checker, docstring: &Docstring, context: &SectionContext) {
// Collect the list of arguments documented in the docstring.
let mut docstring_args: FxHashSet<String> = FxHashSet::default();
let mut docstring_args: HashSet<String> = HashSet::default();
let section_level_indent = leading_space(context.summary_line());
// Join line continuations, then resplit by line.
@@ -2026,7 +2026,7 @@ fn parse_google_sections(
if checker.enabled(Rule::UndocumentedParam) {
let mut has_args = false;
let mut documented_args: FxHashSet<String> = FxHashSet::default();
let mut documented_args: HashSet<String> = HashSet::default();
for section_context in section_contexts {
// Checks occur at the section level. Since two sections (args/keyword args and their
// variants) can list arguments, we need to unify the sets of arguments mentioned in both

View File

@@ -2,22 +2,22 @@
use std::convert::TryFrom;
use std::str::FromStr;
use foldhash::HashSet;
use ruff_python_literal::cformat::{
CFormatError, CFormatPart, CFormatPrecision, CFormatQuantity, CFormatSpec, CFormatString,
};
use rustc_hash::FxHashSet;
pub(crate) struct CFormatSummary {
pub(crate) starred: bool,
pub(crate) num_positional: usize,
pub(crate) keywords: FxHashSet<String>,
pub(crate) keywords: HashSet<String>,
}
impl From<&CFormatString> for CFormatSummary {
fn from(format_string: &CFormatString) -> Self {
let mut starred = false;
let mut num_positional = 0;
let mut keywords = FxHashSet::default();
let mut keywords = HashSet::default();
for format_part in format_string.iter() {
let CFormatPart::Spec(CFormatSpec {

View File

@@ -10,8 +10,8 @@ mod tests {
use std::path::Path;
use anyhow::Result;
use foldhash::HashMap;
use regex::Regex;
use rustc_hash::FxHashMap;
use test_case::test_case;
@@ -261,7 +261,7 @@ mod tests {
vec![],
vec![],
vec![],
FxHashMap::default(),
HashMap::default(),
),
..isort::settings::Settings::default()
},

View File

@@ -1,4 +1,4 @@
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use foldhash::{HashMap, HashMapExt, HashSet, HashSetExt};
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -129,8 +129,8 @@ impl Violation for MultiValueRepeatedKeyVariable {
/// F601, F602
pub(crate) fn repeated_keys(checker: &mut Checker, dict: &ast::ExprDict) {
// Generate a map from key to (index, value).
let mut seen: FxHashMap<ComparableExpr, FxHashSet<ComparableExpr>> =
FxHashMap::with_capacity_and_hasher(dict.len(), FxBuildHasher);
let mut seen: HashMap<ComparableExpr, HashSet<ComparableExpr>> =
HashMap::with_capacity(dict.len());
// Detect duplicate keys.
for (i, ast::DictItem { key, value }) in dict.iter().enumerate() {
@@ -142,7 +142,7 @@ pub(crate) fn repeated_keys(checker: &mut Checker, dict: &ast::ExprDict) {
let comparable_value = ComparableExpr::from(value);
let Some(seen_values) = seen.get_mut(&comparable_key) else {
seen.insert(comparable_key, FxHashSet::from_iter([comparable_value]));
seen.insert(comparable_key, HashSet::from_iter([comparable_value]));
continue;
};

View File

@@ -1,6 +1,6 @@
use std::string::ToString;
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -637,7 +637,7 @@ pub(crate) fn percent_format_missing_arguments(
return; // contains **x splat
}
let mut keywords = FxHashSet::default();
let mut keywords = HashSet::default();
for key in dict.iter_keys().flatten() {
match key {
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => {
@@ -859,7 +859,7 @@ pub(crate) fn string_dot_format_missing_argument(
return;
}
let keywords: FxHashSet<_> = keywords
let keywords: HashSet<_> = keywords
.iter()
.filter_map(|k| {
let Keyword { arg, .. } = &k;

View File

@@ -8,8 +8,8 @@ mod tests {
use std::path::Path;
use anyhow::Result;
use foldhash::HashSet;
use regex::Regex;
use rustc_hash::FxHashSet;
use test_case::test_case;
use crate::registry::Rule;
@@ -220,7 +220,7 @@ mod tests {
Path::new("pylint").join(path).as_path(),
&LinterSettings {
pylint: pylint::settings::Settings {
allow_dunder_method_names: FxHashSet::from_iter([
allow_dunder_method_names: HashSet::from_iter([
"__special_custom_magic__".to_string()
]),
..pylint::settings::Settings::default()

View File

@@ -1,7 +1,7 @@
use std::fmt;
use foldhash::HashSet;
use ruff_python_ast::{self as ast, Expr};
use rustc_hash::FxHashSet;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -124,7 +124,7 @@ impl fmt::Display for RemovalKind {
/// escapes.
fn has_duplicates(s: &ast::StringLiteralValue) -> bool {
let mut escaped = false;
let mut seen = FxHashSet::default();
let mut seen = HashSet::default();
for ch in s.chars() {
if escaped {
escaped = false;

View File

@@ -1,9 +1,9 @@
use std::str::FromStr;
use foldhash::HashMap;
use ruff_python_ast::{self as ast, Expr, StringFlags, StringLiteral};
use ruff_python_literal::cformat::{CFormatPart, CFormatSpec, CFormatStrOrBytes, CFormatString};
use ruff_text_size::Ranged;
use rustc_hash::FxHashMap;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -178,7 +178,7 @@ fn is_valid_dict(formats: &[CFormatStrOrBytes<String>], items: &[ast::DictItem])
return true;
}
let formats_hash: FxHashMap<&str, &&CFormatSpec> = formats
let formats_hash: HashMap<&str, &&CFormatSpec> = formats
.iter()
.filter_map(|format| {
format

View File

@@ -1,5 +1,5 @@
use foldhash::{HashMapExt, HashSet, HashSetExt};
use ruff_python_ast::{self as ast, Arguments, Expr};
use rustc_hash::{FxBuildHasher, FxHashSet};
use ruff_diagnostics::{Diagnostic, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -63,7 +63,7 @@ pub(crate) fn duplicate_bases(checker: &mut Checker, name: &str, arguments: Opti
};
let bases = &arguments.args;
let mut seen: FxHashSet<&str> = FxHashSet::with_capacity_and_hasher(bases.len(), FxBuildHasher);
let mut seen: HashSet<&str> = HashSet::with_capacity(bases.len());
for base in &**bases {
if let Expr::Name(ast::ExprName { id, .. }) = base {
if !seen.insert(id) {

View File

@@ -1,4 +1,4 @@
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -102,7 +102,7 @@ impl Ranged for AttributeAssignment<'_> {
/// If the `__slots__` attribute cannot be statically determined, returns an empty vector.
fn is_attributes_not_in_slots(body: &[Stmt]) -> Vec<AttributeAssignment> {
// First, collect all the attributes that are assigned to `__slots__`.
let mut slots = FxHashSet::default();
let mut slots = HashSet::default();
for statement in body {
match statement {
// Ex) `__slots__ = ("name",)`

View File

@@ -1,5 +1,5 @@
use foldhash::{HashMap, HashMapExt, HashSetExt};
use itertools::Itertools;
use rustc_hash::{FxBuildHasher, FxHashMap};
use ast::ExprContext;
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
@@ -71,8 +71,8 @@ impl AlwaysFixableViolation for RepeatedEqualityComparison {
/// PLR1714
pub(crate) fn repeated_equality_comparison(checker: &mut Checker, bool_op: &ast::ExprBoolOp) {
// Map from expression hash to (starting offset, number of comparisons, list
let mut value_to_comparators: FxHashMap<HashableExpr, (TextSize, Vec<&Expr>, Vec<usize>)> =
FxHashMap::with_capacity_and_hasher(bool_op.values.len() * 2, FxBuildHasher);
let mut value_to_comparators: HashMap<HashableExpr, (TextSize, Vec<&Expr>, Vec<usize>)> =
HashMap::with_capacity(bool_op.values.len() * 2);
for (i, value) in bool_op.values.iter().enumerate() {
let Some((left, right)) = to_allowed_value(bool_op.op, value, checker.semantic()) else {

View File

@@ -1,4 +1,4 @@
use rustc_hash::{FxBuildHasher, FxHashSet};
use foldhash::{HashMapExt, HashSet, HashSetExt};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -38,7 +38,7 @@ impl Violation for RepeatedKeywordArgument {
pub(crate) fn repeated_keyword_argument(checker: &mut Checker, call: &ExprCall) {
let ExprCall { arguments, .. } = call;
let mut seen = FxHashSet::with_capacity_and_hasher(arguments.keywords.len(), FxBuildHasher);
let mut seen = HashSet::with_capacity(arguments.keywords.len());
for keyword in &*arguments.keywords {
if let Some(id) = &keyword.arg {

View File

@@ -1,6 +1,6 @@
//! Settings for the `pylint` plugin.
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use serde::{Deserialize, Serialize};
use std::fmt;
@@ -51,7 +51,7 @@ impl fmt::Display for ConstantType {
#[derive(Debug, Clone, CacheKey)]
pub struct Settings {
pub allow_magic_value_types: Vec<ConstantType>,
pub allow_dunder_method_names: FxHashSet<String>,
pub allow_dunder_method_names: HashSet<String>,
pub max_args: usize,
pub max_positional_args: usize,
pub max_returns: usize,
@@ -67,7 +67,7 @@ impl Default for Settings {
fn default() -> Self {
Self {
allow_magic_value_types: vec![ConstantType::Str, ConstantType::Bytes],
allow_dunder_method_names: FxHashSet::default(),
allow_dunder_method_names: HashSet::default(),
max_args: 5,
max_positional_args: 5,
max_returns: 6,

View File

@@ -1,6 +1,6 @@
use foldhash::HashMap;
use once_cell::sync::Lazy;
use ruff_python_ast::{self as ast, Expr};
use rustc_hash::FxHashMap;
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation};
@@ -57,8 +57,8 @@ impl AlwaysFixableViolation for DeprecatedUnittestAlias {
}
}
static DEPRECATED_ALIASES: Lazy<FxHashMap<&'static str, &'static str>> = Lazy::new(|| {
FxHashMap::from_iter([
static DEPRECATED_ALIASES: Lazy<HashMap<&'static str, &'static str>> = Lazy::new(|| {
HashMap::from_iter([
("assertAlmostEquals", "assertAlmostEqual"),
("assertEquals", "assertEqual"),
("assertNotAlmostEquals", "assertNotAlmostEqual"),

View File

@@ -1,7 +1,7 @@
use std::borrow::Cow;
use anyhow::{Context, Result};
use rustc_hash::{FxHashMap, FxHashSet};
use foldhash::{HashMap, HashSet};
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -62,14 +62,14 @@ impl Violation for FString {
#[derive(Debug)]
struct FormatSummaryValues<'a> {
args: Vec<&'a Expr>,
kwargs: FxHashMap<&'a str, &'a Expr>,
kwargs: HashMap<&'a str, &'a Expr>,
auto_index: usize,
}
impl<'a> FormatSummaryValues<'a> {
fn try_from_call(call: &'a ast::ExprCall, locator: &'a Locator) -> Option<Self> {
let mut extracted_args: Vec<&Expr> = Vec::new();
let mut extracted_kwargs: FxHashMap<&str, &Expr> = FxHashMap::default();
let mut extracted_kwargs: HashMap<&str, &Expr> = HashMap::default();
for arg in &*call.arguments.args {
if matches!(arg, Expr::Starred(..))
@@ -281,7 +281,7 @@ impl FStringConversion {
}
let mut converted = String::with_capacity(contents.len());
let mut seen = FxHashSet::default();
let mut seen = HashSet::default();
for part in format_string.format_parts {
match part {
FormatPart::Field {

View File

@@ -1,4 +1,4 @@
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use ast::traversal;
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
@@ -214,7 +214,7 @@ fn match_consecutive_appends<'a>(
/// Group the given appends by the associated bindings.
fn group_appends(appends: Vec<Append<'_>>) -> Vec<AppendGroup<'_>> {
// We want to go over the given list of appends and group the by receivers.
let mut map: FxHashMap<BindingId, AppendGroup> = FxHashMap::default();
let mut map: HashMap<BindingId, AppendGroup> = HashMap::default();
let mut iter = appends.into_iter();
let mut last_binding = {
let first_append = iter.next().unwrap();
@@ -254,7 +254,7 @@ fn group_appends(appends: Vec<Append<'_>>) -> Vec<AppendGroup<'_>> {
#[inline]
fn get_or_add<'a, 'b>(
map: &'b mut FxHashMap<BindingId, AppendGroup<'a>>,
map: &'b mut HashMap<BindingId, AppendGroup<'a>>,
append: Append<'a>,
) -> &'b mut AppendGroup<'a> {
let group = map.entry(append.binding_id).or_insert(AppendGroup {

View File

@@ -10,7 +10,7 @@ mod tests {
use std::path::Path;
use anyhow::Result;
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use test_case::test_case;
use ruff_source_file::SourceFileBuilder;
@@ -121,7 +121,7 @@ mod tests {
let diagnostics = test_path(
Path::new("ruff/confusables.py"),
&settings::LinterSettings {
allowed_confusables: FxHashSet::from_iter(['', 'ρ', '']),
allowed_confusables: HashSet::from_iter(['', 'ρ', '']),
..settings::LinterSettings::for_rules(vec![
Rule::AmbiguousUnicodeCharacterString,
Rule::AmbiguousUnicodeCharacterDocstring,
@@ -139,7 +139,7 @@ mod tests {
Path::new("ruff/confusables.py"),
&settings::LinterSettings {
preview: PreviewMode::Enabled,
allowed_confusables: FxHashSet::from_iter(['', 'ρ', '']),
allowed_confusables: HashSet::from_iter(['', 'ρ', '']),
..settings::LinterSettings::for_rules(vec![
Rule::AmbiguousUnicodeCharacterString,
Rule::AmbiguousUnicodeCharacterDocstring,

View File

@@ -1,3 +1,4 @@
use foldhash::HashSet;
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast as ast;
@@ -9,7 +10,6 @@ use ruff_source_file::Locator;
use ruff_text_size::{Ranged, TextRange};
use memchr::memchr2_iter;
use rustc_hash::FxHashSet;
use crate::checkers::ast::Checker;
@@ -178,7 +178,7 @@ fn should_be_fstring(
return false;
};
let mut arg_names = FxHashSet::default();
let mut arg_names = HashSet::default();
for expr in semantic
.current_expressions()
.filter_map(ast::Expr::as_call_expr)

View File

@@ -1,8 +1,8 @@
use std::fmt::{Debug, Display, Formatter};
use foldhash::HashMap;
use ruff_diagnostics::Applicability;
use ruff_macros::CacheKey;
use rustc_hash::FxHashMap;
use strum::IntoEnumIterator;
use crate::{
@@ -55,7 +55,7 @@ impl FixSafetyTable {
Unsafe,
}
let safety_override_map: FxHashMap<Rule, Override> = {
let safety_override_map: HashMap<Rule, Override> = {
Specificity::iter()
.flat_map(|spec| {
let safe_overrides = extend_safe_fixes

View File

@@ -5,10 +5,10 @@
use std::fmt::{Display, Formatter};
use std::path::{Path, PathBuf};
use foldhash::HashSet;
use once_cell::sync::Lazy;
use path_absolutize::path_dedot;
use regex::Regex;
use rustc_hash::FxHashSet;
use crate::codes::RuleCodePrefix;
use ruff_macros::CacheKey;
@@ -226,7 +226,7 @@ pub struct LinterSettings {
pub explicit_preview_rules: bool,
// Rule-specific settings
pub allowed_confusables: FxHashSet<char>,
pub allowed_confusables: HashSet<char>,
pub builtins: Vec<String>,
pub dummy_variable_rgx: Regex,
pub external: Vec<String>,
@@ -384,7 +384,7 @@ impl LinterSettings {
.iter()
.flat_map(|selector| selector.rules(&PreviewOptions::default()))
.collect(),
allowed_confusables: FxHashSet::from_iter([]),
allowed_confusables: HashSet::from_iter([]),
// Needs duplicating
builtins: vec![],

View File

@@ -8,10 +8,10 @@ use std::str::FromStr;
use std::string::ToString;
use anyhow::{bail, Result};
use foldhash::HashMap;
use globset::{Glob, GlobMatcher, GlobSet, GlobSetBuilder};
use log::debug;
use pep440_rs::{Operator, Version as Pep440Version, Version, VersionSpecifier, VersionSpecifiers};
use rustc_hash::FxHashMap;
use serde::{de, Deserialize, Deserializer, Serialize};
use strum::IntoEnumIterator;
use strum_macros::EnumIter;
@@ -480,7 +480,7 @@ impl From<ExtensionPair> for (String, Language) {
}
#[derive(Debug, Clone, Default, CacheKey)]
pub struct ExtensionMapping {
mapping: FxHashMap<String, Language>,
mapping: HashMap<String, Language>,
}
impl ExtensionMapping {
@@ -504,8 +504,8 @@ impl Display for ExtensionMapping {
}
}
impl From<FxHashMap<String, Language>> for ExtensionMapping {
fn from(value: FxHashMap<String, Language>) -> Self {
impl From<HashMap<String, Language>> for ExtensionMapping {
fn from(value: HashMap<String, Language>) -> Self {
Self { mapping: value }
}
}

View File

@@ -6,8 +6,8 @@ use std::path::Path;
#[cfg(not(fuzzing))]
use anyhow::Result;
use foldhash::HashMap;
use itertools::Itertools;
use rustc_hash::FxHashMap;
use ruff_diagnostics::{Applicability, Diagnostic, FixAvailability};
use ruff_notebook::Notebook;
@@ -335,7 +335,7 @@ pub(crate) fn print_jupyter_messages(
.emit(
&mut output,
messages,
&EmitterContext::new(&FxHashMap::from_iter([(
&EmitterContext::new(&HashMap::from_iter([(
path.file_name().unwrap().to_string_lossy().to_string(),
notebook.index().clone(),
)])),
@@ -356,7 +356,7 @@ pub(crate) fn print_messages(messages: &[Message]) -> String {
.emit(
&mut output,
messages,
&EmitterContext::new(&FxHashMap::default()),
&EmitterContext::new(&HashMap::default()),
)
.unwrap();

View File

@@ -24,7 +24,7 @@ bitflags = { workspace = true }
is-macro = { workspace = true }
itertools = { workspace = true }
once_cell = { workspace = true }
rustc-hash = { workspace = true }
foldhash = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, optional = true }
compact_str = { workspace = true }

View File

@@ -1,7 +1,7 @@
use std::borrow::Cow;
use std::path::Path;
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use ruff_python_trivia::{indentation_at_offset, CommentRanges, SimpleTokenKind, SimpleTokenizer};
use ruff_source_file::Locator;
@@ -877,7 +877,7 @@ pub fn resolve_imported_module_path<'a>(
#[derive(Debug, Default)]
pub struct NameFinder<'a> {
/// A map from identifier to defining expression.
pub names: FxHashMap<&'a str, &'a ast::ExprName>,
pub names: HashMap<&'a str, &'a ast::ExprName>,
}
impl<'a> Visitor<'a> for NameFinder<'a> {
@@ -893,7 +893,7 @@ impl<'a> Visitor<'a> for NameFinder<'a> {
#[derive(Debug, Default)]
pub struct StoredNameFinder<'a> {
/// A map from identifier to defining expression.
pub names: FxHashMap<&'a str, &'a ast::ExprName>,
pub names: HashMap<&'a str, &'a ast::ExprName>,
}
impl<'a> Visitor<'a> for StoredNameFinder<'a> {

View File

@@ -30,7 +30,7 @@ itertools = { workspace = true }
memchr = { workspace = true }
once_cell = { workspace = true }
regex = { workspace = true }
rustc-hash = { workspace = true }
foldhash = { workspace = true }
serde = { workspace = true, optional = true }
schemars = { workspace = true, optional = true }
smallvec = { workspace = true }

View File

@@ -1,5 +1,5 @@
use countme::Count;
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use std::fmt::{Debug, Formatter};
use std::iter::FusedIterator;
use std::num::NonZeroU32;
@@ -45,7 +45,7 @@ use std::ops::Range;
#[derive(Clone)]
pub(super) struct MultiMap<K, V> {
/// Lookup table to retrieve the entry for a key.
index: FxHashMap<K, Entry>,
index: HashMap<K, Entry>,
/// Flat array storing all the parts that have been inserted in order.
parts: Vec<V>,
@@ -62,7 +62,7 @@ pub(super) struct MultiMap<K, V> {
impl<K: std::hash::Hash + Eq, V> MultiMap<K, V> {
pub(super) fn new() -> Self {
Self {
index: FxHashMap::default(),
index: HashMap::default(),
parts: Vec::new(),
out_of_order_parts: Vec::new(),
}

View File

@@ -21,7 +21,7 @@ bitflags = { workspace = true }
bstr = { workspace = true }
compact_str = { workspace = true }
memchr = { workspace = true }
rustc-hash = { workspace = true }
foldhash = { workspace = true }
static_assertions = { workspace = true }
unicode-ident = { workspace = true }
unicode_names2 = { workspace = true }

View File

@@ -2,7 +2,7 @@ use std::cmp::Ordering;
use std::ops::Deref;
use bitflags::bitflags;
use rustc_hash::{FxBuildHasher, FxHashSet};
use foldhash::{HashMapExt, HashSet, HashSetExt};
use ruff_python_ast::name::Name;
use ruff_python_ast::{
@@ -2277,8 +2277,7 @@ impl<'src> Parser<'src> {
/// 2. If there are more than one argument (positional or keyword), all generator expressions
/// present should be parenthesized.
fn validate_arguments(&mut self, arguments: &ast::Arguments) {
let mut all_arg_names =
FxHashSet::with_capacity_and_hasher(arguments.keywords.len(), FxBuildHasher);
let mut all_arg_names = HashSet::with_capacity(arguments.keywords.len());
for (name, range) in arguments
.keywords

View File

@@ -1,7 +1,7 @@
use compact_str::CompactString;
use std::fmt::Display;
use rustc_hash::{FxBuildHasher, FxHashSet};
use foldhash::{HashMapExt, HashSet, HashSetExt};
use ruff_python_ast::name::Name;
use ruff_python_ast::{
@@ -3272,8 +3272,7 @@ impl<'src> Parser<'src> {
///
/// Report errors for all the duplicate names found.
fn validate_parameters(&mut self, parameters: &ast::Parameters) {
let mut all_arg_names =
FxHashSet::with_capacity_and_hasher(parameters.len(), FxBuildHasher);
let mut all_arg_names = HashSet::with_capacity(parameters.len());
for parameter in parameters {
let range = parameter.name().range();

View File

@@ -22,7 +22,7 @@ ruff_text_size = { workspace = true }
bitflags = { workspace = true }
is-macro = { workspace = true }
rustc-hash = { workspace = true }
foldhash = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, optional = true }

View File

@@ -1,4 +1,4 @@
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use crate::{BindingId, SemanticModel};
use ruff_python_ast as ast;
@@ -29,7 +29,7 @@ pub fn any_base_class(
class_def: &ast::StmtClassDef,
semantic: &SemanticModel,
func: &dyn Fn(&Expr) -> bool,
seen: &mut FxHashSet<BindingId>,
seen: &mut HashSet<BindingId>,
) -> bool {
class_def.bases().iter().any(|expr| {
// If the base class itself matches the pattern, then this does too.
@@ -63,7 +63,7 @@ pub fn any_base_class(
return false;
}
inner(class_def, semantic, func, &mut FxHashSet::default())
inner(class_def, semantic, func, &mut HashSet::default())
}
/// Return `true` if any base class matches an [`ast::StmtClassDef`] predicate.
@@ -76,7 +76,7 @@ pub fn any_super_class(
class_def: &ast::StmtClassDef,
semantic: &SemanticModel,
func: &dyn Fn(&ast::StmtClassDef) -> bool,
seen: &mut FxHashSet<BindingId>,
seen: &mut HashSet<BindingId>,
) -> bool {
// If the function itself matches the pattern, then this does too.
if func(class_def) {
@@ -105,7 +105,7 @@ pub fn any_super_class(
})
}
inner(class_def, semantic, func, &mut FxHashSet::default())
inner(class_def, semantic, func, &mut HashSet::default())
}
/// Return `true` if `class_def` is a class that has one or more enum classes in its mro

View File

@@ -1,6 +1,6 @@
//! Analysis rules to perform basic type inference on individual expressions.
use rustc_hash::FxHashSet;
use foldhash::HashSet;
use ruff_python_ast as ast;
use ruff_python_ast::{Expr, Operator, UnaryOp};
@@ -10,7 +10,7 @@ pub enum ResolvedPythonType {
/// The expression resolved to a single known type, like `str` or `int`.
Atom(PythonType),
/// The expression resolved to a union of known types, like `str | int`.
Union(FxHashSet<PythonType>),
Union(HashSet<PythonType>),
/// The expression resolved to an unknown type, like a variable or function call.
Unknown,
/// The expression resolved to a `TypeError`, like `1 + "hello"`.
@@ -29,7 +29,7 @@ impl ResolvedPythonType {
} else if b.is_subtype_of(a) {
Self::Atom(a)
} else {
Self::Union(FxHashSet::from_iter([a, b]))
Self::Union(HashSet::from_iter([a, b]))
}
}
(Self::Atom(a), Self::Union(mut b)) => {

View File

@@ -4,9 +4,9 @@
use std::ops::Index;
use foldhash::HashMap;
use ruff_python_ast::{self as ast, Stmt};
use ruff_text_size::{Ranged, TextRange};
use rustc_hash::FxHashMap;
use ruff_index::{newtype_index, IndexVec};
use ruff_python_ast::statement_visitor::{walk_stmt, StatementVisitor};
@@ -37,7 +37,7 @@ impl<'a> Index<GlobalsId> for GlobalsArena<'a> {
/// The set of global names for a given scope, represented as a map from the name of the global to
/// the range of the declaration in the source code.
#[derive(Debug)]
pub struct Globals<'a>(FxHashMap<&'a str, TextRange>);
pub struct Globals<'a>(HashMap<&'a str, TextRange>);
impl<'a> Globals<'a> {
/// Extracts the set of global names from a given scope, or return `None` if the scope does not
@@ -59,11 +59,11 @@ impl<'a> Globals<'a> {
/// Extracts the set of global names from a given scope.
#[derive(Debug)]
struct GlobalsVisitor<'a>(FxHashMap<&'a str, TextRange>);
struct GlobalsVisitor<'a>(HashMap<&'a str, TextRange>);
impl<'a> GlobalsVisitor<'a> {
fn new() -> Self {
Self(FxHashMap::default())
Self(HashMap::default())
}
fn finish(self) -> Option<Globals<'a>> {

View File

@@ -1,7 +1,7 @@
use std::path::Path;
use bitflags::bitflags;
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use ruff_python_ast::helpers::from_relative_import;
use ruff_python_ast::name::{QualifiedName, UnqualifiedName};
@@ -78,7 +78,7 @@ pub struct SemanticModel<'a> {
///
/// In this case, the binding created by `x = 1` shadows the binding created by `import x`,
/// despite the fact that they're in different scopes.
pub shadowed_bindings: FxHashMap<BindingId, BindingId>,
pub shadowed_bindings: HashMap<BindingId, BindingId>,
/// Map from binding index to indexes of bindings that annotate it (in the same scope).
///
@@ -101,7 +101,7 @@ pub struct SemanticModel<'a> {
/// In this case, we _do_ store the binding created by `x: int` directly on the scope, and not
/// as a delayed annotation. Annotations are thus treated as bindings only when they are the
/// first binding in a scope; any annotations that follow are treated as "delayed" annotations.
delayed_annotations: FxHashMap<BindingId, Vec<BindingId>>,
delayed_annotations: HashMap<BindingId, Vec<BindingId>>,
/// Map from binding ID to the IDs of all scopes in which it is declared a `global` or
/// `nonlocal`.
@@ -116,7 +116,7 @@ pub struct SemanticModel<'a> {
///
/// In this case, the binding created by `x = 1` is rebound within the scope created by `f`
/// by way of the `global x` statement.
rebinding_scopes: FxHashMap<BindingId, Vec<ScopeId>>,
rebinding_scopes: HashMap<BindingId, Vec<ScopeId>>,
/// Flags for the semantic model.
pub flags: SemanticModelFlags,
@@ -143,7 +143,7 @@ pub struct SemanticModel<'a> {
/// Map from [`ast::ExprName`] node (represented as a [`NameId`]) to the [`Binding`] to which
/// it resolved (represented as a [`BindingId`]).
resolved_names: FxHashMap<NameId, BindingId>,
resolved_names: HashMap<NameId, BindingId>,
}
impl<'a> SemanticModel<'a> {
@@ -163,13 +163,13 @@ impl<'a> SemanticModel<'a> {
resolved_references: ResolvedReferences::default(),
unresolved_references: UnresolvedReferences::default(),
globals: GlobalsArena::default(),
shadowed_bindings: FxHashMap::default(),
delayed_annotations: FxHashMap::default(),
rebinding_scopes: FxHashMap::default(),
shadowed_bindings: HashMap::default(),
delayed_annotations: HashMap::default(),
rebinding_scopes: HashMap::default(),
flags: SemanticModelFlags::new(path),
seen: Modules::empty(),
handled_exceptions: Vec::default(),
resolved_names: FxHashMap::default(),
resolved_names: HashMap::default(),
}
}

View File

@@ -1,8 +1,8 @@
use std::ops::{Deref, DerefMut};
use bitflags::bitflags;
use foldhash::HashMap;
use ruff_python_ast as ast;
use rustc_hash::FxHashMap;
use ruff_index::{newtype_index, Idx, IndexSlice, IndexVec};
@@ -23,7 +23,7 @@ pub struct Scope<'a> {
star_imports: Vec<StarImport<'a>>,
/// A map from bound name to binding ID.
bindings: FxHashMap<&'a str, BindingId>,
bindings: HashMap<&'a str, BindingId>,
/// A map from binding ID to binding ID that it shadows.
///
@@ -35,7 +35,7 @@ pub struct Scope<'a> {
/// ```
///
/// In this case, the binding created by `x = 2` shadows the binding created by `x = 1`.
shadowed_bindings: FxHashMap<BindingId, BindingId>,
shadowed_bindings: HashMap<BindingId, BindingId>,
/// Index into the globals arena, if the scope contains any globally-declared symbols.
globals_id: Option<GlobalsId>,
@@ -50,8 +50,8 @@ impl<'a> Scope<'a> {
kind: ScopeKind::Module,
parent: None,
star_imports: Vec::default(),
bindings: FxHashMap::default(),
shadowed_bindings: FxHashMap::default(),
bindings: HashMap::default(),
shadowed_bindings: HashMap::default(),
globals_id: None,
flags: ScopeFlags::empty(),
}
@@ -62,8 +62,8 @@ impl<'a> Scope<'a> {
kind,
parent: Some(parent),
star_imports: Vec::default(),
bindings: FxHashMap::default(),
shadowed_bindings: FxHashMap::default(),
bindings: HashMap::default(),
shadowed_bindings: HashMap::default(),
globals_id: None,
flags: ScopeFlags::empty(),
}

View File

@@ -33,7 +33,7 @@ jod-thread = { workspace = true }
lsp-server = { workspace = true }
lsp-types = { workspace = true }
regex = { workspace = true }
rustc-hash = { workspace = true }
foldhash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
shellexpand = { workspace = true }

View File

@@ -1,7 +1,7 @@
use anyhow::Ok;
use foldhash::{HashMap, HashMapExt, HashSetExt};
use lsp_types::NotebookCellKind;
use ruff_notebook::CellMetadata;
use rustc_hash::{FxBuildHasher, FxHashMap};
use crate::{PositionEncoding, TextDocument};
@@ -17,7 +17,7 @@ pub struct NotebookDocument {
metadata: ruff_notebook::RawNotebookMetadata,
version: DocumentVersion,
// Used to quickly find the index of a cell for a given URL.
cell_index: FxHashMap<lsp_types::Url, CellId>,
cell_index: HashMap<lsp_types::Url, CellId>,
}
/// A single cell within a notebook, which has text contents represented as a `TextDocument`.
@@ -35,7 +35,7 @@ impl NotebookDocument {
metadata: serde_json::Map<String, serde_json::Value>,
cell_documents: Vec<lsp_types::TextDocumentItem>,
) -> crate::Result<Self> {
let mut cell_contents: FxHashMap<_, _> = cell_documents
let mut cell_contents: HashMap<_, _> = cell_documents
.into_iter()
.map(|document| (document.uri, document.text))
.collect();
@@ -122,7 +122,7 @@ impl NotebookDocument {
// Instead, it only provides that (a) these cell URIs were removed, and (b) these
// cell URIs were added.
// https://github.com/astral-sh/ruff/issues/12573
let mut deleted_cells = FxHashMap::default();
let mut deleted_cells = HashMap::default();
// First, delete the cells and remove them from the index.
if delete > 0 {
@@ -221,8 +221,8 @@ impl NotebookDocument {
self.cells.get_mut(*self.cell_index.get(uri)?)
}
fn make_cell_index(cells: &[NotebookCell]) -> FxHashMap<lsp_types::Url, CellId> {
let mut index = FxHashMap::with_capacity_and_hasher(cells.len(), FxBuildHasher);
fn make_cell_index(cells: &[NotebookCell]) -> HashMap<lsp_types::Url, CellId> {
let mut index = HashMap::with_capacity(cells.len());
for (i, cell) in cells.iter().enumerate() {
index.insert(cell.url.clone(), i);
}

View File

@@ -1,6 +1,6 @@
use std::borrow::Cow;
use rustc_hash::FxHashMap;
use foldhash::HashMap;
use ruff_linter::{
linter::{FixerResult, LinterResult},
@@ -19,7 +19,7 @@ use crate::{
/// A simultaneous fix made across a single text document or among an arbitrary
/// number of notebook cells.
pub(crate) type Fixes = FxHashMap<lsp_types::Url, Vec<lsp_types::TextEdit>>;
pub(crate) type Fixes = HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>>;
pub(crate) fn fix_all(
query: &DocumentQuery,

View File

@@ -1,7 +1,7 @@
//! Access to the Ruff linting API for the LSP
use foldhash::HashMap;
use ruff_python_parser::ParseError;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use ruff_diagnostics::{Applicability, Diagnostic, DiagnosticKind, Edit, Fix};
@@ -58,7 +58,7 @@ pub(crate) struct DiagnosticFix {
}
/// A series of diagnostics across a single text document or an arbitrary number of notebook cells.
pub(crate) type DiagnosticsMap = FxHashMap<lsp_types::Url, Vec<lsp_types::Diagnostic>>;
pub(crate) type DiagnosticsMap = HashMap<lsp_types::Url, Vec<lsp_types::Diagnostic>>;
pub(crate) fn check(
query: &DocumentQuery,

View File

@@ -1,6 +1,6 @@
use foldhash::HashSet;
use lsp_server::ErrorCode;
use lsp_types::{self as types, request as req};
use rustc_hash::FxHashSet;
use types::{CodeActionKind, CodeActionOrCommand};
use crate::edit::WorkspaceEditTracker;
@@ -282,7 +282,7 @@ fn notebook_organize_imports(snapshot: &DocumentSnapshot) -> crate::Result<CodeA
/// the list is filtered.
fn supported_code_actions(
action_filter: Option<Vec<CodeActionKind>>,
) -> FxHashSet<SupportedCodeAction> {
) -> HashSet<SupportedCodeAction> {
let Some(action_filter) = action_filter else {
return SupportedCodeAction::all().collect();
};

View File

@@ -1,7 +1,7 @@
use std::any::TypeId;
use foldhash::HashMap;
use lsp_server::{Notification, RequestId};
use rustc_hash::FxHashMap;
use serde_json::Value;
use super::{schedule::Task, ClientSender};
@@ -23,7 +23,7 @@ pub(crate) struct Responder(ClientSender);
pub(crate) struct Requester<'s> {
sender: ClientSender,
next_request_id: i32,
response_handlers: FxHashMap<lsp_server::RequestId, ResponseBuilder<'s>>,
response_handlers: HashMap<lsp_server::RequestId, ResponseBuilder<'s>>,
}
impl<'s> Client<'s> {
@@ -34,7 +34,7 @@ impl<'s> Client<'s> {
requester: Requester {
sender,
next_request_id: 1,
response_handlers: FxHashMap::default(),
response_handlers: HashMap::default(),
},
}
}

Some files were not shown because too many files have changed in this diff Show More