From 02d5637dbc3942e4afecf7dc93baadb0d13d664f Mon Sep 17 00:00:00 2001 From: camchenry <1514176+camchenry@users.noreply.github.com> Date: Mon, 23 Sep 2024 18:28:54 +0000 Subject: [PATCH] perf(ast-tools): use `FxHashMap` over `std::collections::HashMap` (#5997) --- Cargo.lock | 5 +++ crates/oxc_language_server/src/main.rs | 1 + tasks/ast_tools/Cargo.toml | 1 + tasks/ast_tools/src/codegen.rs | 7 +++-- tasks/ast_tools/src/derives/mod.rs | 5 +-- tasks/ast_tools/src/generators/ast_builder.rs | 5 +-- tasks/ast_tools/src/generators/visit.rs | 7 +++-- tasks/ast_tools/src/passes/calc_layout.rs | 7 ++--- tasks/coverage/Cargo.toml | 1 + tasks/coverage/src/typescript/meta.rs | 7 +++-- tasks/javascript_globals/Cargo.toml | 1 + tasks/javascript_globals/src/main.rs | 31 +++++++++---------- tasks/minsize/Cargo.toml | 1 + tasks/minsize/src/lib.rs | 6 ++-- tasks/rulegen/Cargo.toml | 1 + tasks/rulegen/src/main.rs | 6 ++-- 16 files changed, 53 insertions(+), 39 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d55ef8772ca00..e1cff595ff252 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -976,6 +976,7 @@ dependencies = [ "handlebars", "lazy_static", "oxc_tasks_common", + "rustc-hash", "serde", ] @@ -1482,6 +1483,7 @@ dependencies = [ "proc-macro2", "quote", "regex", + "rustc-hash", "serde", "serde_json", "syn", @@ -1558,6 +1560,7 @@ dependencies = [ "pico-args", "rayon", "regex", + "rustc-hash", "saphyr", "serde", "serde_json", @@ -1724,6 +1727,7 @@ dependencies = [ "oxc_parser", "oxc_span", "oxc_tasks_common", + "rustc-hash", ] [[package]] @@ -2429,6 +2433,7 @@ dependencies = [ "oxc_span", "oxc_tasks_common", "regex", + "rustc-hash", "serde", "ureq", ] diff --git a/crates/oxc_language_server/src/main.rs b/crates/oxc_language_server/src/main.rs index 28bf03829494b..86d56db2483c9 100644 --- a/crates/oxc_language_server/src/main.rs +++ b/crates/oxc_language_server/src/main.rs @@ -1,3 +1,4 @@ +#[allow(clippy::disallowed_types)] mod linter; use std::{collections::HashMap, fmt::Debug, path::PathBuf, str::FromStr}; diff --git a/tasks/ast_tools/Cargo.toml b/tasks/ast_tools/Cargo.toml index fb30bbdcf2858..427ac0533ac7d 100644 --- a/tasks/ast_tools/Cargo.toml +++ b/tasks/ast_tools/Cargo.toml @@ -22,6 +22,7 @@ prettyplease = { workspace = true } proc-macro2 = { workspace = true } quote = { workspace = true } regex = { workspace = true } +rustc-hash = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } syn = { workspace = true, features = ["clone-impls", "derive", "extra-traits", "full", "parsing", "printing", "proc-macro"] } diff --git a/tasks/ast_tools/src/codegen.rs b/tasks/ast_tools/src/codegen.rs index 4ccbb775d8125..582c278115a4a 100644 --- a/tasks/ast_tools/src/codegen.rs +++ b/tasks/ast_tools/src/codegen.rs @@ -1,7 +1,8 @@ -use std::{cell::RefCell, collections::HashMap, path::PathBuf}; +use std::{cell::RefCell, path::PathBuf}; use itertools::Itertools; use proc_macro2::TokenStream; +use rustc_hash::{FxBuildHasher, FxHashMap}; use crate::{ derives::{Derive, DeriveOutput}, @@ -69,7 +70,7 @@ pub trait Runner { pub struct EarlyCtx { ty_table: Vec, - ident_table: HashMap, + ident_table: FxHashMap, mods: RefCell>, } @@ -80,7 +81,7 @@ impl EarlyCtx { let adts = mods.iter().flat_map(|it| it.items.iter()); let mut ty_table = Vec::with_capacity(len); - let mut ident_table = HashMap::with_capacity(len); + let mut ident_table = FxHashMap::with_capacity_and_hasher(len, FxBuildHasher); for adt in adts { if let Some(ident) = adt.borrow().ident() { let ident = ident.to_string(); diff --git a/tasks/ast_tools/src/derives/mod.rs b/tasks/ast_tools/src/derives/mod.rs index 555667a474e62..af4f330251bc4 100644 --- a/tasks/ast_tools/src/derives/mod.rs +++ b/tasks/ast_tools/src/derives/mod.rs @@ -77,10 +77,11 @@ macro_rules! define_derive { } fn run(&mut self, ctx: &$crate::codegen::LateCtx) -> $crate::Result { - use std::collections::{HashMap, HashSet}; + use std::collections::{HashSet}; use std::vec::Vec; use convert_case::{Case, Casing}; use itertools::Itertools; + use rustc_hash::FxHashMap; use $crate::derives::DeriveTemplate; @@ -90,7 +91,7 @@ macro_rules! define_derive { .into_iter() .filter(|def| def.generates_derive(trait_name)) .map(|def| (def, self.derive(def, ctx))) - .fold(HashMap::<&str, (HashSet<&str>, Vec)>::new(), |mut acc, (def, stream)| { + .fold(FxHashMap::<&str, (HashSet<&str>, Vec)>::default(), |mut acc, (def, stream)| { let module_path = def.module_path(); let krate = module_path.split("::").next().unwrap(); if !acc.contains_key(krate) { diff --git a/tasks/ast_tools/src/generators/ast_builder.rs b/tasks/ast_tools/src/generators/ast_builder.rs index 70cfd1351ab47..d3ec9bcd8dc68 100644 --- a/tasks/ast_tools/src/generators/ast_builder.rs +++ b/tasks/ast_tools/src/generators/ast_builder.rs @@ -1,10 +1,11 @@ -use std::{borrow::Cow, collections::HashMap, stringify}; +use std::{borrow::Cow, stringify}; use convert_case::{Case, Casing}; use itertools::Itertools; use lazy_static::lazy_static; use proc_macro2::TokenStream; use quote::{format_ident, quote, ToTokens}; +use rustc_hash::FxHashMap; use syn::{parse_quote, Ident, Type}; use super::define_generator; @@ -226,7 +227,7 @@ fn default_init_field(field: &FieldDef) -> bool { }; } lazy_static! { - static ref DEFAULT_FIELDS: HashMap<&'static str, &'static str> = HashMap::from([ + static ref DEFAULT_FIELDS: FxHashMap<&'static str, &'static str> = FxHashMap::from_iter([ field!(scope_id: Cell>), field!(symbol_id: Cell>), field!(reference_id: Cell>), diff --git a/tasks/ast_tools/src/generators/visit.rs b/tasks/ast_tools/src/generators/visit.rs index 20a984d475386..4488e44aa553e 100644 --- a/tasks/ast_tools/src/generators/visit.rs +++ b/tasks/ast_tools/src/generators/visit.rs @@ -1,9 +1,10 @@ -use std::{borrow::Cow, collections::HashMap}; +use std::borrow::Cow; use convert_case::{Case, Casing}; use itertools::Itertools; use proc_macro2::TokenStream; use quote::{format_ident, quote, ToTokens}; +use rustc_hash::FxHashMap; use syn::{parse_quote, Ident}; use super::define_generator; @@ -135,12 +136,12 @@ struct VisitBuilder<'a> { visits: Vec, walks: Vec, - cache: HashMap>; 2]>, + cache: FxHashMap>; 2]>, } impl<'a> VisitBuilder<'a> { fn new(ctx: &'a LateCtx, is_mut: bool) -> Self { - Self { ctx, is_mut, visits: Vec::new(), walks: Vec::new(), cache: HashMap::new() } + Self { ctx, is_mut, visits: Vec::new(), walks: Vec::new(), cache: FxHashMap::default() } } fn build(mut self) -> (/* visits */ Vec, /* walks */ Vec) { diff --git a/tasks/ast_tools/src/passes/calc_layout.rs b/tasks/ast_tools/src/passes/calc_layout.rs index e66d6fc427ad2..4b18d4442e2ba 100644 --- a/tasks/ast_tools/src/passes/calc_layout.rs +++ b/tasks/ast_tools/src/passes/calc_layout.rs @@ -1,8 +1,7 @@ -use std::collections::HashMap; - use itertools::Itertools; use lazy_static::lazy_static; use quote::ToTokens; +use rustc_hash::FxHashMap; use syn::Type; use super::{define_pass, Pass}; @@ -18,7 +17,7 @@ use crate::{ #[cfg(not(target_pointer_width = "64"))] compile_error!("This module only supports 64bit architectures."); -type WellKnown = HashMap<&'static str, PlatformLayout>; +type WellKnown = FxHashMap<&'static str, PlatformLayout>; define_pass! { pub struct CalcLayout; @@ -279,7 +278,7 @@ fn calc_type_layout(ty: &TypeAnalysis, ctx: &EarlyCtx) -> Result macro_rules! well_known { ($($typ:ty: { $($platform:tt => $layout:expr,)*},)*) => { - WellKnown::from([ + FxHashMap::from_iter([ $(( stringify!($typ), well_known!(@ $( $platform => $layout,)*) diff --git a/tasks/coverage/Cargo.toml b/tasks/coverage/Cargo.toml index 7f80ad35d321c..32d70b79213dd 100644 --- a/tasks/coverage/Cargo.toml +++ b/tasks/coverage/Cargo.toml @@ -35,6 +35,7 @@ phf = { workspace = true, features = ["macros"] } pico-args = { workspace = true } rayon = { workspace = true } regex = { workspace = true } +rustc-hash = { workspace = true } saphyr = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } diff --git a/tasks/coverage/src/typescript/meta.rs b/tasks/coverage/src/typescript/meta.rs index 4c4c56ba89cb7..35eb9897d182b 100644 --- a/tasks/coverage/src/typescript/meta.rs +++ b/tasks/coverage/src/typescript/meta.rs @@ -1,6 +1,7 @@ //! -use std::{collections::HashMap, fs, path::Path, sync::Arc}; +use rustc_hash::FxHashMap; +use std::{fs, path::Path, sync::Arc}; use oxc::{ allocator::Allocator, @@ -35,7 +36,7 @@ pub struct CompilerSettings { } impl CompilerSettings { - pub fn new(options: &HashMap) -> Self { + pub fn new(options: &FxHashMap) -> Self { Self { modules: Self::split_value_options(options.get("module")), targets: Self::split_value_options(options.get("target")), @@ -93,7 +94,7 @@ impl TestCaseContent { /// These files start with `// @: ` and are followed by the file's content. /// This function extracts the individual files with their content and drops unsupported files. pub fn make_units_from_test(path: &Path, code: &str) -> Self { - let mut current_file_options: HashMap = HashMap::default(); + let mut current_file_options: FxHashMap = FxHashMap::default(); let mut current_file_name: Option = None; let mut test_unit_data: Vec = vec![]; let mut current_file_content = String::new(); diff --git a/tasks/javascript_globals/Cargo.toml b/tasks/javascript_globals/Cargo.toml index e8ed01481c181..b7377ae7318d6 100644 --- a/tasks/javascript_globals/Cargo.toml +++ b/tasks/javascript_globals/Cargo.toml @@ -18,4 +18,5 @@ handlebars = { workspace = true } oxc_tasks_common = { workspace = true } lazy_static = { workspace = true } +rustc-hash = { workspace = true } serde = { workspace = true, features = ["derive"] } diff --git a/tasks/javascript_globals/src/main.rs b/tasks/javascript_globals/src/main.rs index 8531e60747023..0086d7b4f5f9a 100644 --- a/tasks/javascript_globals/src/main.rs +++ b/tasks/javascript_globals/src/main.rs @@ -1,8 +1,7 @@ #![allow(clippy::print_stdout, clippy::print_stderr)] -use std::collections::HashMap; - use lazy_static::lazy_static; use oxc_tasks_common::agent; +use rustc_hash::FxHashMap; use serde::Serialize; mod template; @@ -30,10 +29,10 @@ impl<'a> Context<'a> { } fn get_diff( - current: &HashMap, - prev: &HashMap, -) -> HashMap { - let mut retv: HashMap = HashMap::new(); + current: &FxHashMap, + prev: &FxHashMap, +) -> FxHashMap { + let mut retv: FxHashMap = FxHashMap::default(); for (key, value) in current { if !prev.contains_key(key) { @@ -45,22 +44,22 @@ fn get_diff( } lazy_static! { - static ref NEW_GLOBALS_2017: HashMap = { - return HashMap::from([ + static ref NEW_GLOBALS_2017: FxHashMap = { + return FxHashMap::from_iter([ (String::from("Atomics"), false), (String::from("SharedArrayBuffer"), false), ]); }; - static ref NEW_GLOBALS_2020: HashMap = { - return HashMap::from([ + static ref NEW_GLOBALS_2020: FxHashMap = { + return FxHashMap::from_iter([ (String::from("BigInt"), false), (String::from("BigInt64Array"), false), (String::from("BigUint64Array"), false), (String::from("globalThis"), false), ]); }; - static ref NEW_GLOBALS_2021: HashMap = { - return HashMap::from([ + static ref NEW_GLOBALS_2021: FxHashMap = { + return FxHashMap::from_iter([ (String::from("AggregateError"), false), (String::from("FinalizationRegistry"), false), (String::from("WeakRef"), false), @@ -73,8 +72,8 @@ fn main() { // A value of true indicates that the variable may be overwritten. // A value of false indicates that the variable should be considered read-only. // open globals.json file relative to current file - // let globals: HashMap>; - let globals: HashMap> = match agent() + // let globals: FxHashMap>; + let globals: FxHashMap> = match agent() .get("https://raw.githubusercontent.com/sindresorhus/globals/main/globals.json") .call() { @@ -88,7 +87,7 @@ fn main() { let new_globals_2015 = get_diff(&globals["es2015"], &globals["es5"]); let new_globals_2015_2017 = { - let mut map = HashMap::new(); + let mut map = FxHashMap::default(); map.extend(new_globals_2015.clone()); map.extend(NEW_GLOBALS_2017.clone()); map @@ -158,7 +157,7 @@ fn main() { } } -fn to_env_vars(env_var_map: &HashMap) -> Vec { +fn to_env_vars(env_var_map: &FxHashMap) -> Vec { let mut result: Vec = vec![]; for (key, value) in env_var_map { result.push(EnvVar { name: key, writeable: *value }); diff --git a/tasks/minsize/Cargo.toml b/tasks/minsize/Cargo.toml index c60cc8a433acb..bfc2bd01ed9c0 100644 --- a/tasks/minsize/Cargo.toml +++ b/tasks/minsize/Cargo.toml @@ -28,3 +28,4 @@ flate2 = { workspace = true } oxc_tasks_common = { workspace = true } humansize = { workspace = true } +rustc-hash = { workspace = true } diff --git a/tasks/minsize/src/lib.rs b/tasks/minsize/src/lib.rs index b976a26fc5663..d20fba0c38dbd 100644 --- a/tasks/minsize/src/lib.rs +++ b/tasks/minsize/src/lib.rs @@ -1,6 +1,5 @@ #![allow(clippy::print_stdout, clippy::print_stderr)] use std::{ - collections::HashMap, fs::File, io::{self, Write}, }; @@ -13,6 +12,7 @@ use oxc_minifier::{CompressOptions, Minifier, MinifierOptions}; use oxc_parser::Parser; use oxc_span::SourceType; use oxc_tasks_common::{project_root, TestFile, TestFiles}; +use rustc_hash::FxHashMap; // #[test] // #[cfg(any(coverage, coverage_nightly))] @@ -28,7 +28,7 @@ pub fn run() -> Result<(), io::Error> { let path = project_root().join("tasks/minsize/minsize.snap"); // Data copied from https://github.com/privatenumber/minification-benchmarks - let targets = HashMap::<&str, &str>::from_iter([ + let targets = FxHashMap::<&str, &str>::from_iter([ ("react.development.js", "23.70 kB"), ("moment.js", "59.82 kB"), ("jquery.js", "90.07 kB"), @@ -43,7 +43,7 @@ pub fn run() -> Result<(), io::Error> { ("typescript.js", "3.49 MB"), ]); - let gzip_targets = HashMap::<&str, &str>::from_iter([ + let gzip_targets = FxHashMap::<&str, &str>::from_iter([ ("react.development.js", "8.54 kB"), ("moment.js", "19.33 kB"), ("jquery.js", "31.95 kB"), diff --git a/tasks/rulegen/Cargo.toml b/tasks/rulegen/Cargo.toml index 177a94cfc4681..571b7c4123491 100644 --- a/tasks/rulegen/Cargo.toml +++ b/tasks/rulegen/Cargo.toml @@ -24,5 +24,6 @@ convert_case = { workspace = true } handlebars = { workspace = true } lazy_static = { workspace = true } regex = { workspace = true } +rustc-hash = { workspace = true } serde = { workspace = true, features = ["derive"] } ureq = { workspace = true } diff --git a/tasks/rulegen/src/main.rs b/tasks/rulegen/src/main.rs index d1d03922e7ed1..638c335dafc13 100644 --- a/tasks/rulegen/src/main.rs +++ b/tasks/rulegen/src/main.rs @@ -1,7 +1,6 @@ #![allow(clippy::print_stdout, clippy::print_stderr, clippy::disallowed_methods)] use std::{ borrow::Cow, - collections::HashMap, fmt::{self, Display, Formatter}, }; @@ -18,6 +17,7 @@ use oxc_ast::{ }; use oxc_parser::Parser; use oxc_span::{GetSpan, SourceType, Span}; +use rustc_hash::FxHashMap; use serde::Serialize; use ureq::Response; @@ -381,7 +381,7 @@ struct State<'a> { source_text: &'a str, valid_tests: Vec<&'a Expression<'a>>, invalid_tests: Vec<&'a Expression<'a>>, - expression_to_group_comment_map: HashMap, + expression_to_group_comment_map: FxHashMap, group_comment_stack: Vec, } @@ -391,7 +391,7 @@ impl<'a> State<'a> { source_text, valid_tests: vec![], invalid_tests: vec![], - expression_to_group_comment_map: HashMap::new(), + expression_to_group_comment_map: FxHashMap::default(), group_comment_stack: vec![], } }