From 6f1b19fee7dc185e13fca0369d1371923e1390b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Emilio=20Cobos=20=C3=81lvarez?= Date: Sun, 23 Dec 2018 19:20:50 +0100 Subject: [PATCH 1/5] Switch to FxHash for hash maps and such. This isn't such a massive win as I'd have hoped, but it is consistently faster, so there's no reason not to. > ./bindgen-old tests/stylo.hpp --no-rustfmt-bindings > /dev/null 2>&1 6.17s user 0.84s system 98% cpu 7.079 total > ./target/release/bindgen tests/stylo.hpp --no-rustfmt-bindings > /dev/null 2> 5.92s user 0.87s system 98% cpu 6.866 total Which isn't _that_ much but it's quite a bit. --- Cargo.lock | 16 ++++++++++++++++ Cargo.toml | 1 + src/codegen/mod.rs | 7 ++++--- src/ir/analysis/derive_copy.rs | 5 ++--- src/ir/analysis/derive_debug.rs | 5 ++--- src/ir/analysis/derive_default.rs | 7 +++---- src/ir/analysis/derive_hash.rs | 5 ++--- .../analysis/derive_partialeq_or_partialord.rs | 5 ++--- src/ir/analysis/has_destructor.rs | 5 ++--- src/ir/analysis/has_float.rs | 5 ++--- src/ir/analysis/has_type_param_in_array.rs | 5 ++--- src/ir/analysis/has_vtable.rs | 5 ++--- src/ir/analysis/mod.rs | 12 ++++++------ src/ir/analysis/sizedness.rs | 8 +++----- src/ir/analysis/template_params.rs | 6 +++--- src/ir/comp.rs | 2 +- src/ir/context.rs | 17 ++++++++++------- src/lib.rs | 7 ++++++- 18 files changed, 69 insertions(+), 54 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 624c8b26ed..6295be5739 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -56,6 +56,7 @@ dependencies = [ "clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)", "diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -71,6 +72,11 @@ name = "bitflags" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "byteorder" +version = "1.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "cc" version = "1.0.25" @@ -138,6 +144,14 @@ dependencies = [ "backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "glob" version = "0.2.11" @@ -389,6 +403,7 @@ dependencies = [ "checksum backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "89a47830402e9981c5c41223151efcced65a0510c13097c769cede7efb34782a" "checksum backtrace-sys 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "c66d56ac8dabd07f6aacdaf633f4b8262f5b3601a810a0dcddffd5c22c69daa0" "checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12" +"checksum byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "94f88df23a25417badc922ab0f5716cc1330e87f71ddd9203b3a3ccd9cedf75d" "checksum cc 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)" = "f159dfd43363c4d08055a07703eb7a3406b0dac4d0584d96965a3262db3c9d16" "checksum cexpr 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8fc0086be9ca82f7fc89fc873435531cb898b86e850005850de1f820e2db6e9b" "checksum cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0c4e7bb64a8ebb0d856483e1e682ea3422f883c5f5615a90d51a2c82fe87fdd3" @@ -397,6 +412,7 @@ dependencies = [ "checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a" "checksum env_logger 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "afb070faf94c85d17d50ca44f6ad076bce18ae92f0037d350947240a36e9d42e" "checksum failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6dd377bcc1b1b7ce911967e3ec24fa19c3224394ec05b54aa7b083d498341ac7" +"checksum fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" "checksum humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e" "checksum lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca488b89a5657b0a2ecd45b95609b3e848cf1755da332a0da46e2b2b1cb371a7" diff --git a/Cargo.toml b/Cargo.toml index 2a3a4fd5f9..2cb96ac1e3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -54,6 +54,7 @@ peeking_take_while = "0.1.2" quote = { version = "0.6", default-features = false } regex = "1.0" which = "2.0" +fxhash = "0.2" # New validation in 0.3.6 breaks bindgen-integration: # https://github.com/alexcrichton/proc-macro2/commit/489c642. proc-macro2 = { version = "0.4", default-features = false } diff --git a/src/codegen/mod.rs b/src/codegen/mod.rs index 9a7bf897e1..edaf7afef1 100644 --- a/src/codegen/mod.rs +++ b/src/codegen/mod.rs @@ -43,12 +43,13 @@ use proc_macro2::{self, Ident, Span}; use std; use std::borrow::Cow; use std::cell::Cell; -use std::collections::{HashSet, VecDeque}; -use std::collections::hash_map::{Entry, HashMap}; +use std::collections::VecDeque; +use std::collections::hash_map::Entry; use std::fmt::Write; use std::iter; use std::ops; use std::str::FromStr; +use {HashMap, HashSet}; // Name of type defined in constified enum module pub static CONSTIFIED_ENUM_MODULE_REPR_NAME: &'static str = "Type"; @@ -2638,7 +2639,7 @@ impl CodeGenerator for Enum { ); // A map where we keep a value -> variant relation. - let mut seen_values = HashMap::<_, Ident>::new(); + let mut seen_values = HashMap::<_, Ident>::default(); let enum_rust_ty = item.to_rust_ty_or_opaque(ctx, &()); let is_toplevel = item.is_toplevel(ctx); diff --git a/src/ir/analysis/derive_copy.rs b/src/ir/analysis/derive_copy.rs index 9d0bcd190f..55d30097b4 100644 --- a/src/ir/analysis/derive_copy.rs +++ b/src/ir/analysis/derive_copy.rs @@ -11,8 +11,7 @@ use ir::template::TemplateParameters; use ir::traversal::EdgeKind; use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT; use ir::ty::TypeKind; -use std::collections::HashMap; -use std::collections::HashSet; +use {HashMap, HashSet}; /// An analysis that finds for each IR item whether copy cannot be derived. /// @@ -103,7 +102,7 @@ impl<'ctx> MonotoneFramework for CannotDeriveCopy<'ctx> { type Output = HashSet; fn new(ctx: &'ctx BindgenContext) -> CannotDeriveCopy<'ctx> { - let cannot_derive_copy = HashSet::new(); + let cannot_derive_copy = HashSet::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); CannotDeriveCopy { diff --git a/src/ir/analysis/derive_debug.rs b/src/ir/analysis/derive_debug.rs index 9210148a8b..6580a68b69 100644 --- a/src/ir/analysis/derive_debug.rs +++ b/src/ir/analysis/derive_debug.rs @@ -10,8 +10,7 @@ use ir::item::IsOpaque; use ir::traversal::EdgeKind; use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT; use ir::ty::TypeKind; -use std::collections::HashMap; -use std::collections::HashSet; +use {HashMap, HashSet}; /// An analysis that finds for each IR item whether debug cannot be derived. /// @@ -104,7 +103,7 @@ impl<'ctx> MonotoneFramework for CannotDeriveDebug<'ctx> { type Output = HashSet; fn new(ctx: &'ctx BindgenContext) -> CannotDeriveDebug<'ctx> { - let cannot_derive_debug = HashSet::new(); + let cannot_derive_debug = HashSet::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); CannotDeriveDebug { diff --git a/src/ir/analysis/derive_default.rs b/src/ir/analysis/derive_default.rs index 2ff07ce9e8..904cabaa37 100644 --- a/src/ir/analysis/derive_default.rs +++ b/src/ir/analysis/derive_default.rs @@ -12,8 +12,7 @@ use ir::traversal::EdgeKind; use ir::traversal::Trace; use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT; use ir::ty::TypeKind; -use std::collections::HashMap; -use std::collections::HashSet; +use {HashMap, HashSet}; /// An analysis that finds for each IR item whether default cannot be derived. /// @@ -99,8 +98,8 @@ impl<'ctx> MonotoneFramework for CannotDeriveDefault<'ctx> { type Output = HashSet; fn new(ctx: &'ctx BindgenContext) -> CannotDeriveDefault<'ctx> { - let mut dependencies = HashMap::new(); - let cannot_derive_default = HashSet::new(); + let mut dependencies = HashMap::default(); + let cannot_derive_default = HashSet::default(); let whitelisted_items: HashSet<_> = ctx.whitelisted_items().iter().cloned().collect(); diff --git a/src/ir/analysis/derive_hash.rs b/src/ir/analysis/derive_hash.rs index eee6d6f072..6c8b3976dd 100644 --- a/src/ir/analysis/derive_hash.rs +++ b/src/ir/analysis/derive_hash.rs @@ -10,8 +10,7 @@ use ir::item::IsOpaque; use ir::traversal::EdgeKind; use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT; use ir::ty::TypeKind; -use std::collections::HashMap; -use std::collections::HashSet; +use {HashMap, HashSet}; /// An analysis that finds for each IR item whether hash cannot be derived. /// @@ -96,7 +95,7 @@ impl<'ctx> MonotoneFramework for CannotDeriveHash<'ctx> { type Output = HashSet; fn new(ctx: &'ctx BindgenContext) -> CannotDeriveHash<'ctx> { - let cannot_derive_hash = HashSet::new(); + let cannot_derive_hash = HashSet::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); CannotDeriveHash { diff --git a/src/ir/analysis/derive_partialeq_or_partialord.rs b/src/ir/analysis/derive_partialeq_or_partialord.rs index 5a9a21c553..a64fdf3855 100644 --- a/src/ir/analysis/derive_partialeq_or_partialord.rs +++ b/src/ir/analysis/derive_partialeq_or_partialord.rs @@ -9,8 +9,7 @@ use ir::item::{Item, IsOpaque}; use ir::traversal::{EdgeKind, Trace}; use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT; use ir::ty::{TypeKind, Type}; -use std::collections::HashMap; -use std::collections::hash_map::Entry; +use {HashMap, Entry}; /// An analysis that finds for each IR item whether `PartialEq`/`PartialOrd` /// cannot be derived. @@ -326,7 +325,7 @@ impl<'ctx> MonotoneFramework for CannotDerivePartialEqOrPartialOrd<'ctx> { fn new( ctx: &'ctx BindgenContext, ) -> CannotDerivePartialEqOrPartialOrd<'ctx> { - let can_derive_partialeq_or_partialord = HashMap::new(); + let can_derive_partialeq_or_partialord = HashMap::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); CannotDerivePartialEqOrPartialOrd { diff --git a/src/ir/analysis/has_destructor.rs b/src/ir/analysis/has_destructor.rs index c87b7e2580..c79b364591 100644 --- a/src/ir/analysis/has_destructor.rs +++ b/src/ir/analysis/has_destructor.rs @@ -5,8 +5,7 @@ use ir::context::{BindgenContext, ItemId}; use ir::traversal::EdgeKind; use ir::comp::{CompKind, Field, FieldMethods}; use ir::ty::TypeKind; -use std::collections::HashMap; -use std::collections::HashSet; +use {HashMap, HashSet}; /// An analysis that finds for each IR item whether it has a destructor or not /// @@ -73,7 +72,7 @@ impl<'ctx> MonotoneFramework for HasDestructorAnalysis<'ctx> { type Output = HashSet; fn new(ctx: &'ctx BindgenContext) -> Self { - let have_destructor = HashSet::new(); + let have_destructor = HashSet::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); HasDestructorAnalysis { diff --git a/src/ir/analysis/has_float.rs b/src/ir/analysis/has_float.rs index 92bbe36cfd..69cfcc859e 100644 --- a/src/ir/analysis/has_float.rs +++ b/src/ir/analysis/has_float.rs @@ -1,8 +1,7 @@ //! Determining which types has float. use super::{ConstrainResult, MonotoneFramework, generate_dependencies}; -use std::collections::HashSet; -use std::collections::HashMap; +use {HashSet, HashMap}; use ir::context::{BindgenContext, ItemId}; use ir::traversal::EdgeKind; use ir::ty::TypeKind; @@ -84,7 +83,7 @@ impl<'ctx> MonotoneFramework for HasFloat<'ctx> { type Output = HashSet; fn new(ctx: &'ctx BindgenContext) -> HasFloat<'ctx> { - let has_float = HashSet::new(); + let has_float = HashSet::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); HasFloat { diff --git a/src/ir/analysis/has_type_param_in_array.rs b/src/ir/analysis/has_type_param_in_array.rs index 98288d3cfb..98959f0f12 100644 --- a/src/ir/analysis/has_type_param_in_array.rs +++ b/src/ir/analysis/has_type_param_in_array.rs @@ -6,8 +6,7 @@ use ir::comp::FieldMethods; use ir::context::{BindgenContext, ItemId}; use ir::traversal::EdgeKind; use ir::ty::TypeKind; -use std::collections::HashMap; -use std::collections::HashSet; +use {HashMap, HashSet}; /// An analysis that finds for each IR item whether it has array or not. /// @@ -92,7 +91,7 @@ impl<'ctx> MonotoneFramework for HasTypeParameterInArray<'ctx> { fn new( ctx: &'ctx BindgenContext, ) -> HasTypeParameterInArray<'ctx> { - let has_type_parameter_in_array = HashSet::new(); + let has_type_parameter_in_array = HashSet::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); HasTypeParameterInArray { diff --git a/src/ir/analysis/has_vtable.rs b/src/ir/analysis/has_vtable.rs index f3f2a69534..410fca0576 100644 --- a/src/ir/analysis/has_vtable.rs +++ b/src/ir/analysis/has_vtable.rs @@ -5,9 +5,8 @@ use ir::context::{BindgenContext, ItemId}; use ir::traversal::EdgeKind; use ir::ty::TypeKind; use std::cmp; -use std::collections::HashMap; -use std::collections::hash_map::Entry; use std::ops; +use {HashMap, Entry}; /// The result of the `HasVtableAnalysis` for an individual item. #[derive(Copy, Clone, Debug, PartialEq, Eq, Ord)] @@ -148,7 +147,7 @@ impl<'ctx> MonotoneFramework for HasVtableAnalysis<'ctx> { type Output = HashMap; fn new(ctx: &'ctx BindgenContext) -> HasVtableAnalysis<'ctx> { - let have_vtable = HashMap::new(); + let have_vtable = HashMap::default(); let dependencies = generate_dependencies(ctx, Self::consider_edge); HasVtableAnalysis { diff --git a/src/ir/analysis/mod.rs b/src/ir/analysis/mod.rs index 64958c07c1..7d6241bad1 100644 --- a/src/ir/analysis/mod.rs +++ b/src/ir/analysis/mod.rs @@ -64,7 +64,7 @@ pub use self::sizedness::{Sizedness, SizednessAnalysis, SizednessResult}; use ir::context::{BindgenContext, ItemId}; use ir::traversal::{EdgeKind, Trace}; -use std::collections::HashMap; +use HashMap; use std::fmt; use std::ops; @@ -190,7 +190,7 @@ pub fn generate_dependencies( where F: Fn(EdgeKind) -> bool, { - let mut dependencies = HashMap::new(); + let mut dependencies = HashMap::default(); for &item in ctx.whitelisted_items() { dependencies.entry(item).or_insert(vec![]); @@ -219,7 +219,7 @@ where #[cfg(test)] mod tests { use super::*; - use std::collections::{HashMap, HashSet}; + use {HashMap, HashSet}; // Here we find the set of nodes that are reachable from any given // node. This is a lattice mapping nodes to subsets of all nodes. Our join @@ -334,14 +334,14 @@ mod tests { // implementation. Don't copy this code outside of this test! let original_size = - self.reachable.entry(node).or_insert(HashSet::new()).len(); + self.reachable.entry(node).or_insert(HashSet::default()).len(); for sub_node in self.graph.0[&node].iter() { self.reachable.get_mut(&node).unwrap().insert(*sub_node); let sub_reachable = self.reachable .entry(*sub_node) - .or_insert(HashSet::new()) + .or_insert(HashSet::default()) .clone(); for transitive in sub_reachable { @@ -386,7 +386,7 @@ mod tests { nodes.as_ref().iter().cloned().map(Node).collect() } - let mut expected = HashMap::new(); + let mut expected = HashMap::default(); expected.insert(Node(1), nodes([3, 4, 5, 6, 7, 8])); expected.insert(Node(2), nodes([2])); expected.insert(Node(3), nodes([3, 4, 5, 6, 7, 8])); diff --git a/src/ir/analysis/sizedness.rs b/src/ir/analysis/sizedness.rs index 9c07435544..12d679bb14 100644 --- a/src/ir/analysis/sizedness.rs +++ b/src/ir/analysis/sizedness.rs @@ -5,10 +5,8 @@ use ir::context::{BindgenContext, TypeId}; use ir::item::IsOpaque; use ir::traversal::EdgeKind; use ir::ty::TypeKind; -use std::cmp; -use std::collections::HashMap; -use std::collections::hash_map::Entry; -use std::ops; +use std::{cmp, ops}; +use {HashMap, Entry}; /// The result of the `Sizedness` analysis for an individual item. /// @@ -194,7 +192,7 @@ impl<'ctx> MonotoneFramework for SizednessAnalysis<'ctx> { }) .collect(); - let sized = HashMap::new(); + let sized = HashMap::default(); SizednessAnalysis { ctx, diff --git a/src/ir/analysis/template_params.rs b/src/ir/analysis/template_params.rs index b326e6b58b..bd1b51a2f0 100644 --- a/src/ir/analysis/template_params.rs +++ b/src/ir/analysis/template_params.rs @@ -94,7 +94,7 @@ use ir::item::{Item, ItemSet}; use ir::template::{TemplateInstantiation, TemplateParameters}; use ir::traversal::{EdgeKind, Trace}; use ir::ty::TypeKind; -use std::collections::{HashMap, HashSet}; +use {HashMap, HashSet}; /// An analysis that finds for each IR item its set of template parameters that /// it uses. @@ -373,8 +373,8 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> { fn new( ctx: &'ctx BindgenContext, ) -> UsedTemplateParameters<'ctx> { - let mut used = HashMap::new(); - let mut dependencies = HashMap::new(); + let mut used = HashMap::default(); + let mut dependencies = HashMap::default(); let whitelisted_items: HashSet<_> = ctx.whitelisted_items().iter().cloned().collect(); diff --git a/src/ir/comp.rs b/src/ir/comp.rs index 704c1776ff..fd4c827438 100644 --- a/src/ir/comp.rs +++ b/src/ir/comp.rs @@ -17,7 +17,7 @@ use peeking_take_while::PeekableExt; use std::cmp; use std::io; use std::mem; -use std::collections::HashMap; +use HashMap; /// The kind of compound type. #[derive(Debug, Copy, Clone, PartialEq)] diff --git a/src/ir/context.rs b/src/ir/context.rs index 17dd8512cb..f2399f51bc 100644 --- a/src/ir/context.rs +++ b/src/ir/context.rs @@ -27,10 +27,11 @@ use parse::ClangItemParser; use proc_macro2::{Ident, Span}; use std::borrow::Cow; use std::cell::Cell; -use std::collections::{HashMap, HashSet, hash_map}; use std::collections::btree_map::{self, BTreeMap}; use std::iter::IntoIterator; use std::mem; +use std::collections::HashMap as StdHashMap; +use {HashMap, HashSet, Entry}; /// An identifier for some kind of IR item. #[derive(Debug, Copy, Clone, Eq, PartialOrd, Ord, Hash)] @@ -348,10 +349,12 @@ pub struct BindgenContext { /// potentially break that assumption. currently_parsed_types: Vec, - /// A HashSet with all the already parsed macro names. This is done to avoid + /// A map with all the already parsed macro names. This is done to avoid /// hard errors while parsing duplicated macros, as well to allow macro /// expression parsing. - parsed_macros: HashMap, cexpr::expr::EvalResult>, + /// + /// This needs to be an std::HashMap because the cexpr API requires it. + parsed_macros: StdHashMap, cexpr::expr::EvalResult>, /// The active replacements collected from replaces="xxx" annotations. replacements: HashMap, ItemId>, @@ -1380,7 +1383,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" } else { // If you aren't recursively whitelisting, then we can't really make // any sense of template parameter usage, and you're on your own. - let mut used_params = HashMap::new(); + let mut used_params = HashMap::default(); for &id in self.whitelisted_items() { used_params.entry(id).or_insert( id.self_template_params(self).into_iter().map(|p| p.into()).collect() @@ -2079,7 +2082,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" } /// Get the currently parsed macros. - pub fn parsed_macros(&self) -> &HashMap, cexpr::expr::EvalResult> { + pub fn parsed_macros(&self) -> &StdHashMap, cexpr::expr::EvalResult> { debug_assert!(!self.in_codegen_phase()); &self.parsed_macros } @@ -2105,7 +2108,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" /// and implies that the original type is hidden. pub fn replace(&mut self, name: &[String], potential_ty: ItemId) { match self.replacements.entry(name.into()) { - hash_map::Entry::Vacant(entry) => { + Entry::Vacant(entry) => { debug!( "Defining replacement for {:?} as {:?}", name, @@ -2113,7 +2116,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" ); entry.insert(potential_ty); } - hash_map::Entry::Occupied(occupied) => { + Entry::Occupied(occupied) => { warn!( "Replacement for {:?} already defined as {:?}; \ ignoring duplicate replacement definition as {:?}", diff --git a/src/lib.rs b/src/lib.rs index a906d4afee..ef15d49a7c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -23,6 +23,7 @@ extern crate cexpr; #[allow(unused_extern_crates)] extern crate cfg_if; extern crate clang_sys; +extern crate fxhash; #[macro_use] extern crate lazy_static; extern crate peeking_take_while; @@ -88,7 +89,6 @@ use regex_set::RegexSet; pub use codegen::EnumVariation; use std::borrow::Cow; -use std::collections::HashMap; use std::fs::{File, OpenOptions}; use std::io::{self, Write}; use std::iter; @@ -96,6 +96,11 @@ use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::sync::Arc; +// Some convenient typedefs for a fast hash map and hash set. +type HashMap = ::fxhash::FxHashMap; +type HashSet = ::fxhash::FxHashSet; +pub(crate) use ::std::collections::hash_map::Entry; + fn args_are_cpp(clang_args: &[String]) -> bool { return clang_args .windows(2) From 8a579b134dbaf369803cffbe835ba14ab2765fba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Emilio=20Cobos=20=C3=81lvarez?= Date: Sun, 23 Dec 2018 19:32:25 +0100 Subject: [PATCH 2/5] Switch to hashbrown. This wins between 2 and 5 milliseconds more in the test-case above, so no reason not to I guess. --- Cargo.lock | 25 ++++++++++++++++--------- Cargo.toml | 2 +- src/codegen/mod.rs | 3 +-- src/lib.rs | 8 ++++---- 4 files changed, 22 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6295be5739..ce167ca27e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -56,7 +56,7 @@ dependencies = [ "clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)", "diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", - "fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "hashbrown 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -145,17 +145,18 @@ dependencies = [ ] [[package]] -name = "fxhash" -version = "0.2.1" +name = "glob" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", -] [[package]] -name = "glob" -version = "0.2.11" +name = "hashbrown" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", + "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", +] [[package]] name = "humantime" @@ -277,6 +278,11 @@ name = "rustc-demangle" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "scopeguard" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "shlex" version = "0.1.1" @@ -412,8 +418,8 @@ dependencies = [ "checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a" "checksum env_logger 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "afb070faf94c85d17d50ca44f6ad076bce18ae92f0037d350947240a36e9d42e" "checksum failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6dd377bcc1b1b7ce911967e3ec24fa19c3224394ec05b54aa7b083d498341ac7" -"checksum fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" +"checksum hashbrown 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "64b7d419d0622ae02fe5da6b9a5e1964b610a65bb37923b976aeebb6dbb8f86e" "checksum humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e" "checksum lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca488b89a5657b0a2ecd45b95609b3e848cf1755da332a0da46e2b2b1cb371a7" "checksum libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)" = "76e3a3ef172f1a0b9a9ff0dd1491ae5e6c948b94479a3021819ba7d860c8645d" @@ -430,6 +436,7 @@ dependencies = [ "checksum regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "2069749032ea3ec200ca51e4a31df41759190a88edca0d2d86ee8bedf7073341" "checksum regex-syntax 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "747ba3b235651f6e2f67dfa8bcdcd073ddb7c243cb21c442fc12395dfcac212d" "checksum rustc-demangle 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "bcfe5b13211b4d78e5c2cadfebd7769197d95c639c35a50057eb4c05de811395" +"checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27" "checksum shlex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" "checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550" "checksum termcolor 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4096add70612622289f2fdcdbd5086dc81c1e2675e6ae58d6c4f62a16c6d7f2f" diff --git a/Cargo.toml b/Cargo.toml index 2cb96ac1e3..e1d9dcbacc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -54,7 +54,7 @@ peeking_take_while = "0.1.2" quote = { version = "0.6", default-features = false } regex = "1.0" which = "2.0" -fxhash = "0.2" +hashbrown = "0.1" # New validation in 0.3.6 breaks bindgen-integration: # https://github.com/alexcrichton/proc-macro2/commit/489c642. proc-macro2 = { version = "0.4", default-features = false } diff --git a/src/codegen/mod.rs b/src/codegen/mod.rs index edaf7afef1..9c818cc1eb 100644 --- a/src/codegen/mod.rs +++ b/src/codegen/mod.rs @@ -44,12 +44,11 @@ use std; use std::borrow::Cow; use std::cell::Cell; use std::collections::VecDeque; -use std::collections::hash_map::Entry; use std::fmt::Write; use std::iter; use std::ops; use std::str::FromStr; -use {HashMap, HashSet}; +use {HashMap, HashSet, Entry}; // Name of type defined in constified enum module pub static CONSTIFIED_ENUM_MODULE_REPR_NAME: &'static str = "Type"; diff --git a/src/lib.rs b/src/lib.rs index ef15d49a7c..e11773efe4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -23,7 +23,7 @@ extern crate cexpr; #[allow(unused_extern_crates)] extern crate cfg_if; extern crate clang_sys; -extern crate fxhash; +extern crate hashbrown; #[macro_use] extern crate lazy_static; extern crate peeking_take_while; @@ -97,9 +97,9 @@ use std::process::{Command, Stdio}; use std::sync::Arc; // Some convenient typedefs for a fast hash map and hash set. -type HashMap = ::fxhash::FxHashMap; -type HashSet = ::fxhash::FxHashSet; -pub(crate) use ::std::collections::hash_map::Entry; +type HashMap = ::hashbrown::HashMap; +type HashSet = ::hashbrown::HashSet; +pub(crate) use ::hashbrown::hash_map::Entry; fn args_are_cpp(clang_args: &[String]) -> bool { return clang_args From 1cb5ef10743f5bfbabf5a6aba9a34b0e829e02e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Emilio=20Cobos=20=C3=81lvarez?= Date: Sun, 23 Dec 2018 22:29:59 +0100 Subject: [PATCH 3/5] Stop using a BTreeSet to store items. We use sequential id's so a Vec> does the trick. This reduces the time for: time ./target/release/bindgen tests/stylo.hpp --no-rustfmt-bindings From ~6s to less than 5s on my machine. --- src/ir/context.rs | 110 ++++++++++++++++++++++------------------------ src/ir/dot.rs | 2 +- src/ir/objc.rs | 5 +-- 3 files changed, 55 insertions(+), 62 deletions(-) diff --git a/src/ir/context.rs b/src/ir/context.rs index f2399f51bc..e4e828e2bd 100644 --- a/src/ir/context.rs +++ b/src/ir/context.rs @@ -27,7 +27,6 @@ use parse::ClangItemParser; use proc_macro2::{Ident, Span}; use std::borrow::Cow; use std::cell::Cell; -use std::collections::btree_map::{self, BTreeMap}; use std::iter::IntoIterator; use std::mem; use std::collections::HashMap as StdHashMap; @@ -302,14 +301,8 @@ enum TypeKey { /// A context used during parsing and generation of structs. #[derive(Debug)] pub struct BindgenContext { - /// The map of all the items parsed so far. - /// - /// It's a BTreeMap because we want the keys to be sorted to have consistent - /// output. - items: BTreeMap, - - /// The next item id to use during this bindings regeneration. - next_item_id: ItemId, + /// The map of all the items parsed so far, keyed off ItemId. + items: Vec>, /// Clang USR to type map. This is needed to be able to associate types with /// item ids during parsing. @@ -597,12 +590,11 @@ If you encounter an error missing from this list, please file an issue or a PR!" let root_module = Self::build_root_module(ItemId(0)); let root_module_id = root_module.id().as_module_id_unchecked(); - let mut me = BindgenContext { - items: Default::default(), + BindgenContext { + items: vec![Some(root_module)], types: Default::default(), type_params: Default::default(), modules: Default::default(), - next_item_id: ItemId(1), root_module: root_module_id, current_module: root_module_id, semantic_parents: Default::default(), @@ -631,11 +623,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" have_destructor: None, has_type_param_in_array: None, has_float: None, - }; - - me.add_item(root_module, None, None); - - me + } } /// Creates a timer for the current bindgen phase. If time_phases is `true`, @@ -718,7 +706,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" self.need_bitfield_allocation.push(id); } - let old_item = self.items.insert(id, item); + let old_item = mem::replace(&mut self.items[id.0], Some(item)); assert!( old_item.is_none(), "should not have already associated an item with the given id" @@ -746,7 +734,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" debug!( "Invalid declaration {:?} found for type {:?}", declaration, - self.items.get(&id).unwrap().kind().expect_type() + self.resolve_item_fallible(id).unwrap().kind().expect_type() ); return; } @@ -775,9 +763,9 @@ If you encounter an error missing from this list, please file an issue or a PR!" /// details. fn add_item_to_module(&mut self, item: &Item) { assert!(item.id() != self.root_module); - assert!(!self.items.contains_key(&item.id())); + assert!(self.resolve_item_fallible(item.id()).is_none()); - if let Some(parent) = self.items.get_mut(&item.parent_id()) { + if let Some(ref mut parent) = self.items[item.parent_id().0] { if let Some(module) = parent.as_module_mut() { debug!( "add_item_to_module: adding {:?} as child of parent module {:?}", @@ -796,8 +784,8 @@ If you encounter an error missing from this list, please file an issue or a PR!" self.current_module ); - self.items - .get_mut(&self.current_module.into()) + self.items[(self.current_module.0).0] + .as_mut() .expect("Should always have an item for self.current_module") .as_module_mut() .expect("self.current_module should always be a module") @@ -825,7 +813,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" self.add_item_to_module(&item); let id = item.id(); - let old_item = self.items.insert(id, item); + let old_item = mem::replace(&mut self.items[id.0], Some(item)); assert!( old_item.is_none(), "should not have already associated an item with the given id" @@ -941,8 +929,14 @@ If you encounter an error missing from this list, please file an issue or a PR!" } /// Iterate over all items that have been defined. - pub fn items<'a>(&'a self) -> btree_map::Iter<'a, ItemId, Item> { - self.items.iter() + pub fn items(&self) -> impl Iterator { + self.items + .iter() + .enumerate() + .filter_map(|(index, item)| { + let item = item.as_ref()?; + Some((ItemId(index), item)) + }) } /// Have we collected all unresolved type references yet? @@ -957,7 +951,8 @@ If you encounter an error missing from this list, please file an issue or a PR!" debug_assert!(!self.collected_typerefs); self.collected_typerefs = true; let mut typerefs = vec![]; - for (id, ref mut item) in &mut self.items { + + for (id, item) in self.items() { let kind = item.kind(); let ty = match kind.as_type() { Some(ty) => ty, @@ -966,7 +961,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" match *ty.kind() { TypeKind::UnresolvedTypeRef(ref ty, loc, parent_id) => { - typerefs.push((*id, ty.clone(), loc, parent_id)); + typerefs.push((id, ty.clone(), loc, parent_id)); } _ => {} }; @@ -987,7 +982,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" Item::new_opaque_type(self.next_item_id(), &ty, self) }); - let item = self.items.get_mut(&id).unwrap(); + let item = self.items[id.0].as_mut().unwrap(); *item.kind_mut().as_type_mut().unwrap().kind_mut() = TypeKind::ResolvedTypeRef(resolved); resolved @@ -1018,11 +1013,11 @@ If you encounter an error missing from this list, please file an issue or a PR!" where F: (FnOnce(&BindgenContext, &mut Item) -> T) { - let mut item = self.items.remove(&id).unwrap(); + let mut item = self.items[id.0].take().unwrap(); let result = f(self, &mut item); - let existing = self.items.insert(id, item); + let existing = mem::replace(&mut self.items[id.0], Some(item)); assert!(existing.is_none()); result @@ -1051,15 +1046,13 @@ If you encounter an error missing from this list, please file an issue or a PR!" fn deanonymize_fields(&mut self) { let _t = self.timer("deanonymize_fields"); - let comp_item_ids: Vec = self.items - .iter() + let comp_item_ids: Vec = self.items() .filter_map(|(id, item)| { if item.kind().as_type()?.is_comp() { return Some(id); } None }) - .cloned() .collect(); for id in comp_item_ids { @@ -1090,7 +1083,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" let mut replacements = vec![]; - for (id, item) in self.items.iter() { + for (id, item) in self.items() { if item.annotations().use_instead_of().is_some() { continue; } @@ -1114,10 +1107,10 @@ If you encounter an error missing from this list, please file an issue or a PR!" let replacement = self.replacements.get(&path[1..]); if let Some(replacement) = replacement { - if replacement != id { + if *replacement != id { // We set this just after parsing the annotation. It's // very unlikely, but this can happen. - if self.items.get(replacement).is_some() { + if self.resolve_item_fallible(*replacement).is_some() { replacements.push((id.expect_type_id(self), replacement.expect_type_id(self))); } } @@ -1126,9 +1119,9 @@ If you encounter an error missing from this list, please file an issue or a PR!" for (id, replacement_id) in replacements { debug!("Replacing {:?} with {:?}", id, replacement_id); - let new_parent = { - let item = self.items.get_mut(&id.into()).unwrap(); + let item_id: ItemId = id.into(); + let item = self.items[item_id.0].as_mut().unwrap(); *item.kind_mut().as_type_mut().unwrap().kind_mut() = TypeKind::ResolvedTypeRef(replacement_id); item.parent_id() @@ -1146,8 +1139,9 @@ If you encounter an error missing from this list, please file an issue or a PR!" continue; } - self.items - .get_mut(&replacement_id.into()) + let replacement_item_id: ItemId = replacement_id.into(); + self.items[replacement_item_id.0] + .as_mut() .unwrap() .set_parent_for_replacement(new_parent); @@ -1183,16 +1177,16 @@ If you encounter an error missing from this list, please file an issue or a PR!" continue; } - self.items - .get_mut(&old_module) + self.items[old_module.0] + .as_mut() .unwrap() .as_module_mut() .unwrap() .children_mut() .remove(&replacement_id.into()); - self.items - .get_mut(&new_module) + self.items[new_module.0] + .as_mut() .unwrap() .as_module_mut() .unwrap() @@ -1260,7 +1254,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" assert!(self.in_codegen_phase()); assert!(self.current_module == self.root_module); - let roots = self.items().map(|(&id, _)| id); + let roots = self.items().map(|(id, _)| id); traversal::AssertNoDanglingItemsTraversal::new( self, roots, @@ -1276,7 +1270,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" assert!(self.in_codegen_phase()); assert!(self.current_module == self.root_module); - for (&id, _item) in self.items() { + for (id, _item) in self.items() { if id == self.root_module { continue; } @@ -1467,7 +1461,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" debug_assert!(item.kind().is_type()); self.add_item_to_module(&item); let id = item.id(); - let old_item = self.items.insert(id, item); + let old_item = mem::replace(&mut self.items[id.0], Some(item)); assert!(old_item.is_none(), "Inserted type twice?"); } @@ -1502,13 +1496,13 @@ If you encounter an error missing from this list, please file an issue or a PR!" /// /// Panics if the id resolves to an item that is not a type. pub fn safe_resolve_type(&self, type_id: TypeId) -> Option<&Type> { - self.items.get(&type_id.into()).map(|t| t.kind().expect_type()) + self.resolve_item_fallible(type_id).map(|t| t.kind().expect_type()) } /// Resolve the given `ItemId` into an `Item`, or `None` if no such item /// exists. pub fn resolve_item_fallible>(&self, id: Id) -> Option<&Item> { - self.items.get(&id.into()) + self.items.get(id.into().0)?.as_ref() } /// Resolve the given `ItemId` into an `Item`. @@ -1516,7 +1510,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" /// Panics if the given id does not resolve to any item. pub fn resolve_item>(&self, item_id: Id) -> &Item { let item_id = item_id.into(); - match self.items.get(&item_id) { + match self.resolve_item_fallible(item_id) { Some(item) => item, None => panic!("Not an item: {:?}", item_id), } @@ -1782,8 +1776,8 @@ If you encounter an error missing from this list, please file an issue or a PR!" sub_item ); self.add_item_to_module(&sub_item); - debug_assert!(sub_id == sub_item.id()); - self.items.insert(sub_id, sub_item); + debug_assert_eq!(sub_id, sub_item.id()); + self.items[sub_id.0] = Some(sub_item); args.push(sub_id.as_type_id_unchecked()); } } @@ -1842,8 +1836,8 @@ If you encounter an error missing from this list, please file an issue or a PR!" // Bypass all the validations in add_item explicitly. debug!("instantiate_template: inserting item: {:?}", item); self.add_item_to_module(&item); - debug_assert!(with_id == item.id()); - self.items.insert(with_id, item); + debug_assert_eq!(with_id, item.id()); + self.items[with_id.0] = Some(item); Some(with_id.as_type_id_unchecked()) } @@ -1999,8 +1993,8 @@ If you encounter an error missing from this list, please file an issue or a PR!" /// Returns the next item id to be used for an item. pub fn next_item_id(&mut self) -> ItemId { - let ret = self.next_item_id; - self.next_item_id = ItemId(self.next_item_id.0 + 1); + let ret = ItemId(self.items.len()); + self.items.push(None); ret } @@ -2349,7 +2343,7 @@ If you encounter an error missing from this list, please file an issue or a PR!" } } }) - .map(|(&id, _)| id) + .map(|(id, _)| id) .collect::>(); // The reversal preserves the expected ordering of traversal, diff --git a/src/ir/dot.rs b/src/ir/dot.rs index 48bd1d9164..6caca78153 100644 --- a/src/ir/dot.rs +++ b/src/ir/dot.rs @@ -32,7 +32,7 @@ where let mut err: Option> = None; for (id, item) in ctx.items() { - let is_whitelisted = ctx.whitelisted_items().contains(id); + let is_whitelisted = ctx.whitelisted_items().contains(&id); writeln!( &mut dot_file, diff --git a/src/ir/objc.rs b/src/ir/objc.rs index 61c2235635..c7801df101 100644 --- a/src/ir/objc.rs +++ b/src/ir/objc.rs @@ -131,10 +131,9 @@ impl ObjCInterface { if protocol.is_protocol { debug!("Checking protocol {}, ty.name {:?}", protocol.name, ty.name()); - if Some(needle.as_ref()) == ty.name() - { + if Some(needle.as_ref()) == ty.name() { debug!("Found conforming protocol {:?}", item); - interface.conforms_to.push(*id); + interface.conforms_to.push(id); break; } } From 9f7eaa988bebbb1d78ee8a6b5d58cb6d94f19a7c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Emilio=20Cobos=20=C3=81lvarez?= Date: Sun, 23 Dec 2018 22:44:58 +0100 Subject: [PATCH 4/5] codegen: We should not iterate over codegen_items. This should allow making it a HashSet. --- src/codegen/mod.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/codegen/mod.rs b/src/codegen/mod.rs index 9c818cc1eb..03e8543bba 100644 --- a/src/codegen/mod.rs +++ b/src/codegen/mod.rs @@ -3546,11 +3546,12 @@ pub(crate) fn codegen(context: BindgenContext) -> (Vec debug!("codegen: {:?}", context.options()); - let codegen_items = context.codegen_items(); if context.options().emit_ir { - for &id in codegen_items { - let item = context.resolve_item(id); - println!("ir: {:?} = {:#?}", id, item); + let codegen_items = context.codegen_items(); + for (id, item) in context.items() { + if codegen_items.contains(&id) { + println!("ir: {:?} = {:#?}", id, item); + } } } From 2c8bf0b1b90b513967fd4f49a9b80dd37fbd4b29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Emilio=20Cobos=20=C3=81lvarez?= Date: Mon, 24 Dec 2018 03:59:21 +0100 Subject: [PATCH 5/5] ir: Move a variable to where it's used. --- src/ir/context.rs | 49 ++++++++++++++++++++++++++--------------------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/src/ir/context.rs b/src/ir/context.rs index e4e828e2bd..f9cd53f35a 100644 --- a/src/ir/context.rs +++ b/src/ir/context.rs @@ -2315,31 +2315,36 @@ If you encounter an error missing from this list, please file an issue or a PR!" return true; } + // Unnamed top-level enums are special and we + // whitelist them via the `whitelisted_vars` filter, + // since they're effectively top-level constants, + // and there's no way for them to be referenced + // consistently. let parent = self.resolve_item(item.parent_id()); - if parent.is_module() { - let mut prefix_path = parent.path_for_whitelisting(self); - - // Unnamed top-level enums are special and we - // whitelist them via the `whitelisted_vars` filter, - // since they're effectively top-level constants, - // and there's no way for them to be referenced - // consistently. - if let TypeKind::Enum(ref enum_) = *ty.kind() { - if ty.name().is_none() && - enum_.variants().iter().any(|variant| { - prefix_path.push(variant.name().into()); - let name = prefix_path[1..].join("::"); - prefix_path.pop().unwrap(); - self.options() - .whitelisted_vars - .matches(&name) - }) { - return true; - } - } + if !parent.is_module() { + return false; } - false + + let enum_ = match *ty.kind() { + TypeKind::Enum(ref e) => e, + _ => return false, + }; + + if ty.name().is_some() { + return false; + } + + let mut prefix_path = + parent.path_for_whitelisting(self); + enum_.variants().iter().any(|variant| { + prefix_path.push(variant.name().into()); + let name = prefix_path[1..].join("::"); + prefix_path.pop().unwrap(); + self.options() + .whitelisted_vars + .matches(&name) + }) } } })