Skip to content

Commit 1104631

Browse files
authored
Merge pull request #1473 from emilio/hash
Some perf tweaks
2 parents f2ac19c + 2c8bf0b commit 1104631

20 files changed

+162
-142
lines changed

Cargo.lock

+23
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,7 @@ peeking_take_while = "0.1.2"
5454
quote = { version = "0.6", default-features = false }
5555
regex = "1.0"
5656
which = "2.0"
57+
hashbrown = "0.1"
5758
# New validation in 0.3.6 breaks bindgen-integration:
5859
# https://github.com/alexcrichton/proc-macro2/commit/489c642.
5960
proc-macro2 = { version = "0.4", default-features = false }

src/codegen/mod.rs

+8-7
Original file line numberDiff line numberDiff line change
@@ -43,12 +43,12 @@ use proc_macro2::{self, Ident, Span};
4343
use std;
4444
use std::borrow::Cow;
4545
use std::cell::Cell;
46-
use std::collections::{HashSet, VecDeque};
47-
use std::collections::hash_map::{Entry, HashMap};
46+
use std::collections::VecDeque;
4847
use std::fmt::Write;
4948
use std::iter;
5049
use std::ops;
5150
use std::str::FromStr;
51+
use {HashMap, HashSet, Entry};
5252

5353
// Name of type defined in constified enum module
5454
pub static CONSTIFIED_ENUM_MODULE_REPR_NAME: &'static str = "Type";
@@ -2638,7 +2638,7 @@ impl CodeGenerator for Enum {
26382638
);
26392639

26402640
// A map where we keep a value -> variant relation.
2641-
let mut seen_values = HashMap::<_, Ident>::new();
2641+
let mut seen_values = HashMap::<_, Ident>::default();
26422642
let enum_rust_ty = item.to_rust_ty_or_opaque(ctx, &());
26432643
let is_toplevel = item.is_toplevel(ctx);
26442644

@@ -3546,11 +3546,12 @@ pub(crate) fn codegen(context: BindgenContext) -> (Vec<proc_macro2::TokenStream>
35463546

35473547
debug!("codegen: {:?}", context.options());
35483548

3549-
let codegen_items = context.codegen_items();
35503549
if context.options().emit_ir {
3551-
for &id in codegen_items {
3552-
let item = context.resolve_item(id);
3553-
println!("ir: {:?} = {:#?}", id, item);
3550+
let codegen_items = context.codegen_items();
3551+
for (id, item) in context.items() {
3552+
if codegen_items.contains(&id) {
3553+
println!("ir: {:?} = {:#?}", id, item);
3554+
}
35543555
}
35553556
}
35563557

src/ir/analysis/derive_copy.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,7 @@ use ir::template::TemplateParameters;
1111
use ir::traversal::EdgeKind;
1212
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
1313
use ir::ty::TypeKind;
14-
use std::collections::HashMap;
15-
use std::collections::HashSet;
14+
use {HashMap, HashSet};
1615

1716
/// An analysis that finds for each IR item whether copy cannot be derived.
1817
///
@@ -103,7 +102,7 @@ impl<'ctx> MonotoneFramework for CannotDeriveCopy<'ctx> {
103102
type Output = HashSet<ItemId>;
104103

105104
fn new(ctx: &'ctx BindgenContext) -> CannotDeriveCopy<'ctx> {
106-
let cannot_derive_copy = HashSet::new();
105+
let cannot_derive_copy = HashSet::default();
107106
let dependencies = generate_dependencies(ctx, Self::consider_edge);
108107

109108
CannotDeriveCopy {

src/ir/analysis/derive_debug.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,7 @@ use ir::item::IsOpaque;
1010
use ir::traversal::EdgeKind;
1111
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
1212
use ir::ty::TypeKind;
13-
use std::collections::HashMap;
14-
use std::collections::HashSet;
13+
use {HashMap, HashSet};
1514

1615
/// An analysis that finds for each IR item whether debug cannot be derived.
1716
///
@@ -104,7 +103,7 @@ impl<'ctx> MonotoneFramework for CannotDeriveDebug<'ctx> {
104103
type Output = HashSet<ItemId>;
105104

106105
fn new(ctx: &'ctx BindgenContext) -> CannotDeriveDebug<'ctx> {
107-
let cannot_derive_debug = HashSet::new();
106+
let cannot_derive_debug = HashSet::default();
108107
let dependencies = generate_dependencies(ctx, Self::consider_edge);
109108

110109
CannotDeriveDebug {

src/ir/analysis/derive_default.rs

+3-4
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,7 @@ use ir::traversal::EdgeKind;
1212
use ir::traversal::Trace;
1313
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
1414
use ir::ty::TypeKind;
15-
use std::collections::HashMap;
16-
use std::collections::HashSet;
15+
use {HashMap, HashSet};
1716

1817
/// An analysis that finds for each IR item whether default cannot be derived.
1918
///
@@ -99,8 +98,8 @@ impl<'ctx> MonotoneFramework for CannotDeriveDefault<'ctx> {
9998
type Output = HashSet<ItemId>;
10099

101100
fn new(ctx: &'ctx BindgenContext) -> CannotDeriveDefault<'ctx> {
102-
let mut dependencies = HashMap::new();
103-
let cannot_derive_default = HashSet::new();
101+
let mut dependencies = HashMap::default();
102+
let cannot_derive_default = HashSet::default();
104103

105104
let whitelisted_items: HashSet<_> =
106105
ctx.whitelisted_items().iter().cloned().collect();

src/ir/analysis/derive_hash.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,7 @@ use ir::item::IsOpaque;
1010
use ir::traversal::EdgeKind;
1111
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
1212
use ir::ty::TypeKind;
13-
use std::collections::HashMap;
14-
use std::collections::HashSet;
13+
use {HashMap, HashSet};
1514

1615
/// An analysis that finds for each IR item whether hash cannot be derived.
1716
///
@@ -96,7 +95,7 @@ impl<'ctx> MonotoneFramework for CannotDeriveHash<'ctx> {
9695
type Output = HashSet<ItemId>;
9796

9897
fn new(ctx: &'ctx BindgenContext) -> CannotDeriveHash<'ctx> {
99-
let cannot_derive_hash = HashSet::new();
98+
let cannot_derive_hash = HashSet::default();
10099
let dependencies = generate_dependencies(ctx, Self::consider_edge);
101100

102101
CannotDeriveHash {

src/ir/analysis/derive_partialeq_or_partialord.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,7 @@ use ir::item::{Item, IsOpaque};
99
use ir::traversal::{EdgeKind, Trace};
1010
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
1111
use ir::ty::{TypeKind, Type};
12-
use std::collections::HashMap;
13-
use std::collections::hash_map::Entry;
12+
use {HashMap, Entry};
1413

1514
/// An analysis that finds for each IR item whether `PartialEq`/`PartialOrd`
1615
/// cannot be derived.
@@ -326,7 +325,7 @@ impl<'ctx> MonotoneFramework for CannotDerivePartialEqOrPartialOrd<'ctx> {
326325
fn new(
327326
ctx: &'ctx BindgenContext,
328327
) -> CannotDerivePartialEqOrPartialOrd<'ctx> {
329-
let can_derive_partialeq_or_partialord = HashMap::new();
328+
let can_derive_partialeq_or_partialord = HashMap::default();
330329
let dependencies = generate_dependencies(ctx, Self::consider_edge);
331330

332331
CannotDerivePartialEqOrPartialOrd {

src/ir/analysis/has_destructor.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,7 @@ use ir::context::{BindgenContext, ItemId};
55
use ir::traversal::EdgeKind;
66
use ir::comp::{CompKind, Field, FieldMethods};
77
use ir::ty::TypeKind;
8-
use std::collections::HashMap;
9-
use std::collections::HashSet;
8+
use {HashMap, HashSet};
109

1110
/// An analysis that finds for each IR item whether it has a destructor or not
1211
///
@@ -73,7 +72,7 @@ impl<'ctx> MonotoneFramework for HasDestructorAnalysis<'ctx> {
7372
type Output = HashSet<ItemId>;
7473

7574
fn new(ctx: &'ctx BindgenContext) -> Self {
76-
let have_destructor = HashSet::new();
75+
let have_destructor = HashSet::default();
7776
let dependencies = generate_dependencies(ctx, Self::consider_edge);
7877

7978
HasDestructorAnalysis {

src/ir/analysis/has_float.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
11
//! Determining which types has float.
22
33
use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
4-
use std::collections::HashSet;
5-
use std::collections::HashMap;
4+
use {HashSet, HashMap};
65
use ir::context::{BindgenContext, ItemId};
76
use ir::traversal::EdgeKind;
87
use ir::ty::TypeKind;
@@ -84,7 +83,7 @@ impl<'ctx> MonotoneFramework for HasFloat<'ctx> {
8483
type Output = HashSet<ItemId>;
8584

8685
fn new(ctx: &'ctx BindgenContext) -> HasFloat<'ctx> {
87-
let has_float = HashSet::new();
86+
let has_float = HashSet::default();
8887
let dependencies = generate_dependencies(ctx, Self::consider_edge);
8988

9089
HasFloat {

src/ir/analysis/has_type_param_in_array.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,7 @@ use ir::comp::FieldMethods;
66
use ir::context::{BindgenContext, ItemId};
77
use ir::traversal::EdgeKind;
88
use ir::ty::TypeKind;
9-
use std::collections::HashMap;
10-
use std::collections::HashSet;
9+
use {HashMap, HashSet};
1110

1211
/// An analysis that finds for each IR item whether it has array or not.
1312
///
@@ -92,7 +91,7 @@ impl<'ctx> MonotoneFramework for HasTypeParameterInArray<'ctx> {
9291
fn new(
9392
ctx: &'ctx BindgenContext,
9493
) -> HasTypeParameterInArray<'ctx> {
95-
let has_type_parameter_in_array = HashSet::new();
94+
let has_type_parameter_in_array = HashSet::default();
9695
let dependencies = generate_dependencies(ctx, Self::consider_edge);
9796

9897
HasTypeParameterInArray {

src/ir/analysis/has_vtable.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,8 @@ use ir::context::{BindgenContext, ItemId};
55
use ir::traversal::EdgeKind;
66
use ir::ty::TypeKind;
77
use std::cmp;
8-
use std::collections::HashMap;
9-
use std::collections::hash_map::Entry;
108
use std::ops;
9+
use {HashMap, Entry};
1110

1211
/// The result of the `HasVtableAnalysis` for an individual item.
1312
#[derive(Copy, Clone, Debug, PartialEq, Eq, Ord)]
@@ -148,7 +147,7 @@ impl<'ctx> MonotoneFramework for HasVtableAnalysis<'ctx> {
148147
type Output = HashMap<ItemId, HasVtableResult>;
149148

150149
fn new(ctx: &'ctx BindgenContext) -> HasVtableAnalysis<'ctx> {
151-
let have_vtable = HashMap::new();
150+
let have_vtable = HashMap::default();
152151
let dependencies = generate_dependencies(ctx, Self::consider_edge);
153152

154153
HasVtableAnalysis {

src/ir/analysis/mod.rs

+6-6
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ pub use self::sizedness::{Sizedness, SizednessAnalysis, SizednessResult};
6464
use ir::context::{BindgenContext, ItemId};
6565

6666
use ir::traversal::{EdgeKind, Trace};
67-
use std::collections::HashMap;
67+
use HashMap;
6868
use std::fmt;
6969
use std::ops;
7070

@@ -190,7 +190,7 @@ pub fn generate_dependencies<F>(
190190
where
191191
F: Fn(EdgeKind) -> bool,
192192
{
193-
let mut dependencies = HashMap::new();
193+
let mut dependencies = HashMap::default();
194194

195195
for &item in ctx.whitelisted_items() {
196196
dependencies.entry(item).or_insert(vec![]);
@@ -219,7 +219,7 @@ where
219219
#[cfg(test)]
220220
mod tests {
221221
use super::*;
222-
use std::collections::{HashMap, HashSet};
222+
use {HashMap, HashSet};
223223

224224
// Here we find the set of nodes that are reachable from any given
225225
// node. This is a lattice mapping nodes to subsets of all nodes. Our join
@@ -334,14 +334,14 @@ mod tests {
334334
// implementation. Don't copy this code outside of this test!
335335

336336
let original_size =
337-
self.reachable.entry(node).or_insert(HashSet::new()).len();
337+
self.reachable.entry(node).or_insert(HashSet::default()).len();
338338

339339
for sub_node in self.graph.0[&node].iter() {
340340
self.reachable.get_mut(&node).unwrap().insert(*sub_node);
341341

342342
let sub_reachable = self.reachable
343343
.entry(*sub_node)
344-
.or_insert(HashSet::new())
344+
.or_insert(HashSet::default())
345345
.clone();
346346

347347
for transitive in sub_reachable {
@@ -386,7 +386,7 @@ mod tests {
386386
nodes.as_ref().iter().cloned().map(Node).collect()
387387
}
388388

389-
let mut expected = HashMap::new();
389+
let mut expected = HashMap::default();
390390
expected.insert(Node(1), nodes([3, 4, 5, 6, 7, 8]));
391391
expected.insert(Node(2), nodes([2]));
392392
expected.insert(Node(3), nodes([3, 4, 5, 6, 7, 8]));

src/ir/analysis/sizedness.rs

+3-5
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,8 @@ use ir::context::{BindgenContext, TypeId};
55
use ir::item::IsOpaque;
66
use ir::traversal::EdgeKind;
77
use ir::ty::TypeKind;
8-
use std::cmp;
9-
use std::collections::HashMap;
10-
use std::collections::hash_map::Entry;
11-
use std::ops;
8+
use std::{cmp, ops};
9+
use {HashMap, Entry};
1210

1311
/// The result of the `Sizedness` analysis for an individual item.
1412
///
@@ -194,7 +192,7 @@ impl<'ctx> MonotoneFramework for SizednessAnalysis<'ctx> {
194192
})
195193
.collect();
196194

197-
let sized = HashMap::new();
195+
let sized = HashMap::default();
198196

199197
SizednessAnalysis {
200198
ctx,

src/ir/analysis/template_params.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ use ir::item::{Item, ItemSet};
9494
use ir::template::{TemplateInstantiation, TemplateParameters};
9595
use ir::traversal::{EdgeKind, Trace};
9696
use ir::ty::TypeKind;
97-
use std::collections::{HashMap, HashSet};
97+
use {HashMap, HashSet};
9898

9999
/// An analysis that finds for each IR item its set of template parameters that
100100
/// it uses.
@@ -373,8 +373,8 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
373373
fn new(
374374
ctx: &'ctx BindgenContext,
375375
) -> UsedTemplateParameters<'ctx> {
376-
let mut used = HashMap::new();
377-
let mut dependencies = HashMap::new();
376+
let mut used = HashMap::default();
377+
let mut dependencies = HashMap::default();
378378
let whitelisted_items: HashSet<_> =
379379
ctx.whitelisted_items().iter().cloned().collect();
380380

src/ir/comp.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ use peeking_take_while::PeekableExt;
1717
use std::cmp;
1818
use std::io;
1919
use std::mem;
20-
use std::collections::HashMap;
20+
use HashMap;
2121

2222
/// The kind of compound type.
2323
#[derive(Debug, Copy, Clone, PartialEq)]

0 commit comments

Comments
 (0)