Skip to content

Some perf tweaks #1473

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Dec 29, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ peeking_take_while = "0.1.2"
quote = { version = "0.6", default-features = false }
regex = "1.0"
which = "2.0"
hashbrown = "0.1"
# New validation in 0.3.6 breaks bindgen-integration:
# https://github.com/alexcrichton/proc-macro2/commit/489c642.
proc-macro2 = { version = "0.4", default-features = false }
Expand Down
15 changes: 8 additions & 7 deletions src/codegen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,12 @@ use proc_macro2::{self, Ident, Span};
use std;
use std::borrow::Cow;
use std::cell::Cell;
use std::collections::{HashSet, VecDeque};
use std::collections::hash_map::{Entry, HashMap};
use std::collections::VecDeque;
use std::fmt::Write;
use std::iter;
use std::ops;
use std::str::FromStr;
use {HashMap, HashSet, Entry};

// Name of type defined in constified enum module
pub static CONSTIFIED_ENUM_MODULE_REPR_NAME: &'static str = "Type";
Expand Down Expand Up @@ -2638,7 +2638,7 @@ impl CodeGenerator for Enum {
);

// A map where we keep a value -> variant relation.
let mut seen_values = HashMap::<_, Ident>::new();
let mut seen_values = HashMap::<_, Ident>::default();
let enum_rust_ty = item.to_rust_ty_or_opaque(ctx, &());
let is_toplevel = item.is_toplevel(ctx);

Expand Down Expand Up @@ -3546,11 +3546,12 @@ pub(crate) fn codegen(context: BindgenContext) -> (Vec<proc_macro2::TokenStream>

debug!("codegen: {:?}", context.options());

let codegen_items = context.codegen_items();
if context.options().emit_ir {
for &id in codegen_items {
let item = context.resolve_item(id);
println!("ir: {:?} = {:#?}", id, item);
let codegen_items = context.codegen_items();
for (id, item) in context.items() {
if codegen_items.contains(&id) {
println!("ir: {:?} = {:#?}", id, item);
}
}
}

Expand Down
5 changes: 2 additions & 3 deletions src/ir/analysis/derive_copy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@ use ir::template::TemplateParameters;
use ir::traversal::EdgeKind;
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
use ir::ty::TypeKind;
use std::collections::HashMap;
use std::collections::HashSet;
use {HashMap, HashSet};

/// An analysis that finds for each IR item whether copy cannot be derived.
///
Expand Down Expand Up @@ -103,7 +102,7 @@ impl<'ctx> MonotoneFramework for CannotDeriveCopy<'ctx> {
type Output = HashSet<ItemId>;

fn new(ctx: &'ctx BindgenContext) -> CannotDeriveCopy<'ctx> {
let cannot_derive_copy = HashSet::new();
let cannot_derive_copy = HashSet::default();
let dependencies = generate_dependencies(ctx, Self::consider_edge);

CannotDeriveCopy {
Expand Down
5 changes: 2 additions & 3 deletions src/ir/analysis/derive_debug.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@ use ir::item::IsOpaque;
use ir::traversal::EdgeKind;
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
use ir::ty::TypeKind;
use std::collections::HashMap;
use std::collections::HashSet;
use {HashMap, HashSet};

/// An analysis that finds for each IR item whether debug cannot be derived.
///
Expand Down Expand Up @@ -104,7 +103,7 @@ impl<'ctx> MonotoneFramework for CannotDeriveDebug<'ctx> {
type Output = HashSet<ItemId>;

fn new(ctx: &'ctx BindgenContext) -> CannotDeriveDebug<'ctx> {
let cannot_derive_debug = HashSet::new();
let cannot_derive_debug = HashSet::default();
let dependencies = generate_dependencies(ctx, Self::consider_edge);

CannotDeriveDebug {
Expand Down
7 changes: 3 additions & 4 deletions src/ir/analysis/derive_default.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@ use ir::traversal::EdgeKind;
use ir::traversal::Trace;
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
use ir::ty::TypeKind;
use std::collections::HashMap;
use std::collections::HashSet;
use {HashMap, HashSet};

/// An analysis that finds for each IR item whether default cannot be derived.
///
Expand Down Expand Up @@ -99,8 +98,8 @@ impl<'ctx> MonotoneFramework for CannotDeriveDefault<'ctx> {
type Output = HashSet<ItemId>;

fn new(ctx: &'ctx BindgenContext) -> CannotDeriveDefault<'ctx> {
let mut dependencies = HashMap::new();
let cannot_derive_default = HashSet::new();
let mut dependencies = HashMap::default();
let cannot_derive_default = HashSet::default();

let whitelisted_items: HashSet<_> =
ctx.whitelisted_items().iter().cloned().collect();
Expand Down
5 changes: 2 additions & 3 deletions src/ir/analysis/derive_hash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@ use ir::item::IsOpaque;
use ir::traversal::EdgeKind;
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
use ir::ty::TypeKind;
use std::collections::HashMap;
use std::collections::HashSet;
use {HashMap, HashSet};

/// An analysis that finds for each IR item whether hash cannot be derived.
///
Expand Down Expand Up @@ -96,7 +95,7 @@ impl<'ctx> MonotoneFramework for CannotDeriveHash<'ctx> {
type Output = HashSet<ItemId>;

fn new(ctx: &'ctx BindgenContext) -> CannotDeriveHash<'ctx> {
let cannot_derive_hash = HashSet::new();
let cannot_derive_hash = HashSet::default();
let dependencies = generate_dependencies(ctx, Self::consider_edge);

CannotDeriveHash {
Expand Down
5 changes: 2 additions & 3 deletions src/ir/analysis/derive_partialeq_or_partialord.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,7 @@ use ir::item::{Item, IsOpaque};
use ir::traversal::{EdgeKind, Trace};
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
use ir::ty::{TypeKind, Type};
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use {HashMap, Entry};

/// An analysis that finds for each IR item whether `PartialEq`/`PartialOrd`
/// cannot be derived.
Expand Down Expand Up @@ -326,7 +325,7 @@ impl<'ctx> MonotoneFramework for CannotDerivePartialEqOrPartialOrd<'ctx> {
fn new(
ctx: &'ctx BindgenContext,
) -> CannotDerivePartialEqOrPartialOrd<'ctx> {
let can_derive_partialeq_or_partialord = HashMap::new();
let can_derive_partialeq_or_partialord = HashMap::default();
let dependencies = generate_dependencies(ctx, Self::consider_edge);

CannotDerivePartialEqOrPartialOrd {
Expand Down
5 changes: 2 additions & 3 deletions src/ir/analysis/has_destructor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@ use ir::context::{BindgenContext, ItemId};
use ir::traversal::EdgeKind;
use ir::comp::{CompKind, Field, FieldMethods};
use ir::ty::TypeKind;
use std::collections::HashMap;
use std::collections::HashSet;
use {HashMap, HashSet};

/// An analysis that finds for each IR item whether it has a destructor or not
///
Expand Down Expand Up @@ -73,7 +72,7 @@ impl<'ctx> MonotoneFramework for HasDestructorAnalysis<'ctx> {
type Output = HashSet<ItemId>;

fn new(ctx: &'ctx BindgenContext) -> Self {
let have_destructor = HashSet::new();
let have_destructor = HashSet::default();
let dependencies = generate_dependencies(ctx, Self::consider_edge);

HasDestructorAnalysis {
Expand Down
5 changes: 2 additions & 3 deletions src/ir/analysis/has_float.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
//! Determining which types has float.

use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
use std::collections::HashSet;
use std::collections::HashMap;
use {HashSet, HashMap};
use ir::context::{BindgenContext, ItemId};
use ir::traversal::EdgeKind;
use ir::ty::TypeKind;
Expand Down Expand Up @@ -84,7 +83,7 @@ impl<'ctx> MonotoneFramework for HasFloat<'ctx> {
type Output = HashSet<ItemId>;

fn new(ctx: &'ctx BindgenContext) -> HasFloat<'ctx> {
let has_float = HashSet::new();
let has_float = HashSet::default();
let dependencies = generate_dependencies(ctx, Self::consider_edge);

HasFloat {
Expand Down
5 changes: 2 additions & 3 deletions src/ir/analysis/has_type_param_in_array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@ use ir::comp::FieldMethods;
use ir::context::{BindgenContext, ItemId};
use ir::traversal::EdgeKind;
use ir::ty::TypeKind;
use std::collections::HashMap;
use std::collections::HashSet;
use {HashMap, HashSet};

/// An analysis that finds for each IR item whether it has array or not.
///
Expand Down Expand Up @@ -92,7 +91,7 @@ impl<'ctx> MonotoneFramework for HasTypeParameterInArray<'ctx> {
fn new(
ctx: &'ctx BindgenContext,
) -> HasTypeParameterInArray<'ctx> {
let has_type_parameter_in_array = HashSet::new();
let has_type_parameter_in_array = HashSet::default();
let dependencies = generate_dependencies(ctx, Self::consider_edge);

HasTypeParameterInArray {
Expand Down
5 changes: 2 additions & 3 deletions src/ir/analysis/has_vtable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,8 @@ use ir::context::{BindgenContext, ItemId};
use ir::traversal::EdgeKind;
use ir::ty::TypeKind;
use std::cmp;
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::ops;
use {HashMap, Entry};

/// The result of the `HasVtableAnalysis` for an individual item.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Ord)]
Expand Down Expand Up @@ -148,7 +147,7 @@ impl<'ctx> MonotoneFramework for HasVtableAnalysis<'ctx> {
type Output = HashMap<ItemId, HasVtableResult>;

fn new(ctx: &'ctx BindgenContext) -> HasVtableAnalysis<'ctx> {
let have_vtable = HashMap::new();
let have_vtable = HashMap::default();
let dependencies = generate_dependencies(ctx, Self::consider_edge);

HasVtableAnalysis {
Expand Down
12 changes: 6 additions & 6 deletions src/ir/analysis/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ pub use self::sizedness::{Sizedness, SizednessAnalysis, SizednessResult};
use ir::context::{BindgenContext, ItemId};

use ir::traversal::{EdgeKind, Trace};
use std::collections::HashMap;
use HashMap;
use std::fmt;
use std::ops;

Expand Down Expand Up @@ -190,7 +190,7 @@ pub fn generate_dependencies<F>(
where
F: Fn(EdgeKind) -> bool,
{
let mut dependencies = HashMap::new();
let mut dependencies = HashMap::default();

for &item in ctx.whitelisted_items() {
dependencies.entry(item).or_insert(vec![]);
Expand Down Expand Up @@ -219,7 +219,7 @@ where
#[cfg(test)]
mod tests {
use super::*;
use std::collections::{HashMap, HashSet};
use {HashMap, HashSet};

// Here we find the set of nodes that are reachable from any given
// node. This is a lattice mapping nodes to subsets of all nodes. Our join
Expand Down Expand Up @@ -334,14 +334,14 @@ mod tests {
// implementation. Don't copy this code outside of this test!

let original_size =
self.reachable.entry(node).or_insert(HashSet::new()).len();
self.reachable.entry(node).or_insert(HashSet::default()).len();

for sub_node in self.graph.0[&node].iter() {
self.reachable.get_mut(&node).unwrap().insert(*sub_node);

let sub_reachable = self.reachable
.entry(*sub_node)
.or_insert(HashSet::new())
.or_insert(HashSet::default())
.clone();

for transitive in sub_reachable {
Expand Down Expand Up @@ -386,7 +386,7 @@ mod tests {
nodes.as_ref().iter().cloned().map(Node).collect()
}

let mut expected = HashMap::new();
let mut expected = HashMap::default();
expected.insert(Node(1), nodes([3, 4, 5, 6, 7, 8]));
expected.insert(Node(2), nodes([2]));
expected.insert(Node(3), nodes([3, 4, 5, 6, 7, 8]));
Expand Down
8 changes: 3 additions & 5 deletions src/ir/analysis/sizedness.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,8 @@ use ir::context::{BindgenContext, TypeId};
use ir::item::IsOpaque;
use ir::traversal::EdgeKind;
use ir::ty::TypeKind;
use std::cmp;
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::ops;
use std::{cmp, ops};
use {HashMap, Entry};

/// The result of the `Sizedness` analysis for an individual item.
///
Expand Down Expand Up @@ -194,7 +192,7 @@ impl<'ctx> MonotoneFramework for SizednessAnalysis<'ctx> {
})
.collect();

let sized = HashMap::new();
let sized = HashMap::default();

SizednessAnalysis {
ctx,
Expand Down
6 changes: 3 additions & 3 deletions src/ir/analysis/template_params.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ use ir::item::{Item, ItemSet};
use ir::template::{TemplateInstantiation, TemplateParameters};
use ir::traversal::{EdgeKind, Trace};
use ir::ty::TypeKind;
use std::collections::{HashMap, HashSet};
use {HashMap, HashSet};

/// An analysis that finds for each IR item its set of template parameters that
/// it uses.
Expand Down Expand Up @@ -373,8 +373,8 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
fn new(
ctx: &'ctx BindgenContext,
) -> UsedTemplateParameters<'ctx> {
let mut used = HashMap::new();
let mut dependencies = HashMap::new();
let mut used = HashMap::default();
let mut dependencies = HashMap::default();
let whitelisted_items: HashSet<_> =
ctx.whitelisted_items().iter().cloned().collect();

Expand Down
2 changes: 1 addition & 1 deletion src/ir/comp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use peeking_take_while::PeekableExt;
use std::cmp;
use std::io;
use std::mem;
use std::collections::HashMap;
use HashMap;

/// The kind of compound type.
#[derive(Debug, Copy, Clone, PartialEq)]
Expand Down
Loading