diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index b391b353632aa..523a244c8361b 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -618,7 +618,7 @@ define_dep_nodes!( <'tcx> [input] Freevars(DefId), [input] MaybeUnusedTraitImport(DefId), - [] MaybeUnusedExternCrates, + [input] MaybeUnusedExternCrates, [] StabilityIndex, [input] AllCrateNums, [] ExportedSymbols(CrateNum), diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs index 97ac1b256124d..c9205f67f661f 100644 --- a/src/librustc/dep_graph/graph.rs +++ b/src/librustc/dep_graph/graph.rs @@ -327,6 +327,7 @@ impl DepGraph { } } + #[inline] pub fn fingerprint_of(&self, dep_node: &DepNode) -> Fingerprint { match self.fingerprints.borrow().get(dep_node) { Some(&fingerprint) => fingerprint, @@ -340,6 +341,11 @@ impl DepGraph { self.data.as_ref().unwrap().previous.fingerprint_of(dep_node) } + #[inline] + pub fn prev_dep_node_index_of(&self, dep_node: &DepNode) -> SerializedDepNodeIndex { + self.data.as_ref().unwrap().previous.node_to_index(dep_node) + } + /// Indicates that a previous work product exists for `v`. This is /// invoked during initial start-up based on what nodes are clean /// (and what files exist in the incr. directory). diff --git a/src/librustc/dep_graph/prev.rs b/src/librustc/dep_graph/prev.rs index 17001bbb0c38a..6c43b5c5ff197 100644 --- a/src/librustc/dep_graph/prev.rs +++ b/src/librustc/dep_graph/prev.rs @@ -44,6 +44,11 @@ impl PreviousDepGraph { self.data.nodes[dep_node_index].0 } + #[inline] + pub fn node_to_index(&self, dep_node: &DepNode) -> SerializedDepNodeIndex { + self.index[dep_node] + } + #[inline] pub fn fingerprint_of(&self, dep_node: &DepNode) -> Option { self.index diff --git a/src/librustc/hir/def_id.rs b/src/librustc/hir/def_id.rs index 428f154c1b66a..f6fcff37ca53e 100644 --- a/src/librustc/hir/def_id.rs +++ b/src/librustc/hir/def_id.rs @@ -11,8 +11,7 @@ use ty; use rustc_data_structures::indexed_vec::Idx; -use serialize::{self, Encoder, Decoder}; - +use serialize; use std::fmt; use std::u32; @@ -32,6 +31,10 @@ newtype_index!(CrateNum /// A CrateNum value that indicates that something is wrong. const INVALID_CRATE = u32::MAX - 1, + + /// A special CrateNum that we use for the tcx.rcache when decoding from + /// the incr. comp. cache. + const RESERVED_FOR_INCR_COMP_CACHE = u32::MAX - 2, }); impl CrateNum { @@ -61,17 +64,8 @@ impl fmt::Display for CrateNum { } } -impl serialize::UseSpecializedEncodable for CrateNum { - fn default_encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_u32(self.0) - } -} - -impl serialize::UseSpecializedDecodable for CrateNum { - fn default_decode(d: &mut D) -> Result { - d.read_u32().map(CrateNum) - } -} +impl serialize::UseSpecializedEncodable for CrateNum {} +impl serialize::UseSpecializedDecodable for CrateNum {} /// A DefIndex is an index into the hir-map for a crate, identifying a /// particular definition. It should really be considered an interned @@ -88,6 +82,7 @@ impl serialize::UseSpecializedDecodable for CrateNum { /// don't have to care about these ranges. newtype_index!(DefIndex { + ENCODABLE = custom DEBUG_FORMAT = custom, /// The start of the "high" range of DefIndexes. @@ -146,6 +141,9 @@ impl DefIndex { } } +impl serialize::UseSpecializedEncodable for DefIndex {} +impl serialize::UseSpecializedDecodable for DefIndex {} + #[derive(Copy, Clone, Eq, PartialEq, Hash)] pub enum DefIndexAddressSpace { Low = 0, @@ -166,7 +164,7 @@ impl DefIndexAddressSpace { /// A DefId identifies a particular *definition*, by combining a crate /// index and a def index. -#[derive(Clone, Eq, Ord, PartialOrd, PartialEq, RustcEncodable, RustcDecodable, Hash, Copy)] +#[derive(Clone, Eq, Ord, PartialOrd, PartialEq, Hash, Copy)] pub struct DefId { pub krate: CrateNum, pub index: DefIndex, @@ -188,14 +186,58 @@ impl fmt::Debug for DefId { } } - impl DefId { /// Make a local `DefId` with the given index. + #[inline] pub fn local(index: DefIndex) -> DefId { DefId { krate: LOCAL_CRATE, index: index } } - pub fn is_local(&self) -> bool { + #[inline] + pub fn is_local(self) -> bool { self.krate == LOCAL_CRATE } + + #[inline] + pub fn to_local(self) -> LocalDefId { + LocalDefId::from_def_id(self) + } } + +impl serialize::UseSpecializedEncodable for DefId {} +impl serialize::UseSpecializedDecodable for DefId {} + +/// A LocalDefId is equivalent to a DefId with `krate == LOCAL_CRATE`. Since +/// we encode this information in the type, we can ensure at compile time that +/// no DefIds from upstream crates get thrown into the mix. There are quite a +/// few cases where we know that only DefIds from the local crate are expected +/// and a DefId from a different crate would signify a bug somewhere. This +/// is when LocalDefId comes in handy. +#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub struct LocalDefId(DefIndex); + +impl LocalDefId { + + #[inline] + pub fn from_def_id(def_id: DefId) -> LocalDefId { + assert!(def_id.is_local()); + LocalDefId(def_id.index) + } + + #[inline] + pub fn to_def_id(self) -> DefId { + DefId { + krate: LOCAL_CRATE, + index: self.0 + } + } +} + +impl fmt::Debug for LocalDefId { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.to_def_id().fmt(f) + } +} + +impl serialize::UseSpecializedEncodable for LocalDefId {} +impl serialize::UseSpecializedDecodable for LocalDefId {} diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 453d30dde7595..d0a7ac392014e 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -17,7 +17,7 @@ pub use self::definitions::{Definitions, DefKey, DefPath, DefPathData, use dep_graph::{DepGraph, DepNode, DepKind, DepNodeIndex}; -use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndexAddressSpace}; +use hir::def_id::{CRATE_DEF_INDEX, DefId, LocalDefId, DefIndexAddressSpace}; use syntax::abi::Abi; use syntax::ast::{self, Name, NodeId, CRATE_NODE_ID}; @@ -359,6 +359,16 @@ impl<'hir> Map<'hir> { self.definitions.as_local_node_id(DefId::local(def_index)).unwrap() } + #[inline] + pub fn local_def_id_to_hir_id(&self, def_id: LocalDefId) -> HirId { + self.definitions.def_index_to_hir_id(def_id.to_def_id().index) + } + + #[inline] + pub fn local_def_id_to_node_id(&self, def_id: LocalDefId) -> NodeId { + self.definitions.as_local_node_id(def_id.to_def_id()).unwrap() + } + fn entry_count(&self) -> usize { self.map.len() } diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index c9b1d70e7b60d..3bc2736586f45 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -45,6 +45,7 @@ use ty::AdtKind; use rustc_data_structures::indexed_vec; +use serialize::{self, Encoder, Encodable, Decoder, Decodable}; use std::collections::BTreeMap; use std::fmt; @@ -85,13 +86,37 @@ pub mod svh; /// the local_id part of the HirId changing, which is a very useful property in /// incremental compilation where we have to persist things through changes to /// the code base. -#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug, - RustcEncodable, RustcDecodable)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] pub struct HirId { pub owner: DefIndex, pub local_id: ItemLocalId, } +impl serialize::UseSpecializedEncodable for HirId { + fn default_encode(&self, s: &mut S) -> Result<(), S::Error> { + let HirId { + owner, + local_id, + } = *self; + + owner.encode(s)?; + local_id.encode(s) + } +} + +impl serialize::UseSpecializedDecodable for HirId { + fn default_decode(d: &mut D) -> Result { + let owner = DefIndex::decode(d)?; + let local_id = ItemLocalId::decode(d)?; + + Ok(HirId { + owner, + local_id + }) + } +} + + /// An `ItemLocalId` uniquely identifies something within a given "item-like", /// that is within a hir::Item, hir::TraitItem, or hir::ImplItem. There is no /// guarantee that the numerical value of a given `ItemLocalId` corresponds to diff --git a/src/librustc/ich/impls_hir.rs b/src/librustc/ich/impls_hir.rs index 6b78cd473be8f..11ac2bf83be3e 100644 --- a/src/librustc/ich/impls_hir.rs +++ b/src/librustc/ich/impls_hir.rs @@ -13,7 +13,7 @@ use hir; use hir::map::DefPathHash; -use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX}; +use hir::def_id::{DefId, LocalDefId, CrateNum, CRATE_DEF_INDEX}; use ich::{StableHashingContext, NodeIdHashingMode}; use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, StableHasher, StableHasherResult}; @@ -38,6 +38,24 @@ impl<'gcx> ToStableHashKey> for DefId { } } +impl<'gcx> HashStable> for LocalDefId { + #[inline] + fn hash_stable(&self, + hcx: &mut StableHashingContext<'gcx>, + hasher: &mut StableHasher) { + hcx.def_path_hash(self.to_def_id()).hash_stable(hcx, hasher); + } +} + +impl<'gcx> ToStableHashKey> for LocalDefId { + type KeyType = DefPathHash; + + #[inline] + fn to_stable_hash_key(&self, hcx: &StableHashingContext<'gcx>) -> DefPathHash { + hcx.def_path_hash(self.to_def_id()) + } +} + impl<'gcx> HashStable> for CrateNum { #[inline] fn hash_stable(&self, diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs index 628538b41c5d8..5d7141949e389 100644 --- a/src/librustc/middle/cstore.rs +++ b/src/librustc/middle/cstore.rs @@ -24,7 +24,7 @@ use hir; use hir::def; -use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE}; +use hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use hir::map as hir_map; use hir::map::definitions::{Definitions, DefKey, DefPathTable}; use hir::svh::Svh; @@ -180,7 +180,7 @@ impl EncodedMetadata { /// upstream crate. #[derive(Debug, RustcEncodable, RustcDecodable, Copy, Clone)] pub struct EncodedMetadataHash { - pub def_index: DefIndex, + pub def_index: u32, pub hash: ich::Fingerprint, } diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index 0383d5ca68232..9018b9fe590b2 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -20,7 +20,7 @@ use self::TrackMatchMode::*; use self::OverloadedCallType::*; use hir::def::Def; -use hir::def_id::{DefId}; +use hir::def_id::DefId; use infer::InferCtxt; use middle::mem_categorization as mc; use middle::region; @@ -915,7 +915,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { let closure_def_id = self.tcx().hir.local_def_id(closure_expr.id); let upvar_id = ty::UpvarId { var_id: var_hir_id, - closure_expr_id: closure_def_id.index + closure_expr_id: closure_def_id.to_local(), }; let upvar_capture = self.mc.tables.upvar_capture(upvar_id); let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id, diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index fc10406c8cebd..6ea8595533991 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -70,7 +70,7 @@ pub use self::Note::*; use self::Aliasability::*; use middle::region; -use hir::def_id::{DefId, DefIndex}; +use hir::def_id::{DefId, LocalDefId}; use hir::map as hir_map; use infer::InferCtxt; use hir::def::{Def, CtorKind}; @@ -191,7 +191,7 @@ pub type cmt<'tcx> = Rc>; pub enum ImmutabilityBlame<'tcx> { ImmLocal(ast::NodeId), - ClosureEnv(DefIndex), + ClosureEnv(LocalDefId), LocalDeref(ast::NodeId), AdtFieldDeref(&'tcx ty::AdtDef, &'tcx ty::FieldDef) } @@ -758,11 +758,11 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { } }; - let closure_expr_def_index = self.tcx.hir.local_def_id(fn_node_id).index; + let closure_expr_def_id = self.tcx.hir.local_def_id(fn_node_id); let var_hir_id = self.tcx.hir.node_to_hir_id(var_id); let upvar_id = ty::UpvarId { var_id: var_hir_id, - closure_expr_id: closure_expr_def_index + closure_expr_id: closure_expr_def_id.to_local(), }; let var_ty = self.node_ty(var_hir_id)?; @@ -837,7 +837,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { // The environment of a closure is guaranteed to // outlive any bindings introduced in the body of the // closure itself. - scope: DefId::local(upvar_id.closure_expr_id), + scope: upvar_id.closure_expr_id.to_def_id(), bound_region: ty::BrEnv })); diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index ffb8144e07e5c..b7abcc03132cb 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -1042,6 +1042,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "enable incremental compilation (experimental)"), incremental_cc: bool = (false, parse_bool, [UNTRACKED], "enable cross-crate incremental compilation (even more experimental)"), + incremental_queries: bool = (true, parse_bool, [UNTRACKED], + "enable incremental compilation support for queries (experimental)"), incremental_info: bool = (false, parse_bool, [UNTRACKED], "print high-level information about incremental reuse (or the lack thereof)"), incremental_dump_hash: bool = (false, parse_bool, [UNTRACKED], diff --git a/src/librustc/ty/codec.rs b/src/librustc/ty/codec.rs index 1c793920bf2e4..fbb14f39ade34 100644 --- a/src/librustc/ty/codec.rs +++ b/src/librustc/ty/codec.rs @@ -19,7 +19,7 @@ use hir::def_id::{DefId, CrateNum}; use middle::const_val::ByteArray; use rustc_data_structures::fx::FxHashMap; -use rustc_serialize::{Decodable, Decoder, Encoder, Encodable}; +use rustc_serialize::{Decodable, Decoder, Encoder, Encodable, opaque}; use std::hash::Hash; use std::intrinsics; use ty::{self, Ty, TyCtxt}; @@ -53,6 +53,13 @@ pub trait TyEncoder: Encoder { fn position(&self) -> usize; } +impl<'buf> TyEncoder for opaque::Encoder<'buf> { + #[inline] + fn position(&self) -> usize { + self.position() + } +} + /// Encode the given value or a previously cached shorthand. pub fn encode_with_shorthand(encoder: &mut E, value: &T, @@ -113,6 +120,8 @@ pub trait TyDecoder<'a, 'tcx: 'a>: Decoder { fn peek_byte(&self) -> u8; + fn position(&self) -> usize; + fn cached_ty_for_shorthand(&mut self, shorthand: usize, or_insert_with: F) @@ -129,6 +138,7 @@ pub trait TyDecoder<'a, 'tcx: 'a>: Decoder { } } +#[inline] pub fn decode_cnum<'a, 'tcx, D>(decoder: &mut D) -> Result where D: TyDecoder<'a, 'tcx>, 'tcx: 'a, @@ -137,12 +147,12 @@ pub fn decode_cnum<'a, 'tcx, D>(decoder: &mut D) -> Result Ok(decoder.map_encoded_cnum_to_current(cnum)) } +#[inline] pub fn decode_ty<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> where D: TyDecoder<'a, 'tcx>, 'tcx: 'a, { // Handle shorthands first, if we have an usize > 0x80. - // if self.opaque.data[self.opaque.position()] & 0x80 != 0 { if decoder.positioned_at_shorthand() { let pos = decoder.read_usize()?; assert!(pos >= SHORTHAND_OFFSET); @@ -157,6 +167,7 @@ pub fn decode_ty<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> } } +#[inline] pub fn decode_predicates<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> where D: TyDecoder<'a, 'tcx>, @@ -180,6 +191,7 @@ pub fn decode_predicates<'a, 'tcx, D>(decoder: &mut D) }) } +#[inline] pub fn decode_substs<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx Substs<'tcx>, D::Error> where D: TyDecoder<'a, 'tcx>, 'tcx: 'a, @@ -189,6 +201,7 @@ pub fn decode_substs<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx Substs<'tcx>, Ok(tcx.mk_substs((0..len).map(|_| Decodable::decode(decoder)))?) } +#[inline] pub fn decode_region<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> where D: TyDecoder<'a, 'tcx>, 'tcx: 'a, @@ -196,6 +209,7 @@ pub fn decode_region<'a, 'tcx, D>(decoder: &mut D) -> Result, D Ok(decoder.tcx().mk_region(Decodable::decode(decoder)?)) } +#[inline] pub fn decode_ty_slice<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx ty::Slice>, D::Error> where D: TyDecoder<'a, 'tcx>, @@ -205,6 +219,7 @@ pub fn decode_ty_slice<'a, 'tcx, D>(decoder: &mut D) Ok(decoder.tcx().mk_type_list((0..len).map(|_| Decodable::decode(decoder)))?) } +#[inline] pub fn decode_adt_def<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx ty::AdtDef, D::Error> where D: TyDecoder<'a, 'tcx>, @@ -214,6 +229,7 @@ pub fn decode_adt_def<'a, 'tcx, D>(decoder: &mut D) Ok(decoder.tcx().adt_def(def_id)) } +#[inline] pub fn decode_existential_predicate_slice<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx ty::Slice>, D::Error> where D: TyDecoder<'a, 'tcx>, @@ -224,6 +240,7 @@ pub fn decode_existential_predicate_slice<'a, 'tcx, D>(decoder: &mut D) .mk_existential_predicates((0..len).map(|_| Decodable::decode(decoder)))?) } +#[inline] pub fn decode_byte_array<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> where D: TyDecoder<'a, 'tcx>, @@ -234,6 +251,7 @@ pub fn decode_byte_array<'a, 'tcx, D>(decoder: &mut D) }) } +#[inline] pub fn decode_const<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx ty::Const<'tcx>, D::Error> where D: TyDecoder<'a, 'tcx>, @@ -241,3 +259,138 @@ pub fn decode_const<'a, 'tcx, D>(decoder: &mut D) { Ok(decoder.tcx().mk_const(Decodable::decode(decoder)?)) } + +#[macro_export] +macro_rules! __impl_decoder_methods { + ($($name:ident -> $ty:ty;)*) => { + $(fn $name(&mut self) -> Result<$ty, Self::Error> { + self.opaque.$name() + })* + } +} + +#[macro_export] +macro_rules! implement_ty_decoder { + ($DecoderName:ident <$($typaram:tt),*>) => { + mod __ty_decoder_impl { + use super::$DecoderName; + use $crate::ty; + use $crate::ty::codec::*; + use $crate::ty::subst::Substs; + use $crate::hir::def_id::{CrateNum}; + use $crate::middle::const_val::ByteArray; + use rustc_serialize::{Decoder, SpecializedDecoder}; + use std::borrow::Cow; + + impl<$($typaram ),*> Decoder for $DecoderName<$($typaram),*> { + type Error = String; + + __impl_decoder_methods! { + read_nil -> (); + + read_u128 -> u128; + read_u64 -> u64; + read_u32 -> u32; + read_u16 -> u16; + read_u8 -> u8; + read_usize -> usize; + + read_i128 -> i128; + read_i64 -> i64; + read_i32 -> i32; + read_i16 -> i16; + read_i8 -> i8; + read_isize -> isize; + + read_bool -> bool; + read_f64 -> f64; + read_f32 -> f32; + read_char -> char; + read_str -> Cow; + } + + fn error(&mut self, err: &str) -> Self::Error { + self.opaque.error(err) + } + } + + // FIXME(#36588) These impls are horribly unsound as they allow + // the caller to pick any lifetime for 'tcx, including 'static, + // by using the unspecialized proxies to them. + + impl<$($typaram),*> SpecializedDecoder + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result { + decode_cnum(self) + } + } + + impl<$($typaram),*> SpecializedDecoder> + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + decode_ty(self) + } + } + + impl<$($typaram),*> SpecializedDecoder> + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) + -> Result, Self::Error> { + decode_predicates(self) + } + } + + impl<$($typaram),*> SpecializedDecoder<&'tcx Substs<'tcx>> + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result<&'tcx Substs<'tcx>, Self::Error> { + decode_substs(self) + } + } + + impl<$($typaram),*> SpecializedDecoder> + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + decode_region(self) + } + } + + impl<$($typaram),*> SpecializedDecoder<&'tcx ty::Slice>> + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) + -> Result<&'tcx ty::Slice>, Self::Error> { + decode_ty_slice(self) + } + } + + impl<$($typaram),*> SpecializedDecoder<&'tcx ty::AdtDef> + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result<&'tcx ty::AdtDef, Self::Error> { + decode_adt_def(self) + } + } + + impl<$($typaram),*> SpecializedDecoder<&'tcx ty::Slice>> + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) + -> Result<&'tcx ty::Slice>, Self::Error> { + decode_existential_predicate_slice(self) + } + } + + impl<$($typaram),*> SpecializedDecoder> + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + decode_byte_array(self) + } + } + + impl<$($typaram),*> SpecializedDecoder<&'tcx $crate::ty::Const<'tcx>> + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result<&'tcx ty::Const<'tcx>, Self::Error> { + decode_const(self) + } + } + } + } +} + diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 37c4346a7dc93..9daf152dc4216 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -769,7 +769,7 @@ impl<'gcx> HashStable> for TypeckTables<'gcx> { }; let closure_def_id = DefId { krate: local_id_root.krate, - index: closure_expr_id, + index: closure_expr_id.to_def_id().index, }; (hcx.def_path_hash(var_owner_def_id), var_id.local_id, @@ -1311,9 +1311,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn serialize_query_result_cache(self, encoder: &mut E) -> Result<(), E::Error> - where E: ::rustc_serialize::Encoder + where E: ty::codec::TyEncoder { - self.on_disk_query_result_cache.serialize(encoder) + self.on_disk_query_result_cache.serialize(self.global_tcx(), self.cstore, encoder) } } diff --git a/src/librustc/ty/maps/config.rs b/src/librustc/ty/maps/config.rs index deaafd1efed45..066b80cefa4b5 100644 --- a/src/librustc/ty/maps/config.rs +++ b/src/librustc/ty/maps/config.rs @@ -8,6 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use dep_graph::SerializedDepNodeIndex; use hir::def_id::{CrateNum, DefId, DefIndex}; use ty::{self, Ty, TyCtxt}; use ty::maps::queries; @@ -23,11 +24,21 @@ pub trait QueryConfig { type Value; } -pub(super) trait QueryDescription: QueryConfig { +pub(super) trait QueryDescription<'tcx>: QueryConfig { fn describe(tcx: TyCtxt, key: Self::Key) -> String; + + fn cache_on_disk(_: Self::Key) -> bool { + false + } + + fn load_from_disk<'a>(_: TyCtxt<'a, 'tcx, 'tcx>, + _: SerializedDepNodeIndex) + -> Self::Value { + bug!("QueryDescription::load_from_disk() called for unsupport query.") + } } -impl> QueryDescription for M { +impl<'tcx, M: QueryConfig> QueryDescription<'tcx> for M { default fn describe(tcx: TyCtxt, def_id: DefId) -> String { if !tcx.sess.verbose() { format!("processing `{}`", tcx.item_path_str(def_id)) @@ -38,50 +49,50 @@ impl> QueryDescription for M { } } -impl<'tcx> QueryDescription for queries::is_copy_raw<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_copy_raw<'tcx> { fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String { format!("computing whether `{}` is `Copy`", env.value) } } -impl<'tcx> QueryDescription for queries::is_sized_raw<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_sized_raw<'tcx> { fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String { format!("computing whether `{}` is `Sized`", env.value) } } -impl<'tcx> QueryDescription for queries::is_freeze_raw<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_freeze_raw<'tcx> { fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String { format!("computing whether `{}` is freeze", env.value) } } -impl<'tcx> QueryDescription for queries::needs_drop_raw<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::needs_drop_raw<'tcx> { fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String { format!("computing whether `{}` needs drop", env.value) } } -impl<'tcx> QueryDescription for queries::layout_raw<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::layout_raw<'tcx> { fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String { format!("computing layout of `{}`", env.value) } } -impl<'tcx> QueryDescription for queries::super_predicates_of<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::super_predicates_of<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("computing the supertraits of `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::erase_regions_ty<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::erase_regions_ty<'tcx> { fn describe(_tcx: TyCtxt, ty: Ty<'tcx>) -> String { format!("erasing regions from `{:?}`", ty) } } -impl<'tcx> QueryDescription for queries::type_param_predicates<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::type_param_predicates<'tcx> { fn describe(tcx: TyCtxt, (_, def_id): (DefId, DefId)) -> String { let id = tcx.hir.as_local_node_id(def_id).unwrap(); format!("computing the bounds for type parameter `{}`", @@ -89,452 +100,468 @@ impl<'tcx> QueryDescription for queries::type_param_predicates<'tcx> { } } -impl<'tcx> QueryDescription for queries::coherent_trait<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::coherent_trait<'tcx> { fn describe(tcx: TyCtxt, (_, def_id): (CrateNum, DefId)) -> String { format!("coherence checking all impls of trait `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::crate_inherent_impls<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::crate_inherent_impls<'tcx> { fn describe(_: TyCtxt, k: CrateNum) -> String { format!("all inherent impls defined in crate `{:?}`", k) } } -impl<'tcx> QueryDescription for queries::crate_inherent_impls_overlap_check<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::crate_inherent_impls_overlap_check<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { format!("check for overlap between inherent impls defined in this crate") } } -impl<'tcx> QueryDescription for queries::crate_variances<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::crate_variances<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("computing the variances for items in this crate") } } -impl<'tcx> QueryDescription for queries::mir_shims<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::mir_shims<'tcx> { fn describe(tcx: TyCtxt, def: ty::InstanceDef<'tcx>) -> String { format!("generating MIR shim for `{}`", tcx.item_path_str(def.def_id())) } } -impl<'tcx> QueryDescription for queries::privacy_access_levels<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::privacy_access_levels<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { format!("privacy access levels") } } -impl<'tcx> QueryDescription for queries::typeck_item_bodies<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::typeck_item_bodies<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { format!("type-checking all item bodies") } } -impl<'tcx> QueryDescription for queries::reachable_set<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::reachable_set<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { format!("reachability") } } -impl<'tcx> QueryDescription for queries::const_eval<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::const_eval<'tcx> { fn describe(tcx: TyCtxt, key: ty::ParamEnvAnd<'tcx, (DefId, &'tcx Substs<'tcx>)>) -> String { format!("const-evaluating `{}`", tcx.item_path_str(key.value.0)) } } -impl<'tcx> QueryDescription for queries::mir_keys<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::mir_keys<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { format!("getting a list of all mir_keys") } } -impl<'tcx> QueryDescription for queries::symbol_name<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::symbol_name<'tcx> { fn describe(_tcx: TyCtxt, instance: ty::Instance<'tcx>) -> String { format!("computing the symbol for `{}`", instance) } } -impl<'tcx> QueryDescription for queries::describe_def<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::describe_def<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("describe_def") } } -impl<'tcx> QueryDescription for queries::def_span<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::def_span<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("def_span") } } -impl<'tcx> QueryDescription for queries::lookup_stability<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::lookup_stability<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("stability") } } -impl<'tcx> QueryDescription for queries::lookup_deprecation_entry<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::lookup_deprecation_entry<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("deprecation") } } -impl<'tcx> QueryDescription for queries::item_attrs<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::item_attrs<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("item_attrs") } } -impl<'tcx> QueryDescription for queries::is_exported_symbol<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_exported_symbol<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("is_exported_symbol") } } -impl<'tcx> QueryDescription for queries::fn_arg_names<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::fn_arg_names<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("fn_arg_names") } } -impl<'tcx> QueryDescription for queries::impl_parent<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::impl_parent<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("impl_parent") } } -impl<'tcx> QueryDescription for queries::trait_of_item<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::trait_of_item<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("trait_of_item") } } -impl<'tcx> QueryDescription for queries::item_body_nested_bodies<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::item_body_nested_bodies<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("nested item bodies of `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::const_is_rvalue_promotable_to_static<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::const_is_rvalue_promotable_to_static<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("const checking if rvalue is promotable to static `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::rvalue_promotable_map<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::rvalue_promotable_map<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("checking which parts of `{}` are promotable to static", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::is_mir_available<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_mir_available<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("checking if item is mir available: `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::trans_fulfill_obligation<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::trans_fulfill_obligation<'tcx> { fn describe(tcx: TyCtxt, key: (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>)) -> String { format!("checking if `{}` fulfills its obligations", tcx.item_path_str(key.1.def_id())) } } -impl<'tcx> QueryDescription for queries::trait_impls_of<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::trait_impls_of<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("trait impls of `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::is_object_safe<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_object_safe<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("determine object safety of trait `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::is_const_fn<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_const_fn<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("checking if item is const fn: `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::dylib_dependency_formats<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::dylib_dependency_formats<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { "dylib dependency formats of crate".to_string() } } -impl<'tcx> QueryDescription for queries::is_panic_runtime<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_panic_runtime<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { "checking if the crate is_panic_runtime".to_string() } } -impl<'tcx> QueryDescription for queries::is_compiler_builtins<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_compiler_builtins<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { "checking if the crate is_compiler_builtins".to_string() } } -impl<'tcx> QueryDescription for queries::has_global_allocator<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::has_global_allocator<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { "checking if the crate has_global_allocator".to_string() } } -impl<'tcx> QueryDescription for queries::extern_crate<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::extern_crate<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { "getting crate's ExternCrateData".to_string() } } -impl<'tcx> QueryDescription for queries::lint_levels<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::lint_levels<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("computing the lint levels for items in this crate") } } -impl<'tcx> QueryDescription for queries::specializes<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::specializes<'tcx> { fn describe(_tcx: TyCtxt, _: (DefId, DefId)) -> String { format!("computing whether impls specialize one another") } } -impl<'tcx> QueryDescription for queries::in_scope_traits_map<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::in_scope_traits_map<'tcx> { fn describe(_tcx: TyCtxt, _: DefIndex) -> String { format!("traits in scope at a block") } } -impl<'tcx> QueryDescription for queries::is_no_builtins<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_no_builtins<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("test whether a crate has #![no_builtins]") } } -impl<'tcx> QueryDescription for queries::panic_strategy<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::panic_strategy<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("query a crate's configured panic strategy") } } -impl<'tcx> QueryDescription for queries::is_profiler_runtime<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_profiler_runtime<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("query a crate is #![profiler_runtime]") } } -impl<'tcx> QueryDescription for queries::is_sanitizer_runtime<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_sanitizer_runtime<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("query a crate is #![sanitizer_runtime]") } } -impl<'tcx> QueryDescription for queries::exported_symbol_ids<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::exported_symbol_ids<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the exported symbols of a crate") } } -impl<'tcx> QueryDescription for queries::native_libraries<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::native_libraries<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the native libraries of a linked crate") } } -impl<'tcx> QueryDescription for queries::plugin_registrar_fn<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::plugin_registrar_fn<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the plugin registrar for a crate") } } -impl<'tcx> QueryDescription for queries::derive_registrar_fn<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::derive_registrar_fn<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the derive registrar for a crate") } } -impl<'tcx> QueryDescription for queries::crate_disambiguator<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::crate_disambiguator<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the disambiguator a crate") } } -impl<'tcx> QueryDescription for queries::crate_hash<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::crate_hash<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the hash a crate") } } -impl<'tcx> QueryDescription for queries::original_crate_name<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::original_crate_name<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the original name a crate") } } -impl<'tcx> QueryDescription for queries::implementations_of_trait<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::implementations_of_trait<'tcx> { fn describe(_tcx: TyCtxt, _: (CrateNum, DefId)) -> String { format!("looking up implementations of a trait in a crate") } } -impl<'tcx> QueryDescription for queries::all_trait_implementations<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::all_trait_implementations<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up all (?) trait implementations") } } -impl<'tcx> QueryDescription for queries::link_args<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::link_args<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up link arguments for a crate") } } -impl<'tcx> QueryDescription for queries::named_region_map<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::named_region_map<'tcx> { fn describe(_tcx: TyCtxt, _: DefIndex) -> String { format!("looking up a named region") } } -impl<'tcx> QueryDescription for queries::is_late_bound_map<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_late_bound_map<'tcx> { fn describe(_tcx: TyCtxt, _: DefIndex) -> String { format!("testing if a region is late boudn") } } -impl<'tcx> QueryDescription for queries::object_lifetime_defaults_map<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::object_lifetime_defaults_map<'tcx> { fn describe(_tcx: TyCtxt, _: DefIndex) -> String { format!("looking up lifetime defaults for a region") } } -impl<'tcx> QueryDescription for queries::dep_kind<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::dep_kind<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("fetching what a dependency looks like") } } -impl<'tcx> QueryDescription for queries::crate_name<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::crate_name<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("fetching what a crate is named") } } -impl<'tcx> QueryDescription for queries::get_lang_items<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::get_lang_items<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("calculating the lang items map") } } -impl<'tcx> QueryDescription for queries::defined_lang_items<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::defined_lang_items<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("calculating the lang items defined in a crate") } } -impl<'tcx> QueryDescription for queries::missing_lang_items<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::missing_lang_items<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("calculating the missing lang items in a crate") } } -impl<'tcx> QueryDescription for queries::visible_parent_map<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::visible_parent_map<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("calculating the visible parent map") } } -impl<'tcx> QueryDescription for queries::missing_extern_crate_item<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::missing_extern_crate_item<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("seeing if we're missing an `extern crate` item for this crate") } } -impl<'tcx> QueryDescription for queries::used_crate_source<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::used_crate_source<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking at the source for a crate") } } -impl<'tcx> QueryDescription for queries::postorder_cnums<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::postorder_cnums<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("generating a postorder list of CrateNums") } } -impl<'tcx> QueryDescription for queries::maybe_unused_extern_crates<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::maybe_unused_extern_crates<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up all possibly unused extern crates") } } -impl<'tcx> QueryDescription for queries::stability_index<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::stability_index<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("calculating the stability index for the local crate") } } -impl<'tcx> QueryDescription for queries::all_crate_nums<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::all_crate_nums<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("fetching all foreign CrateNum instances") } } -impl<'tcx> QueryDescription for queries::exported_symbols<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::exported_symbols<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("exported_symbols") } } -impl<'tcx> QueryDescription for queries::collect_and_partition_translation_items<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::collect_and_partition_translation_items<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("collect_and_partition_translation_items") } } -impl<'tcx> QueryDescription for queries::codegen_unit<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::codegen_unit<'tcx> { fn describe(_tcx: TyCtxt, _: InternedString) -> String { format!("codegen_unit") } } -impl<'tcx> QueryDescription for queries::compile_codegen_unit<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::compile_codegen_unit<'tcx> { fn describe(_tcx: TyCtxt, _: InternedString) -> String { format!("compile_codegen_unit") } } -impl<'tcx> QueryDescription for queries::output_filenames<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::output_filenames<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("output_filenames") } } -impl<'tcx> QueryDescription for queries::has_clone_closures<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::has_clone_closures<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("seeing if the crate has enabled `Clone` closures") } } -impl<'tcx> QueryDescription for queries::vtable_methods<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::vtable_methods<'tcx> { fn describe(tcx: TyCtxt, key: ty::PolyTraitRef<'tcx> ) -> String { format!("finding all methods for trait {}", tcx.item_path_str(key.def_id())) } } -impl<'tcx> QueryDescription for queries::has_copy_closures<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::has_copy_closures<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("seeing if the crate has enabled `Copy` closures") } } -impl<'tcx> QueryDescription for queries::fully_normalize_monormophic_ty<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::fully_normalize_monormophic_ty<'tcx> { fn describe(_tcx: TyCtxt, _: Ty) -> String { format!("normalizing types") } } + +impl<'tcx> QueryDescription<'tcx> for queries::typeck_tables_of<'tcx> { + #[inline] + fn cache_on_disk(def_id: Self::Key) -> bool { + def_id.is_local() + } + + fn load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: SerializedDepNodeIndex) + -> Self::Value { + let typeck_tables: ty::TypeckTables<'tcx> = tcx.on_disk_query_result_cache + .load_query_result(tcx, id); + tcx.alloc_tables(typeck_tables) + } +} + diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index 24ce8fb299598..53ca9b3851d5e 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -9,24 +9,42 @@ // except according to those terms. use dep_graph::{DepNodeIndex, SerializedDepNodeIndex}; -use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::indexed_vec::Idx; use errors::Diagnostic; +use hir; +use hir::def_id::{CrateNum, DefIndex, DefId, LocalDefId, + RESERVED_FOR_INCR_COMP_CACHE, LOCAL_CRATE}; +use hir::map::definitions::DefPathHash; +use middle::cstore::CrateStore; +use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::indexed_vec::{IndexVec, Idx}; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque, - SpecializedDecoder}; -use session::Session; -use std::borrow::Cow; + SpecializedDecoder, SpecializedEncoder, + UseSpecializedDecodable, UseSpecializedEncodable}; +use session::{CrateDisambiguator, Session}; use std::cell::RefCell; use std::collections::BTreeMap; use std::mem; +use syntax::ast::NodeId; use syntax::codemap::{CodeMap, StableFilemapId}; use syntax_pos::{BytePos, Span, NO_EXPANSION, DUMMY_SP}; +use ty; +use ty::codec::{self as ty_codec, TyDecoder, TyEncoder}; +use ty::context::TyCtxt; + +// Some magic values used for verifying that encoding and decoding. These are +// basically random numbers. +const PREV_DIAGNOSTICS_TAG: u64 = 0x1234_5678_A1A1_A1A1; +const QUERY_RESULT_INDEX_TAG: u64 = 0x1234_5678_C3C3_C3C3; /// `OnDiskCache` provides an interface to incr. comp. data cached from the /// previous compilation session. This data will eventually include the results /// of a few selected queries (like `typeck_tables_of` and `mir_optimized`) and /// any diagnostics that have been emitted during a query. pub struct OnDiskCache<'sess> { + + // The complete cache data in serialized form. + serialized_data: Vec, + // The diagnostics emitted during the previous compilation session. prev_diagnostics: FxHashMap>, @@ -34,68 +52,120 @@ pub struct OnDiskCache<'sess> { // compilation session. current_diagnostics: RefCell>>, - // This will eventually be needed for creating Decoders that can rebase - // spans. - _prev_filemap_starts: BTreeMap, + prev_cnums: Vec<(u32, String, CrateDisambiguator)>, + cnum_map: RefCell>>>, + + prev_filemap_starts: BTreeMap, codemap: &'sess CodeMap, + + // A map from dep-node to the position of the cached query result in + // `serialized_data`. + query_result_index: FxHashMap, } // This type is used only for (de-)serialization. #[derive(RustcEncodable, RustcDecodable)] struct Header { prev_filemap_starts: BTreeMap, + prev_cnums: Vec<(u32, String, CrateDisambiguator)>, } -// This type is used only for (de-)serialization. -#[derive(RustcEncodable, RustcDecodable)] -struct Body { - diagnostics: Vec<(SerializedDepNodeIndex, Vec)>, -} +type EncodedPrevDiagnostics = Vec<(SerializedDepNodeIndex, Vec)>; +type EncodedQueryResultIndex = Vec<(SerializedDepNodeIndex, usize)>; impl<'sess> OnDiskCache<'sess> { /// Create a new OnDiskCache instance from the serialized data in `data`. - /// Note that the current implementation (which only deals with diagnostics - /// so far) will eagerly deserialize the complete cache. Once we are - /// dealing with larger amounts of data (i.e. cached query results), - /// deserialization will need to happen lazily. - pub fn new(sess: &'sess Session, data: &[u8]) -> OnDiskCache<'sess> { + pub fn new(sess: &'sess Session, data: Vec, start_pos: usize) -> OnDiskCache<'sess> { debug_assert!(sess.opts.incremental.is_some()); - let mut decoder = opaque::Decoder::new(&data[..], 0); - let header = Header::decode(&mut decoder).unwrap(); + // Decode the header + let (header, post_header_pos) = { + let mut decoder = opaque::Decoder::new(&data[..], start_pos); + let header = Header::decode(&mut decoder) + .expect("Error while trying to decode incr. comp. cache header."); + (header, decoder.position()) + }; - let prev_diagnostics: FxHashMap<_, _> = { + let (prev_diagnostics, query_result_index) = { let mut decoder = CacheDecoder { - opaque: decoder, + tcx: None, + opaque: opaque::Decoder::new(&data[..], post_header_pos), codemap: sess.codemap(), prev_filemap_starts: &header.prev_filemap_starts, + cnum_map: &IndexVec::new(), + }; + + // Decode Diagnostics + let prev_diagnostics: FxHashMap<_, _> = { + let diagnostics: EncodedPrevDiagnostics = + decode_tagged(&mut decoder, PREV_DIAGNOSTICS_TAG) + .expect("Error while trying to decode previous session \ + diagnostics from incr. comp. cache."); + diagnostics.into_iter().collect() + }; + + // Decode the *position* of the query result index + let query_result_index_pos = { + let pos_pos = data.len() - IntEncodedWithFixedSize::ENCODED_SIZE; + decoder.with_position(pos_pos, |decoder| { + IntEncodedWithFixedSize::decode(decoder) + }).expect("Error while trying to decode query result index position.") + .0 as usize }; - let body = Body::decode(&mut decoder).unwrap(); - body.diagnostics.into_iter().collect() + + // Decode the query result index itself + let query_result_index: EncodedQueryResultIndex = + decoder.with_position(query_result_index_pos, |decoder| { + decode_tagged(decoder, QUERY_RESULT_INDEX_TAG) + }).expect("Error while trying to decode query result index."); + + (prev_diagnostics, query_result_index) }; OnDiskCache { + serialized_data: data, prev_diagnostics, - _prev_filemap_starts: header.prev_filemap_starts, + prev_filemap_starts: header.prev_filemap_starts, + prev_cnums: header.prev_cnums, + cnum_map: RefCell::new(None), codemap: sess.codemap(), current_diagnostics: RefCell::new(FxHashMap()), + query_result_index: query_result_index.into_iter().collect(), } } pub fn new_empty(codemap: &'sess CodeMap) -> OnDiskCache<'sess> { OnDiskCache { + serialized_data: Vec::new(), prev_diagnostics: FxHashMap(), - _prev_filemap_starts: BTreeMap::new(), + prev_filemap_starts: BTreeMap::new(), + prev_cnums: vec![], + cnum_map: RefCell::new(None), codemap, current_diagnostics: RefCell::new(FxHashMap()), + query_result_index: FxHashMap(), } } pub fn serialize<'a, 'tcx, E>(&self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + cstore: &CrateStore, encoder: &mut E) -> Result<(), E::Error> - where E: Encoder - { + where E: ty_codec::TyEncoder + { + // Serializing the DepGraph should not modify it: + let _in_ignore = tcx.dep_graph.in_ignore(); + + let mut encoder = CacheEncoder { + tcx, + encoder, + type_shorthands: FxHashMap(), + predicate_shorthands: FxHashMap(), + }; + + + // Encode the file header let prev_filemap_starts: BTreeMap<_, _> = self .codemap .files() @@ -103,18 +173,61 @@ impl<'sess> OnDiskCache<'sess> { .map(|fm| (fm.start_pos, StableFilemapId::new(fm))) .collect(); - Header { prev_filemap_starts }.encode(encoder)?; + let sorted_cnums = sorted_cnums_including_local_crate(cstore); + + let prev_cnums: Vec<_> = sorted_cnums.iter().map(|&cnum| { + let crate_name = tcx.original_crate_name(cnum).as_str().to_string(); + let crate_disambiguator = tcx.crate_disambiguator(cnum); + (cnum.as_u32(), crate_name, crate_disambiguator) + }).collect(); + + Header { + prev_filemap_starts, + prev_cnums, + }.encode(&mut encoder)?; + - let diagnostics: Vec<(SerializedDepNodeIndex, Vec)> = + // Encode Diagnostics + let diagnostics: EncodedPrevDiagnostics = self.current_diagnostics .borrow() .iter() .map(|(k, v)| (SerializedDepNodeIndex::new(k.index()), v.clone())) .collect(); - Body { diagnostics }.encode(encoder)?; + encoder.encode_tagged(PREV_DIAGNOSTICS_TAG, &diagnostics)?; - Ok(()) + + // Encode query results + let mut query_result_index = EncodedQueryResultIndex::new(); + + { + use ty::maps::queries::*; + let enc = &mut encoder; + let qri = &mut query_result_index; + + // Encode TypeckTables + encode_query_results::(tcx, enc, qri)?; + } + + // Encode query result index + let query_result_index_pos = encoder.position() as u64; + encoder.encode_tagged(QUERY_RESULT_INDEX_TAG, &query_result_index)?; + + // Encode the position of the query result index as the last 8 bytes of + // file so we know where to look for it. + IntEncodedWithFixedSize(query_result_index_pos).encode(&mut encoder)?; + + return Ok(()); + + fn sorted_cnums_including_local_crate(cstore: &CrateStore) -> Vec { + let mut cnums = vec![LOCAL_CRATE]; + cnums.extend_from_slice(&cstore.crates_untracked()[..]); + cnums.sort_unstable(); + // Just to be sure... + cnums.dedup(); + cnums + } } /// Load a diagnostic emitted during the previous compilation session. @@ -135,6 +248,37 @@ impl<'sess> OnDiskCache<'sess> { debug_assert!(prev.is_none()); } + pub fn load_query_result<'a, 'tcx, T>(&self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + dep_node_index: SerializedDepNodeIndex) + -> T + where T: Decodable + { + let pos = self.query_result_index[&dep_node_index]; + + let mut cnum_map = self.cnum_map.borrow_mut(); + if cnum_map.is_none() { + *cnum_map = Some(Self::compute_cnum_map(tcx, &self.prev_cnums[..])); + } + + let mut decoder = CacheDecoder { + tcx: Some(tcx), + opaque: opaque::Decoder::new(&self.serialized_data[..], pos), + codemap: self.codemap, + prev_filemap_starts: &self.prev_filemap_starts, + cnum_map: cnum_map.as_ref().unwrap(), + }; + + match decode_tagged(&mut decoder, dep_node_index) { + Ok(value) => { + value + } + Err(e) => { + bug!("Could not decode cached query result: {}", e) + } + } + } + /// Store a diagnostic emitted during computation of an anonymous query. /// Since many anonymous queries can share the same `DepNode`, we aggregate /// them -- as opposed to regular queries where we assume that there is a @@ -150,18 +294,57 @@ impl<'sess> OnDiskCache<'sess> { x.extend(diagnostics.into_iter()); } + + // This function builds mapping from previous-session-CrateNum to + // current-session-CrateNum. There might be CrateNums from the previous + // Session that don't occur in the current one. For these, the mapping + // maps to None. + fn compute_cnum_map(tcx: TyCtxt, + prev_cnums: &[(u32, String, CrateDisambiguator)]) + -> IndexVec> + { + let _in_ignore = tcx.dep_graph.in_ignore(); + + let current_cnums = tcx.all_crate_nums(LOCAL_CRATE).iter().map(|&cnum| { + let crate_name = tcx.original_crate_name(cnum) + .as_str() + .to_string(); + let crate_disambiguator = tcx.crate_disambiguator(cnum); + ((crate_name, crate_disambiguator), cnum) + }).collect::>(); + + let map_size = prev_cnums.iter() + .map(|&(cnum, ..)| cnum) + .max() + .unwrap_or(0) + 1; + let mut map = IndexVec::new(); + map.resize(map_size as usize, None); + + for &(prev_cnum, ref crate_name, crate_disambiguator) in prev_cnums { + let key = (crate_name.clone(), crate_disambiguator); + map[CrateNum::from_u32(prev_cnum)] = current_cnums.get(&key).cloned(); + } + + map[LOCAL_CRATE] = Some(LOCAL_CRATE); + map + } } + +//- DECODING ------------------------------------------------------------------- + /// A decoder that can read the incr. comp. cache. It is similar to the one /// we use for crate metadata decoding in that it can rebase spans and /// eventually will also handle things that contain `Ty` instances. -struct CacheDecoder<'a> { - opaque: opaque::Decoder<'a>, - codemap: &'a CodeMap, - prev_filemap_starts: &'a BTreeMap, +struct CacheDecoder<'a, 'tcx: 'a, 'x> { + tcx: Option>, + opaque: opaque::Decoder<'x>, + codemap: &'x CodeMap, + prev_filemap_starts: &'x BTreeMap, + cnum_map: &'x IndexVec>, } -impl<'a> CacheDecoder<'a> { +impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> { fn find_filemap_prev_bytepos(&self, prev_bytepos: BytePos) -> Option<(BytePos, StableFilemapId)> { @@ -173,47 +356,91 @@ impl<'a> CacheDecoder<'a> { } } -macro_rules! decoder_methods { - ($($name:ident -> $ty:ty;)*) => { - $(fn $name(&mut self) -> Result<$ty, Self::Error> { - self.opaque.$name() - })* - } +// Decode something that was encoded with encode_tagged() and verify that the +// tag matches and the correct amount of bytes was read. +fn decode_tagged<'a, 'tcx, D, T, V>(decoder: &mut D, + expected_tag: T) + -> Result + where T: Decodable + Eq + ::std::fmt::Debug, + V: Decodable, + D: Decoder + ty_codec::TyDecoder<'a, 'tcx>, + 'tcx: 'a, +{ + let start_pos = decoder.position(); + + let actual_tag = T::decode(decoder)?; + assert_eq!(actual_tag, expected_tag); + let value = V::decode(decoder)?; + let end_pos = decoder.position(); + + let expected_len: u64 = Decodable::decode(decoder)?; + assert_eq!((end_pos - start_pos) as u64, expected_len); + + Ok(value) } -impl<'sess> Decoder for CacheDecoder<'sess> { - type Error = String; - decoder_methods! { - read_nil -> (); +impl<'a, 'tcx: 'a, 'x> ty_codec::TyDecoder<'a, 'tcx> for CacheDecoder<'a, 'tcx, 'x> { + + #[inline] + fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { + self.tcx.expect("missing TyCtxt in CacheDecoder") + } + + #[inline] + fn position(&self) -> usize { + self.opaque.position() + } + + #[inline] + fn peek_byte(&self) -> u8 { + self.opaque.data[self.opaque.position()] + } + + fn cached_ty_for_shorthand(&mut self, + shorthand: usize, + or_insert_with: F) + -> Result, Self::Error> + where F: FnOnce(&mut Self) -> Result, Self::Error> + { + let tcx = self.tcx(); + + let cache_key = ty::CReaderCacheKey { + cnum: RESERVED_FOR_INCR_COMP_CACHE, + pos: shorthand, + }; + + if let Some(&ty) = tcx.rcache.borrow().get(&cache_key) { + return Ok(ty); + } - read_u128 -> u128; - read_u64 -> u64; - read_u32 -> u32; - read_u16 -> u16; - read_u8 -> u8; - read_usize -> usize; + let ty = or_insert_with(self)?; + tcx.rcache.borrow_mut().insert(cache_key, ty); + Ok(ty) + } - read_i128 -> i128; - read_i64 -> i64; - read_i32 -> i32; - read_i16 -> i16; - read_i8 -> i8; - read_isize -> isize; + fn with_position(&mut self, pos: usize, f: F) -> R + where F: FnOnce(&mut Self) -> R + { + debug_assert!(pos < self.opaque.data.len()); - read_bool -> bool; - read_f64 -> f64; - read_f32 -> f32; - read_char -> char; - read_str -> Cow; + let new_opaque = opaque::Decoder::new(self.opaque.data, pos); + let old_opaque = mem::replace(&mut self.opaque, new_opaque); + let r = f(self); + self.opaque = old_opaque; + r } - fn error(&mut self, err: &str) -> Self::Error { - self.opaque.error(err) + fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum { + self.cnum_map[cnum].unwrap_or_else(|| { + bug!("Could not find new CrateNum for {:?}", cnum) + }) } } -impl<'a> SpecializedDecoder for CacheDecoder<'a> { +implement_ty_decoder!( CacheDecoder<'a, 'tcx, 'x> ); + +impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { fn specialized_decode(&mut self) -> Result { let lo = BytePos::decode(self)?; let hi = BytePos::decode(self)?; @@ -229,3 +456,307 @@ impl<'a> SpecializedDecoder for CacheDecoder<'a> { Ok(DUMMY_SP) } } + +// This impl makes sure that we get a runtime error when we try decode a +// DefIndex that is not contained in a DefId. Such a case would be problematic +// because we would not know how to transform the DefIndex to the current +// context. +impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result { + bug!("Trying to decode DefIndex outside the context of a DefId") + } +} + +// Both the CrateNum and the DefIndex of a DefId can change in between two +// compilation sessions. We use the DefPathHash, which is stable across +// sessions, to map the old DefId to the new one. +impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result { + // Load the DefPathHash which is was we encoded the DefId as. + let def_path_hash = DefPathHash::decode(self)?; + + // Using the DefPathHash, we can lookup the new DefId + Ok(self.tcx().def_path_hash_to_def_id.as_ref().unwrap()[&def_path_hash]) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result { + Ok(LocalDefId::from_def_id(DefId::decode(self)?)) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result { + // Load the DefPathHash which is was we encoded the DefIndex as. + let def_path_hash = DefPathHash::decode(self)?; + + // Use the DefPathHash to map to the current DefId. + let def_id = self.tcx() + .def_path_hash_to_def_id + .as_ref() + .unwrap()[&def_path_hash]; + + debug_assert!(def_id.is_local()); + + // The ItemLocalId needs no remapping. + let local_id = hir::ItemLocalId::decode(self)?; + + // Reconstruct the HirId and look up the corresponding NodeId in the + // context of the current session. + Ok(hir::HirId { + owner: def_id.index, + local_id + }) + } +} + +// NodeIds are not stable across compilation sessions, so we store them in their +// HirId representation. This allows use to map them to the current NodeId. +impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result { + let hir_id = hir::HirId::decode(self)?; + Ok(self.tcx().hir.hir_to_node_id(hir_id)) + } +} + +//- ENCODING ------------------------------------------------------------------- + +struct CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder, + 'tcx: 'a, +{ + tcx: TyCtxt<'a, 'tcx, 'tcx>, + encoder: &'enc mut E, + type_shorthands: FxHashMap, usize>, + predicate_shorthands: FxHashMap, usize>, +} + +impl<'enc, 'a, 'tcx, E> CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + /// Encode something with additional information that allows to do some + /// sanity checks when decoding the data again. This method will first + /// encode the specified tag, then the given value, then the number of + /// bytes taken up by tag and value. On decoding, we can then verify that + /// we get the expected tag and read the expected number of bytes. + fn encode_tagged(&mut self, + tag: T, + value: &V) + -> Result<(), E::Error> + { + use ty::codec::TyEncoder; + let start_pos = self.position(); + + tag.encode(self)?; + value.encode(self)?; + + let end_pos = self.position(); + ((end_pos - start_pos) as u64).encode(self) + } +} + +impl<'enc, 'a, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + #[inline] + fn position(&self) -> usize { + self.encoder.position() + } +} + +impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + #[inline] + fn specialized_encode(&mut self, cnum: &CrateNum) -> Result<(), Self::Error> { + self.emit_u32(cnum.as_u32()) + } +} + +impl<'enc, 'a, 'tcx, E> SpecializedEncoder> for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + #[inline] + fn specialized_encode(&mut self, ty: &ty::Ty<'tcx>) -> Result<(), Self::Error> { + ty_codec::encode_with_shorthand(self, ty, + |encoder| &mut encoder.type_shorthands) + } +} + +impl<'enc, 'a, 'tcx, E> SpecializedEncoder> + for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + #[inline] + fn specialized_encode(&mut self, + predicates: &ty::GenericPredicates<'tcx>) + -> Result<(), Self::Error> { + ty_codec::encode_predicates(self, predicates, + |encoder| &mut encoder.predicate_shorthands) + } +} + +impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + #[inline] + fn specialized_encode(&mut self, id: &hir::HirId) -> Result<(), Self::Error> { + let hir::HirId { + owner, + local_id, + } = *id; + + let def_path_hash = self.tcx.hir.definitions().def_path_hash(owner); + + def_path_hash.encode(self)?; + local_id.encode(self) + } +} + + +impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + #[inline] + fn specialized_encode(&mut self, id: &DefId) -> Result<(), Self::Error> { + let def_path_hash = self.tcx.def_path_hash(*id); + def_path_hash.encode(self) + } +} + +impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + #[inline] + fn specialized_encode(&mut self, id: &LocalDefId) -> Result<(), Self::Error> { + id.to_def_id().encode(self) + } +} + +impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + fn specialized_encode(&mut self, _: &DefIndex) -> Result<(), Self::Error> { + bug!("Encoding DefIndex without context.") + } +} + +// NodeIds are not stable across compilation sessions, so we store them in their +// HirId representation. This allows use to map them to the current NodeId. +impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + #[inline] + fn specialized_encode(&mut self, node_id: &NodeId) -> Result<(), Self::Error> { + let hir_id = self.tcx.hir.node_to_hir_id(*node_id); + hir_id.encode(self) + } +} + +macro_rules! encoder_methods { + ($($name:ident($ty:ty);)*) => { + $(fn $name(&mut self, value: $ty) -> Result<(), Self::Error> { + self.encoder.$name(value) + })* + } +} + +impl<'enc, 'a, 'tcx, E> Encoder for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + type Error = E::Error; + + fn emit_nil(&mut self) -> Result<(), Self::Error> { + Ok(()) + } + + encoder_methods! { + emit_usize(usize); + emit_u128(u128); + emit_u64(u64); + emit_u32(u32); + emit_u16(u16); + emit_u8(u8); + + emit_isize(isize); + emit_i128(i128); + emit_i64(i64); + emit_i32(i32); + emit_i16(i16); + emit_i8(i8); + + emit_bool(bool); + emit_f64(f64); + emit_f32(f32); + emit_char(char); + emit_str(&str); + } +} + +// An integer that will always encode to 8 bytes. +struct IntEncodedWithFixedSize(u64); + +impl IntEncodedWithFixedSize { + pub const ENCODED_SIZE: usize = 8; +} + +impl UseSpecializedEncodable for IntEncodedWithFixedSize {} +impl UseSpecializedDecodable for IntEncodedWithFixedSize {} + +impl<'enc, 'a, 'tcx, E> SpecializedEncoder +for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + fn specialized_encode(&mut self, x: &IntEncodedWithFixedSize) -> Result<(), Self::Error> { + let start_pos = self.position(); + for i in 0 .. IntEncodedWithFixedSize::ENCODED_SIZE { + ((x.0 >> i * 8) as u8).encode(self)?; + } + let end_pos = self.position(); + assert_eq!((end_pos - start_pos), IntEncodedWithFixedSize::ENCODED_SIZE); + Ok(()) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder +for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result { + let mut value: u64 = 0; + let start_pos = self.position(); + + for i in 0 .. IntEncodedWithFixedSize::ENCODED_SIZE { + let byte: u8 = Decodable::decode(self)?; + value |= (byte as u64) << (i * 8); + } + + let end_pos = self.position(); + assert_eq!((end_pos - start_pos), IntEncodedWithFixedSize::ENCODED_SIZE); + + Ok(IntEncodedWithFixedSize(value)) + } +} + +fn encode_query_results<'enc, 'a, 'tcx, Q, E>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + encoder: &mut CacheEncoder<'enc, 'a, 'tcx, E>, + query_result_index: &mut EncodedQueryResultIndex) + -> Result<(), E::Error> + where Q: super::plumbing::GetCacheInternal<'tcx>, + E: 'enc + TyEncoder, + Q::Value: Encodable, +{ + for (key, entry) in Q::get_cache_internal(tcx).map.iter() { + if Q::cache_on_disk(key.clone()) { + let dep_node = SerializedDepNodeIndex::new(entry.index.index()); + + // Record position of the cache entry + query_result_index.push((dep_node, encoder.position())); + + // Encode the type check tables with the SerializedDepNodeIndex + // as tag. + encoder.encode_tagged(dep_node, &entry.value)?; + } + } + + Ok(()) +} diff --git a/src/librustc/ty/maps/plumbing.rs b/src/librustc/ty/maps/plumbing.rs index f5e1f384d60ea..1ca8fc6eb480f 100644 --- a/src/librustc/ty/maps/plumbing.rs +++ b/src/librustc/ty/maps/plumbing.rs @@ -20,13 +20,13 @@ use ty::maps::config::QueryDescription; use ty::item_path; use rustc_data_structures::fx::{FxHashMap}; -use std::cell::RefMut; +use std::cell::{Ref, RefMut}; use std::marker::PhantomData; use std::mem; use syntax_pos::Span; -pub(super) struct QueryMap { - phantom: PhantomData, +pub(super) struct QueryMap<'tcx, D: QueryDescription<'tcx>> { + phantom: PhantomData<(D, &'tcx ())>, pub(super) map: FxHashMap>, } @@ -46,8 +46,8 @@ impl QueryValue { } } -impl QueryMap { - pub(super) fn new() -> QueryMap { +impl<'tcx, M: QueryDescription<'tcx>> QueryMap<'tcx, M> { + pub(super) fn new() -> QueryMap<'tcx, M> { QueryMap { phantom: PhantomData, map: FxHashMap(), @@ -55,6 +55,11 @@ impl QueryMap { } } +pub(super) trait GetCacheInternal<'tcx>: QueryDescription<'tcx> + Sized { + fn get_cache_internal<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> Ref<'a, QueryMap<'tcx, Self>>; +} + pub(super) struct CycleError<'a, 'tcx: 'a> { span: Span, cycle: RefMut<'a, [(Span, Query<'tcx>)]>, @@ -242,6 +247,13 @@ macro_rules! define_maps { type Value = $V; } + impl<$tcx> GetCacheInternal<$tcx> for queries::$name<$tcx> { + fn get_cache_internal<'a>(tcx: TyCtxt<'a, $tcx, $tcx>) + -> ::std::cell::Ref<'a, QueryMap<$tcx, Self>> { + tcx.maps.$name.borrow() + } + } + impl<'a, $tcx, 'lcx> queries::$name<$tcx> { #[allow(unused)] @@ -379,18 +391,26 @@ macro_rules! define_maps { { debug_assert!(tcx.dep_graph.is_green(dep_node_index)); - // We don't do any caching yet, so recompute. - // The diagnostics for this query have already been promoted to - // the current session during try_mark_green(), so we can ignore - // them here. - let (result, _) = tcx.cycle_check(span, Query::$name(key), || { - tcx.sess.diagnostic().track_diagnostics(|| { - // The dep-graph for this computation is already in place - tcx.dep_graph.with_ignore(|| { - Self::compute_result(tcx, key) + let result = if tcx.sess.opts.debugging_opts.incremental_queries && + Self::cache_on_disk(key) { + let prev_dep_node_index = + tcx.dep_graph.prev_dep_node_index_of(dep_node); + Self::load_from_disk(tcx.global_tcx(), prev_dep_node_index) + } else { + let (result, _ ) = tcx.cycle_check(span, Query::$name(key), || { + // The diagnostics for this query have already been + // promoted to the current session during + // try_mark_green(), so we can ignore them here. + tcx.sess.diagnostic().track_diagnostics(|| { + // The dep-graph for this computation is already in + // place + tcx.dep_graph.with_ignore(|| { + Self::compute_result(tcx, key) + }) }) - }) - })?; + })?; + result + }; // If -Zincremental-verify-ich is specified, re-hash results from // the cache and make sure that they have the expected fingerprint. @@ -547,7 +567,7 @@ macro_rules! define_map_struct { pub struct Maps<$tcx> { providers: IndexVec>, query_stack: RefCell)>>, - $($(#[$attr])* $name: RefCell>>,)* + $($(#[$attr])* $name: RefCell>>,)* } }; } diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index 0deababd21829..abf2a1b0c00a4 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -17,7 +17,7 @@ pub use self::fold::TypeFoldable; use hir::{map as hir_map, FreevarMap, TraitMap}; use hir::def::{Def, CtorKind, ExportMap}; -use hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE}; +use hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use hir::map::DefPathData; use ich::StableHashingContext; use middle::const_val::ConstVal; @@ -89,6 +89,7 @@ pub use self::maps::queries; pub mod adjustment; pub mod binding; pub mod cast; +#[macro_use] pub mod codec; pub mod error; mod erase_regions; @@ -573,7 +574,7 @@ impl Slice { #[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct UpvarId { pub var_id: hir::HirId, - pub closure_expr_id: DefIndex, + pub closure_expr_id: LocalDefId, } #[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable, Copy)] diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 6be07878487b9..7b09e45fe96e3 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -29,7 +29,7 @@ use rustc::middle::dataflow::BitwiseOperator; use rustc::middle::dataflow::DataFlowOperator; use rustc::middle::dataflow::KillFrom; use rustc::middle::borrowck::BorrowCheckResult; -use rustc::hir::def_id::{DefId, DefIndex}; +use rustc::hir::def_id::{DefId, LocalDefId}; use rustc::middle::expr_use_visitor as euv; use rustc::middle::mem_categorization as mc; use rustc::middle::mem_categorization::Categorization; @@ -376,9 +376,9 @@ pub enum LoanPathElem<'tcx> { LpInterior(Option, InteriorKind), } -fn closure_to_block(closure_id: DefIndex, +fn closure_to_block(closure_id: LocalDefId, tcx: TyCtxt) -> ast::NodeId { - let closure_id = tcx.hir.def_index_to_node_id(closure_id); + let closure_id = tcx.hir.local_def_id_to_node_id(closure_id); match tcx.hir.get(closure_id) { hir_map::NodeExpr(expr) => match expr.node { hir::ExprClosure(.., body_id, _, _) => { @@ -1101,7 +1101,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } else { "consider changing this closure to take self by mutable reference" }; - let node_id = self.tcx.hir.def_index_to_node_id(id); + let node_id = self.tcx.hir.local_def_id_to_node_id(id); let help_span = self.tcx.hir.span(node_id); self.cannot_act_on_capture_in_sharable_fn(span, prefix, @@ -1297,7 +1297,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { }; if kind == ty::ClosureKind::Fn { let closure_node_id = - self.tcx.hir.def_index_to_node_id(upvar_id.closure_expr_id); + self.tcx.hir.local_def_id_to_node_id(upvar_id.closure_expr_id); db.span_help(self.tcx.hir.span(closure_node_id), "consider changing this closure to take \ self by mutable reference"); diff --git a/src/librustc_incremental/persist/data.rs b/src/librustc_incremental/persist/data.rs index fc417851b8897..08f9dba2ba162 100644 --- a/src/librustc_incremental/persist/data.rs +++ b/src/librustc_incremental/persist/data.rs @@ -11,7 +11,6 @@ //! The data that we will serialize and deserialize. use rustc::dep_graph::{WorkProduct, WorkProductId}; -use rustc::hir::def_id::DefIndex; use rustc::hir::map::DefPathHash; use rustc::middle::cstore::EncodedMetadataHash; use rustc_data_structures::fx::FxHashMap; @@ -58,5 +57,5 @@ pub struct SerializedMetadataHashes { /// is only populated if -Z query-dep-graph is specified. It will be /// empty otherwise. Importing crates are perfectly happy with just having /// the DefIndex. - pub index_map: FxHashMap + pub index_map: FxHashMap } diff --git a/src/librustc_incremental/persist/file_format.rs b/src/librustc_incremental/persist/file_format.rs index 7d1400b6b95a5..7d27b842a68a7 100644 --- a/src/librustc_incremental/persist/file_format.rs +++ b/src/librustc_incremental/persist/file_format.rs @@ -53,19 +53,25 @@ pub fn write_file_header(stream: &mut W) -> io::Result<()> { /// Reads the contents of a file with a file header as defined in this module. /// -/// - Returns `Ok(Some(data))` if the file existed and was generated by a +/// - Returns `Ok(Some(data, pos))` if the file existed and was generated by a /// compatible compiler version. `data` is the entire contents of the file -/// *after* the header. +/// and `pos` points to the first byte after the header. /// - Returns `Ok(None)` if the file did not exist or was generated by an /// incompatible version of the compiler. /// - Returns `Err(..)` if some kind of IO error occurred while reading the /// file. -pub fn read_file(sess: &Session, path: &Path) -> io::Result>> { +pub fn read_file(sess: &Session, path: &Path) -> io::Result, usize)>> { if !path.exists() { return Ok(None); } let mut file = File::open(path)?; + let file_size = file.metadata()?.len() as usize; + + let mut data = Vec::with_capacity(file_size); + file.read_to_end(&mut data)?; + + let mut file = io::Cursor::new(data); // Check FILE_MAGIC { @@ -107,10 +113,8 @@ pub fn read_file(sess: &Session, path: &Path) -> io::Result>> { } } - let mut data = vec![]; - file.read_to_end(&mut data)?; - - Ok(Some(data)) + let post_header_start_pos = file.position() as usize; + Ok(Some((file.into_inner(), post_header_start_pos))) } fn report_format_mismatch(sess: &Session, file: &Path, message: &str) { diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs index 158e9f2677a72..e4bc6b7339efc 100644 --- a/src/librustc_incremental/persist/load.rs +++ b/src/librustc_incremental/persist/load.rs @@ -42,9 +42,9 @@ pub fn dep_graph_tcx_init<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { } let work_products_path = work_products_path(tcx.sess); - if let Some(work_products_data) = load_data(tcx.sess, &work_products_path) { + if let Some((work_products_data, start_pos)) = load_data(tcx.sess, &work_products_path) { // Decode the list of work_products - let mut work_product_decoder = Decoder::new(&work_products_data[..], 0); + let mut work_product_decoder = Decoder::new(&work_products_data[..], start_pos); let work_products: Vec = RustcDecodable::decode(&mut work_product_decoder).unwrap_or_else(|e| { let msg = format!("Error decoding `work-products` from incremental \ @@ -77,9 +77,9 @@ pub fn dep_graph_tcx_init<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { } } -fn load_data(sess: &Session, path: &Path) -> Option> { +fn load_data(sess: &Session, path: &Path) -> Option<(Vec, usize)> { match file_format::read_file(sess, path) { - Ok(Some(data)) => return Some(data), + Ok(Some(data_and_pos)) => return Some(data_and_pos), Ok(None) => { // The file either didn't exist or was produced by an incompatible // compiler version. Neither is an error. @@ -126,8 +126,8 @@ pub fn load_prev_metadata_hashes(tcx: TyCtxt) -> DefIdMap { debug!("load_prev_metadata_hashes() - File: {}", file_path.display()); - let data = match file_format::read_file(tcx.sess, &file_path) { - Ok(Some(data)) => data, + let (data, start_pos) = match file_format::read_file(tcx.sess, &file_path) { + Ok(Some(data_and_pos)) => data_and_pos, Ok(None) => { debug!("load_prev_metadata_hashes() - File produced by incompatible \ compiler version: {}", file_path.display()); @@ -141,7 +141,7 @@ pub fn load_prev_metadata_hashes(tcx: TyCtxt) -> DefIdMap { }; debug!("load_prev_metadata_hashes() - Decoding hashes"); - let mut decoder = Decoder::new(&data, 0); + let mut decoder = Decoder::new(&data, start_pos); let _ = Svh::decode(&mut decoder).unwrap(); let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder).unwrap(); @@ -171,8 +171,8 @@ pub fn load_dep_graph(sess: &Session) -> PreviousDepGraph { return empty } - if let Some(bytes) = load_data(sess, &dep_graph_path(sess)) { - let mut decoder = Decoder::new(&bytes, 0); + if let Some((bytes, start_pos)) = load_data(sess, &dep_graph_path(sess)) { + let mut decoder = Decoder::new(&bytes, start_pos); let prev_commandline_args_hash = u64::decode(&mut decoder) .expect("Error reading commandline arg hash from cached dep-graph"); @@ -184,6 +184,10 @@ pub fn load_dep_graph(sess: &Session) -> PreviousDepGraph { // We can't reuse the cache, purge it. debug!("load_dep_graph_new: differing commandline arg hashes"); + delete_all_session_dir_contents(sess) + .expect("Failed to delete invalidated incr. comp. session \ + directory contents."); + // No need to do any further work return empty } @@ -198,12 +202,13 @@ pub fn load_dep_graph(sess: &Session) -> PreviousDepGraph { } pub fn load_query_result_cache<'sess>(sess: &'sess Session) -> OnDiskCache<'sess> { - if sess.opts.incremental.is_none() { + if sess.opts.incremental.is_none() || + !sess.opts.debugging_opts.incremental_queries { return OnDiskCache::new_empty(sess.codemap()); } - if let Some(bytes) = load_data(sess, &query_cache_path(sess)) { - OnDiskCache::new(sess, &bytes[..]) + if let Some((bytes, start_pos)) = load_data(sess, &query_cache_path(sess)) { + OnDiskCache::new(sess, bytes, start_pos) } else { OnDiskCache::new_empty(sess.codemap()) } diff --git a/src/librustc_incremental/persist/save.rs b/src/librustc_incremental/persist/save.rs index 711550c27d16f..b6dabf99be7d7 100644 --- a/src/librustc_incremental/persist/save.rs +++ b/src/librustc_incremental/persist/save.rs @@ -9,7 +9,7 @@ // except according to those terms. use rustc::dep_graph::{DepGraph, DepKind}; -use rustc::hir::def_id::DefId; +use rustc::hir::def_id::{DefId, DefIndex}; use rustc::hir::svh::Svh; use rustc::ich::Fingerprint; use rustc::middle::cstore::EncodedMetadataHashes; @@ -69,11 +69,13 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, |e| encode_query_cache(tcx, e)); }); - time(sess.time_passes(), "persist dep-graph", || { - save_in(sess, - dep_graph_path(sess), - |e| encode_dep_graph(tcx, e)); - }); + if tcx.sess.opts.debugging_opts.incremental_queries { + time(sess.time_passes(), "persist dep-graph", || { + save_in(sess, + dep_graph_path(sess), + |e| encode_dep_graph(tcx, e)); + }); + } dirty_clean::check_dirty_clean_annotations(tcx); dirty_clean::check_dirty_clean_metadata(tcx, @@ -268,11 +270,11 @@ fn encode_metadata_hashes(tcx: TyCtxt, if tcx.sess.opts.debugging_opts.query_dep_graph { for serialized_hash in &serialized_hashes.entry_hashes { - let def_id = DefId::local(serialized_hash.def_index); + let def_id = DefId::local(DefIndex::from_u32(serialized_hash.def_index)); // Store entry in the index_map let def_path_hash = tcx.def_path_hash(def_id); - serialized_hashes.index_map.insert(def_id.index, def_path_hash); + serialized_hashes.index_map.insert(def_id.index.as_u32(), def_path_hash); // Record hash in current_metadata_hashes current_metadata_hashes.insert(def_id, serialized_hash.hash); diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index e63037f4da1ef..0dd1b9e500c08 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -15,8 +15,6 @@ use schema::*; use rustc::hir::map::{DefKey, DefPath, DefPathData, DefPathHash}; use rustc::hir; - -use rustc::middle::const_val::ByteArray; use rustc::middle::cstore::{LinkagePreference, ExternConstBody, ExternBodyNestedBodies}; use rustc::hir::def::{self, Def, CtorKind}; @@ -25,19 +23,15 @@ use rustc::ich::Fingerprint; use rustc::middle::lang_items; use rustc::session::Session; use rustc::ty::{self, Ty, TyCtxt}; -use rustc::ty::codec::{self as ty_codec, TyDecoder}; -use rustc::ty::subst::Substs; +use rustc::ty::codec::TyDecoder; use rustc::util::nodemap::DefIdSet; - use rustc::mir::Mir; -use std::borrow::Cow; use std::cell::Ref; use std::collections::BTreeMap; use std::io; use std::mem; use std::rc::Rc; -use std::str; use std::u32; use rustc_serialize::{Decodable, Decoder, SpecializedDecoder, opaque}; @@ -174,57 +168,23 @@ impl<'a, 'tcx> DecodeContext<'a, 'tcx> { } } -macro_rules! decoder_methods { - ($($name:ident -> $ty:ty;)*) => { - $(fn $name(&mut self) -> Result<$ty, Self::Error> { - self.opaque.$name() - })* - } -} - -impl<'doc, 'tcx> Decoder for DecodeContext<'doc, 'tcx> { - type Error = as Decoder>::Error; - - decoder_methods! { - read_nil -> (); - - read_u128 -> u128; - read_u64 -> u64; - read_u32 -> u32; - read_u16 -> u16; - read_u8 -> u8; - read_usize -> usize; - - read_i128 -> i128; - read_i64 -> i64; - read_i32 -> i32; - read_i16 -> i16; - read_i8 -> i8; - read_isize -> isize; - - read_bool -> bool; - read_f64 -> f64; - read_f32 -> f32; - read_char -> char; - read_str -> Cow; - } - - fn error(&mut self, err: &str) -> Self::Error { - self.opaque.error(err) - } -} - - impl<'a, 'tcx: 'a> TyDecoder<'a, 'tcx> for DecodeContext<'a, 'tcx> { + #[inline] fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.tcx.expect("missing TyCtxt in DecodeContext") } + #[inline] fn peek_byte(&self) -> u8 { self.opaque.data[self.opaque.position()] } + #[inline] + fn position(&self) -> usize { + self.opaque.position() + } + fn cached_ty_for_shorthand(&mut self, shorthand: usize, or_insert_with: F) @@ -286,14 +246,24 @@ impl<'a, 'tcx, T> SpecializedDecoder> for DecodeContext<'a, 'tcx> { } } -impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result { - let cnum = CrateNum::from_u32(u32::decode(self)?); - if cnum == LOCAL_CRATE { - Ok(self.cdata().cnum) - } else { - Ok(self.cdata().cnum_map.borrow()[cnum]) - } + +impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { + #[inline] + fn specialized_decode(&mut self) -> Result { + let krate = CrateNum::decode(self)?; + let index = DefIndex::decode(self)?; + + Ok(DefId { + krate, + index, + }) + } +} + +impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { + #[inline] + fn specialized_decode(&mut self) -> Result { + Ok(DefIndex::from_u32(self.read_u32()?)) } } @@ -357,65 +327,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { } } -// FIXME(#36588) These impls are horribly unsound as they allow -// the caller to pick any lifetime for 'tcx, including 'static, -// by using the unspecialized proxies to them. - -impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result, Self::Error> { - ty_codec::decode_ty(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result, Self::Error> { - ty_codec::decode_predicates(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder<&'tcx Substs<'tcx>> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result<&'tcx Substs<'tcx>, Self::Error> { - ty_codec::decode_substs(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result, Self::Error> { - ty_codec::decode_region(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Slice>> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result<&'tcx ty::Slice>, Self::Error> { - ty_codec::decode_ty_slice(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::AdtDef> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result<&'tcx ty::AdtDef, Self::Error> { - ty_codec::decode_adt_def(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Slice>> - for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) - -> Result<&'tcx ty::Slice>, Self::Error> { - ty_codec::decode_existential_predicate_slice(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result, Self::Error> { - ty_codec::decode_byte_array(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Const<'tcx>> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result<&'tcx ty::Const<'tcx>, Self::Error> { - ty_codec::decode_const(self) - } -} +implement_ty_decoder!( DecodeContext<'a, 'tcx> ); impl<'a, 'tcx> MetadataBlob { pub fn is_compatible(&self) -> bool { diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index d5eee14bf506b..19d0de7334650 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -116,6 +116,33 @@ impl<'a, 'tcx, T> SpecializedEncoder> for EncodeContext<'a, 'tcx> { } } +impl<'a, 'tcx> SpecializedEncoder for EncodeContext<'a, 'tcx> { + #[inline] + fn specialized_encode(&mut self, cnum: &CrateNum) -> Result<(), Self::Error> { + self.emit_u32(cnum.as_u32()) + } +} + +impl<'a, 'tcx> SpecializedEncoder for EncodeContext<'a, 'tcx> { + #[inline] + fn specialized_encode(&mut self, def_id: &DefId) -> Result<(), Self::Error> { + let DefId { + krate, + index, + } = *def_id; + + krate.encode(self)?; + index.encode(self) + } +} + +impl<'a, 'tcx> SpecializedEncoder for EncodeContext<'a, 'tcx> { + #[inline] + fn specialized_encode(&mut self, def_index: &DefIndex) -> Result<(), Self::Error> { + self.emit_u32(def_index.as_u32()) + } +} + impl<'a, 'tcx> SpecializedEncoder> for EncodeContext<'a, 'tcx> { fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> { ty_codec::encode_with_shorthand(self, ty, |ecx| &mut ecx.type_shorthands) @@ -213,7 +240,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { if let Some(fingerprint) = fingerprint { this.metadata_hashes.hashes.push(EncodedMetadataHash { - def_index, + def_index: def_index.as_u32(), hash: fingerprint, }) } @@ -395,7 +422,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let total_bytes = self.position(); self.metadata_hashes.hashes.push(EncodedMetadataHash { - def_index: global_metadata_def_index(GlobalMetaDataKind::Krate), + def_index: global_metadata_def_index(GlobalMetaDataKind::Krate).as_u32(), hash: Fingerprint::from_smaller_hash(link_meta.crate_hash.as_u64()) }); diff --git a/src/librustc_metadata/index_builder.rs b/src/librustc_metadata/index_builder.rs index 1d2b6cc33d46a..46706bba96d6d 100644 --- a/src/librustc_metadata/index_builder.rs +++ b/src/librustc_metadata/index_builder.rs @@ -136,7 +136,7 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { let (fingerprint, ecx) = entry_builder.finish(); if let Some(hash) = fingerprint { ecx.metadata_hashes.hashes.push(EncodedMetadataHash { - def_index: id.index, + def_index: id.index.as_u32(), hash, }); } diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index 2073d49530061..7d624b5c9cf5b 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -13,7 +13,7 @@ use build; use hair::cx::Cx; use hair::LintLevel; use rustc::hir; -use rustc::hir::def_id::DefId; +use rustc::hir::def_id::{DefId, LocalDefId}; use rustc::middle::region; use rustc::mir::*; use rustc::mir::transform::MirSource; @@ -414,10 +414,10 @@ fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>, freevars.iter().map(|fv| { let var_id = fv.var_id(); let var_hir_id = tcx.hir.node_to_hir_id(var_id); - let closure_expr_id = tcx.hir.local_def_id(fn_id).index; + let closure_expr_id = tcx.hir.local_def_id(fn_id); let capture = hir.tables().upvar_capture(ty::UpvarId { var_id: var_hir_id, - closure_expr_id, + closure_expr_id: LocalDefId::from_def_id(closure_expr_id), }); let by_ref = match capture { ty::UpvarCapture::ByValue => false, diff --git a/src/librustc_mir/hair/cx/expr.rs b/src/librustc_mir/hair/cx/expr.rs index f5a53e2aa8eed..798928e7ae7a5 100644 --- a/src/librustc_mir/hair/cx/expr.rs +++ b/src/librustc_mir/hair/cx/expr.rs @@ -20,6 +20,7 @@ use rustc::ty::{self, AdtKind, VariantDef, Ty}; use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow}; use rustc::ty::cast::CastKind as TyCastKind; use rustc::hir; +use rustc::hir::def_id::LocalDefId; impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr { type Output = Expr<'tcx>; @@ -783,7 +784,7 @@ fn convert_var<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, // point we need an implicit deref let upvar_id = ty::UpvarId { var_id: var_hir_id, - closure_expr_id: closure_def_id.index, + closure_expr_id: LocalDefId::from_def_id(closure_def_id), }; match cx.tables().upvar_capture(upvar_id) { ty::UpvarCapture::ByValue => field_kind, @@ -897,7 +898,7 @@ fn capture_freevar<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, let var_hir_id = cx.tcx.hir.node_to_hir_id(freevar.var_id()); let upvar_id = ty::UpvarId { var_id: var_hir_id, - closure_expr_id: cx.tcx.hir.local_def_id(closure_expr.id).index, + closure_expr_id: cx.tcx.hir.local_def_id(closure_expr.id).to_local(), }; let upvar_capture = cx.tables().upvar_capture(upvar_id); let temp_lifetime = cx.region_scope_tree.temporary_scope(closure_expr.hir_id.local_id); diff --git a/src/librustc_typeck/check/upvar.rs b/src/librustc_typeck/check/upvar.rs index d179b390a2918..07ed0f5b89003 100644 --- a/src/librustc_typeck/check/upvar.rs +++ b/src/librustc_typeck/check/upvar.rs @@ -50,7 +50,7 @@ use rustc::infer::UpvarRegion; use syntax::ast; use syntax_pos::Span; use rustc::hir; -use rustc::hir::def_id::DefIndex; +use rustc::hir::def_id::LocalDefId; use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; use rustc::util::nodemap::FxHashMap; @@ -128,7 +128,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { for freevar in freevars { let upvar_id = ty::UpvarId { var_id: self.tcx.hir.node_to_hir_id(freevar.var_id()), - closure_expr_id: closure_def_id.index, + closure_expr_id: LocalDefId::from_def_id(closure_def_id), }; debug!("seed upvar_id {:?}", upvar_id); @@ -167,7 +167,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // Write the adjusted values back into the main tables. if infer_kind { if let Some(kind) = delegate.adjust_closure_kinds - .remove(&closure_def_id.index) { + .remove(&closure_def_id.to_local()) { self.tables .borrow_mut() .closure_kinds_mut() @@ -231,7 +231,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // This may change if abstract return types of some sort are // implemented. let tcx = self.tcx; - let closure_def_index = tcx.hir.local_def_id(closure_id).index; + let closure_def_index = tcx.hir.local_def_id(closure_id); tcx.with_freevars(closure_id, |freevars| { freevars.iter().map(|freevar| { @@ -240,7 +240,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let freevar_ty = self.node_ty(var_hir_id); let upvar_id = ty::UpvarId { var_id: var_hir_id, - closure_expr_id: closure_def_index, + closure_expr_id: LocalDefId::from_def_id(closure_def_index), }; let capture = self.tables.borrow().upvar_capture(upvar_id); @@ -263,7 +263,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { struct InferBorrowKind<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, - adjust_closure_kinds: FxHashMap)>, + adjust_closure_kinds: FxHashMap)>, adjust_upvar_captures: ty::UpvarCaptureMap<'tcx>, } @@ -481,7 +481,7 @@ impl<'a, 'gcx, 'tcx> InferBorrowKind<'a, 'gcx, 'tcx> { } fn adjust_closure_kind(&mut self, - closure_id: DefIndex, + closure_id: LocalDefId, new_kind: ty::ClosureKind, upvar_span: Span, var_name: ast::Name) { @@ -490,7 +490,7 @@ impl<'a, 'gcx, 'tcx> InferBorrowKind<'a, 'gcx, 'tcx> { let closure_kind = self.adjust_closure_kinds.get(&closure_id).cloned() .or_else(|| { - let closure_id = self.fcx.tcx.hir.def_index_to_hir_id(closure_id); + let closure_id = self.fcx.tcx.hir.local_def_id_to_hir_id(closure_id); self.fcx.tables.borrow().closure_kinds().get(closure_id).cloned() }); diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index 80ca0afe72b50..8d94039c594f8 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -1387,6 +1387,7 @@ actual:\n\ if let Some(ref incremental_dir) = self.props.incremental_dir { rustc.args(&["-Z", &format!("incremental={}", incremental_dir.display())]); rustc.args(&["-Z", "incremental-verify-ich"]); + rustc.args(&["-Z", "incremental-queries"]); } match self.config.mode { @@ -2614,4 +2615,4 @@ fn read2_abbreviated(mut child: Child) -> io::Result { stdout: stdout.into_bytes(), stderr: stderr.into_bytes(), }) -} \ No newline at end of file +}