Skip to content

Commit a5137af

Browse files
ICH: Hash MacroDefs in a mostly stable way.
1 parent 003b169 commit a5137af

File tree

2 files changed

+143
-3
lines changed

2 files changed

+143
-3
lines changed

src/librustc_incremental/calculate_svh/mod.rs

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ use self::caching_codemap_view::CachingCodemapView;
4646
use self::hasher::IchHasher;
4747
use ich::Fingerprint;
4848

49+
4950
mod def_path_hash;
5051
mod svh_visitor;
5152
mod caching_codemap_view;
@@ -113,8 +114,12 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
113114
record_time(&tcx.sess.perf_stats.incr_comp_hashes_time, || {
114115
visitor.calculate_def_id(DefId::local(CRATE_DEF_INDEX),
115116
|v| visit::walk_crate(v, krate));
116-
// FIXME(#37713) if foreign items were item likes, could use ItemLikeVisitor
117117
krate.visit_all_item_likes(&mut visitor.as_deep_visitor());
118+
119+
for macro_def in krate.exported_macros.iter() {
120+
visitor.calculate_node_id(macro_def.id,
121+
|v| v.visit_macro_def(macro_def));
122+
}
118123
});
119124

120125
tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);

src/librustc_incremental/calculate_svh/svh_visitor.rs

Lines changed: 137 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ use syntax::ast::{self, Name, NodeId};
2424
use syntax::attr;
2525
use syntax::parse::token;
2626
use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos};
27+
use syntax::tokenstream;
2728
use rustc::hir;
2829
use rustc::hir::*;
2930
use rustc::hir::def::{Def, PathResolution};
@@ -769,9 +770,10 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has
769770
debug!("visit_macro_def: st={:?}", self.st);
770771
SawMacroDef.hash(self.st);
771772
hash_attrs!(self, &macro_def.attrs);
773+
for tt in &macro_def.body {
774+
self.hash_token_tree(tt);
775+
}
772776
visit::walk_macro_def(self, macro_def)
773-
// FIXME(mw): We should hash the body of the macro too but we don't
774-
// have a stable way of doing so yet.
775777
}
776778
}
777779

@@ -941,4 +943,137 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
941943
self.overflow_checks_enabled = true;
942944
}
943945
}
946+
947+
fn hash_token_tree(&mut self, tt: &tokenstream::TokenTree) {
948+
self.hash_discriminant(tt);
949+
match *tt {
950+
tokenstream::TokenTree::Token(span, ref token) => {
951+
hash_span!(self, span);
952+
self.hash_token(token);
953+
}
954+
tokenstream::TokenTree::Delimited(span, ref delimited) => {
955+
hash_span!(self, span);
956+
let tokenstream::Delimited {
957+
ref delim,
958+
open_span,
959+
ref tts,
960+
close_span,
961+
} = **delimited;
962+
963+
delim.hash(self.st);
964+
hash_span!(self, open_span);
965+
tts.len().hash(self.st);
966+
for sub_tt in tts {
967+
self.hash_token_tree(sub_tt);
968+
}
969+
hash_span!(self, close_span);
970+
}
971+
tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => {
972+
hash_span!(self, span);
973+
let tokenstream::SequenceRepetition {
974+
ref tts,
975+
ref separator,
976+
op,
977+
num_captures,
978+
} = **sequence_repetition;
979+
980+
tts.len().hash(self.st);
981+
for sub_tt in tts {
982+
self.hash_token_tree(sub_tt);
983+
}
984+
self.hash_discriminant(separator);
985+
if let Some(ref separator) = *separator {
986+
self.hash_token(separator);
987+
}
988+
op.hash(self.st);
989+
num_captures.hash(self.st);
990+
}
991+
}
992+
}
993+
994+
fn hash_token(&mut self, token: &token::Token) {
995+
self.hash_discriminant(token);
996+
match *token {
997+
token::Token::Eq |
998+
token::Token::Lt |
999+
token::Token::Le |
1000+
token::Token::EqEq |
1001+
token::Token::Ne |
1002+
token::Token::Ge |
1003+
token::Token::Gt |
1004+
token::Token::AndAnd |
1005+
token::Token::OrOr |
1006+
token::Token::Not |
1007+
token::Token::Tilde |
1008+
token::Token::At |
1009+
token::Token::Dot |
1010+
token::Token::DotDot |
1011+
token::Token::DotDotDot |
1012+
token::Token::Comma |
1013+
token::Token::Semi |
1014+
token::Token::Colon |
1015+
token::Token::ModSep |
1016+
token::Token::RArrow |
1017+
token::Token::LArrow |
1018+
token::Token::FatArrow |
1019+
token::Token::Pound |
1020+
token::Token::Dollar |
1021+
token::Token::Question |
1022+
token::Token::Underscore |
1023+
token::Token::Whitespace |
1024+
token::Token::Comment |
1025+
token::Token::Eof => {}
1026+
1027+
token::Token::BinOp(bin_op_token) |
1028+
token::Token::BinOpEq(bin_op_token) => bin_op_token.hash(self.st),
1029+
1030+
token::Token::OpenDelim(delim_token) |
1031+
token::Token::CloseDelim(delim_token) => delim_token.hash(self.st),
1032+
1033+
token::Token::Literal(ref lit, ref opt_name) => {
1034+
self.hash_discriminant(lit);
1035+
match *lit {
1036+
token::Lit::Byte(val) |
1037+
token::Lit::Char(val) |
1038+
token::Lit::Integer(val) |
1039+
token::Lit::Float(val) |
1040+
token::Lit::Str_(val) |
1041+
token::Lit::ByteStr(val) => val.as_str().hash(self.st),
1042+
token::Lit::StrRaw(val, n) |
1043+
token::Lit::ByteStrRaw(val, n) => {
1044+
val.as_str().hash(self.st);
1045+
n.hash(self.st);
1046+
}
1047+
};
1048+
opt_name.map(ast::Name::as_str).hash(self.st);
1049+
}
1050+
1051+
token::Token::Ident(ident) |
1052+
token::Token::Lifetime(ident) |
1053+
token::Token::SubstNt(ident) => ident.name.as_str().hash(self.st),
1054+
token::Token::MatchNt(ident1, ident2) => {
1055+
ident1.name.as_str().hash(self.st);
1056+
ident2.name.as_str().hash(self.st);
1057+
}
1058+
1059+
token::Token::Interpolated(ref non_terminal) => {
1060+
// FIXME(mw): This could be implemented properly. It's just a
1061+
// lot of work, since we would need to hash the AST
1062+
// in a stable way, in addition to the HIR.
1063+
// Since this is hardly used anywhere, just emit a
1064+
// warning for now.
1065+
if self.tcx.sess.opts.debugging_opts.incremental.is_some() {
1066+
let msg = format!("Quasi-quoting might make incremental \
1067+
compilation very inefficient: {:?}",
1068+
non_terminal);
1069+
self.tcx.sess.warn(&msg[..]);
1070+
}
1071+
1072+
non_terminal.hash(self.st);
1073+
}
1074+
1075+
token::Token::DocComment(val) |
1076+
token::Token::Shebang(val) => val.as_str().hash(self.st),
1077+
}
1078+
}
9441079
}

0 commit comments

Comments
 (0)