@@ -24,6 +24,7 @@ use syntax::ast::{self, Name, NodeId};
24
24
use syntax:: attr;
25
25
use syntax:: parse:: token;
26
26
use syntax_pos:: { Span , NO_EXPANSION , COMMAND_LINE_EXPN , BytePos } ;
27
+ use syntax:: tokenstream;
27
28
use rustc:: hir;
28
29
use rustc:: hir:: * ;
29
30
use rustc:: hir:: def:: { Def , PathResolution } ;
@@ -769,9 +770,10 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has
769
770
debug ! ( "visit_macro_def: st={:?}" , self . st) ;
770
771
SawMacroDef . hash ( self . st ) ;
771
772
hash_attrs ! ( self , & macro_def. attrs) ;
773
+ for tt in & macro_def. body {
774
+ self . hash_token_tree ( tt) ;
775
+ }
772
776
visit:: walk_macro_def ( self , macro_def)
773
- // FIXME(mw): We should hash the body of the macro too but we don't
774
- // have a stable way of doing so yet.
775
777
}
776
778
}
777
779
@@ -941,4 +943,137 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
941
943
self . overflow_checks_enabled = true ;
942
944
}
943
945
}
946
+
947
+ fn hash_token_tree ( & mut self , tt : & tokenstream:: TokenTree ) {
948
+ self . hash_discriminant ( tt) ;
949
+ match * tt {
950
+ tokenstream:: TokenTree :: Token ( span, ref token) => {
951
+ hash_span ! ( self , span) ;
952
+ self . hash_token ( token) ;
953
+ }
954
+ tokenstream:: TokenTree :: Delimited ( span, ref delimited) => {
955
+ hash_span ! ( self , span) ;
956
+ let tokenstream:: Delimited {
957
+ ref delim,
958
+ open_span,
959
+ ref tts,
960
+ close_span,
961
+ } = * * delimited;
962
+
963
+ delim. hash ( self . st ) ;
964
+ hash_span ! ( self , open_span) ;
965
+ tts. len ( ) . hash ( self . st ) ;
966
+ for sub_tt in tts {
967
+ self . hash_token_tree ( sub_tt) ;
968
+ }
969
+ hash_span ! ( self , close_span) ;
970
+ }
971
+ tokenstream:: TokenTree :: Sequence ( span, ref sequence_repetition) => {
972
+ hash_span ! ( self , span) ;
973
+ let tokenstream:: SequenceRepetition {
974
+ ref tts,
975
+ ref separator,
976
+ op,
977
+ num_captures,
978
+ } = * * sequence_repetition;
979
+
980
+ tts. len ( ) . hash ( self . st ) ;
981
+ for sub_tt in tts {
982
+ self . hash_token_tree ( sub_tt) ;
983
+ }
984
+ self . hash_discriminant ( separator) ;
985
+ if let Some ( ref separator) = * separator {
986
+ self . hash_token ( separator) ;
987
+ }
988
+ op. hash ( self . st ) ;
989
+ num_captures. hash ( self . st ) ;
990
+ }
991
+ }
992
+ }
993
+
994
+ fn hash_token ( & mut self , token : & token:: Token ) {
995
+ self . hash_discriminant ( token) ;
996
+ match * token {
997
+ token:: Token :: Eq |
998
+ token:: Token :: Lt |
999
+ token:: Token :: Le |
1000
+ token:: Token :: EqEq |
1001
+ token:: Token :: Ne |
1002
+ token:: Token :: Ge |
1003
+ token:: Token :: Gt |
1004
+ token:: Token :: AndAnd |
1005
+ token:: Token :: OrOr |
1006
+ token:: Token :: Not |
1007
+ token:: Token :: Tilde |
1008
+ token:: Token :: At |
1009
+ token:: Token :: Dot |
1010
+ token:: Token :: DotDot |
1011
+ token:: Token :: DotDotDot |
1012
+ token:: Token :: Comma |
1013
+ token:: Token :: Semi |
1014
+ token:: Token :: Colon |
1015
+ token:: Token :: ModSep |
1016
+ token:: Token :: RArrow |
1017
+ token:: Token :: LArrow |
1018
+ token:: Token :: FatArrow |
1019
+ token:: Token :: Pound |
1020
+ token:: Token :: Dollar |
1021
+ token:: Token :: Question |
1022
+ token:: Token :: Underscore |
1023
+ token:: Token :: Whitespace |
1024
+ token:: Token :: Comment |
1025
+ token:: Token :: Eof => { }
1026
+
1027
+ token:: Token :: BinOp ( bin_op_token) |
1028
+ token:: Token :: BinOpEq ( bin_op_token) => bin_op_token. hash ( self . st ) ,
1029
+
1030
+ token:: Token :: OpenDelim ( delim_token) |
1031
+ token:: Token :: CloseDelim ( delim_token) => delim_token. hash ( self . st ) ,
1032
+
1033
+ token:: Token :: Literal ( ref lit, ref opt_name) => {
1034
+ self . hash_discriminant ( lit) ;
1035
+ match * lit {
1036
+ token:: Lit :: Byte ( val) |
1037
+ token:: Lit :: Char ( val) |
1038
+ token:: Lit :: Integer ( val) |
1039
+ token:: Lit :: Float ( val) |
1040
+ token:: Lit :: Str_ ( val) |
1041
+ token:: Lit :: ByteStr ( val) => val. as_str ( ) . hash ( self . st ) ,
1042
+ token:: Lit :: StrRaw ( val, n) |
1043
+ token:: Lit :: ByteStrRaw ( val, n) => {
1044
+ val. as_str ( ) . hash ( self . st ) ;
1045
+ n. hash ( self . st ) ;
1046
+ }
1047
+ } ;
1048
+ opt_name. map ( ast:: Name :: as_str) . hash ( self . st ) ;
1049
+ }
1050
+
1051
+ token:: Token :: Ident ( ident) |
1052
+ token:: Token :: Lifetime ( ident) |
1053
+ token:: Token :: SubstNt ( ident) => ident. name . as_str ( ) . hash ( self . st ) ,
1054
+ token:: Token :: MatchNt ( ident1, ident2) => {
1055
+ ident1. name . as_str ( ) . hash ( self . st ) ;
1056
+ ident2. name . as_str ( ) . hash ( self . st ) ;
1057
+ }
1058
+
1059
+ token:: Token :: Interpolated ( ref non_terminal) => {
1060
+ // FIXME(mw): This could be implemented properly. It's just a
1061
+ // lot of work, since we would need to hash the AST
1062
+ // in a stable way, in addition to the HIR.
1063
+ // Since this is hardly used anywhere, just emit a
1064
+ // warning for now.
1065
+ if self . tcx . sess . opts . debugging_opts . incremental . is_some ( ) {
1066
+ let msg = format ! ( "Quasi-quoting might make incremental \
1067
+ compilation very inefficient: {:?}",
1068
+ non_terminal) ;
1069
+ self . tcx . sess . warn ( & msg[ ..] ) ;
1070
+ }
1071
+
1072
+ non_terminal. hash ( self . st ) ;
1073
+ }
1074
+
1075
+ token:: Token :: DocComment ( val) |
1076
+ token:: Token :: Shebang ( val) => val. as_str ( ) . hash ( self . st ) ,
1077
+ }
1078
+ }
944
1079
}
0 commit comments