Skip to content

Commit 49d3fd3

Browse files
authored
Auto merge of #37787 - michaelwoerister:macro-def-ich, r=nikomatsakis
ICH: Handle MacroDef HIR instances. As of recently, `hir::MacroDef` instances are exported in crate metadata, which means we also store their ICH when doing incremental compilation. Even though exported macro definitions should not (yet) interact with incremental compilation, the ICH is also used for the general purpose crate hash, where macros should be included. This PR implements ICH computation for `MacroDef`. In theory, the ICH of these MacroDefs is less stable than that of other HIR items, since I opted to just call the compiler-generated `Hash::hash()` for `Token::Interpolated` variants. `Token::Interpolated` contains AST data structures and it would have been a lot of effort to expand ICH computation to the AST too. Since quasi-quoting is rarely used *and* it would only make a difference if incremental compilation was extended to macros, the simpler implementation seemed like a good idea. This fixes the problem reported in #37756. The test still fails because of broken codegen-unit support though. r? @nikomatsakis
2 parents ac635aa + c722a1e commit 49d3fd3

File tree

3 files changed

+174
-9
lines changed

3 files changed

+174
-9
lines changed

src/librustc_incremental/calculate_svh/mod.rs

+12-2
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ use self::caching_codemap_view::CachingCodemapView;
4646
use self::hasher::IchHasher;
4747
use ich::Fingerprint;
4848

49+
4950
mod def_path_hash;
5051
mod svh_visitor;
5152
mod caching_codemap_view;
@@ -88,7 +89,12 @@ impl<'a> ::std::ops::Index<&'a DepNode<DefId>> for IncrementalHashesMap {
8889
type Output = Fingerprint;
8990

9091
fn index(&self, index: &'a DepNode<DefId>) -> &Fingerprint {
91-
&self.hashes[index]
92+
match self.hashes.get(index) {
93+
Some(fingerprint) => fingerprint,
94+
None => {
95+
bug!("Could not find ICH for {:?}", index);
96+
}
97+
}
9298
}
9399
}
94100

@@ -108,8 +114,12 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
108114
record_time(&tcx.sess.perf_stats.incr_comp_hashes_time, || {
109115
visitor.calculate_def_id(DefId::local(CRATE_DEF_INDEX),
110116
|v| visit::walk_crate(v, krate));
111-
// FIXME(#37713) if foreign items were item likes, could use ItemLikeVisitor
112117
krate.visit_all_item_likes(&mut visitor.as_deep_visitor());
118+
119+
for macro_def in krate.exported_macros.iter() {
120+
visitor.calculate_node_id(macro_def.id,
121+
|v| v.visit_macro_def(macro_def));
122+
}
113123
});
114124

115125
tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);

src/librustc_incremental/calculate_svh/svh_visitor.rs

+139-7
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,6 @@
88
// option. This file may not be copied, modified, or distributed
99
// except according to those terms.
1010

11-
// FIXME (#14132): Even this SVH computation still has implementation
12-
// artifacts: namely, the order of item declaration will affect the
13-
// hash computation, but for many kinds of items the order of
14-
// declaration should be irrelevant to the ABI.
15-
1611
use self::SawExprComponent::*;
1712
use self::SawAbiComponent::*;
1813
use self::SawItemComponent::*;
@@ -24,6 +19,7 @@ use syntax::ast::{self, Name, NodeId};
2419
use syntax::attr;
2520
use syntax::parse::token;
2621
use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos};
22+
use syntax::tokenstream;
2723
use rustc::hir;
2824
use rustc::hir::*;
2925
use rustc::hir::def::{Def, PathResolution};
@@ -769,9 +765,10 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has
769765
debug!("visit_macro_def: st={:?}", self.st);
770766
SawMacroDef.hash(self.st);
771767
hash_attrs!(self, &macro_def.attrs);
768+
for tt in &macro_def.body {
769+
self.hash_token_tree(tt);
770+
}
772771
visit::walk_macro_def(self, macro_def)
773-
// FIXME(mw): We should hash the body of the macro too but we don't
774-
// have a stable way of doing so yet.
775772
}
776773
}
777774

@@ -941,4 +938,139 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
941938
self.overflow_checks_enabled = true;
942939
}
943940
}
941+
942+
fn hash_token_tree(&mut self, tt: &tokenstream::TokenTree) {
943+
self.hash_discriminant(tt);
944+
match *tt {
945+
tokenstream::TokenTree::Token(span, ref token) => {
946+
hash_span!(self, span);
947+
self.hash_token(token, span);
948+
}
949+
tokenstream::TokenTree::Delimited(span, ref delimited) => {
950+
hash_span!(self, span);
951+
let tokenstream::Delimited {
952+
ref delim,
953+
open_span,
954+
ref tts,
955+
close_span,
956+
} = **delimited;
957+
958+
delim.hash(self.st);
959+
hash_span!(self, open_span);
960+
tts.len().hash(self.st);
961+
for sub_tt in tts {
962+
self.hash_token_tree(sub_tt);
963+
}
964+
hash_span!(self, close_span);
965+
}
966+
tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => {
967+
hash_span!(self, span);
968+
let tokenstream::SequenceRepetition {
969+
ref tts,
970+
ref separator,
971+
op,
972+
num_captures,
973+
} = **sequence_repetition;
974+
975+
tts.len().hash(self.st);
976+
for sub_tt in tts {
977+
self.hash_token_tree(sub_tt);
978+
}
979+
self.hash_discriminant(separator);
980+
if let Some(ref separator) = *separator {
981+
self.hash_token(separator, span);
982+
}
983+
op.hash(self.st);
984+
num_captures.hash(self.st);
985+
}
986+
}
987+
}
988+
989+
fn hash_token(&mut self,
990+
token: &token::Token,
991+
error_reporting_span: Span) {
992+
self.hash_discriminant(token);
993+
match *token {
994+
token::Token::Eq |
995+
token::Token::Lt |
996+
token::Token::Le |
997+
token::Token::EqEq |
998+
token::Token::Ne |
999+
token::Token::Ge |
1000+
token::Token::Gt |
1001+
token::Token::AndAnd |
1002+
token::Token::OrOr |
1003+
token::Token::Not |
1004+
token::Token::Tilde |
1005+
token::Token::At |
1006+
token::Token::Dot |
1007+
token::Token::DotDot |
1008+
token::Token::DotDotDot |
1009+
token::Token::Comma |
1010+
token::Token::Semi |
1011+
token::Token::Colon |
1012+
token::Token::ModSep |
1013+
token::Token::RArrow |
1014+
token::Token::LArrow |
1015+
token::Token::FatArrow |
1016+
token::Token::Pound |
1017+
token::Token::Dollar |
1018+
token::Token::Question |
1019+
token::Token::Underscore |
1020+
token::Token::Whitespace |
1021+
token::Token::Comment |
1022+
token::Token::Eof => {}
1023+
1024+
token::Token::BinOp(bin_op_token) |
1025+
token::Token::BinOpEq(bin_op_token) => bin_op_token.hash(self.st),
1026+
1027+
token::Token::OpenDelim(delim_token) |
1028+
token::Token::CloseDelim(delim_token) => delim_token.hash(self.st),
1029+
1030+
token::Token::Literal(ref lit, ref opt_name) => {
1031+
self.hash_discriminant(lit);
1032+
match *lit {
1033+
token::Lit::Byte(val) |
1034+
token::Lit::Char(val) |
1035+
token::Lit::Integer(val) |
1036+
token::Lit::Float(val) |
1037+
token::Lit::Str_(val) |
1038+
token::Lit::ByteStr(val) => val.as_str().hash(self.st),
1039+
token::Lit::StrRaw(val, n) |
1040+
token::Lit::ByteStrRaw(val, n) => {
1041+
val.as_str().hash(self.st);
1042+
n.hash(self.st);
1043+
}
1044+
};
1045+
opt_name.map(ast::Name::as_str).hash(self.st);
1046+
}
1047+
1048+
token::Token::Ident(ident) |
1049+
token::Token::Lifetime(ident) |
1050+
token::Token::SubstNt(ident) => ident.name.as_str().hash(self.st),
1051+
token::Token::MatchNt(ident1, ident2) => {
1052+
ident1.name.as_str().hash(self.st);
1053+
ident2.name.as_str().hash(self.st);
1054+
}
1055+
1056+
token::Token::Interpolated(ref non_terminal) => {
1057+
// FIXME(mw): This could be implemented properly. It's just a
1058+
// lot of work, since we would need to hash the AST
1059+
// in a stable way, in addition to the HIR.
1060+
// Since this is hardly used anywhere, just emit a
1061+
// warning for now.
1062+
if self.tcx.sess.opts.debugging_opts.incremental.is_some() {
1063+
let msg = format!("Quasi-quoting might make incremental \
1064+
compilation very inefficient: {:?}",
1065+
non_terminal);
1066+
self.tcx.sess.span_warn(error_reporting_span, &msg[..]);
1067+
}
1068+
1069+
non_terminal.hash(self.st);
1070+
}
1071+
1072+
token::Token::DocComment(val) |
1073+
token::Token::Shebang(val) => val.as_str().hash(self.st),
1074+
}
1075+
}
9441076
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2+
// file at the top-level directory of this distribution and at
3+
// http://rust-lang.org/COPYRIGHT.
4+
//
5+
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6+
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7+
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8+
// option. This file may not be copied, modified, or distributed
9+
// except according to those terms.
10+
11+
// compile-flags: -Zincremental=tmp/cfail-tests/incr_comp_with_macro_export
12+
// must-compile-successfully
13+
14+
15+
// This test case makes sure that we can compile with incremental compilation
16+
// enabled when there are macros exported from this crate. (See #37756)
17+
18+
#![crate_type="rlib"]
19+
20+
#[macro_export]
21+
macro_rules! some_macro {
22+
($e:expr) => ($e + 1)
23+
}

0 commit comments

Comments
 (0)