Skip to content

Commit

Permalink
incr.comp.: Compute hashes of all query results.
Browse files Browse the repository at this point in the history
  • Loading branch information
michaelwoerister committed Sep 18, 2017
1 parent 3cf28f3 commit e6c9a53
Show file tree
Hide file tree
Showing 17 changed files with 828 additions and 84 deletions.
82 changes: 75 additions & 7 deletions src/librustc/ich/hcx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,10 @@ use hir::def_id::DefId;
use hir::map::DefPathHash;
use ich::{self, CachingCodemapView};
use session::config::DebugInfoLevel::NoDebugInfo;
use ty::TyCtxt;
use util::nodemap::{NodeMap, ItemLocalMap};
use ty::{self, TyCtxt, fast_reject};
use util::nodemap::{NodeMap, NodeSet, ItemLocalMap};

use std::cmp::Ord;
use std::hash as std_hash;
use std::collections::{HashMap, HashSet, BTreeMap};

Expand Down Expand Up @@ -47,6 +48,7 @@ pub struct StableHashingContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
#[derive(PartialEq, Eq, Clone, Copy)]
pub enum NodeIdHashingMode {
Ignore,
CheckedIgnore,
HashDefPath,
HashTraitsInScope,
}
Expand Down Expand Up @@ -148,7 +150,7 @@ impl<'a, 'gcx, 'tcx> StableHashingContext<'a, 'gcx, 'tcx> {
self.overflow_checks_enabled = true;
}
let prev_hash_node_ids = self.node_id_hashing_mode;
self.node_id_hashing_mode = NodeIdHashingMode::Ignore;
self.node_id_hashing_mode = NodeIdHashingMode::CheckedIgnore;

f(self);

Expand Down Expand Up @@ -202,6 +204,9 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ast::N
let hir_id = hcx.tcx.hir.node_to_hir_id(*self);
match hcx.node_id_hashing_mode {
NodeIdHashingMode::Ignore => {
// Don't do anything.
}
NodeIdHashingMode::CheckedIgnore => {
// Most NodeIds in the HIR can be ignored, but if there is a
// corresponding entry in the `trait_map` we need to hash that.
// Make sure we don't ignore too much by checking that there is
Expand Down Expand Up @@ -321,7 +326,7 @@ pub fn hash_stable_hashmap<'a, 'gcx, 'tcx, K, V, R, SK, F, W>(
let mut keys: Vec<_> = map.keys()
.map(|k| (extract_stable_key(hcx, k), k))
.collect();
keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
keys.sort_unstable_by(|&(ref sk1, _), &(ref sk2, _)| sk1.cmp(sk2));
keys.len().hash_stable(hcx, hasher);
for (stable_key, key) in keys {
stable_key.hash_stable(hcx, hasher);
Expand Down Expand Up @@ -354,8 +359,25 @@ pub fn hash_stable_nodemap<'a, 'tcx, 'gcx, V, W>(
where V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
W: StableHasherResult,
{
hash_stable_hashmap(hcx, hasher, map, |hcx, node_id| {
hcx.tcx.hir.definitions().node_to_hir_id(*node_id).local_id
let definitions = hcx.tcx.hir.definitions();
hash_stable_hashmap(hcx, hasher, map, |_, node_id| {
let hir_id = definitions.node_to_hir_id(*node_id);
let owner_def_path_hash = definitions.def_path_hash(hir_id.owner);
(owner_def_path_hash, hir_id.local_id)
});
}

pub fn hash_stable_nodeset<'a, 'tcx, 'gcx, W>(
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
hasher: &mut StableHasher<W>,
map: &NodeSet)
where W: StableHasherResult,
{
let definitions = hcx.tcx.hir.definitions();
hash_stable_hashset(hcx, hasher, map, |_, node_id| {
let hir_id = definitions.node_to_hir_id(*node_id);
let owner_def_path_hash = definitions.def_path_hash(hir_id.owner);
(owner_def_path_hash, hir_id.local_id)
});
}

Expand Down Expand Up @@ -386,10 +408,56 @@ pub fn hash_stable_btreemap<'a, 'tcx, 'gcx, K, V, SK, F, W>(
let mut keys: Vec<_> = map.keys()
.map(|k| (extract_stable_key(hcx, k), k))
.collect();
keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
keys.sort_unstable_by(|&(ref sk1, _), &(ref sk2, _)| sk1.cmp(sk2));
keys.len().hash_stable(hcx, hasher);
for (stable_key, key) in keys {
stable_key.hash_stable(hcx, hasher);
map[key].hash_stable(hcx, hasher);
}
}

pub fn hash_stable_trait_impls<'a, 'tcx, 'gcx, W, R>(
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
hasher: &mut StableHasher<W>,
blanket_impls: &Vec<DefId>,
non_blanket_impls: &HashMap<fast_reject::SimplifiedType, Vec<DefId>, R>)
where W: StableHasherResult,
R: std_hash::BuildHasher,
{
{
let mut blanket_impls: AccumulateVec<[_; 8]> = blanket_impls
.iter()
.map(|&def_id| hcx.def_path_hash(def_id))
.collect();

if blanket_impls.len() > 1 {
blanket_impls.sort_unstable();
}

blanket_impls.hash_stable(hcx, hasher);
}

{
let tcx = hcx.tcx();
let mut keys: AccumulateVec<[_; 8]> =
non_blanket_impls.keys()
.map(|k| (k, k.map_def(|d| tcx.def_path_hash(d))))
.collect();
keys.sort_unstable_by(|&(_, ref k1), &(_, ref k2)| k1.cmp(k2));
keys.len().hash_stable(hcx, hasher);
for (key, ref stable_key) in keys {
stable_key.hash_stable(hcx, hasher);
let mut impls : AccumulateVec<[_; 8]> = non_blanket_impls[key]
.iter()
.map(|&impl_id| hcx.def_path_hash(impl_id))
.collect();

if impls.len() > 1 {
impls.sort_unstable();
}

impls.hash_stable(hcx, hasher);
}
}
}

7 changes: 7 additions & 0 deletions src/librustc/ich/impls_cstore.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,3 +38,10 @@ impl_stable_hash_for!(enum middle::cstore::LinkagePreference {
RequireDynamic,
RequireStatic
});

impl_stable_hash_for!(struct middle::cstore::ExternCrate {
def_id,
span,
direct,
path_len
});
54 changes: 40 additions & 14 deletions src/librustc/ich/impls_hir.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,12 @@

use hir;
use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX};
use ich::{StableHashingContext, NodeIdHashingMode};
use std::mem;

use syntax::ast;

use ich::{self, StableHashingContext, NodeIdHashingMode};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
StableHasherResult};
use std::mem;
use syntax::ast;
use util::nodemap::DefIdSet;

impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for DefId {
#[inline]
Expand All @@ -30,6 +29,16 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for DefId
}
}

impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for DefIdSet
{
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
hasher: &mut StableHasher<W>) {
ich::hash_stable_hashset(hcx, hasher, self, |hcx, def_id| {
hcx.def_path_hash(*def_id)
});
}
}

impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::HirId {
#[inline]
Expand Down Expand Up @@ -235,7 +244,7 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::T
hir::TyTypeof(..) |
hir::TyErr |
hir::TyInfer => {
NodeIdHashingMode::Ignore
NodeIdHashingMode::CheckedIgnore
}
hir::TyPath(..) => {
NodeIdHashingMode::HashTraitsInScope
Expand Down Expand Up @@ -403,7 +412,7 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::P
hir::PatKind::Lit(..) |
hir::PatKind::Range(..) |
hir::PatKind::Slice(..) => {
NodeIdHashingMode::Ignore
NodeIdHashingMode::CheckedIgnore
}
hir::PatKind::Path(..) |
hir::PatKind::Struct(..) |
Expand Down Expand Up @@ -574,21 +583,21 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::E
hir::ExprRepeat(..) |
hir::ExprTup(..) => {
// For these we only hash the span when debuginfo is on.
(false, NodeIdHashingMode::Ignore)
(false, NodeIdHashingMode::CheckedIgnore)
}
// For the following, spans might be significant because of
// panic messages indicating the source location.
hir::ExprBinary(op, ..) => {
(hcx.binop_can_panic_at_runtime(op.node), NodeIdHashingMode::Ignore)
(hcx.binop_can_panic_at_runtime(op.node), NodeIdHashingMode::CheckedIgnore)
}
hir::ExprUnary(op, _) => {
(hcx.unop_can_panic_at_runtime(op), NodeIdHashingMode::Ignore)
(hcx.unop_can_panic_at_runtime(op), NodeIdHashingMode::CheckedIgnore)
}
hir::ExprAssignOp(op, ..) => {
(hcx.binop_can_panic_at_runtime(op.node), NodeIdHashingMode::Ignore)
(hcx.binop_can_panic_at_runtime(op.node), NodeIdHashingMode::CheckedIgnore)
}
hir::ExprIndex(..) => {
(true, NodeIdHashingMode::Ignore)
(true, NodeIdHashingMode::CheckedIgnore)
}
// For these we don't care about the span, but want to hash the
// trait in scope
Expand Down Expand Up @@ -899,7 +908,7 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::I
hir::ItemStatic(..) |
hir::ItemConst(..) |
hir::ItemFn(..) => {
(NodeIdHashingMode::Ignore, hcx.hash_spans())
(NodeIdHashingMode::CheckedIgnore, hcx.hash_spans())
}
hir::ItemUse(..) => {
(NodeIdHashingMode::HashTraitsInScope, false)
Expand All @@ -916,7 +925,7 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::I
hir::ItemEnum(..) |
hir::ItemStruct(..) |
hir::ItemUnion(..) => {
(NodeIdHashingMode::Ignore, false)
(NodeIdHashingMode::CheckedIgnore, false)
}
};

Expand Down Expand Up @@ -1160,3 +1169,20 @@ for ::middle::lang_items::LangItem {
::std::hash::Hash::hash(self, hasher);
}
}

impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
for hir::TraitCandidate {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
hasher: &mut StableHasher<W>) {
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
let hir::TraitCandidate {
def_id,
import_id,
} = *self;

def_id.hash_stable(hcx, hasher);
import_id.hash_stable(hcx, hasher);
});
}
}
Loading

0 comments on commit e6c9a53

Please sign in to comment.