//===--- Passes/CallGraph.cpp ---------------------------------------------===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // //===----------------------------------------------------------------------===// #include "CallGraph.h" #define DEBUG_TYPE "callgraph" #if defined(__x86_64__) && !defined(_MSC_VER) # if (!defined USE_SSECRC) # define USE_SSECRC # endif #else # undef USE_SSECRC #endif namespace { inline size_t hash_int64_fallback(int64_t key) { // "64 bit Mix Functions", from Thomas Wang's "Integer Hash Function." // http://www.concentric.net/~ttwang/tech/inthash.htm key = (~key) + (key << 21); // key = (key << 21) - key - 1; key = key ^ ((unsigned long long)key >> 24); key = (key + (key << 3)) + (key << 8); // key * 265 key = key ^ ((unsigned long long)key >> 14); key = (key + (key << 2)) + (key << 4); // key * 21 key = key ^ ((unsigned long long)key >> 28); return static_cast(static_cast(key)); } inline size_t hash_int64(int64_t k) { #if defined(USE_SSECRC) && defined(__SSE4_2__) size_t h = 0; __asm("crc32q %1, %0\n" : "+r"(h) : "rm"(k)); return h; #else return hash_int64_fallback(k); #endif } inline size_t hash_int64_pair(int64_t k1, int64_t k2) { #if defined(USE_SSECRC) && defined(__SSE4_2__) // crc32 is commutative, so we need to perturb k1 so that (k1, k2) hashes // differently from (k2, k1). k1 += k1; __asm("crc32q %1, %0\n" : "+r" (k1) : "rm"(k2)); return k1; #else return (hash_int64(k1) << 1) ^ hash_int64(k2); #endif } } namespace llvm { namespace bolt { int64_t CallGraph::Arc::Hash::operator()(const Arc &Arc) const { #ifdef USE_STD_HASH std::hash Hasher; return hashCombine(Hasher(Arc.src()), Arc.dst()); #else return hash_int64_pair(int64_t(Arc.src()), int64_t(Arc.dst())); #endif } CallGraph::NodeId CallGraph::addNode(uint32_t Size, uint64_t Samples) { auto Id = Nodes.size(); Nodes.emplace_back(Size, Samples); return Id; } const CallGraph::Arc &CallGraph::incArcWeight(NodeId Src, NodeId Dst, double W, double Offset) { auto Res = Arcs.emplace(Src, Dst, W); if (!Res.second) { Res.first->Weight += W; return *Res.first; } Res.first->AvgCallOffset += Offset; Nodes[Src].Succs.push_back(Dst); Nodes[Dst].Preds.push_back(Src); return *Res.first; } void CallGraph::normalizeArcWeights(bool UseEdgeCounts) { // Normalize arc weights. if (!UseEdgeCounts) { for (NodeId FuncId = 0; FuncId < numNodes(); ++FuncId) { auto& Func = getNode(FuncId); for (auto Caller : Func.predecessors()) { auto Arc = findArc(Caller, FuncId); Arc->NormalizedWeight = Arc->weight() / Func.samples(); Arc->AvgCallOffset /= Arc->weight(); assert(Arc->AvgCallOffset < size(Caller)); } } } else { for (NodeId FuncId = 0; FuncId < numNodes(); ++FuncId) { auto &Func = getNode(FuncId); for (auto Caller : Func.predecessors()) { auto Arc = findArc(Caller, FuncId); Arc->NormalizedWeight = Arc->weight() / Func.samples(); } } } } } }