Home | History | Annotate | Download | only in trace_event
      1 // Copyright 2015 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "base/trace_event/heap_profiler_allocation_context.h"
      6 
      7 #include <cstring>
      8 
      9 #include "base/hash.h"
     10 #include "base/macros.h"
     11 
     12 namespace base {
     13 namespace trace_event {
     14 
     15 bool operator < (const StackFrame& lhs, const StackFrame& rhs) {
     16   return lhs.value < rhs.value;
     17 }
     18 
     19 bool operator == (const StackFrame& lhs, const StackFrame& rhs) {
     20   return lhs.value == rhs.value;
     21 }
     22 
     23 bool operator != (const StackFrame& lhs, const StackFrame& rhs) {
     24   return !(lhs.value == rhs.value);
     25 }
     26 
     27 Backtrace::Backtrace(): frame_count(0) {}
     28 
     29 bool operator==(const Backtrace& lhs, const Backtrace& rhs) {
     30   if (lhs.frame_count != rhs.frame_count) return false;
     31   return std::equal(lhs.frames, lhs.frames + lhs.frame_count, rhs.frames);
     32 }
     33 
     34 bool operator!=(const Backtrace& lhs, const Backtrace& rhs) {
     35   return !(lhs == rhs);
     36 }
     37 
     38 AllocationContext::AllocationContext(): type_name(nullptr) {}
     39 
     40 AllocationContext::AllocationContext(const Backtrace& backtrace,
     41                                      const char* type_name)
     42   : backtrace(backtrace), type_name(type_name) {}
     43 
     44 bool operator==(const AllocationContext& lhs, const AllocationContext& rhs) {
     45   return (lhs.backtrace == rhs.backtrace) && (lhs.type_name == rhs.type_name);
     46 }
     47 
     48 bool operator!=(const AllocationContext& lhs, const AllocationContext& rhs) {
     49   return !(lhs == rhs);
     50 }
     51 }  // namespace trace_event
     52 }  // namespace base
     53 
     54 namespace BASE_HASH_NAMESPACE {
     55 using base::trace_event::AllocationContext;
     56 using base::trace_event::Backtrace;
     57 using base::trace_event::StackFrame;
     58 
     59 size_t hash<StackFrame>::operator()(const StackFrame& frame) const {
     60   return hash<const void*>()(frame.value);
     61 }
     62 
     63 size_t hash<Backtrace>::operator()(const Backtrace& backtrace) const {
     64   const void* values[Backtrace::kMaxFrameCount];
     65   for (size_t i = 0; i != backtrace.frame_count; ++i) {
     66     values[i] = backtrace.frames[i].value;
     67   }
     68   return base::SuperFastHash(
     69       reinterpret_cast<const char*>(values),
     70       static_cast<int>(backtrace.frame_count * sizeof(*values)));
     71 }
     72 
     73 size_t hash<AllocationContext>::operator()(const AllocationContext& ctx) const {
     74   size_t backtrace_hash = hash<Backtrace>()(ctx.backtrace);
     75 
     76   // Multiplicative hash from [Knuth 1998]. Works best if |size_t| is 32 bits,
     77   // because the magic number is a prime very close to 2^32 / golden ratio, but
     78   // will still redistribute keys bijectively on 64-bit architectures because
     79   // the magic number is coprime to 2^64.
     80   size_t type_hash = reinterpret_cast<size_t>(ctx.type_name) * 2654435761;
     81 
     82   // Multiply one side to break the commutativity of +. Multiplication with a
     83   // number coprime to |numeric_limits<size_t>::max() + 1| is bijective so
     84   // randomness is preserved.
     85   return (backtrace_hash * 3) + type_hash;
     86 }
     87 
     88 }  // BASE_HASH_NAMESPACE
     89