Remove obsolete aggregating and non-working producers heap profilers.
2000 LOC are gone! R=sgjesse@chromium.org BUG=1481 Review URL: http://codereview.chromium.org/7247018 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8406 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
94e5f2f35a
commit
f4bf8f8fee
@ -269,17 +269,10 @@ class V8EXPORT HeapGraphNode {
|
||||
|
||||
/**
|
||||
* Returns node id. For the same heap object, the id remains the same
|
||||
* across all snapshots. Not applicable to aggregated heap snapshots
|
||||
* as they only contain aggregated instances.
|
||||
* across all snapshots.
|
||||
*/
|
||||
uint64_t GetId() const;
|
||||
|
||||
/**
|
||||
* Returns the number of instances. Only applicable to aggregated
|
||||
* heap snapshots.
|
||||
*/
|
||||
int GetInstancesCount() const;
|
||||
|
||||
/** Returns node's own size, in bytes. */
|
||||
int GetSelfSize() const;
|
||||
|
||||
@ -323,9 +316,7 @@ class V8EXPORT HeapGraphNode {
|
||||
class V8EXPORT HeapSnapshot {
|
||||
public:
|
||||
enum Type {
|
||||
kFull = 0, // Heap snapshot with all instances and references.
|
||||
kAggregated = 1 // Snapshot doesn't contain individual heap entries,
|
||||
// instead they are grouped by constructor name.
|
||||
kFull = 0 // Heap snapshot with all instances and references.
|
||||
};
|
||||
enum SerializationFormat {
|
||||
kJSON = 0 // See format description near 'Serialize' method.
|
||||
|
11
include/v8.h
11
include/v8.h
@ -2560,17 +2560,12 @@ typedef void (*GCCallback)();
|
||||
/**
|
||||
* Profiler modules.
|
||||
*
|
||||
* In V8, profiler consists of several modules: CPU profiler, and different
|
||||
* kinds of heap profiling. Each can be turned on / off independently.
|
||||
* When PROFILER_MODULE_HEAP_SNAPSHOT flag is passed to ResumeProfilerEx,
|
||||
* modules are enabled only temporarily for making a snapshot of the heap.
|
||||
* In V8, profiler consists of several modules. Each can be turned on / off
|
||||
* independently.
|
||||
*/
|
||||
enum ProfilerModules {
|
||||
PROFILER_MODULE_NONE = 0,
|
||||
PROFILER_MODULE_CPU = 1,
|
||||
PROFILER_MODULE_HEAP_STATS = 1 << 1,
|
||||
PROFILER_MODULE_JS_CONSTRUCTORS = 1 << 2,
|
||||
PROFILER_MODULE_HEAP_SNAPSHOT = 1 << 16
|
||||
PROFILER_MODULE_CPU = 1
|
||||
};
|
||||
|
||||
|
||||
|
33
src/api.cc
33
src/api.cc
@ -4855,22 +4855,7 @@ bool V8::IsProfilerPaused() {
|
||||
void V8::ResumeProfilerEx(int flags, int tag) {
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
i::Isolate* isolate = i::Isolate::Current();
|
||||
if (flags & PROFILER_MODULE_HEAP_SNAPSHOT) {
|
||||
// Snapshot mode: resume modules, perform GC, then pause only
|
||||
// those modules which haven't been started prior to making a
|
||||
// snapshot.
|
||||
|
||||
// Make a GC prior to taking a snapshot.
|
||||
isolate->heap()->CollectAllGarbage(false);
|
||||
// Reset snapshot flag and CPU module flags.
|
||||
flags &= ~(PROFILER_MODULE_HEAP_SNAPSHOT | PROFILER_MODULE_CPU);
|
||||
const int current_flags = isolate->logger()->GetActiveProfilerModules();
|
||||
isolate->logger()->ResumeProfiler(flags, tag);
|
||||
isolate->heap()->CollectAllGarbage(false);
|
||||
isolate->logger()->PauseProfiler(~current_flags & flags, tag);
|
||||
} else {
|
||||
isolate->logger()->ResumeProfiler(flags, tag);
|
||||
}
|
||||
isolate->logger()->ResumeProfiler(flags, tag);
|
||||
#endif
|
||||
}
|
||||
|
||||
@ -5720,7 +5705,6 @@ uint64_t HeapGraphNode::GetId() const {
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
i::Isolate* isolate = i::Isolate::Current();
|
||||
IsDeadCheck(isolate, "v8::HeapGraphNode::GetId");
|
||||
ASSERT(ToInternal(this)->snapshot()->type() != i::HeapSnapshot::kAggregated);
|
||||
return ToInternal(this)->id();
|
||||
#else
|
||||
return 0;
|
||||
@ -5728,18 +5712,6 @@ uint64_t HeapGraphNode::GetId() const {
|
||||
}
|
||||
|
||||
|
||||
int HeapGraphNode::GetInstancesCount() const {
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
i::Isolate* isolate = i::Isolate::Current();
|
||||
IsDeadCheck(isolate, "v8::HeapGraphNode::GetInstancesCount");
|
||||
ASSERT(ToInternal(this)->snapshot()->type() == i::HeapSnapshot::kAggregated);
|
||||
return static_cast<int>(ToInternal(this)->id());
|
||||
#else
|
||||
return 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
int HeapGraphNode::GetSelfSize() const {
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
i::Isolate* isolate = i::Isolate::Current();
|
||||
@ -5987,9 +5959,6 @@ const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle<String> title,
|
||||
case HeapSnapshot::kFull:
|
||||
internal_type = i::HeapSnapshot::kFull;
|
||||
break;
|
||||
case HeapSnapshot::kAggregated:
|
||||
internal_type = i::HeapSnapshot::kAggregated;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
@ -474,7 +474,6 @@ DEFINE_bool(log_handles, false, "Log global handle events.")
|
||||
DEFINE_bool(log_snapshot_positions, false,
|
||||
"log positions of (de)serialized objects in the snapshot.")
|
||||
DEFINE_bool(log_suspect, false, "Log suspect operations.")
|
||||
DEFINE_bool(log_producers, false, "Log stack traces of JS objects allocations.")
|
||||
DEFINE_bool(prof, false,
|
||||
"Log statistical profiling information (implies --log-code).")
|
||||
DEFINE_bool(prof_auto, true,
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -28,9 +28,7 @@
|
||||
#ifndef V8_HEAP_PROFILER_H_
|
||||
#define V8_HEAP_PROFILER_H_
|
||||
|
||||
#include "allocation.h"
|
||||
#include "isolate.h"
|
||||
#include "zone-inl.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -81,10 +79,6 @@ class HeapProfiler {
|
||||
return snapshots_->is_tracking_objects();
|
||||
}
|
||||
|
||||
// Obsolete interface.
|
||||
// Write a single heap sample to the log file.
|
||||
static void WriteSample();
|
||||
|
||||
private:
|
||||
HeapProfiler();
|
||||
~HeapProfiler();
|
||||
@ -103,295 +97,6 @@ class HeapProfiler {
|
||||
#endif // ENABLE_LOGGING_AND_PROFILING
|
||||
};
|
||||
|
||||
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
|
||||
// JSObjectsCluster describes a group of JS objects that are
|
||||
// considered equivalent in terms of a particular profile.
|
||||
class JSObjectsCluster BASE_EMBEDDED {
|
||||
public:
|
||||
// These special cases are used in retainer profile.
|
||||
enum SpecialCase {
|
||||
ROOTS = 1,
|
||||
GLOBAL_PROPERTY = 2,
|
||||
CODE = 3,
|
||||
SELF = 100 // This case is used in ClustersCoarser only.
|
||||
};
|
||||
|
||||
JSObjectsCluster() : constructor_(NULL), instance_(NULL) {}
|
||||
explicit JSObjectsCluster(String* constructor)
|
||||
: constructor_(constructor), instance_(NULL) {}
|
||||
explicit JSObjectsCluster(SpecialCase special)
|
||||
: constructor_(FromSpecialCase(special)), instance_(NULL) {}
|
||||
JSObjectsCluster(String* constructor, Object* instance)
|
||||
: constructor_(constructor), instance_(instance) {}
|
||||
|
||||
static int CompareConstructors(const JSObjectsCluster& a,
|
||||
const JSObjectsCluster& b) {
|
||||
// Strings are unique, so it is sufficient to compare their pointers.
|
||||
return a.constructor_ == b.constructor_ ? 0
|
||||
: (a.constructor_ < b.constructor_ ? -1 : 1);
|
||||
}
|
||||
static int Compare(const JSObjectsCluster& a, const JSObjectsCluster& b) {
|
||||
// Strings are unique, so it is sufficient to compare their pointers.
|
||||
const int cons_cmp = CompareConstructors(a, b);
|
||||
return cons_cmp == 0 ?
|
||||
(a.instance_ == b.instance_ ? 0 : (a.instance_ < b.instance_ ? -1 : 1))
|
||||
: cons_cmp;
|
||||
}
|
||||
static int Compare(const JSObjectsCluster* a, const JSObjectsCluster* b) {
|
||||
return Compare(*a, *b);
|
||||
}
|
||||
|
||||
bool is_null() const { return constructor_ == NULL; }
|
||||
bool can_be_coarsed() const { return instance_ != NULL; }
|
||||
String* constructor() const { return constructor_; }
|
||||
Object* instance() const { return instance_; }
|
||||
|
||||
const char* GetSpecialCaseName() const;
|
||||
void Print(StringStream* accumulator) const;
|
||||
// Allows null clusters to be printed.
|
||||
void DebugPrint(StringStream* accumulator) const;
|
||||
|
||||
private:
|
||||
static String* FromSpecialCase(SpecialCase special) {
|
||||
// We use symbols that are illegal JS identifiers to identify special cases.
|
||||
// Their actual value is irrelevant for us.
|
||||
switch (special) {
|
||||
case ROOTS: return HEAP->result_symbol();
|
||||
case GLOBAL_PROPERTY: return HEAP->catch_var_symbol();
|
||||
case CODE: return HEAP->code_symbol();
|
||||
case SELF: return HEAP->this_symbol();
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
String* constructor_;
|
||||
Object* instance_;
|
||||
};
|
||||
|
||||
|
||||
struct JSObjectsClusterTreeConfig {
|
||||
typedef JSObjectsCluster Key;
|
||||
typedef NumberAndSizeInfo Value;
|
||||
static const Key kNoKey;
|
||||
static const Value kNoValue;
|
||||
static int Compare(const Key& a, const Key& b) {
|
||||
return Key::Compare(a, b);
|
||||
}
|
||||
};
|
||||
typedef ZoneSplayTree<JSObjectsClusterTreeConfig> JSObjectsClusterTree;
|
||||
|
||||
|
||||
// ConstructorHeapProfile is responsible for gathering and logging
|
||||
// "constructor profile" of JS objects allocated on heap.
|
||||
// It is run during garbage collection cycle, thus it doesn't need
|
||||
// to use handles.
|
||||
class ConstructorHeapProfile BASE_EMBEDDED {
|
||||
public:
|
||||
ConstructorHeapProfile();
|
||||
virtual ~ConstructorHeapProfile() {}
|
||||
void CollectStats(HeapObject* obj);
|
||||
void PrintStats();
|
||||
|
||||
template<class Callback>
|
||||
void ForEach(Callback* callback) { js_objects_info_tree_.ForEach(callback); }
|
||||
// Used by ZoneSplayTree::ForEach. Made virtual to allow overriding in tests.
|
||||
virtual void Call(const JSObjectsCluster& cluster,
|
||||
const NumberAndSizeInfo& number_and_size);
|
||||
|
||||
private:
|
||||
ZoneScope zscope_;
|
||||
JSObjectsClusterTree js_objects_info_tree_;
|
||||
};
|
||||
|
||||
|
||||
// JSObjectsRetainerTree is used to represent retainer graphs using
|
||||
// adjacency list form:
|
||||
//
|
||||
// Cluster -> (Cluster -> NumberAndSizeInfo)
|
||||
//
|
||||
// Subordinate splay trees are stored by pointer. They are zone-allocated,
|
||||
// so it isn't needed to manage their lifetime.
|
||||
//
|
||||
struct JSObjectsRetainerTreeConfig {
|
||||
typedef JSObjectsCluster Key;
|
||||
typedef JSObjectsClusterTree* Value;
|
||||
static const Key kNoKey;
|
||||
static const Value kNoValue;
|
||||
static int Compare(const Key& a, const Key& b) {
|
||||
return Key::Compare(a, b);
|
||||
}
|
||||
};
|
||||
typedef ZoneSplayTree<JSObjectsRetainerTreeConfig> JSObjectsRetainerTree;
|
||||
|
||||
|
||||
class ClustersCoarser BASE_EMBEDDED {
|
||||
public:
|
||||
ClustersCoarser();
|
||||
|
||||
// Processes a given retainer graph.
|
||||
void Process(JSObjectsRetainerTree* tree);
|
||||
|
||||
// Returns an equivalent cluster (can be the cluster itself).
|
||||
// If the given cluster doesn't have an equivalent, returns null cluster.
|
||||
JSObjectsCluster GetCoarseEquivalent(const JSObjectsCluster& cluster);
|
||||
// Returns whether a cluster can be substitued with an equivalent and thus,
|
||||
// skipped in some cases.
|
||||
bool HasAnEquivalent(const JSObjectsCluster& cluster);
|
||||
|
||||
// Used by JSObjectsRetainerTree::ForEach.
|
||||
void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
|
||||
void Call(const JSObjectsCluster& cluster,
|
||||
const NumberAndSizeInfo& number_and_size);
|
||||
|
||||
private:
|
||||
// Stores a list of back references for a cluster.
|
||||
struct ClusterBackRefs {
|
||||
explicit ClusterBackRefs(const JSObjectsCluster& cluster_);
|
||||
ClusterBackRefs(const ClusterBackRefs& src);
|
||||
ClusterBackRefs& operator=(const ClusterBackRefs& src);
|
||||
|
||||
static int Compare(const ClusterBackRefs& a, const ClusterBackRefs& b);
|
||||
void SortRefs() { refs.Sort(JSObjectsCluster::Compare); }
|
||||
static void SortRefsIterator(ClusterBackRefs* ref) { ref->SortRefs(); }
|
||||
|
||||
JSObjectsCluster cluster;
|
||||
ZoneList<JSObjectsCluster> refs;
|
||||
};
|
||||
typedef ZoneList<ClusterBackRefs> SimilarityList;
|
||||
|
||||
// A tree for storing a list of equivalents for a cluster.
|
||||
struct ClusterEqualityConfig {
|
||||
typedef JSObjectsCluster Key;
|
||||
typedef JSObjectsCluster Value;
|
||||
static const Key kNoKey;
|
||||
static const Value kNoValue;
|
||||
static int Compare(const Key& a, const Key& b) {
|
||||
return Key::Compare(a, b);
|
||||
}
|
||||
};
|
||||
typedef ZoneSplayTree<ClusterEqualityConfig> EqualityTree;
|
||||
|
||||
static int ClusterBackRefsCmp(const ClusterBackRefs* a,
|
||||
const ClusterBackRefs* b) {
|
||||
return ClusterBackRefs::Compare(*a, *b);
|
||||
}
|
||||
int DoProcess(JSObjectsRetainerTree* tree);
|
||||
int FillEqualityTree();
|
||||
|
||||
static const int kInitialBackrefsListCapacity = 2;
|
||||
static const int kInitialSimilarityListCapacity = 2000;
|
||||
// Number of passes for finding equivalents. Limits the length of paths
|
||||
// that can be considered equivalent.
|
||||
static const int kMaxPassesCount = 10;
|
||||
|
||||
ZoneScope zscope_;
|
||||
SimilarityList sim_list_;
|
||||
EqualityTree eq_tree_;
|
||||
ClusterBackRefs* current_pair_;
|
||||
JSObjectsRetainerTree* current_set_;
|
||||
const JSObjectsCluster* self_;
|
||||
};
|
||||
|
||||
|
||||
// RetainerHeapProfile is responsible for gathering and logging
|
||||
// "retainer profile" of JS objects allocated on heap.
|
||||
// It is run during garbage collection cycle, thus it doesn't need
|
||||
// to use handles.
|
||||
class RetainerTreeAggregator;
|
||||
|
||||
class RetainerHeapProfile BASE_EMBEDDED {
|
||||
public:
|
||||
class Printer {
|
||||
public:
|
||||
virtual ~Printer() {}
|
||||
virtual void PrintRetainers(const JSObjectsCluster& cluster,
|
||||
const StringStream& retainers) = 0;
|
||||
};
|
||||
|
||||
RetainerHeapProfile();
|
||||
~RetainerHeapProfile();
|
||||
|
||||
RetainerTreeAggregator* aggregator() { return aggregator_; }
|
||||
ClustersCoarser* coarser() { return &coarser_; }
|
||||
JSObjectsRetainerTree* retainers_tree() { return &retainers_tree_; }
|
||||
|
||||
void CollectStats(HeapObject* obj);
|
||||
void CoarseAndAggregate();
|
||||
void PrintStats();
|
||||
void DebugPrintStats(Printer* printer);
|
||||
void StoreReference(const JSObjectsCluster& cluster, HeapObject* ref);
|
||||
|
||||
private:
|
||||
ZoneScope zscope_;
|
||||
JSObjectsRetainerTree retainers_tree_;
|
||||
ClustersCoarser coarser_;
|
||||
RetainerTreeAggregator* aggregator_;
|
||||
};
|
||||
|
||||
|
||||
class AggregatedHeapSnapshot {
|
||||
public:
|
||||
AggregatedHeapSnapshot();
|
||||
~AggregatedHeapSnapshot();
|
||||
|
||||
HistogramInfo* info() { return info_; }
|
||||
ConstructorHeapProfile* js_cons_profile() { return &js_cons_profile_; }
|
||||
RetainerHeapProfile* js_retainer_profile() { return &js_retainer_profile_; }
|
||||
|
||||
private:
|
||||
HistogramInfo* info_;
|
||||
ConstructorHeapProfile js_cons_profile_;
|
||||
RetainerHeapProfile js_retainer_profile_;
|
||||
};
|
||||
|
||||
|
||||
class HeapEntriesMap;
|
||||
class HeapEntriesAllocator;
|
||||
|
||||
class AggregatedHeapSnapshotGenerator {
|
||||
public:
|
||||
explicit AggregatedHeapSnapshotGenerator(AggregatedHeapSnapshot* snapshot);
|
||||
void GenerateSnapshot();
|
||||
void FillHeapSnapshot(HeapSnapshot* snapshot);
|
||||
|
||||
static const int kAllStringsType = LAST_TYPE + 1;
|
||||
|
||||
private:
|
||||
void CalculateStringsStats();
|
||||
void CollectStats(HeapObject* obj);
|
||||
template<class Iterator>
|
||||
void IterateRetainers(
|
||||
HeapEntriesAllocator* allocator, HeapEntriesMap* entries_map);
|
||||
|
||||
AggregatedHeapSnapshot* agg_snapshot_;
|
||||
};
|
||||
|
||||
|
||||
class ProducerHeapProfile {
|
||||
public:
|
||||
void Setup();
|
||||
void RecordJSObjectAllocation(Object* obj) {
|
||||
if (FLAG_log_producers) DoRecordJSObjectAllocation(obj);
|
||||
}
|
||||
|
||||
private:
|
||||
ProducerHeapProfile() : can_log_(false) { }
|
||||
|
||||
void DoRecordJSObjectAllocation(Object* obj);
|
||||
Isolate* isolate_;
|
||||
bool can_log_;
|
||||
|
||||
friend class Isolate;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(ProducerHeapProfile);
|
||||
};
|
||||
|
||||
#endif // ENABLE_LOGGING_AND_PROFILING
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
#endif // V8_HEAP_PROFILER_H_
|
||||
|
16
src/heap.cc
16
src/heap.cc
@ -523,11 +523,6 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
|
||||
GarbageCollectionEpilogue();
|
||||
}
|
||||
|
||||
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
if (FLAG_log_gc) HeapProfiler::WriteSample();
|
||||
#endif
|
||||
|
||||
return next_gc_likely_to_collect_more;
|
||||
}
|
||||
|
||||
@ -2984,9 +2979,6 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
|
||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||
}
|
||||
HeapObject::cast(result)->set_map(map);
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
isolate_->producer_heap_profile()->RecordJSObjectAllocation(result);
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -3435,9 +3427,6 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
|
||||
JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
|
||||
}
|
||||
// Return the new clone.
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
isolate_->producer_heap_profile()->RecordJSObjectAllocation(clone);
|
||||
#endif
|
||||
return clone;
|
||||
}
|
||||
|
||||
@ -5122,11 +5111,6 @@ bool Heap::Setup(bool create_heap_objects) {
|
||||
LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
|
||||
LOG(isolate_, IntPtrTEvent("heap-available", Available()));
|
||||
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
// This should be called only after initial objects have been created.
|
||||
isolate_->producer_heap_profile()->Setup();
|
||||
#endif
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -1445,10 +1445,6 @@ Isolate::Isolate()
|
||||
debugger_ = NULL;
|
||||
#endif
|
||||
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
producer_heap_profile_ = NULL;
|
||||
#endif
|
||||
|
||||
handle_scope_data_.Initialize();
|
||||
|
||||
#define ISOLATE_INIT_EXECUTE(type, name, initial_value) \
|
||||
@ -1537,11 +1533,6 @@ void Isolate::SetIsolateThreadLocals(Isolate* isolate,
|
||||
Isolate::~Isolate() {
|
||||
TRACE_ISOLATE(destructor);
|
||||
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
delete producer_heap_profile_;
|
||||
producer_heap_profile_ = NULL;
|
||||
#endif
|
||||
|
||||
delete unicode_cache_;
|
||||
unicode_cache_ = NULL;
|
||||
|
||||
@ -1657,11 +1648,6 @@ bool Isolate::PreInit() {
|
||||
regexp_stack_ = new RegExpStack();
|
||||
regexp_stack_->isolate_ = this;
|
||||
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
producer_heap_profile_ = new ProducerHeapProfile();
|
||||
producer_heap_profile_->isolate_ = this;
|
||||
#endif
|
||||
|
||||
state_ = PREINITIALIZED;
|
||||
return true;
|
||||
}
|
||||
|
@ -69,7 +69,6 @@ class InlineRuntimeFunctionsTable;
|
||||
class NoAllocationStringAllocator;
|
||||
class PcToCodeCache;
|
||||
class PreallocatedMemoryThread;
|
||||
class ProducerHeapProfile;
|
||||
class RegExpStack;
|
||||
class SaveContext;
|
||||
class UnicodeCache;
|
||||
@ -907,12 +906,6 @@ class Isolate {
|
||||
|
||||
inline bool DebuggerHasBreakPoints();
|
||||
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
ProducerHeapProfile* producer_heap_profile() {
|
||||
return producer_heap_profile_;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef DEBUG
|
||||
HistogramInfo* heap_histograms() { return heap_histograms_; }
|
||||
|
||||
@ -1172,10 +1165,6 @@ class Isolate {
|
||||
Debug* debug_;
|
||||
#endif
|
||||
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
ProducerHeapProfile* producer_heap_profile_;
|
||||
#endif
|
||||
|
||||
#define GLOBAL_BACKING_STORE(type, name, initialvalue) \
|
||||
type name##_;
|
||||
ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
|
||||
|
97
src/log.cc
97
src/log.cc
@ -521,7 +521,6 @@ Logger::Logger()
|
||||
log_events_(NULL),
|
||||
logging_nesting_(0),
|
||||
cpu_profiler_nesting_(0),
|
||||
heap_profiler_nesting_(0),
|
||||
log_(new Log(this)),
|
||||
name_buffer_(new NameBuffer),
|
||||
address_to_name_map_(NULL),
|
||||
@ -1286,19 +1285,6 @@ void Logger::HeapSampleBeginEvent(const char* space, const char* kind) {
|
||||
}
|
||||
|
||||
|
||||
void Logger::HeapSampleStats(const char* space, const char* kind,
|
||||
intptr_t capacity, intptr_t used) {
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
if (!log_->IsEnabled() || !FLAG_log_gc) return;
|
||||
LogMessageBuilder msg(this);
|
||||
msg.Append("heap-sample-stats,\"%s\",\"%s\","
|
||||
"%" V8_PTR_PREFIX "d,%" V8_PTR_PREFIX "d\n",
|
||||
space, kind, capacity, used);
|
||||
msg.WriteToLogFile();
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
void Logger::HeapSampleEndEvent(const char* space, const char* kind) {
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
if (!log_->IsEnabled() || !FLAG_log_gc) return;
|
||||
@ -1319,72 +1305,6 @@ void Logger::HeapSampleItemEvent(const char* type, int number, int bytes) {
|
||||
}
|
||||
|
||||
|
||||
void Logger::HeapSampleJSConstructorEvent(const char* constructor,
|
||||
int number, int bytes) {
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
if (!log_->IsEnabled() || !FLAG_log_gc) return;
|
||||
LogMessageBuilder msg(this);
|
||||
msg.Append("heap-js-cons-item,%s,%d,%d\n", constructor, number, bytes);
|
||||
msg.WriteToLogFile();
|
||||
#endif
|
||||
}
|
||||
|
||||
// Event starts with comma, so we don't have it in the format string.
|
||||
static const char kEventText[] = "heap-js-ret-item,%s";
|
||||
// We take placeholder strings into account, but it's OK to be conservative.
|
||||
static const int kEventTextLen = sizeof(kEventText)/sizeof(kEventText[0]);
|
||||
|
||||
void Logger::HeapSampleJSRetainersEvent(
|
||||
const char* constructor, const char* event) {
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
if (!log_->IsEnabled() || !FLAG_log_gc) return;
|
||||
const int cons_len = StrLength(constructor);
|
||||
const int event_len = StrLength(event);
|
||||
int pos = 0;
|
||||
// Retainer lists can be long. We may need to split them into multiple events.
|
||||
do {
|
||||
LogMessageBuilder msg(this);
|
||||
msg.Append(kEventText, constructor);
|
||||
int to_write = event_len - pos;
|
||||
if (to_write > Log::kMessageBufferSize - (cons_len + kEventTextLen)) {
|
||||
int cut_pos = pos + Log::kMessageBufferSize - (cons_len + kEventTextLen);
|
||||
ASSERT(cut_pos < event_len);
|
||||
while (cut_pos > pos && event[cut_pos] != ',') --cut_pos;
|
||||
if (event[cut_pos] != ',') {
|
||||
// Crash in debug mode, skip in release mode.
|
||||
ASSERT(false);
|
||||
return;
|
||||
}
|
||||
// Append a piece of event that fits, without trailing comma.
|
||||
msg.AppendStringPart(event + pos, cut_pos - pos);
|
||||
// Start next piece with comma.
|
||||
pos = cut_pos;
|
||||
} else {
|
||||
msg.Append("%s", event + pos);
|
||||
pos += event_len;
|
||||
}
|
||||
msg.Append('\n');
|
||||
msg.WriteToLogFile();
|
||||
} while (pos < event_len);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
void Logger::HeapSampleJSProducerEvent(const char* constructor,
|
||||
Address* stack) {
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
if (!log_->IsEnabled() || !FLAG_log_gc) return;
|
||||
LogMessageBuilder msg(this);
|
||||
msg.Append("heap-js-prod-item,%s", constructor);
|
||||
while (*stack != NULL) {
|
||||
msg.Append(",0x%" V8PRIxPTR, *stack++);
|
||||
}
|
||||
msg.Append("\n");
|
||||
msg.WriteToLogFile();
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
void Logger::DebugTag(const char* call_site_tag) {
|
||||
#ifdef ENABLE_LOGGING_AND_PROFILING
|
||||
if (!log_->IsEnabled() || !FLAG_log) return;
|
||||
@ -1447,9 +1367,6 @@ int Logger::GetActiveProfilerModules() {
|
||||
if (profiler_ != NULL && !profiler_->paused()) {
|
||||
result |= PROFILER_MODULE_CPU;
|
||||
}
|
||||
if (FLAG_log_gc) {
|
||||
result |= PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -1471,13 +1388,6 @@ void Logger::PauseProfiler(int flags, int tag) {
|
||||
--logging_nesting_;
|
||||
}
|
||||
}
|
||||
if (flags &
|
||||
(PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
|
||||
if (--heap_profiler_nesting_ == 0) {
|
||||
FLAG_log_gc = false;
|
||||
--logging_nesting_;
|
||||
}
|
||||
}
|
||||
if (tag != 0) {
|
||||
UncheckedIntEvent("close-tag", tag);
|
||||
}
|
||||
@ -1505,13 +1415,6 @@ void Logger::ResumeProfiler(int flags, int tag) {
|
||||
profiler_->resume();
|
||||
}
|
||||
}
|
||||
if (flags &
|
||||
(PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
|
||||
if (heap_profiler_nesting_++ == 0) {
|
||||
++logging_nesting_;
|
||||
FLAG_log_gc = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -401,7 +401,6 @@ class Logger {
|
||||
|
||||
int logging_nesting_;
|
||||
int cpu_profiler_nesting_;
|
||||
int heap_profiler_nesting_;
|
||||
|
||||
Log* log_;
|
||||
|
||||
|
@ -638,8 +638,7 @@ class HeapSnapshotsCollection;
|
||||
class HeapSnapshot {
|
||||
public:
|
||||
enum Type {
|
||||
kFull = v8::HeapSnapshot::kFull,
|
||||
kAggregated = v8::HeapSnapshot::kAggregated
|
||||
kFull = v8::HeapSnapshot::kFull
|
||||
};
|
||||
|
||||
HeapSnapshot(HeapSnapshotsCollection* collection,
|
||||
|
@ -9,381 +9,10 @@
|
||||
#include "cctest.h"
|
||||
#include "heap-profiler.h"
|
||||
#include "snapshot.h"
|
||||
#include "string-stream.h"
|
||||
#include "utils-inl.h"
|
||||
#include "zone-inl.h"
|
||||
#include "../include/v8-profiler.h"
|
||||
|
||||
namespace i = v8::internal;
|
||||
using i::ClustersCoarser;
|
||||
using i::JSObjectsCluster;
|
||||
using i::JSObjectsRetainerTree;
|
||||
using i::JSObjectsClusterTree;
|
||||
using i::RetainerHeapProfile;
|
||||
|
||||
|
||||
namespace {
|
||||
|
||||
class ConstructorHeapProfileTestHelper : public i::ConstructorHeapProfile {
|
||||
public:
|
||||
ConstructorHeapProfileTestHelper()
|
||||
: i::ConstructorHeapProfile(),
|
||||
f_name_(FACTORY->NewStringFromAscii(i::CStrVector("F"))),
|
||||
f_count_(0) {
|
||||
}
|
||||
|
||||
void Call(const JSObjectsCluster& cluster,
|
||||
const i::NumberAndSizeInfo& number_and_size) {
|
||||
if (f_name_->Equals(cluster.constructor())) {
|
||||
CHECK_EQ(f_count_, 0);
|
||||
f_count_ = number_and_size.number();
|
||||
CHECK_GT(f_count_, 0);
|
||||
}
|
||||
}
|
||||
|
||||
int f_count() { return f_count_; }
|
||||
|
||||
private:
|
||||
i::Handle<i::String> f_name_;
|
||||
int f_count_;
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
|
||||
TEST(ConstructorProfile) {
|
||||
v8::HandleScope scope;
|
||||
LocalContext env;
|
||||
|
||||
CompileRun(
|
||||
"function F() {} // A constructor\n"
|
||||
"var f1 = new F();\n"
|
||||
"var f2 = new F();\n");
|
||||
|
||||
ConstructorHeapProfileTestHelper cons_profile;
|
||||
i::AssertNoAllocation no_alloc;
|
||||
i::HeapIterator iterator;
|
||||
for (i::HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next())
|
||||
cons_profile.CollectStats(obj);
|
||||
CHECK_EQ(0, cons_profile.f_count());
|
||||
cons_profile.PrintStats();
|
||||
CHECK_EQ(2, cons_profile.f_count());
|
||||
}
|
||||
|
||||
|
||||
static JSObjectsCluster AddHeapObjectToTree(JSObjectsRetainerTree* tree,
|
||||
i::String* constructor,
|
||||
int instance,
|
||||
JSObjectsCluster* ref1 = NULL,
|
||||
JSObjectsCluster* ref2 = NULL,
|
||||
JSObjectsCluster* ref3 = NULL) {
|
||||
JSObjectsCluster o(constructor, reinterpret_cast<i::Object*>(instance));
|
||||
JSObjectsClusterTree* o_tree = new JSObjectsClusterTree();
|
||||
JSObjectsClusterTree::Locator o_loc;
|
||||
if (ref1 != NULL) o_tree->Insert(*ref1, &o_loc);
|
||||
if (ref2 != NULL) o_tree->Insert(*ref2, &o_loc);
|
||||
if (ref3 != NULL) o_tree->Insert(*ref3, &o_loc);
|
||||
JSObjectsRetainerTree::Locator loc;
|
||||
tree->Insert(o, &loc);
|
||||
loc.set_value(o_tree);
|
||||
return o;
|
||||
}
|
||||
|
||||
|
||||
static void AddSelfReferenceToTree(JSObjectsRetainerTree* tree,
|
||||
JSObjectsCluster* self_ref) {
|
||||
JSObjectsRetainerTree::Locator loc;
|
||||
CHECK(tree->Find(*self_ref, &loc));
|
||||
JSObjectsClusterTree::Locator o_loc;
|
||||
CHECK_NE(NULL, loc.value());
|
||||
loc.value()->Insert(*self_ref, &o_loc);
|
||||
}
|
||||
|
||||
|
||||
static inline void CheckEqualsHelper(const char* file, int line,
|
||||
const char* expected_source,
|
||||
const JSObjectsCluster& expected,
|
||||
const char* value_source,
|
||||
const JSObjectsCluster& value) {
|
||||
if (JSObjectsCluster::Compare(expected, value) != 0) {
|
||||
i::HeapStringAllocator allocator;
|
||||
i::StringStream stream(&allocator);
|
||||
stream.Add("# Expected: ");
|
||||
expected.DebugPrint(&stream);
|
||||
stream.Add("\n# Found: ");
|
||||
value.DebugPrint(&stream);
|
||||
V8_Fatal(file, line, "CHECK_EQ(%s, %s) failed\n%s",
|
||||
expected_source, value_source,
|
||||
*stream.ToCString());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static inline void CheckNonEqualsHelper(const char* file, int line,
|
||||
const char* expected_source,
|
||||
const JSObjectsCluster& expected,
|
||||
const char* value_source,
|
||||
const JSObjectsCluster& value) {
|
||||
if (JSObjectsCluster::Compare(expected, value) == 0) {
|
||||
i::HeapStringAllocator allocator;
|
||||
i::StringStream stream(&allocator);
|
||||
stream.Add("# !Expected: ");
|
||||
expected.DebugPrint(&stream);
|
||||
stream.Add("\n# Found: ");
|
||||
value.DebugPrint(&stream);
|
||||
V8_Fatal(file, line, "CHECK_NE(%s, %s) failed\n%s",
|
||||
expected_source, value_source,
|
||||
*stream.ToCString());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
TEST(ClustersCoarserSimple) {
|
||||
v8::HandleScope scope;
|
||||
LocalContext env;
|
||||
|
||||
i::ZoneScope zn_scope(i::Isolate::Current(), i::DELETE_ON_EXIT);
|
||||
|
||||
JSObjectsRetainerTree tree;
|
||||
JSObjectsCluster function(HEAP->function_class_symbol());
|
||||
JSObjectsCluster a(*FACTORY->NewStringFromAscii(i::CStrVector("A")));
|
||||
JSObjectsCluster b(*FACTORY->NewStringFromAscii(i::CStrVector("B")));
|
||||
|
||||
// o1 <- Function
|
||||
JSObjectsCluster o1 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x100, &function);
|
||||
// o2 <- Function
|
||||
JSObjectsCluster o2 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x200, &function);
|
||||
// o3 <- A, B
|
||||
JSObjectsCluster o3 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x300, &a, &b);
|
||||
// o4 <- B, A
|
||||
JSObjectsCluster o4 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x400, &b, &a);
|
||||
// o5 <- A, B, Function
|
||||
JSObjectsCluster o5 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x500,
|
||||
&a, &b, &function);
|
||||
|
||||
ClustersCoarser coarser;
|
||||
coarser.Process(&tree);
|
||||
|
||||
CHECK_EQ(coarser.GetCoarseEquivalent(o1), coarser.GetCoarseEquivalent(o2));
|
||||
CHECK_EQ(coarser.GetCoarseEquivalent(o3), coarser.GetCoarseEquivalent(o4));
|
||||
CHECK_NE(coarser.GetCoarseEquivalent(o1), coarser.GetCoarseEquivalent(o3));
|
||||
CHECK_EQ(JSObjectsCluster(), coarser.GetCoarseEquivalent(o5));
|
||||
}
|
||||
|
||||
|
||||
TEST(ClustersCoarserMultipleConstructors) {
|
||||
v8::HandleScope scope;
|
||||
LocalContext env;
|
||||
|
||||
i::ZoneScope zn_scope(i::Isolate::Current(), i::DELETE_ON_EXIT);
|
||||
|
||||
JSObjectsRetainerTree tree;
|
||||
JSObjectsCluster function(HEAP->function_class_symbol());
|
||||
|
||||
// o1 <- Function
|
||||
JSObjectsCluster o1 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x100, &function);
|
||||
// a1 <- Function
|
||||
JSObjectsCluster a1 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Array_symbol(), 0x1000, &function);
|
||||
// o2 <- Function
|
||||
JSObjectsCluster o2 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x200, &function);
|
||||
// a2 <- Function
|
||||
JSObjectsCluster a2 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Array_symbol(), 0x2000, &function);
|
||||
|
||||
ClustersCoarser coarser;
|
||||
coarser.Process(&tree);
|
||||
|
||||
CHECK_EQ(coarser.GetCoarseEquivalent(o1), coarser.GetCoarseEquivalent(o2));
|
||||
CHECK_EQ(coarser.GetCoarseEquivalent(a1), coarser.GetCoarseEquivalent(a2));
|
||||
}
|
||||
|
||||
|
||||
TEST(ClustersCoarserPathsTraversal) {
|
||||
v8::HandleScope scope;
|
||||
LocalContext env;
|
||||
|
||||
i::ZoneScope zn_scope(i::Isolate::Current(), i::DELETE_ON_EXIT);
|
||||
|
||||
JSObjectsRetainerTree tree;
|
||||
|
||||
// On the following graph:
|
||||
//
|
||||
// p
|
||||
// <- o21 <- o11 <-
|
||||
// q o
|
||||
// <- o22 <- o12 <-
|
||||
// r
|
||||
//
|
||||
// we expect that coarser will deduce equivalences: p ~ q ~ r,
|
||||
// o21 ~ o22, and o11 ~ o12.
|
||||
|
||||
JSObjectsCluster o =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x100);
|
||||
JSObjectsCluster o11 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x110, &o);
|
||||
JSObjectsCluster o12 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x120, &o);
|
||||
JSObjectsCluster o21 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x210, &o11);
|
||||
JSObjectsCluster o22 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x220, &o12);
|
||||
JSObjectsCluster p =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x300, &o21);
|
||||
JSObjectsCluster q =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x310, &o21, &o22);
|
||||
JSObjectsCluster r =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x320, &o22);
|
||||
|
||||
ClustersCoarser coarser;
|
||||
coarser.Process(&tree);
|
||||
|
||||
CHECK_EQ(JSObjectsCluster(), coarser.GetCoarseEquivalent(o));
|
||||
CHECK_NE(JSObjectsCluster(), coarser.GetCoarseEquivalent(o11));
|
||||
CHECK_EQ(coarser.GetCoarseEquivalent(o11), coarser.GetCoarseEquivalent(o12));
|
||||
CHECK_EQ(coarser.GetCoarseEquivalent(o21), coarser.GetCoarseEquivalent(o22));
|
||||
CHECK_NE(coarser.GetCoarseEquivalent(o11), coarser.GetCoarseEquivalent(o21));
|
||||
CHECK_NE(JSObjectsCluster(), coarser.GetCoarseEquivalent(p));
|
||||
CHECK_EQ(coarser.GetCoarseEquivalent(p), coarser.GetCoarseEquivalent(q));
|
||||
CHECK_EQ(coarser.GetCoarseEquivalent(q), coarser.GetCoarseEquivalent(r));
|
||||
CHECK_NE(coarser.GetCoarseEquivalent(o11), coarser.GetCoarseEquivalent(p));
|
||||
CHECK_NE(coarser.GetCoarseEquivalent(o21), coarser.GetCoarseEquivalent(p));
|
||||
}
|
||||
|
||||
|
||||
TEST(ClustersCoarserSelf) {
|
||||
v8::HandleScope scope;
|
||||
LocalContext env;
|
||||
|
||||
i::ZoneScope zn_scope(i::Isolate::Current(), i::DELETE_ON_EXIT);
|
||||
|
||||
JSObjectsRetainerTree tree;
|
||||
|
||||
// On the following graph:
|
||||
//
|
||||
// p (self-referencing)
|
||||
// <- o1 <-
|
||||
// q (self-referencing) o
|
||||
// <- o2 <-
|
||||
// r (self-referencing)
|
||||
//
|
||||
// we expect that coarser will deduce equivalences: p ~ q ~ r, o1 ~ o2;
|
||||
|
||||
JSObjectsCluster o =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x100);
|
||||
JSObjectsCluster o1 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x110, &o);
|
||||
JSObjectsCluster o2 =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x120, &o);
|
||||
JSObjectsCluster p =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x300, &o1);
|
||||
AddSelfReferenceToTree(&tree, &p);
|
||||
JSObjectsCluster q =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x310, &o1, &o2);
|
||||
AddSelfReferenceToTree(&tree, &q);
|
||||
JSObjectsCluster r =
|
||||
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x320, &o2);
|
||||
AddSelfReferenceToTree(&tree, &r);
|
||||
|
||||
ClustersCoarser coarser;
|
||||
coarser.Process(&tree);
|
||||
|
||||
CHECK_EQ(JSObjectsCluster(), coarser.GetCoarseEquivalent(o));
|
||||
CHECK_NE(JSObjectsCluster(), coarser.GetCoarseEquivalent(o1));
|
||||
CHECK_EQ(coarser.GetCoarseEquivalent(o1), coarser.GetCoarseEquivalent(o2));
|
||||
CHECK_NE(JSObjectsCluster(), coarser.GetCoarseEquivalent(p));
|
||||
CHECK_EQ(coarser.GetCoarseEquivalent(p), coarser.GetCoarseEquivalent(q));
|
||||
CHECK_EQ(coarser.GetCoarseEquivalent(q), coarser.GetCoarseEquivalent(r));
|
||||
CHECK_NE(coarser.GetCoarseEquivalent(o1), coarser.GetCoarseEquivalent(p));
|
||||
}
|
||||
|
||||
|
||||
namespace {
|
||||
|
||||
class RetainerProfilePrinter : public RetainerHeapProfile::Printer {
|
||||
public:
|
||||
RetainerProfilePrinter() : stream_(&allocator_), lines_(100) {}
|
||||
|
||||
void PrintRetainers(const JSObjectsCluster& cluster,
|
||||
const i::StringStream& retainers) {
|
||||
cluster.Print(&stream_);
|
||||
stream_.Add("%s", *(retainers.ToCString()));
|
||||
stream_.Put('\0');
|
||||
}
|
||||
|
||||
const char* GetRetainers(const char* constructor) {
|
||||
FillLines();
|
||||
const size_t cons_len = strlen(constructor);
|
||||
for (int i = 0; i < lines_.length(); ++i) {
|
||||
if (strncmp(constructor, lines_[i], cons_len) == 0 &&
|
||||
lines_[i][cons_len] == ',') {
|
||||
return lines_[i] + cons_len + 1;
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
private:
|
||||
void FillLines() {
|
||||
if (lines_.length() > 0) return;
|
||||
stream_.Put('\0');
|
||||
stream_str_ = stream_.ToCString();
|
||||
const char* pos = *stream_str_;
|
||||
while (pos != NULL && *pos != '\0') {
|
||||
lines_.Add(pos);
|
||||
pos = strchr(pos, '\0');
|
||||
if (pos != NULL) ++pos;
|
||||
}
|
||||
}
|
||||
|
||||
i::HeapStringAllocator allocator_;
|
||||
i::StringStream stream_;
|
||||
i::SmartPointer<const char> stream_str_;
|
||||
i::List<const char*> lines_;
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
|
||||
TEST(RetainerProfile) {
|
||||
v8::HandleScope scope;
|
||||
LocalContext env;
|
||||
|
||||
CompileRun(
|
||||
"function A() {}\n"
|
||||
"function B(x) { this.x = x; }\n"
|
||||
"function C(x) { this.x1 = x; this.x2 = x; }\n"
|
||||
"var a = new A();\n"
|
||||
"var b1 = new B(a), b2 = new B(a);\n"
|
||||
"var c = new C(a);");
|
||||
|
||||
RetainerHeapProfile ret_profile;
|
||||
i::AssertNoAllocation no_alloc;
|
||||
i::HeapIterator iterator;
|
||||
for (i::HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next())
|
||||
ret_profile.CollectStats(obj);
|
||||
ret_profile.CoarseAndAggregate();
|
||||
RetainerProfilePrinter printer;
|
||||
ret_profile.DebugPrintStats(&printer);
|
||||
const char* retainers_of_a = printer.GetRetainers("A");
|
||||
// The order of retainers is unspecified, so we check string length, and
|
||||
// verify each retainer separately.
|
||||
CHECK_EQ(i::StrLength("(global property);1,B;2,C;2"),
|
||||
i::StrLength(retainers_of_a));
|
||||
CHECK(strstr(retainers_of_a, "(global property);1") != NULL);
|
||||
CHECK(strstr(retainers_of_a, "B;2") != NULL);
|
||||
CHECK(strstr(retainers_of_a, "C;2") != NULL);
|
||||
CHECK_EQ("(global property);2", printer.GetRetainers("B"));
|
||||
CHECK_EQ("(global property);1", printer.GetRetainers("C"));
|
||||
}
|
||||
|
||||
|
||||
namespace {
|
||||
|
||||
@ -726,116 +355,6 @@ TEST(HeapSnapshotRootPreservedAfterSorting) {
|
||||
}
|
||||
|
||||
|
||||
static const v8::HeapGraphNode* GetChild(
|
||||
const v8::HeapGraphNode* node,
|
||||
v8::HeapGraphNode::Type type,
|
||||
const char* name,
|
||||
const v8::HeapGraphNode* after = NULL) {
|
||||
bool ignore_child = after == NULL ? false : true;
|
||||
for (int i = 0, count = node->GetChildrenCount(); i < count; ++i) {
|
||||
const v8::HeapGraphEdge* prop = node->GetChild(i);
|
||||
const v8::HeapGraphNode* child = prop->GetToNode();
|
||||
v8::String::AsciiValue child_name(child->GetName());
|
||||
if (!ignore_child
|
||||
&& child->GetType() == type
|
||||
&& strcmp(name, *child_name) == 0)
|
||||
return child;
|
||||
if (after != NULL && child == after) ignore_child = false;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static bool IsNodeRetainedAs(const v8::HeapGraphNode* node,
|
||||
int element) {
|
||||
for (int i = 0, count = node->GetRetainersCount(); i < count; ++i) {
|
||||
const v8::HeapGraphEdge* prop = node->GetRetainer(i);
|
||||
if (prop->GetType() == v8::HeapGraphEdge::kElement
|
||||
&& element == prop->GetName()->Int32Value())
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
TEST(AggregatedHeapSnapshot) {
|
||||
v8::HandleScope scope;
|
||||
LocalContext env;
|
||||
|
||||
CompileRun(
|
||||
"function A() {}\n"
|
||||
"function B(x) { this.x = x; }\n"
|
||||
"var a = new A();\n"
|
||||
"var b = new B(a);");
|
||||
const v8::HeapSnapshot* snapshot =
|
||||
v8::HeapProfiler::TakeSnapshot(
|
||||
v8::String::New("agg"), v8::HeapSnapshot::kAggregated);
|
||||
const v8::HeapGraphNode* strings = GetChild(snapshot->GetRoot(),
|
||||
v8::HeapGraphNode::kHidden,
|
||||
"STRING_TYPE");
|
||||
CHECK_NE(NULL, strings);
|
||||
CHECK_NE(0, strings->GetSelfSize());
|
||||
CHECK_NE(0, strings->GetInstancesCount());
|
||||
const v8::HeapGraphNode* maps = GetChild(snapshot->GetRoot(),
|
||||
v8::HeapGraphNode::kHidden,
|
||||
"MAP_TYPE");
|
||||
CHECK_NE(NULL, maps);
|
||||
CHECK_NE(0, maps->GetSelfSize());
|
||||
CHECK_NE(0, maps->GetInstancesCount());
|
||||
|
||||
const v8::HeapGraphNode* a = GetChild(snapshot->GetRoot(),
|
||||
v8::HeapGraphNode::kObject,
|
||||
"A");
|
||||
CHECK_NE(NULL, a);
|
||||
CHECK_NE(0, a->GetSelfSize());
|
||||
CHECK_EQ(1, a->GetInstancesCount());
|
||||
|
||||
const v8::HeapGraphNode* b = GetChild(snapshot->GetRoot(),
|
||||
v8::HeapGraphNode::kObject,
|
||||
"B");
|
||||
CHECK_NE(NULL, b);
|
||||
CHECK_NE(0, b->GetSelfSize());
|
||||
CHECK_EQ(1, b->GetInstancesCount());
|
||||
|
||||
const v8::HeapGraphNode* glob_prop = GetChild(snapshot->GetRoot(),
|
||||
v8::HeapGraphNode::kObject,
|
||||
"(global property)",
|
||||
b);
|
||||
CHECK_NE(NULL, glob_prop);
|
||||
CHECK_EQ(0, glob_prop->GetSelfSize());
|
||||
CHECK_EQ(0, glob_prop->GetInstancesCount());
|
||||
CHECK_NE(0, glob_prop->GetChildrenCount());
|
||||
|
||||
const v8::HeapGraphNode* a_from_glob_prop = GetChild(
|
||||
glob_prop,
|
||||
v8::HeapGraphNode::kObject,
|
||||
"A");
|
||||
CHECK_NE(NULL, a_from_glob_prop);
|
||||
CHECK_EQ(0, a_from_glob_prop->GetSelfSize());
|
||||
CHECK_EQ(0, a_from_glob_prop->GetInstancesCount());
|
||||
CHECK_EQ(0, a_from_glob_prop->GetChildrenCount()); // Retains nothing.
|
||||
CHECK(IsNodeRetainedAs(a_from_glob_prop, 1)); // (global propery) has 1 ref.
|
||||
|
||||
const v8::HeapGraphNode* b_with_children = GetChild(
|
||||
snapshot->GetRoot(),
|
||||
v8::HeapGraphNode::kObject,
|
||||
"B",
|
||||
b);
|
||||
CHECK_NE(NULL, b_with_children);
|
||||
CHECK_EQ(0, b_with_children->GetSelfSize());
|
||||
CHECK_EQ(0, b_with_children->GetInstancesCount());
|
||||
CHECK_NE(0, b_with_children->GetChildrenCount());
|
||||
|
||||
const v8::HeapGraphNode* a_from_b = GetChild(
|
||||
b_with_children,
|
||||
v8::HeapGraphNode::kObject,
|
||||
"A");
|
||||
CHECK_NE(NULL, a_from_b);
|
||||
CHECK_EQ(0, a_from_b->GetSelfSize());
|
||||
CHECK_EQ(0, a_from_b->GetInstancesCount());
|
||||
CHECK_EQ(0, a_from_b->GetChildrenCount()); // Retains nothing.
|
||||
CHECK(IsNodeRetainedAs(a_from_b, 1)); // B has 1 ref to A.
|
||||
}
|
||||
|
||||
|
||||
TEST(HeapEntryDominator) {
|
||||
// The graph looks like this:
|
||||
//
|
||||
@ -1048,21 +567,6 @@ TEST(HeapSnapshotJSONSerializationAborting) {
|
||||
}
|
||||
|
||||
|
||||
// Must not crash in debug mode.
|
||||
TEST(AggregatedHeapSnapshotJSONSerialization) {
|
||||
v8::HandleScope scope;
|
||||
LocalContext env;
|
||||
|
||||
const v8::HeapSnapshot* snapshot =
|
||||
v8::HeapProfiler::TakeSnapshot(
|
||||
v8::String::New("agg"), v8::HeapSnapshot::kAggregated);
|
||||
TestJSONStream stream;
|
||||
snapshot->Serialize(&stream, v8::HeapSnapshot::kJSON);
|
||||
CHECK_GT(stream.size(), 0);
|
||||
CHECK_EQ(1, stream.eos_signaled());
|
||||
}
|
||||
|
||||
|
||||
TEST(HeapSnapshotGetNodeById) {
|
||||
v8::HandleScope scope;
|
||||
LocalContext env;
|
||||
|
@ -708,24 +708,6 @@ TEST(IsLoggingPreserved) {
|
||||
CHECK(LOGGER->is_logging());
|
||||
LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 1);
|
||||
CHECK(LOGGER->is_logging());
|
||||
|
||||
CHECK(LOGGER->is_logging());
|
||||
LOGGER->ResumeProfiler(
|
||||
v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
|
||||
CHECK(LOGGER->is_logging());
|
||||
LOGGER->PauseProfiler(
|
||||
v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
|
||||
CHECK(LOGGER->is_logging());
|
||||
|
||||
CHECK(LOGGER->is_logging());
|
||||
LOGGER->ResumeProfiler(
|
||||
v8::PROFILER_MODULE_CPU |
|
||||
v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
|
||||
CHECK(LOGGER->is_logging());
|
||||
LOGGER->PauseProfiler(
|
||||
v8::PROFILER_MODULE_CPU |
|
||||
v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
|
||||
CHECK(LOGGER->is_logging());
|
||||
}
|
||||
|
||||
|
||||
|
@ -169,17 +169,12 @@ function TickProcessor(
|
||||
processor: this.processHeapSampleBegin },
|
||||
'heap-sample-end': { parsers: [null, null],
|
||||
processor: this.processHeapSampleEnd },
|
||||
'heap-js-prod-item': { parsers: [null, 'var-args'],
|
||||
processor: this.processJSProducer },
|
||||
// Ignored events.
|
||||
'profiler': null,
|
||||
'function-creation': null,
|
||||
'function-move': null,
|
||||
'function-delete': null,
|
||||
'heap-sample-stats': null,
|
||||
'heap-sample-item': null,
|
||||
'heap-js-cons-item': null,
|
||||
'heap-js-ret-item': null,
|
||||
// Obsolete row types.
|
||||
'code-allocate': null,
|
||||
'begin-code-region': null,
|
||||
@ -401,17 +396,6 @@ TickProcessor.prototype.processHeapSampleEnd = function(space, state) {
|
||||
};
|
||||
|
||||
|
||||
TickProcessor.prototype.processJSProducer = function(constructor, stack) {
|
||||
if (!this.currentProducerProfile_) return;
|
||||
if (stack.length == 0) return;
|
||||
var first = stack.shift();
|
||||
var processedStack =
|
||||
this.profile_.resolveAndFilterFuncs_(this.processStack(first, 0, stack));
|
||||
processedStack.unshift(constructor);
|
||||
this.currentProducerProfile_.addPath(processedStack);
|
||||
};
|
||||
|
||||
|
||||
TickProcessor.prototype.printStatistics = function() {
|
||||
print('Statistical profiling result from ' + this.lastLogFileName_ +
|
||||
', (' + this.ticks_.total +
|
||||
|
Loading…
Reference in New Issue
Block a user