Remove obsolete aggregating and non-working producers heap profilers.

2000 LOC are gone!

R=sgjesse@chromium.org
BUG=1481

Review URL: http://codereview.chromium.org/7247018

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8406 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 94e5f2f3
......@@ -269,17 +269,10 @@ class V8EXPORT HeapGraphNode {
/**
* Returns node id. For the same heap object, the id remains the same
* across all snapshots. Not applicable to aggregated heap snapshots
* as they only contain aggregated instances.
* across all snapshots.
*/
uint64_t GetId() const;
/**
* Returns the number of instances. Only applicable to aggregated
* heap snapshots.
*/
int GetInstancesCount() const;
/** Returns node's own size, in bytes. */
int GetSelfSize() const;
......@@ -323,9 +316,7 @@ class V8EXPORT HeapGraphNode {
class V8EXPORT HeapSnapshot {
public:
enum Type {
kFull = 0, // Heap snapshot with all instances and references.
kAggregated = 1 // Snapshot doesn't contain individual heap entries,
// instead they are grouped by constructor name.
kFull = 0 // Heap snapshot with all instances and references.
};
enum SerializationFormat {
kJSON = 0 // See format description near 'Serialize' method.
......
......@@ -2560,17 +2560,12 @@ typedef void (*GCCallback)();
/**
* Profiler modules.
*
* In V8, profiler consists of several modules: CPU profiler, and different
* kinds of heap profiling. Each can be turned on / off independently.
* When PROFILER_MODULE_HEAP_SNAPSHOT flag is passed to ResumeProfilerEx,
* modules are enabled only temporarily for making a snapshot of the heap.
* In V8, profiler consists of several modules. Each can be turned on / off
* independently.
*/
enum ProfilerModules {
PROFILER_MODULE_NONE = 0,
PROFILER_MODULE_CPU = 1,
PROFILER_MODULE_HEAP_STATS = 1 << 1,
PROFILER_MODULE_JS_CONSTRUCTORS = 1 << 2,
PROFILER_MODULE_HEAP_SNAPSHOT = 1 << 16
PROFILER_MODULE_CPU = 1
};
......
......@@ -4855,22 +4855,7 @@ bool V8::IsProfilerPaused() {
void V8::ResumeProfilerEx(int flags, int tag) {
#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
if (flags & PROFILER_MODULE_HEAP_SNAPSHOT) {
// Snapshot mode: resume modules, perform GC, then pause only
// those modules which haven't been started prior to making a
// snapshot.
// Make a GC prior to taking a snapshot.
isolate->heap()->CollectAllGarbage(false);
// Reset snapshot flag and CPU module flags.
flags &= ~(PROFILER_MODULE_HEAP_SNAPSHOT | PROFILER_MODULE_CPU);
const int current_flags = isolate->logger()->GetActiveProfilerModules();
isolate->logger()->ResumeProfiler(flags, tag);
isolate->heap()->CollectAllGarbage(false);
isolate->logger()->PauseProfiler(~current_flags & flags, tag);
} else {
isolate->logger()->ResumeProfiler(flags, tag);
}
isolate->logger()->ResumeProfiler(flags, tag);
#endif
}
......@@ -5720,7 +5705,6 @@ uint64_t HeapGraphNode::GetId() const {
#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphNode::GetId");
ASSERT(ToInternal(this)->snapshot()->type() != i::HeapSnapshot::kAggregated);
return ToInternal(this)->id();
#else
return 0;
......@@ -5728,18 +5712,6 @@ uint64_t HeapGraphNode::GetId() const {
}
int HeapGraphNode::GetInstancesCount() const {
#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphNode::GetInstancesCount");
ASSERT(ToInternal(this)->snapshot()->type() == i::HeapSnapshot::kAggregated);
return static_cast<int>(ToInternal(this)->id());
#else
return 0;
#endif
}
int HeapGraphNode::GetSelfSize() const {
#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
......@@ -5987,9 +5959,6 @@ const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle<String> title,
case HeapSnapshot::kFull:
internal_type = i::HeapSnapshot::kFull;
break;
case HeapSnapshot::kAggregated:
internal_type = i::HeapSnapshot::kAggregated;
break;
default:
UNREACHABLE();
}
......
......@@ -474,7 +474,6 @@ DEFINE_bool(log_handles, false, "Log global handle events.")
DEFINE_bool(log_snapshot_positions, false,
"log positions of (de)serialized objects in the snapshot.")
DEFINE_bool(log_suspect, false, "Log suspect operations.")
DEFINE_bool(log_producers, false, "Log stack traces of JS objects allocations.")
DEFINE_bool(prof, false,
"Log statistical profiling information (implies --log-code).")
DEFINE_bool(prof_auto, true,
......
......@@ -28,294 +28,13 @@
#include "v8.h"
#include "heap-profiler.h"
#include "frames-inl.h"
#include "global-handles.h"
#include "profile-generator.h"
#include "string-stream.h"
namespace v8 {
namespace internal {
#ifdef ENABLE_LOGGING_AND_PROFILING
namespace {
// Clusterizer is a set of helper functions for converting
// object references into clusters.
class Clusterizer : public AllStatic {
public:
static JSObjectsCluster Clusterize(HeapObject* obj) {
return Clusterize(obj, true);
}
static void InsertIntoTree(JSObjectsClusterTree* tree,
HeapObject* obj, bool fine_grain);
static void InsertReferenceIntoTree(JSObjectsClusterTree* tree,
const JSObjectsCluster& cluster) {
InsertIntoTree(tree, cluster, 0);
}
private:
static JSObjectsCluster Clusterize(HeapObject* obj, bool fine_grain);
static int CalculateNetworkSize(JSObject* obj);
static int GetObjectSize(HeapObject* obj) {
return obj->IsJSObject() ?
CalculateNetworkSize(JSObject::cast(obj)) : obj->Size();
}
static void InsertIntoTree(JSObjectsClusterTree* tree,
const JSObjectsCluster& cluster, int size);
};
JSObjectsCluster Clusterizer::Clusterize(HeapObject* obj, bool fine_grain) {
if (obj->IsJSObject()) {
JSObject* js_obj = JSObject::cast(obj);
String* constructor = GetConstructorNameForHeapProfile(
JSObject::cast(js_obj));
// Differentiate Object and Array instances.
if (fine_grain && (constructor == HEAP->Object_symbol() ||
constructor == HEAP->Array_symbol())) {
return JSObjectsCluster(constructor, obj);
} else {
return JSObjectsCluster(constructor);
}
} else if (obj->IsString()) {
return JSObjectsCluster(HEAP->String_symbol());
} else if (obj->IsJSGlobalPropertyCell()) {
return JSObjectsCluster(JSObjectsCluster::GLOBAL_PROPERTY);
} else if (obj->IsCode() || obj->IsSharedFunctionInfo() || obj->IsScript()) {
return JSObjectsCluster(JSObjectsCluster::CODE);
}
return JSObjectsCluster();
}
void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree,
HeapObject* obj, bool fine_grain) {
JSObjectsCluster cluster = Clusterize(obj, fine_grain);
if (cluster.is_null()) return;
InsertIntoTree(tree, cluster, GetObjectSize(obj));
}
void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree,
const JSObjectsCluster& cluster, int size) {
JSObjectsClusterTree::Locator loc;
tree->Insert(cluster, &loc);
NumberAndSizeInfo number_and_size = loc.value();
number_and_size.increment_number(1);
number_and_size.increment_bytes(size);
loc.set_value(number_and_size);
}
int Clusterizer::CalculateNetworkSize(JSObject* obj) {
int size = obj->Size();
// If 'properties' and 'elements' are non-empty (thus, non-shared),
// take their size into account.
if (obj->properties() != HEAP->empty_fixed_array()) {
size += obj->properties()->Size();
}
if (obj->elements() != HEAP->empty_fixed_array()) {
size += obj->elements()->Size();
}
// For functions, also account non-empty context and literals sizes.
if (obj->IsJSFunction()) {
JSFunction* f = JSFunction::cast(obj);
if (f->unchecked_context()->IsContext()) {
size += f->context()->Size();
}
if (f->literals()->length() != 0) {
size += f->literals()->Size();
}
}
return size;
}
// A helper class for recording back references.
class ReferencesExtractor : public ObjectVisitor {
public:
ReferencesExtractor(const JSObjectsCluster& cluster,
RetainerHeapProfile* profile)
: cluster_(cluster),
profile_(profile),
inside_array_(false) {
}
void VisitPointer(Object** o) {
if ((*o)->IsFixedArray() && !inside_array_) {
// Traverse one level deep for data members that are fixed arrays.
// This covers the case of 'elements' and 'properties' of JSObject,
// and function contexts.
inside_array_ = true;
FixedArray::cast(*o)->Iterate(this);
inside_array_ = false;
} else if ((*o)->IsHeapObject()) {
profile_->StoreReference(cluster_, HeapObject::cast(*o));
}
}
void VisitPointers(Object** start, Object** end) {
for (Object** p = start; p < end; p++) VisitPointer(p);
}
private:
const JSObjectsCluster& cluster_;
RetainerHeapProfile* profile_;
bool inside_array_;
};
// A printer interface implementation for the Retainers profile.
class RetainersPrinter : public RetainerHeapProfile::Printer {
public:
void PrintRetainers(const JSObjectsCluster& cluster,
const StringStream& retainers) {
HeapStringAllocator allocator;
StringStream stream(&allocator);
cluster.Print(&stream);
LOG(ISOLATE,
HeapSampleJSRetainersEvent(
*(stream.ToCString()), *(retainers.ToCString())));
}
};
// Visitor for printing a cluster tree.
class ClusterTreePrinter BASE_EMBEDDED {
public:
explicit ClusterTreePrinter(StringStream* stream) : stream_(stream) {}
void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
Print(stream_, cluster, number_and_size);
}
static void Print(StringStream* stream,
const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size);
private:
StringStream* stream_;
};
void ClusterTreePrinter::Print(StringStream* stream,
const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
stream->Put(',');
cluster.Print(stream);
stream->Add(";%d", number_and_size.number());
}
// Visitor for printing a retainer tree.
class SimpleRetainerTreePrinter BASE_EMBEDDED {
public:
explicit SimpleRetainerTreePrinter(RetainerHeapProfile::Printer* printer)
: printer_(printer) {}
void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
private:
RetainerHeapProfile::Printer* printer_;
};
void SimpleRetainerTreePrinter::Call(const JSObjectsCluster& cluster,
JSObjectsClusterTree* tree) {
HeapStringAllocator allocator;
StringStream stream(&allocator);
ClusterTreePrinter retainers_printer(&stream);
tree->ForEach(&retainers_printer);
printer_->PrintRetainers(cluster, stream);
}
// Visitor for aggregating references count of equivalent clusters.
class RetainersAggregator BASE_EMBEDDED {
public:
RetainersAggregator(ClustersCoarser* coarser, JSObjectsClusterTree* dest_tree)
: coarser_(coarser), dest_tree_(dest_tree) {}
void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size);
private:
ClustersCoarser* coarser_;
JSObjectsClusterTree* dest_tree_;
};
void RetainersAggregator::Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster);
if (eq.is_null()) eq = cluster;
JSObjectsClusterTree::Locator loc;
dest_tree_->Insert(eq, &loc);
NumberAndSizeInfo aggregated_number = loc.value();
aggregated_number.increment_number(number_and_size.number());
loc.set_value(aggregated_number);
}
// Visitor for printing retainers tree. Aggregates equivalent retainer clusters.
class AggregatingRetainerTreePrinter BASE_EMBEDDED {
public:
AggregatingRetainerTreePrinter(ClustersCoarser* coarser,
RetainerHeapProfile::Printer* printer)
: coarser_(coarser), printer_(printer) {}
void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
private:
ClustersCoarser* coarser_;
RetainerHeapProfile::Printer* printer_;
};
void AggregatingRetainerTreePrinter::Call(const JSObjectsCluster& cluster,
JSObjectsClusterTree* tree) {
if (!coarser_->GetCoarseEquivalent(cluster).is_null()) return;
JSObjectsClusterTree dest_tree_;
RetainersAggregator retainers_aggregator(coarser_, &dest_tree_);
tree->ForEach(&retainers_aggregator);
HeapStringAllocator allocator;
StringStream stream(&allocator);
ClusterTreePrinter retainers_printer(&stream);
dest_tree_.ForEach(&retainers_printer);
printer_->PrintRetainers(cluster, stream);
}
} // namespace
// A helper class for building a retainers tree, that aggregates
// all equivalent clusters.
class RetainerTreeAggregator {
public:
explicit RetainerTreeAggregator(ClustersCoarser* coarser)
: coarser_(coarser) {}
void Process(JSObjectsRetainerTree* input_tree) {
input_tree->ForEach(this);
}
void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
JSObjectsRetainerTree& output_tree() { return output_tree_; }
private:
ClustersCoarser* coarser_;
JSObjectsRetainerTree output_tree_;
};
void RetainerTreeAggregator::Call(const JSObjectsCluster& cluster,
JSObjectsClusterTree* tree) {
JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster);
if (eq.is_null()) return;
JSObjectsRetainerTree::Locator loc;
if (output_tree_.Insert(eq, &loc)) {
loc.set_value(new JSObjectsClusterTree());
}
RetainersAggregator retainers_aggregator(coarser_, loc.value());
tree->ForEach(&retainers_aggregator);
}
HeapProfiler::HeapProfiler()
: snapshots_(new HeapSnapshotsCollection()),
next_snapshot_uid_(1) {
......@@ -409,14 +128,6 @@ HeapSnapshot* HeapProfiler::TakeSnapshotImpl(const char* name,
generation_completed = generator.GenerateSnapshot();
break;
}
case HeapSnapshot::kAggregated: {
HEAP->CollectAllGarbage(true);
AggregatedHeapSnapshot agg_snapshot;
AggregatedHeapSnapshotGenerator generator(&agg_snapshot);
generator.GenerateSnapshot();
generator.FillHeapSnapshot(result);
break;
}
default:
UNREACHABLE();
}
......@@ -468,705 +179,6 @@ void HeapProfiler::ObjectMoveEvent(Address from, Address to) {
snapshots_->ObjectMoveEvent(from, to);
}
const JSObjectsClusterTreeConfig::Key JSObjectsClusterTreeConfig::kNoKey;
const JSObjectsClusterTreeConfig::Value JSObjectsClusterTreeConfig::kNoValue;
ConstructorHeapProfile::ConstructorHeapProfile()
: zscope_(Isolate::Current(), DELETE_ON_EXIT) {
}
void ConstructorHeapProfile::Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
HeapStringAllocator allocator;
StringStream stream(&allocator);
cluster.Print(&stream);
LOG(ISOLATE,
HeapSampleJSConstructorEvent(*(stream.ToCString()),
number_and_size.number(),
number_and_size.bytes()));
}
void ConstructorHeapProfile::CollectStats(HeapObject* obj) {
Clusterizer::InsertIntoTree(&js_objects_info_tree_, obj, false);
}
void ConstructorHeapProfile::PrintStats() {
js_objects_info_tree_.ForEach(this);
}
static const char* GetConstructorName(const char* name) {
return name[0] != '\0' ? name : "(anonymous)";
}
const char* JSObjectsCluster::GetSpecialCaseName() const {
if (constructor_ == FromSpecialCase(ROOTS)) {
return "(roots)";
} else if (constructor_ == FromSpecialCase(GLOBAL_PROPERTY)) {
return "(global property)";
} else if (constructor_ == FromSpecialCase(CODE)) {
return "(code)";
} else if (constructor_ == FromSpecialCase(SELF)) {
return "(self)";
}
return NULL;
}
void JSObjectsCluster::Print(StringStream* accumulator) const {
ASSERT(!is_null());
const char* special_case_name = GetSpecialCaseName();
if (special_case_name != NULL) {
accumulator->Add(special_case_name);
} else {
SmartPointer<char> s_name(
constructor_->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
accumulator->Add("%s", GetConstructorName(*s_name));
if (instance_ != NULL) {
accumulator->Add(":%p", static_cast<void*>(instance_));
}
}
}
void JSObjectsCluster::DebugPrint(StringStream* accumulator) const {
if (!is_null()) {
Print(accumulator);
} else {
accumulator->Add("(null cluster)");
}
}
inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs(
const JSObjectsCluster& cluster_)
: cluster(cluster_), refs(kInitialBackrefsListCapacity) {
}
inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs(
const ClustersCoarser::ClusterBackRefs& src)
: cluster(src.cluster), refs(src.refs.capacity()) {
refs.AddAll(src.refs);
}
inline ClustersCoarser::ClusterBackRefs&
ClustersCoarser::ClusterBackRefs::operator=(
const ClustersCoarser::ClusterBackRefs& src) {
if (this == &src) return *this;
cluster = src.cluster;
refs.Clear();
refs.AddAll(src.refs);
return *this;
}
inline int ClustersCoarser::ClusterBackRefs::Compare(
const ClustersCoarser::ClusterBackRefs& a,
const ClustersCoarser::ClusterBackRefs& b) {
int cmp = JSObjectsCluster::CompareConstructors(a.cluster, b.cluster);
if (cmp != 0) return cmp;
if (a.refs.length() < b.refs.length()) return -1;
if (a.refs.length() > b.refs.length()) return 1;
for (int i = 0; i < a.refs.length(); ++i) {
int cmp = JSObjectsCluster::Compare(a.refs[i], b.refs[i]);
if (cmp != 0) return cmp;
}
return 0;
}
ClustersCoarser::ClustersCoarser()
: zscope_(Isolate::Current(), DELETE_ON_EXIT),
sim_list_(ClustersCoarser::kInitialSimilarityListCapacity),
current_pair_(NULL),
current_set_(NULL),
self_(NULL) {
}
void ClustersCoarser::Call(const JSObjectsCluster& cluster,
JSObjectsClusterTree* tree) {
if (!cluster.can_be_coarsed()) return;
ClusterBackRefs pair(cluster);
ASSERT(current_pair_ == NULL);
current_pair_ = &pair;
current_set_ = new JSObjectsRetainerTree();
self_ = &cluster;
tree->ForEach(this);
sim_list_.Add(pair);
current_pair_ = NULL;
current_set_ = NULL;
self_ = NULL;
}
void ClustersCoarser::Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
ASSERT(current_pair_ != NULL);
ASSERT(current_set_ != NULL);
ASSERT(self_ != NULL);
JSObjectsRetainerTree::Locator loc;
if (JSObjectsCluster::Compare(*self_, cluster) == 0) {
current_pair_->refs.Add(JSObjectsCluster(JSObjectsCluster::SELF));
return;
}
JSObjectsCluster eq = GetCoarseEquivalent(cluster);
if (!eq.is_null()) {
if (current_set_->Find(eq, &loc)) return;
current_pair_->refs.Add(eq);
current_set_->Insert(eq, &loc);
} else {
current_pair_->refs.Add(cluster);
}
}
void ClustersCoarser::Process(JSObjectsRetainerTree* tree) {
int last_eq_clusters = -1;
for (int i = 0; i < kMaxPassesCount; ++i) {
sim_list_.Clear();
const int curr_eq_clusters = DoProcess(tree);
// If no new cluster equivalents discovered, abort processing.
if (last_eq_clusters == curr_eq_clusters) break;
last_eq_clusters = curr_eq_clusters;
}
}
int ClustersCoarser::DoProcess(JSObjectsRetainerTree* tree) {
tree->ForEach(this);
sim_list_.Iterate(ClusterBackRefs::SortRefsIterator);
sim_list_.Sort(ClusterBackRefsCmp);
return FillEqualityTree();
}
JSObjectsCluster ClustersCoarser::GetCoarseEquivalent(
const JSObjectsCluster& cluster) {
if (!cluster.can_be_coarsed()) return JSObjectsCluster();
EqualityTree::Locator loc;
return eq_tree_.Find(cluster, &loc) ? loc.value() : JSObjectsCluster();
}
bool ClustersCoarser::HasAnEquivalent(const JSObjectsCluster& cluster) {
// Return true for coarsible clusters that have a non-identical equivalent.
if (!cluster.can_be_coarsed()) return false;
JSObjectsCluster eq = GetCoarseEquivalent(cluster);
return !eq.is_null() && JSObjectsCluster::Compare(cluster, eq) != 0;
}
int ClustersCoarser::FillEqualityTree() {
int eq_clusters_count = 0;
int eq_to = 0;
bool first_added = false;
for (int i = 1; i < sim_list_.length(); ++i) {
if (ClusterBackRefs::Compare(sim_list_[i], sim_list_[eq_to]) == 0) {
EqualityTree::Locator loc;
if (!first_added) {
// Add self-equivalence, if we have more than one item in this
// equivalence class.
eq_tree_.Insert(sim_list_[eq_to].cluster, &loc);
loc.set_value(sim_list_[eq_to].cluster);
first_added = true;
}
eq_tree_.Insert(sim_list_[i].cluster, &loc);
loc.set_value(sim_list_[eq_to].cluster);
++eq_clusters_count;
} else {
eq_to = i;
first_added = false;
}
}
return eq_clusters_count;
}
const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoKey;
const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoValue;
const JSObjectsRetainerTreeConfig::Key JSObjectsRetainerTreeConfig::kNoKey;
const JSObjectsRetainerTreeConfig::Value JSObjectsRetainerTreeConfig::kNoValue =
NULL;
RetainerHeapProfile::RetainerHeapProfile()
: zscope_(Isolate::Current(), DELETE_ON_EXIT),
aggregator_(NULL) {
JSObjectsCluster roots(JSObjectsCluster::ROOTS);
ReferencesExtractor extractor(roots, this);
HEAP->IterateRoots(&extractor, VISIT_ONLY_STRONG);
}
RetainerHeapProfile::~RetainerHeapProfile() {
delete aggregator_;
}
void RetainerHeapProfile::StoreReference(const JSObjectsCluster& cluster,
HeapObject* ref) {
JSObjectsCluster ref_cluster = Clusterizer::Clusterize(ref);
if (ref_cluster.is_null()) return;
JSObjectsRetainerTree::Locator ref_loc;
if (retainers_tree_.Insert(ref_cluster, &ref_loc)) {
ref_loc.set_value(new JSObjectsClusterTree());
}
JSObjectsClusterTree* referenced_by = ref_loc.value();
Clusterizer::InsertReferenceIntoTree(referenced_by, cluster);
}
void RetainerHeapProfile::CollectStats(HeapObject* obj) {
const JSObjectsCluster cluster = Clusterizer::Clusterize(obj);
if (cluster.is_null()) return;
ReferencesExtractor extractor(cluster, this);
obj->Iterate(&extractor);
}
void RetainerHeapProfile::CoarseAndAggregate() {
coarser_.Process(&retainers_tree_);
ASSERT(aggregator_ == NULL);
aggregator_ = new RetainerTreeAggregator(&coarser_);
aggregator_->Process(&retainers_tree_);
}
void RetainerHeapProfile::DebugPrintStats(
RetainerHeapProfile::Printer* printer) {
// Print clusters that have no equivalents, aggregating their retainers.
AggregatingRetainerTreePrinter agg_printer(&coarser_, printer);
retainers_tree_.ForEach(&agg_printer);
// Print clusters that have equivalents.
SimpleRetainerTreePrinter s_printer(printer);
aggregator_->output_tree().ForEach(&s_printer);
}
void RetainerHeapProfile::PrintStats() {
RetainersPrinter printer;
DebugPrintStats(&printer);
}
//
// HeapProfiler class implementation.
//
static void StackWeakReferenceCallback(Persistent<Value> object,
void* trace) {
DeleteArray(static_cast<Address*>(trace));
object.Dispose();
}
static void PrintProducerStackTrace(Object* obj, void* trace) {
if (!obj->IsJSObject()) return;
String* constructor = GetConstructorNameForHeapProfile(JSObject::cast(obj));
SmartPointer<char> s_name(
constructor->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
LOG(ISOLATE,
HeapSampleJSProducerEvent(GetConstructorName(*s_name),
reinterpret_cast<Address*>(trace)));
}
void HeapProfiler::WriteSample() {
Isolate* isolate = Isolate::Current();
LOG(isolate, HeapSampleBeginEvent("Heap", "allocated"));
LOG(isolate,
HeapSampleStats(
"Heap", "allocated", HEAP->CommittedMemory(), HEAP->SizeOfObjects()));
AggregatedHeapSnapshot snapshot;
AggregatedHeapSnapshotGenerator generator(&snapshot);
generator.GenerateSnapshot();
HistogramInfo* info = snapshot.info();
for (int i = FIRST_NONSTRING_TYPE;
i <= AggregatedHeapSnapshotGenerator::kAllStringsType;
++i) {
if (info[i].bytes() > 0) {
LOG(isolate,
HeapSampleItemEvent(info[i].name(), info[i].number(),
info[i].bytes()));
}
}
snapshot.js_cons_profile()->PrintStats();
snapshot.js_retainer_profile()->PrintStats();
isolate->global_handles()->IterateWeakRoots(PrintProducerStackTrace,
StackWeakReferenceCallback);
LOG(isolate, HeapSampleEndEvent("Heap", "allocated"));
}
AggregatedHeapSnapshot::AggregatedHeapSnapshot()
: info_(NewArray<HistogramInfo>(
AggregatedHeapSnapshotGenerator::kAllStringsType + 1)) {
#define DEF_TYPE_NAME(name) info_[name].set_name(#name);
INSTANCE_TYPE_LIST(DEF_TYPE_NAME);
#undef DEF_TYPE_NAME
info_[AggregatedHeapSnapshotGenerator::kAllStringsType].set_name(
"STRING_TYPE");
}
AggregatedHeapSnapshot::~AggregatedHeapSnapshot() {
DeleteArray(info_);
}
AggregatedHeapSnapshotGenerator::AggregatedHeapSnapshotGenerator(
AggregatedHeapSnapshot* agg_snapshot)
: agg_snapshot_(agg_snapshot) {
}
void AggregatedHeapSnapshotGenerator::CalculateStringsStats() {
HistogramInfo* info = agg_snapshot_->info();
HistogramInfo& strings = info[kAllStringsType];
// Lump all the string types together.
#define INCREMENT_SIZE(type, size, name, camel_name) \
strings.increment_number(info[type].number()); \
strings.increment_bytes(info[type].bytes());
STRING_TYPE_LIST(INCREMENT_SIZE);
#undef INCREMENT_SIZE
}
void AggregatedHeapSnapshotGenerator::CollectStats(HeapObject* obj) {
InstanceType type = obj->map()->instance_type();
ASSERT(0 <= type && type <= LAST_TYPE);
agg_snapshot_->info()[type].increment_number(1);
agg_snapshot_->info()[type].increment_bytes(obj->Size());
}
void AggregatedHeapSnapshotGenerator::GenerateSnapshot() {
HeapIterator iterator(HeapIterator::kFilterUnreachable);
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
CollectStats(obj);
agg_snapshot_->js_cons_profile()->CollectStats(obj);
agg_snapshot_->js_retainer_profile()->CollectStats(obj);
}
CalculateStringsStats();
agg_snapshot_->js_retainer_profile()->CoarseAndAggregate();
}
class CountingConstructorHeapProfileIterator {
public:
CountingConstructorHeapProfileIterator()
: entities_count_(0), children_count_(0) {
}
void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
++entities_count_;
children_count_ += number_and_size.number();
}
int entities_count() { return entities_count_; }
int children_count() { return children_count_; }
private:
int entities_count_;
int children_count_;
};
static HeapEntry* AddEntryFromAggregatedSnapshot(HeapSnapshot* snapshot,
int* root_child_index,
HeapEntry::Type type,
const char* name,
int count,
int size,
int children_count,
int retainers_count) {
HeapEntry* entry = snapshot->AddEntry(
type, name, count, size, children_count, retainers_count);
ASSERT(entry != NULL);
snapshot->root()->SetUnidirElementReference(*root_child_index,
*root_child_index + 1,
entry);
*root_child_index = *root_child_index + 1;
return entry;
}
class AllocatingConstructorHeapProfileIterator {
public:
AllocatingConstructorHeapProfileIterator(HeapSnapshot* snapshot,
int* root_child_index)
: snapshot_(snapshot),
root_child_index_(root_child_index) {
}
void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
const char* name = cluster.GetSpecialCaseName();
if (name == NULL) {
name = snapshot_->collection()->names()->GetFunctionName(
cluster.constructor());
}
AddEntryFromAggregatedSnapshot(snapshot_,
root_child_index_,
HeapEntry::kObject,
name,
number_and_size.number(),
number_and_size.bytes(),
0,
0);
}
private:
HeapSnapshot* snapshot_;
int* root_child_index_;
};
static HeapObject* ClusterAsHeapObject(const JSObjectsCluster& cluster) {
return cluster.can_be_coarsed() ?
reinterpret_cast<HeapObject*>(cluster.instance()) : cluster.constructor();
}
static JSObjectsCluster HeapObjectAsCluster(HeapObject* object) {
if (object->IsString()) {
return JSObjectsCluster(String::cast(object));
} else {
JSObject* js_obj = JSObject::cast(object);
String* constructor = GetConstructorNameForHeapProfile(
JSObject::cast(js_obj));
return JSObjectsCluster(constructor, object);
}
}
class CountingRetainersIterator {
public:
CountingRetainersIterator(const JSObjectsCluster& child_cluster,
HeapEntriesAllocator* allocator,
HeapEntriesMap* map)
: child_(ClusterAsHeapObject(child_cluster)),
allocator_(allocator),
map_(map) {
if (map_->Map(child_) == NULL)
map_->Pair(child_, allocator_, HeapEntriesMap::kHeapEntryPlaceholder);
}
void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
if (map_->Map(ClusterAsHeapObject(cluster)) == NULL)
map_->Pair(ClusterAsHeapObject(cluster),
allocator_,
HeapEntriesMap::kHeapEntryPlaceholder);
map_->CountReference(ClusterAsHeapObject(cluster), child_);
}
private:
HeapObject* child_;
HeapEntriesAllocator* allocator_;
HeapEntriesMap* map_;
};
class AllocatingRetainersIterator {
public:
AllocatingRetainersIterator(const JSObjectsCluster& child_cluster,
HeapEntriesAllocator*,
HeapEntriesMap* map)
: child_(ClusterAsHeapObject(child_cluster)), map_(map) {
child_entry_ = map_->Map(child_);
ASSERT(child_entry_ != NULL);
}
void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
int child_index, retainer_index;
map_->CountReference(ClusterAsHeapObject(cluster),
child_,
&child_index,
&retainer_index);
map_->Map(ClusterAsHeapObject(cluster))->SetIndexedReference(
HeapGraphEdge::kElement,
child_index,
number_and_size.number(),
child_entry_,
retainer_index);
}
private:
HeapObject* child_;
HeapEntriesMap* map_;
HeapEntry* child_entry_;
};
template<class RetainersIterator>
class AggregatingRetainerTreeIterator {
public:
explicit AggregatingRetainerTreeIterator(ClustersCoarser* coarser,
HeapEntriesAllocator* allocator,
HeapEntriesMap* map)
: coarser_(coarser), allocator_(allocator), map_(map) {
}
void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree) {
if (coarser_ != NULL &&
!coarser_->GetCoarseEquivalent(cluster).is_null()) return;
JSObjectsClusterTree* tree_to_iterate = tree;
ZoneScope zs(Isolate::Current(), DELETE_ON_EXIT);
JSObjectsClusterTree dest_tree_;
if (coarser_ != NULL) {
RetainersAggregator retainers_aggregator(coarser_, &dest_tree_);
tree->ForEach(&retainers_aggregator);
tree_to_iterate = &dest_tree_;
}
RetainersIterator iterator(cluster, allocator_, map_);
tree_to_iterate->ForEach(&iterator);
}
private:
ClustersCoarser* coarser_;
HeapEntriesAllocator* allocator_;
HeapEntriesMap* map_;
};
class AggregatedRetainerTreeAllocator : public HeapEntriesAllocator {
public:
AggregatedRetainerTreeAllocator(HeapSnapshot* snapshot,
int* root_child_index)
: snapshot_(snapshot), root_child_index_(root_child_index) {
}
~AggregatedRetainerTreeAllocator() { }
HeapEntry* AllocateEntry(
HeapThing ptr, int children_count, int retainers_count) {
HeapObject* obj = reinterpret_cast<HeapObject*>(ptr);
JSObjectsCluster cluster = HeapObjectAsCluster(obj);
const char* name = cluster.GetSpecialCaseName();
if (name == NULL) {
name = snapshot_->collection()->names()->GetFunctionName(
cluster.constructor());
}
return AddEntryFromAggregatedSnapshot(
snapshot_, root_child_index_, HeapEntry::kObject, name,
0, 0, children_count, retainers_count);
}
private:
HeapSnapshot* snapshot_;
int* root_child_index_;
};
template<class Iterator>
void AggregatedHeapSnapshotGenerator::IterateRetainers(
HeapEntriesAllocator* allocator, HeapEntriesMap* entries_map) {
RetainerHeapProfile* p = agg_snapshot_->js_retainer_profile();
AggregatingRetainerTreeIterator<Iterator> agg_ret_iter_1(
p->coarser(), allocator, entries_map);
p->retainers_tree()->ForEach(&agg_ret_iter_1);
AggregatingRetainerTreeIterator<Iterator> agg_ret_iter_2(
NULL, allocator, entries_map);
p->aggregator()->output_tree().ForEach(&agg_ret_iter_2);
}
void AggregatedHeapSnapshotGenerator::FillHeapSnapshot(HeapSnapshot* snapshot) {
// Count the number of entities.
int histogram_entities_count = 0;
int histogram_children_count = 0;
int histogram_retainers_count = 0;
for (int i = FIRST_NONSTRING_TYPE; i <= kAllStringsType; ++i) {
if (agg_snapshot_->info()[i].bytes() > 0) {
++histogram_entities_count;
}
}
CountingConstructorHeapProfileIterator counting_cons_iter;
agg_snapshot_->js_cons_profile()->ForEach(&counting_cons_iter);
histogram_entities_count += counting_cons_iter.entities_count();
HeapEntriesMap entries_map;
int root_child_index = 0;
AggregatedRetainerTreeAllocator allocator(snapshot, &root_child_index);
IterateRetainers<CountingRetainersIterator>(&allocator, &entries_map);
histogram_entities_count += entries_map.entries_count();
histogram_children_count += entries_map.total_children_count();
histogram_retainers_count += entries_map.total_retainers_count();
// Root entry references all other entries.
histogram_children_count += histogram_entities_count;
int root_children_count = histogram_entities_count;
++histogram_entities_count;
// Allocate and fill entries in the snapshot, allocate references.
snapshot->AllocateEntries(histogram_entities_count,
histogram_children_count,
histogram_retainers_count);
snapshot->AddRootEntry(root_children_count);
for (int i = FIRST_NONSTRING_TYPE; i <= kAllStringsType; ++i) {
if (agg_snapshot_->info()[i].bytes() > 0) {
AddEntryFromAggregatedSnapshot(snapshot,
&root_child_index,
HeapEntry::kHidden,
agg_snapshot_->info()[i].name(),
agg_snapshot_->info()[i].number(),
agg_snapshot_->info()[i].bytes(),
0,
0);
}
}
AllocatingConstructorHeapProfileIterator alloc_cons_iter(
snapshot, &root_child_index);
agg_snapshot_->js_cons_profile()->ForEach(&alloc_cons_iter);
entries_map.AllocateEntries();
// Fill up references.
IterateRetainers<AllocatingRetainersIterator>(&allocator, &entries_map);
snapshot->SetDominatorsToSelf();
}
void ProducerHeapProfile::Setup() {
can_log_ = true;
}
void ProducerHeapProfile::DoRecordJSObjectAllocation(Object* obj) {
ASSERT(FLAG_log_producers);
if (!can_log_) return;
int framesCount = 0;
for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
++framesCount;
}
if (framesCount == 0) return;
++framesCount; // Reserve place for the terminator item.
Vector<Address> stack(NewArray<Address>(framesCount), framesCount);
int i = 0;
for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
stack[i++] = it.frame()->pc();
}
stack[i] = NULL;
Handle<Object> handle = isolate_->global_handles()->Create(obj);
isolate_->global_handles()->MakeWeak(handle.location(),
static_cast<void*>(stack.start()),
StackWeakReferenceCallback);
}
#endif // ENABLE_LOGGING_AND_PROFILING
......
......@@ -28,9 +28,7 @@
#ifndef V8_HEAP_PROFILER_H_
#define V8_HEAP_PROFILER_H_
#include "allocation.h"
#include "isolate.h"
#include "zone-inl.h"
namespace v8 {
namespace internal {
......@@ -81,10 +79,6 @@ class HeapProfiler {
return snapshots_->is_tracking_objects();
}
// Obsolete interface.
// Write a single heap sample to the log file.
static void WriteSample();
private:
HeapProfiler();
~HeapProfiler();
......@@ -103,295 +97,6 @@ class HeapProfiler {
#endif // ENABLE_LOGGING_AND_PROFILING
};
#ifdef ENABLE_LOGGING_AND_PROFILING
// JSObjectsCluster describes a group of JS objects that are
// considered equivalent in terms of a particular profile.
class JSObjectsCluster BASE_EMBEDDED {
public:
// These special cases are used in retainer profile.
enum SpecialCase {
ROOTS = 1,
GLOBAL_PROPERTY = 2,
CODE = 3,
SELF = 100 // This case is used in ClustersCoarser only.
};
JSObjectsCluster() : constructor_(NULL), instance_(NULL) {}
explicit JSObjectsCluster(String* constructor)
: constructor_(constructor), instance_(NULL) {}
explicit JSObjectsCluster(SpecialCase special)
: constructor_(FromSpecialCase(special)), instance_(NULL) {}
JSObjectsCluster(String* constructor, Object* instance)
: constructor_(constructor), instance_(instance) {}
static int CompareConstructors(const JSObjectsCluster& a,
const JSObjectsCluster& b) {
// Strings are unique, so it is sufficient to compare their pointers.
return a.constructor_ == b.constructor_ ? 0
: (a.constructor_ < b.constructor_ ? -1 : 1);
}
static int Compare(const JSObjectsCluster& a, const JSObjectsCluster& b) {
// Strings are unique, so it is sufficient to compare their pointers.
const int cons_cmp = CompareConstructors(a, b);
return cons_cmp == 0 ?
(a.instance_ == b.instance_ ? 0 : (a.instance_ < b.instance_ ? -1 : 1))
: cons_cmp;
}
static int Compare(const JSObjectsCluster* a, const JSObjectsCluster* b) {
return Compare(*a, *b);
}
bool is_null() const { return constructor_ == NULL; }
bool can_be_coarsed() const { return instance_ != NULL; }
String* constructor() const { return constructor_; }
Object* instance() const { return instance_; }
const char* GetSpecialCaseName() const;
void Print(StringStream* accumulator) const;
// Allows null clusters to be printed.
void DebugPrint(StringStream* accumulator) const;
private:
static String* FromSpecialCase(SpecialCase special) {
// We use symbols that are illegal JS identifiers to identify special cases.
// Their actual value is irrelevant for us.
switch (special) {
case ROOTS: return HEAP->result_symbol();
case GLOBAL_PROPERTY: return HEAP->catch_var_symbol();
case CODE: return HEAP->code_symbol();
case SELF: return HEAP->this_symbol();
default:
UNREACHABLE();
return NULL;
}
}
String* constructor_;
Object* instance_;
};
struct JSObjectsClusterTreeConfig {
typedef JSObjectsCluster Key;
typedef NumberAndSizeInfo Value;
static const Key kNoKey;
static const Value kNoValue;
static int Compare(const Key& a, const Key& b) {
return Key::Compare(a, b);
}
};
typedef ZoneSplayTree<JSObjectsClusterTreeConfig> JSObjectsClusterTree;
// ConstructorHeapProfile is responsible for gathering and logging
// "constructor profile" of JS objects allocated on heap.
// It is run during garbage collection cycle, thus it doesn't need
// to use handles.
class ConstructorHeapProfile BASE_EMBEDDED {
public:
ConstructorHeapProfile();
virtual ~ConstructorHeapProfile() {}
void CollectStats(HeapObject* obj);
void PrintStats();
template<class Callback>
void ForEach(Callback* callback) { js_objects_info_tree_.ForEach(callback); }
// Used by ZoneSplayTree::ForEach. Made virtual to allow overriding in tests.
virtual void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size);
private:
ZoneScope zscope_;
JSObjectsClusterTree js_objects_info_tree_;
};
// JSObjectsRetainerTree is used to represent retainer graphs using
// adjacency list form:
//
// Cluster -> (Cluster -> NumberAndSizeInfo)
//
// Subordinate splay trees are stored by pointer. They are zone-allocated,
// so it isn't needed to manage their lifetime.
//
struct JSObjectsRetainerTreeConfig {
typedef JSObjectsCluster Key;
typedef JSObjectsClusterTree* Value;
static const Key kNoKey;
static const Value kNoValue;
static int Compare(const Key& a, const Key& b) {
return Key::Compare(a, b);
}
};
typedef ZoneSplayTree<JSObjectsRetainerTreeConfig> JSObjectsRetainerTree;
class ClustersCoarser BASE_EMBEDDED {
public:
ClustersCoarser();
// Processes a given retainer graph.
void Process(JSObjectsRetainerTree* tree);
// Returns an equivalent cluster (can be the cluster itself).
// If the given cluster doesn't have an equivalent, returns null cluster.
JSObjectsCluster GetCoarseEquivalent(const JSObjectsCluster& cluster);
// Returns whether a cluster can be substitued with an equivalent and thus,
// skipped in some cases.
bool HasAnEquivalent(const JSObjectsCluster& cluster);
// Used by JSObjectsRetainerTree::ForEach.
void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size);
private:
// Stores a list of back references for a cluster.
struct ClusterBackRefs {
explicit ClusterBackRefs(const JSObjectsCluster& cluster_);
ClusterBackRefs(const ClusterBackRefs& src);
ClusterBackRefs& operator=(const ClusterBackRefs& src);
static int Compare(const ClusterBackRefs& a, const ClusterBackRefs& b);
void SortRefs() { refs.Sort(JSObjectsCluster::Compare); }
static void SortRefsIterator(ClusterBackRefs* ref) { ref->SortRefs(); }
JSObjectsCluster cluster;
ZoneList<JSObjectsCluster> refs;
};
typedef ZoneList<ClusterBackRefs> SimilarityList;
// A tree for storing a list of equivalents for a cluster.
struct ClusterEqualityConfig {
typedef JSObjectsCluster Key;
typedef JSObjectsCluster Value;
static const Key kNoKey;
static const Value kNoValue;
static int Compare(const Key& a, const Key& b) {
return Key::Compare(a, b);
}
};
typedef ZoneSplayTree<ClusterEqualityConfig> EqualityTree;
static int ClusterBackRefsCmp(const ClusterBackRefs* a,
const ClusterBackRefs* b) {
return ClusterBackRefs::Compare(*a, *b);
}
int DoProcess(JSObjectsRetainerTree* tree);
int FillEqualityTree();
static const int kInitialBackrefsListCapacity = 2;
static const int kInitialSimilarityListCapacity = 2000;
// Number of passes for finding equivalents. Limits the length of paths
// that can be considered equivalent.
static const int kMaxPassesCount = 10;
ZoneScope zscope_;
SimilarityList sim_list_;
EqualityTree eq_tree_;
ClusterBackRefs* current_pair_;
JSObjectsRetainerTree* current_set_;
const JSObjectsCluster* self_;
};
// RetainerHeapProfile is responsible for gathering and logging
// "retainer profile" of JS objects allocated on heap.
// It is run during garbage collection cycle, thus it doesn't need
// to use handles.
class RetainerTreeAggregator;
class RetainerHeapProfile BASE_EMBEDDED {
public:
class Printer {
public:
virtual ~Printer() {}
virtual void PrintRetainers(const JSObjectsCluster& cluster,
const StringStream& retainers) = 0;
};
RetainerHeapProfile();
~RetainerHeapProfile();
RetainerTreeAggregator* aggregator() { return aggregator_; }
ClustersCoarser* coarser() { return &coarser_; }
JSObjectsRetainerTree* retainers_tree() { return &retainers_tree_; }
void CollectStats(HeapObject* obj);
void CoarseAndAggregate();
void PrintStats();
void DebugPrintStats(Printer* printer);
void StoreReference(const JSObjectsCluster& cluster, HeapObject* ref);
private:
ZoneScope zscope_;
JSObjectsRetainerTree retainers_tree_;
ClustersCoarser coarser_;
RetainerTreeAggregator* aggregator_;
};
class AggregatedHeapSnapshot {
public:
AggregatedHeapSnapshot();
~AggregatedHeapSnapshot();
HistogramInfo* info() { return info_; }
ConstructorHeapProfile* js_cons_profile() { return &js_cons_profile_; }
RetainerHeapProfile* js_retainer_profile() { return &js_retainer_profile_; }
private:
HistogramInfo* info_;
ConstructorHeapProfile js_cons_profile_;
RetainerHeapProfile js_retainer_profile_;
};
class HeapEntriesMap;
class HeapEntriesAllocator;
class AggregatedHeapSnapshotGenerator {
public:
explicit AggregatedHeapSnapshotGenerator(AggregatedHeapSnapshot* snapshot);
void GenerateSnapshot();
void FillHeapSnapshot(HeapSnapshot* snapshot);
static const int kAllStringsType = LAST_TYPE + 1;
private:
void CalculateStringsStats();
void CollectStats(HeapObject* obj);
template<class Iterator>
void IterateRetainers(
HeapEntriesAllocator* allocator, HeapEntriesMap* entries_map);
AggregatedHeapSnapshot* agg_snapshot_;
};
class ProducerHeapProfile {
public:
void Setup();
void RecordJSObjectAllocation(Object* obj) {
if (FLAG_log_producers) DoRecordJSObjectAllocation(obj);
}
private:
ProducerHeapProfile() : can_log_(false) { }
void DoRecordJSObjectAllocation(Object* obj);
Isolate* isolate_;
bool can_log_;
friend class Isolate;
DISALLOW_COPY_AND_ASSIGN(ProducerHeapProfile);
};
#endif // ENABLE_LOGGING_AND_PROFILING
} } // namespace v8::internal
#endif // V8_HEAP_PROFILER_H_
......@@ -523,11 +523,6 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
GarbageCollectionEpilogue();
}
#ifdef ENABLE_LOGGING_AND_PROFILING
if (FLAG_log_gc) HeapProfiler::WriteSample();
#endif
return next_gc_likely_to_collect_more;
}
......@@ -2984,9 +2979,6 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
if (!maybe_result->ToObject(&result)) return maybe_result;
}
HeapObject::cast(result)->set_map(map);
#ifdef ENABLE_LOGGING_AND_PROFILING
isolate_->producer_heap_profile()->RecordJSObjectAllocation(result);
#endif
return result;
}
......@@ -3435,9 +3427,6 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
}
// Return the new clone.
#ifdef ENABLE_LOGGING_AND_PROFILING
isolate_->producer_heap_profile()->RecordJSObjectAllocation(clone);
#endif
return clone;
}
......@@ -5122,11 +5111,6 @@ bool Heap::Setup(bool create_heap_objects) {
LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
LOG(isolate_, IntPtrTEvent("heap-available", Available()));
#ifdef ENABLE_LOGGING_AND_PROFILING
// This should be called only after initial objects have been created.
isolate_->producer_heap_profile()->Setup();
#endif
return true;
}
......
......@@ -1445,10 +1445,6 @@ Isolate::Isolate()
debugger_ = NULL;
#endif
#ifdef ENABLE_LOGGING_AND_PROFILING
producer_heap_profile_ = NULL;
#endif
handle_scope_data_.Initialize();
#define ISOLATE_INIT_EXECUTE(type, name, initial_value) \
......@@ -1537,11 +1533,6 @@ void Isolate::SetIsolateThreadLocals(Isolate* isolate,
Isolate::~Isolate() {
TRACE_ISOLATE(destructor);
#ifdef ENABLE_LOGGING_AND_PROFILING
delete producer_heap_profile_;
producer_heap_profile_ = NULL;
#endif
delete unicode_cache_;
unicode_cache_ = NULL;
......@@ -1657,11 +1648,6 @@ bool Isolate::PreInit() {
regexp_stack_ = new RegExpStack();
regexp_stack_->isolate_ = this;
#ifdef ENABLE_LOGGING_AND_PROFILING
producer_heap_profile_ = new ProducerHeapProfile();
producer_heap_profile_->isolate_ = this;
#endif
state_ = PREINITIALIZED;
return true;
}
......
......@@ -69,7 +69,6 @@ class InlineRuntimeFunctionsTable;
class NoAllocationStringAllocator;
class PcToCodeCache;
class PreallocatedMemoryThread;
class ProducerHeapProfile;
class RegExpStack;
class SaveContext;
class UnicodeCache;
......@@ -907,12 +906,6 @@ class Isolate {
inline bool DebuggerHasBreakPoints();
#ifdef ENABLE_LOGGING_AND_PROFILING
ProducerHeapProfile* producer_heap_profile() {
return producer_heap_profile_;
}
#endif
#ifdef DEBUG
HistogramInfo* heap_histograms() { return heap_histograms_; }
......@@ -1172,10 +1165,6 @@ class Isolate {
Debug* debug_;
#endif
#ifdef ENABLE_LOGGING_AND_PROFILING
ProducerHeapProfile* producer_heap_profile_;
#endif
#define GLOBAL_BACKING_STORE(type, name, initialvalue) \
type name##_;
ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
......
......@@ -521,7 +521,6 @@ Logger::Logger()
log_events_(NULL),
logging_nesting_(0),
cpu_profiler_nesting_(0),
heap_profiler_nesting_(0),
log_(new Log(this)),
name_buffer_(new NameBuffer),
address_to_name_map_(NULL),
......@@ -1286,19 +1285,6 @@ void Logger::HeapSampleBeginEvent(const char* space, const char* kind) {
}
void Logger::HeapSampleStats(const char* space, const char* kind,
intptr_t capacity, intptr_t used) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg(this);
msg.Append("heap-sample-stats,\"%s\",\"%s\","
"%" V8_PTR_PREFIX "d,%" V8_PTR_PREFIX "d\n",
space, kind, capacity, used);
msg.WriteToLogFile();
#endif
}
void Logger::HeapSampleEndEvent(const char* space, const char* kind) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_gc) return;
......@@ -1319,72 +1305,6 @@ void Logger::HeapSampleItemEvent(const char* type, int number, int bytes) {
}
void Logger::HeapSampleJSConstructorEvent(const char* constructor,
int number, int bytes) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg(this);
msg.Append("heap-js-cons-item,%s,%d,%d\n", constructor, number, bytes);
msg.WriteToLogFile();
#endif
}
// Event starts with comma, so we don't have it in the format string.
static const char kEventText[] = "heap-js-ret-item,%s";
// We take placeholder strings into account, but it's OK to be conservative.
static const int kEventTextLen = sizeof(kEventText)/sizeof(kEventText[0]);
void Logger::HeapSampleJSRetainersEvent(
const char* constructor, const char* event) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_gc) return;
const int cons_len = StrLength(constructor);
const int event_len = StrLength(event);
int pos = 0;
// Retainer lists can be long. We may need to split them into multiple events.
do {
LogMessageBuilder msg(this);
msg.Append(kEventText, constructor);
int to_write = event_len - pos;
if (to_write > Log::kMessageBufferSize - (cons_len + kEventTextLen)) {
int cut_pos = pos + Log::kMessageBufferSize - (cons_len + kEventTextLen);
ASSERT(cut_pos < event_len);
while (cut_pos > pos && event[cut_pos] != ',') --cut_pos;
if (event[cut_pos] != ',') {
// Crash in debug mode, skip in release mode.
ASSERT(false);
return;
}
// Append a piece of event that fits, without trailing comma.
msg.AppendStringPart(event + pos, cut_pos - pos);
// Start next piece with comma.
pos = cut_pos;
} else {
msg.Append("%s", event + pos);
pos += event_len;
}
msg.Append('\n');
msg.WriteToLogFile();
} while (pos < event_len);
#endif
}
void Logger::HeapSampleJSProducerEvent(const char* constructor,
Address* stack) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg(this);
msg.Append("heap-js-prod-item,%s", constructor);
while (*stack != NULL) {
msg.Append(",0x%" V8PRIxPTR, *stack++);
}
msg.Append("\n");
msg.WriteToLogFile();
#endif
}
void Logger::DebugTag(const char* call_site_tag) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log) return;
......@@ -1447,9 +1367,6 @@ int Logger::GetActiveProfilerModules() {
if (profiler_ != NULL && !profiler_->paused()) {
result |= PROFILER_MODULE_CPU;
}
if (FLAG_log_gc) {
result |= PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS;
}
return result;
}
......@@ -1471,13 +1388,6 @@ void Logger::PauseProfiler(int flags, int tag) {
--logging_nesting_;
}
}
if (flags &
(PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
if (--heap_profiler_nesting_ == 0) {
FLAG_log_gc = false;
--logging_nesting_;
}
}
if (tag != 0) {
UncheckedIntEvent("close-tag", tag);
}
......@@ -1505,13 +1415,6 @@ void Logger::ResumeProfiler(int flags, int tag) {
profiler_->resume();
}
}
if (flags &
(PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
if (heap_profiler_nesting_++ == 0) {
++logging_nesting_;
FLAG_log_gc = true;
}
}
}
......
......@@ -401,7 +401,6 @@ class Logger {
int logging_nesting_;
int cpu_profiler_nesting_;
int heap_profiler_nesting_;
Log* log_;
......
......@@ -638,8 +638,7 @@ class HeapSnapshotsCollection;
class HeapSnapshot {
public:
enum Type {
kFull = v8::HeapSnapshot::kFull,
kAggregated = v8::HeapSnapshot::kAggregated
kFull = v8::HeapSnapshot::kFull
};
HeapSnapshot(HeapSnapshotsCollection* collection,
......
......@@ -9,381 +9,10 @@
#include "cctest.h"
#include "heap-profiler.h"
#include "snapshot.h"
#include "string-stream.h"
#include "utils-inl.h"
#include "zone-inl.h"
#include "../include/v8-profiler.h"
namespace i = v8::internal;
using i::ClustersCoarser;
using i::JSObjectsCluster;
using i::JSObjectsRetainerTree;
using i::JSObjectsClusterTree;
using i::RetainerHeapProfile;
namespace {
class ConstructorHeapProfileTestHelper : public i::ConstructorHeapProfile {
public:
ConstructorHeapProfileTestHelper()
: i::ConstructorHeapProfile(),
f_name_(FACTORY->NewStringFromAscii(i::CStrVector("F"))),
f_count_(0) {
}
void Call(const JSObjectsCluster& cluster,
const i::NumberAndSizeInfo& number_and_size) {
if (f_name_->Equals(cluster.constructor())) {
CHECK_EQ(f_count_, 0);
f_count_ = number_and_size.number();
CHECK_GT(f_count_, 0);
}
}
int f_count() { return f_count_; }
private:
i::Handle<i::String> f_name_;
int f_count_;
};
} // namespace
TEST(ConstructorProfile) {
v8::HandleScope scope;
LocalContext env;
CompileRun(
"function F() {} // A constructor\n"
"var f1 = new F();\n"
"var f2 = new F();\n");
ConstructorHeapProfileTestHelper cons_profile;
i::AssertNoAllocation no_alloc;
i::HeapIterator iterator;
for (i::HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next())
cons_profile.CollectStats(obj);
CHECK_EQ(0, cons_profile.f_count());
cons_profile.PrintStats();
CHECK_EQ(2, cons_profile.f_count());
}
static JSObjectsCluster AddHeapObjectToTree(JSObjectsRetainerTree* tree,
i::String* constructor,
int instance,
JSObjectsCluster* ref1 = NULL,
JSObjectsCluster* ref2 = NULL,
JSObjectsCluster* ref3 = NULL) {
JSObjectsCluster o(constructor, reinterpret_cast<i::Object*>(instance));
JSObjectsClusterTree* o_tree = new JSObjectsClusterTree();
JSObjectsClusterTree::Locator o_loc;
if (ref1 != NULL) o_tree->Insert(*ref1, &o_loc);
if (ref2 != NULL) o_tree->Insert(*ref2, &o_loc);
if (ref3 != NULL) o_tree->Insert(*ref3, &o_loc);
JSObjectsRetainerTree::Locator loc;
tree->Insert(o, &loc);
loc.set_value(o_tree);
return o;
}
static void AddSelfReferenceToTree(JSObjectsRetainerTree* tree,
JSObjectsCluster* self_ref) {
JSObjectsRetainerTree::Locator loc;
CHECK(tree->Find(*self_ref, &loc));
JSObjectsClusterTree::Locator o_loc;
CHECK_NE(NULL, loc.value());
loc.value()->Insert(*self_ref, &o_loc);
}
static inline void CheckEqualsHelper(const char* file, int line,
const char* expected_source,
const JSObjectsCluster& expected,
const char* value_source,
const JSObjectsCluster& value) {
if (JSObjectsCluster::Compare(expected, value) != 0) {
i::HeapStringAllocator allocator;
i::StringStream stream(&allocator);
stream.Add("# Expected: ");
expected.DebugPrint(&stream);
stream.Add("\n# Found: ");
value.DebugPrint(&stream);
V8_Fatal(file, line, "CHECK_EQ(%s, %s) failed\n%s",
expected_source, value_source,
*stream.ToCString());
}
}
static inline void CheckNonEqualsHelper(const char* file, int line,
const char* expected_source,
const JSObjectsCluster& expected,
const char* value_source,
const JSObjectsCluster& value) {
if (JSObjectsCluster::Compare(expected, value) == 0) {
i::HeapStringAllocator allocator;
i::StringStream stream(&allocator);
stream.Add("# !Expected: ");
expected.DebugPrint(&stream);
stream.Add("\n# Found: ");
value.DebugPrint(&stream);
V8_Fatal(file, line, "CHECK_NE(%s, %s) failed\n%s",
expected_source, value_source,
*stream.ToCString());
}
}
TEST(ClustersCoarserSimple) {
v8::HandleScope scope;
LocalContext env;
i::ZoneScope zn_scope(i::Isolate::Current(), i::DELETE_ON_EXIT);
JSObjectsRetainerTree tree;
JSObjectsCluster function(HEAP->function_class_symbol());
JSObjectsCluster a(*FACTORY->NewStringFromAscii(i::CStrVector("A")));
JSObjectsCluster b(*FACTORY->NewStringFromAscii(i::CStrVector("B")));
// o1 <- Function
JSObjectsCluster o1 =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x100, &function);
// o2 <- Function
JSObjectsCluster o2 =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x200, &function);
// o3 <- A, B
JSObjectsCluster o3 =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x300, &a, &b);
// o4 <- B, A
JSObjectsCluster o4 =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x400, &b, &a);
// o5 <- A, B, Function
JSObjectsCluster o5 =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x500,
&a, &b, &function);
ClustersCoarser coarser;
coarser.Process(&tree);
CHECK_EQ(coarser.GetCoarseEquivalent(o1), coarser.GetCoarseEquivalent(o2));
CHECK_EQ(coarser.GetCoarseEquivalent(o3), coarser.GetCoarseEquivalent(o4));
CHECK_NE(coarser.GetCoarseEquivalent(o1), coarser.GetCoarseEquivalent(o3));
CHECK_EQ(JSObjectsCluster(), coarser.GetCoarseEquivalent(o5));
}
TEST(ClustersCoarserMultipleConstructors) {
v8::HandleScope scope;
LocalContext env;
i::ZoneScope zn_scope(i::Isolate::Current(), i::DELETE_ON_EXIT);
JSObjectsRetainerTree tree;
JSObjectsCluster function(HEAP->function_class_symbol());
// o1 <- Function
JSObjectsCluster o1 =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x100, &function);
// a1 <- Function
JSObjectsCluster a1 =
AddHeapObjectToTree(&tree, HEAP->Array_symbol(), 0x1000, &function);
// o2 <- Function
JSObjectsCluster o2 =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x200, &function);
// a2 <- Function
JSObjectsCluster a2 =
AddHeapObjectToTree(&tree, HEAP->Array_symbol(), 0x2000, &function);
ClustersCoarser coarser;
coarser.Process(&tree);
CHECK_EQ(coarser.GetCoarseEquivalent(o1), coarser.GetCoarseEquivalent(o2));
CHECK_EQ(coarser.GetCoarseEquivalent(a1), coarser.GetCoarseEquivalent(a2));
}
TEST(ClustersCoarserPathsTraversal) {
v8::HandleScope scope;
LocalContext env;
i::ZoneScope zn_scope(i::Isolate::Current(), i::DELETE_ON_EXIT);
JSObjectsRetainerTree tree;
// On the following graph:
//
// p
// <- o21 <- o11 <-
// q o
// <- o22 <- o12 <-
// r
//
// we expect that coarser will deduce equivalences: p ~ q ~ r,
// o21 ~ o22, and o11 ~ o12.
JSObjectsCluster o =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x100);
JSObjectsCluster o11 =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x110, &o);
JSObjectsCluster o12 =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x120, &o);
JSObjectsCluster o21 =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x210, &o11);
JSObjectsCluster o22 =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x220, &o12);
JSObjectsCluster p =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x300, &o21);
JSObjectsCluster q =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x310, &o21, &o22);
JSObjectsCluster r =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x320, &o22);
ClustersCoarser coarser;
coarser.Process(&tree);
CHECK_EQ(JSObjectsCluster(), coarser.GetCoarseEquivalent(o));
CHECK_NE(JSObjectsCluster(), coarser.GetCoarseEquivalent(o11));
CHECK_EQ(coarser.GetCoarseEquivalent(o11), coarser.GetCoarseEquivalent(o12));
CHECK_EQ(coarser.GetCoarseEquivalent(o21), coarser.GetCoarseEquivalent(o22));
CHECK_NE(coarser.GetCoarseEquivalent(o11), coarser.GetCoarseEquivalent(o21));
CHECK_NE(JSObjectsCluster(), coarser.GetCoarseEquivalent(p));
CHECK_EQ(coarser.GetCoarseEquivalent(p), coarser.GetCoarseEquivalent(q));
CHECK_EQ(coarser.GetCoarseEquivalent(q), coarser.GetCoarseEquivalent(r));
CHECK_NE(coarser.GetCoarseEquivalent(o11), coarser.GetCoarseEquivalent(p));
CHECK_NE(coarser.GetCoarseEquivalent(o21), coarser.GetCoarseEquivalent(p));
}
TEST(ClustersCoarserSelf) {
v8::HandleScope scope;
LocalContext env;
i::ZoneScope zn_scope(i::Isolate::Current(), i::DELETE_ON_EXIT);
JSObjectsRetainerTree tree;
// On the following graph:
//
// p (self-referencing)
// <- o1 <-
// q (self-referencing) o
// <- o2 <-
// r (self-referencing)
//
// we expect that coarser will deduce equivalences: p ~ q ~ r, o1 ~ o2;
JSObjectsCluster o =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x100);
JSObjectsCluster o1 =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x110, &o);
JSObjectsCluster o2 =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x120, &o);
JSObjectsCluster p =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x300, &o1);
AddSelfReferenceToTree(&tree, &p);
JSObjectsCluster q =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x310, &o1, &o2);
AddSelfReferenceToTree(&tree, &q);
JSObjectsCluster r =
AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x320, &o2);
AddSelfReferenceToTree(&tree, &r);
ClustersCoarser coarser;
coarser.Process(&tree);
CHECK_EQ(JSObjectsCluster(), coarser.GetCoarseEquivalent(o));
CHECK_NE(JSObjectsCluster(), coarser.GetCoarseEquivalent(o1));
CHECK_EQ(coarser.GetCoarseEquivalent(o1), coarser.GetCoarseEquivalent(o2));
CHECK_NE(JSObjectsCluster(), coarser.GetCoarseEquivalent(p));
CHECK_EQ(coarser.GetCoarseEquivalent(p), coarser.GetCoarseEquivalent(q));
CHECK_EQ(coarser.GetCoarseEquivalent(q), coarser.GetCoarseEquivalent(r));
CHECK_NE(coarser.GetCoarseEquivalent(o1), coarser.GetCoarseEquivalent(p));
}
namespace {
class RetainerProfilePrinter : public RetainerHeapProfile::Printer {
public:
RetainerProfilePrinter() : stream_(&allocator_), lines_(100) {}
void PrintRetainers(const JSObjectsCluster& cluster,
const i::StringStream& retainers) {
cluster.Print(&stream_);
stream_.Add("%s", *(retainers.ToCString()));
stream_.Put('\0');
}
const char* GetRetainers(const char* constructor) {
FillLines();
const size_t cons_len = strlen(constructor);
for (int i = 0; i < lines_.length(); ++i) {
if (strncmp(constructor, lines_[i], cons_len) == 0 &&
lines_[i][cons_len] == ',') {
return lines_[i] + cons_len + 1;
}
}
return NULL;
}
private:
void FillLines() {
if (lines_.length() > 0) return;
stream_.Put('\0');
stream_str_ = stream_.ToCString();
const char* pos = *stream_str_;
while (pos != NULL && *pos != '\0') {
lines_.Add(pos);
pos = strchr(pos, '\0');
if (pos != NULL) ++pos;
}
}
i::HeapStringAllocator allocator_;
i::StringStream stream_;
i::SmartPointer<const char> stream_str_;
i::List<const char*> lines_;
};
} // namespace
TEST(RetainerProfile) {
v8::HandleScope scope;
LocalContext env;
CompileRun(
"function A() {}\n"
"function B(x) { this.x = x; }\n"
"function C(x) { this.x1 = x; this.x2 = x; }\n"
"var a = new A();\n"
"var b1 = new B(a), b2 = new B(a);\n"
"var c = new C(a);");
RetainerHeapProfile ret_profile;
i::AssertNoAllocation no_alloc;
i::HeapIterator iterator;
for (i::HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next())
ret_profile.CollectStats(obj);
ret_profile.CoarseAndAggregate();
RetainerProfilePrinter printer;
ret_profile.DebugPrintStats(&printer);
const char* retainers_of_a = printer.GetRetainers("A");
// The order of retainers is unspecified, so we check string length, and
// verify each retainer separately.
CHECK_EQ(i::StrLength("(global property);1,B;2,C;2"),
i::StrLength(retainers_of_a));
CHECK(strstr(retainers_of_a, "(global property);1") != NULL);
CHECK(strstr(retainers_of_a, "B;2") != NULL);
CHECK(strstr(retainers_of_a, "C;2") != NULL);
CHECK_EQ("(global property);2", printer.GetRetainers("B"));
CHECK_EQ("(global property);1", printer.GetRetainers("C"));
}
namespace {
......@@ -726,116 +355,6 @@ TEST(HeapSnapshotRootPreservedAfterSorting) {
}
static const v8::HeapGraphNode* GetChild(
const v8::HeapGraphNode* node,
v8::HeapGraphNode::Type type,
const char* name,
const v8::HeapGraphNode* after = NULL) {
bool ignore_child = after == NULL ? false : true;
for (int i = 0, count = node->GetChildrenCount(); i < count; ++i) {
const v8::HeapGraphEdge* prop = node->GetChild(i);
const v8::HeapGraphNode* child = prop->GetToNode();
v8::String::AsciiValue child_name(child->GetName());
if (!ignore_child
&& child->GetType() == type
&& strcmp(name, *child_name) == 0)
return child;
if (after != NULL && child == after) ignore_child = false;
}
return NULL;
}
static bool IsNodeRetainedAs(const v8::HeapGraphNode* node,
int element) {
for (int i = 0, count = node->GetRetainersCount(); i < count; ++i) {
const v8::HeapGraphEdge* prop = node->GetRetainer(i);
if (prop->GetType() == v8::HeapGraphEdge::kElement
&& element == prop->GetName()->Int32Value())
return true;
}
return false;
}
TEST(AggregatedHeapSnapshot) {
v8::HandleScope scope;
LocalContext env;
CompileRun(
"function A() {}\n"
"function B(x) { this.x = x; }\n"
"var a = new A();\n"
"var b = new B(a);");
const v8::HeapSnapshot* snapshot =
v8::HeapProfiler::TakeSnapshot(
v8::String::New("agg"), v8::HeapSnapshot::kAggregated);
const v8::HeapGraphNode* strings = GetChild(snapshot->GetRoot(),
v8::HeapGraphNode::kHidden,
"STRING_TYPE");
CHECK_NE(NULL, strings);
CHECK_NE(0, strings->GetSelfSize());
CHECK_NE(0, strings->GetInstancesCount());
const v8::HeapGraphNode* maps = GetChild(snapshot->GetRoot(),
v8::HeapGraphNode::kHidden,
"MAP_TYPE");
CHECK_NE(NULL, maps);
CHECK_NE(0, maps->GetSelfSize());
CHECK_NE(0, maps->GetInstancesCount());
const v8::HeapGraphNode* a = GetChild(snapshot->GetRoot(),
v8::HeapGraphNode::kObject,
"A");
CHECK_NE(NULL, a);
CHECK_NE(0, a->GetSelfSize());
CHECK_EQ(1, a->GetInstancesCount());
const v8::HeapGraphNode* b = GetChild(snapshot->GetRoot(),
v8::HeapGraphNode::kObject,
"B");
CHECK_NE(NULL, b);
CHECK_NE(0, b->GetSelfSize());
CHECK_EQ(1, b->GetInstancesCount());
const v8::HeapGraphNode* glob_prop = GetChild(snapshot->GetRoot(),
v8::HeapGraphNode::kObject,
"(global property)",
b);
CHECK_NE(NULL, glob_prop);
CHECK_EQ(0, glob_prop->GetSelfSize());
CHECK_EQ(0, glob_prop->GetInstancesCount());
CHECK_NE(0, glob_prop->GetChildrenCount());
const v8::HeapGraphNode* a_from_glob_prop = GetChild(
glob_prop,
v8::HeapGraphNode::kObject,
"A");
CHECK_NE(NULL, a_from_glob_prop);
CHECK_EQ(0, a_from_glob_prop->GetSelfSize());
CHECK_EQ(0, a_from_glob_prop->GetInstancesCount());
CHECK_EQ(0, a_from_glob_prop->GetChildrenCount()); // Retains nothing.
CHECK(IsNodeRetainedAs(a_from_glob_prop, 1)); // (global propery) has 1 ref.
const v8::HeapGraphNode* b_with_children = GetChild(
snapshot->GetRoot(),
v8::HeapGraphNode::kObject,
"B",
b);
CHECK_NE(NULL, b_with_children);
CHECK_EQ(0, b_with_children->GetSelfSize());
CHECK_EQ(0, b_with_children->GetInstancesCount());
CHECK_NE(0, b_with_children->GetChildrenCount());
const v8::HeapGraphNode* a_from_b = GetChild(
b_with_children,
v8::HeapGraphNode::kObject,
"A");
CHECK_NE(NULL, a_from_b);
CHECK_EQ(0, a_from_b->GetSelfSize());
CHECK_EQ(0, a_from_b->GetInstancesCount());
CHECK_EQ(0, a_from_b->GetChildrenCount()); // Retains nothing.
CHECK(IsNodeRetainedAs(a_from_b, 1)); // B has 1 ref to A.
}
TEST(HeapEntryDominator) {
// The graph looks like this:
//
......@@ -1048,21 +567,6 @@ TEST(HeapSnapshotJSONSerializationAborting) {
}
// Must not crash in debug mode.
TEST(AggregatedHeapSnapshotJSONSerialization) {
v8::HandleScope scope;
LocalContext env;
const v8::HeapSnapshot* snapshot =
v8::HeapProfiler::TakeSnapshot(
v8::String::New("agg"), v8::HeapSnapshot::kAggregated);
TestJSONStream stream;
snapshot->Serialize(&stream, v8::HeapSnapshot::kJSON);
CHECK_GT(stream.size(), 0);
CHECK_EQ(1, stream.eos_signaled());
}
TEST(HeapSnapshotGetNodeById) {
v8::HandleScope scope;
LocalContext env;
......
......@@ -708,24 +708,6 @@ TEST(IsLoggingPreserved) {
CHECK(LOGGER->is_logging());
LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 1);
CHECK(LOGGER->is_logging());
CHECK(LOGGER->is_logging());
LOGGER->ResumeProfiler(
v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
CHECK(LOGGER->is_logging());
LOGGER->PauseProfiler(
v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
CHECK(LOGGER->is_logging());
CHECK(LOGGER->is_logging());
LOGGER->ResumeProfiler(
v8::PROFILER_MODULE_CPU |
v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
CHECK(LOGGER->is_logging());
LOGGER->PauseProfiler(
v8::PROFILER_MODULE_CPU |
v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
CHECK(LOGGER->is_logging());
}
......
......@@ -169,17 +169,12 @@ function TickProcessor(
processor: this.processHeapSampleBegin },
'heap-sample-end': { parsers: [null, null],
processor: this.processHeapSampleEnd },
'heap-js-prod-item': { parsers: [null, 'var-args'],
processor: this.processJSProducer },
// Ignored events.
'profiler': null,
'function-creation': null,
'function-move': null,
'function-delete': null,
'heap-sample-stats': null,
'heap-sample-item': null,
'heap-js-cons-item': null,
'heap-js-ret-item': null,
// Obsolete row types.
'code-allocate': null,
'begin-code-region': null,
......@@ -401,17 +396,6 @@ TickProcessor.prototype.processHeapSampleEnd = function(space, state) {
};
TickProcessor.prototype.processJSProducer = function(constructor, stack) {
if (!this.currentProducerProfile_) return;
if (stack.length == 0) return;
var first = stack.shift();
var processedStack =
this.profile_.resolveAndFilterFuncs_(this.processStack(first, 0, stack));
processedStack.unshift(constructor);
this.currentProducerProfile_.addPath(processedStack);
};
TickProcessor.prototype.printStatistics = function() {
print('Statistical profiling result from ' + this.lastLogFileName_ +
', (' + this.ticks_.total +
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment