Remove obsolete aggregating and non-working producers heap profilers.

2000 LOC are gone!

R=sgjesse@chromium.org
BUG=1481

Review URL: http://codereview.chromium.org/7247018

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8406 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 94e5f2f3
...@@ -269,17 +269,10 @@ class V8EXPORT HeapGraphNode { ...@@ -269,17 +269,10 @@ class V8EXPORT HeapGraphNode {
/** /**
* Returns node id. For the same heap object, the id remains the same * Returns node id. For the same heap object, the id remains the same
* across all snapshots. Not applicable to aggregated heap snapshots * across all snapshots.
* as they only contain aggregated instances.
*/ */
uint64_t GetId() const; uint64_t GetId() const;
/**
* Returns the number of instances. Only applicable to aggregated
* heap snapshots.
*/
int GetInstancesCount() const;
/** Returns node's own size, in bytes. */ /** Returns node's own size, in bytes. */
int GetSelfSize() const; int GetSelfSize() const;
...@@ -323,9 +316,7 @@ class V8EXPORT HeapGraphNode { ...@@ -323,9 +316,7 @@ class V8EXPORT HeapGraphNode {
class V8EXPORT HeapSnapshot { class V8EXPORT HeapSnapshot {
public: public:
enum Type { enum Type {
kFull = 0, // Heap snapshot with all instances and references. kFull = 0 // Heap snapshot with all instances and references.
kAggregated = 1 // Snapshot doesn't contain individual heap entries,
// instead they are grouped by constructor name.
}; };
enum SerializationFormat { enum SerializationFormat {
kJSON = 0 // See format description near 'Serialize' method. kJSON = 0 // See format description near 'Serialize' method.
......
...@@ -2560,17 +2560,12 @@ typedef void (*GCCallback)(); ...@@ -2560,17 +2560,12 @@ typedef void (*GCCallback)();
/** /**
* Profiler modules. * Profiler modules.
* *
* In V8, profiler consists of several modules: CPU profiler, and different * In V8, profiler consists of several modules. Each can be turned on / off
* kinds of heap profiling. Each can be turned on / off independently. * independently.
* When PROFILER_MODULE_HEAP_SNAPSHOT flag is passed to ResumeProfilerEx,
* modules are enabled only temporarily for making a snapshot of the heap.
*/ */
enum ProfilerModules { enum ProfilerModules {
PROFILER_MODULE_NONE = 0, PROFILER_MODULE_NONE = 0,
PROFILER_MODULE_CPU = 1, PROFILER_MODULE_CPU = 1
PROFILER_MODULE_HEAP_STATS = 1 << 1,
PROFILER_MODULE_JS_CONSTRUCTORS = 1 << 2,
PROFILER_MODULE_HEAP_SNAPSHOT = 1 << 16
}; };
......
...@@ -4855,22 +4855,7 @@ bool V8::IsProfilerPaused() { ...@@ -4855,22 +4855,7 @@ bool V8::IsProfilerPaused() {
void V8::ResumeProfilerEx(int flags, int tag) { void V8::ResumeProfilerEx(int flags, int tag) {
#ifdef ENABLE_LOGGING_AND_PROFILING #ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current(); i::Isolate* isolate = i::Isolate::Current();
if (flags & PROFILER_MODULE_HEAP_SNAPSHOT) {
// Snapshot mode: resume modules, perform GC, then pause only
// those modules which haven't been started prior to making a
// snapshot.
// Make a GC prior to taking a snapshot.
isolate->heap()->CollectAllGarbage(false);
// Reset snapshot flag and CPU module flags.
flags &= ~(PROFILER_MODULE_HEAP_SNAPSHOT | PROFILER_MODULE_CPU);
const int current_flags = isolate->logger()->GetActiveProfilerModules();
isolate->logger()->ResumeProfiler(flags, tag); isolate->logger()->ResumeProfiler(flags, tag);
isolate->heap()->CollectAllGarbage(false);
isolate->logger()->PauseProfiler(~current_flags & flags, tag);
} else {
isolate->logger()->ResumeProfiler(flags, tag);
}
#endif #endif
} }
...@@ -5720,7 +5705,6 @@ uint64_t HeapGraphNode::GetId() const { ...@@ -5720,7 +5705,6 @@ uint64_t HeapGraphNode::GetId() const {
#ifdef ENABLE_LOGGING_AND_PROFILING #ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current(); i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphNode::GetId"); IsDeadCheck(isolate, "v8::HeapGraphNode::GetId");
ASSERT(ToInternal(this)->snapshot()->type() != i::HeapSnapshot::kAggregated);
return ToInternal(this)->id(); return ToInternal(this)->id();
#else #else
return 0; return 0;
...@@ -5728,18 +5712,6 @@ uint64_t HeapGraphNode::GetId() const { ...@@ -5728,18 +5712,6 @@ uint64_t HeapGraphNode::GetId() const {
} }
int HeapGraphNode::GetInstancesCount() const {
#ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphNode::GetInstancesCount");
ASSERT(ToInternal(this)->snapshot()->type() == i::HeapSnapshot::kAggregated);
return static_cast<int>(ToInternal(this)->id());
#else
return 0;
#endif
}
int HeapGraphNode::GetSelfSize() const { int HeapGraphNode::GetSelfSize() const {
#ifdef ENABLE_LOGGING_AND_PROFILING #ifdef ENABLE_LOGGING_AND_PROFILING
i::Isolate* isolate = i::Isolate::Current(); i::Isolate* isolate = i::Isolate::Current();
...@@ -5987,9 +5959,6 @@ const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle<String> title, ...@@ -5987,9 +5959,6 @@ const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle<String> title,
case HeapSnapshot::kFull: case HeapSnapshot::kFull:
internal_type = i::HeapSnapshot::kFull; internal_type = i::HeapSnapshot::kFull;
break; break;
case HeapSnapshot::kAggregated:
internal_type = i::HeapSnapshot::kAggregated;
break;
default: default:
UNREACHABLE(); UNREACHABLE();
} }
......
...@@ -474,7 +474,6 @@ DEFINE_bool(log_handles, false, "Log global handle events.") ...@@ -474,7 +474,6 @@ DEFINE_bool(log_handles, false, "Log global handle events.")
DEFINE_bool(log_snapshot_positions, false, DEFINE_bool(log_snapshot_positions, false,
"log positions of (de)serialized objects in the snapshot.") "log positions of (de)serialized objects in the snapshot.")
DEFINE_bool(log_suspect, false, "Log suspect operations.") DEFINE_bool(log_suspect, false, "Log suspect operations.")
DEFINE_bool(log_producers, false, "Log stack traces of JS objects allocations.")
DEFINE_bool(prof, false, DEFINE_bool(prof, false,
"Log statistical profiling information (implies --log-code).") "Log statistical profiling information (implies --log-code).")
DEFINE_bool(prof_auto, true, DEFINE_bool(prof_auto, true,
......
This diff is collapsed.
...@@ -28,9 +28,7 @@ ...@@ -28,9 +28,7 @@
#ifndef V8_HEAP_PROFILER_H_ #ifndef V8_HEAP_PROFILER_H_
#define V8_HEAP_PROFILER_H_ #define V8_HEAP_PROFILER_H_
#include "allocation.h"
#include "isolate.h" #include "isolate.h"
#include "zone-inl.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
...@@ -81,10 +79,6 @@ class HeapProfiler { ...@@ -81,10 +79,6 @@ class HeapProfiler {
return snapshots_->is_tracking_objects(); return snapshots_->is_tracking_objects();
} }
// Obsolete interface.
// Write a single heap sample to the log file.
static void WriteSample();
private: private:
HeapProfiler(); HeapProfiler();
~HeapProfiler(); ~HeapProfiler();
...@@ -103,295 +97,6 @@ class HeapProfiler { ...@@ -103,295 +97,6 @@ class HeapProfiler {
#endif // ENABLE_LOGGING_AND_PROFILING #endif // ENABLE_LOGGING_AND_PROFILING
}; };
#ifdef ENABLE_LOGGING_AND_PROFILING
// JSObjectsCluster describes a group of JS objects that are
// considered equivalent in terms of a particular profile.
class JSObjectsCluster BASE_EMBEDDED {
public:
// These special cases are used in retainer profile.
enum SpecialCase {
ROOTS = 1,
GLOBAL_PROPERTY = 2,
CODE = 3,
SELF = 100 // This case is used in ClustersCoarser only.
};
JSObjectsCluster() : constructor_(NULL), instance_(NULL) {}
explicit JSObjectsCluster(String* constructor)
: constructor_(constructor), instance_(NULL) {}
explicit JSObjectsCluster(SpecialCase special)
: constructor_(FromSpecialCase(special)), instance_(NULL) {}
JSObjectsCluster(String* constructor, Object* instance)
: constructor_(constructor), instance_(instance) {}
static int CompareConstructors(const JSObjectsCluster& a,
const JSObjectsCluster& b) {
// Strings are unique, so it is sufficient to compare their pointers.
return a.constructor_ == b.constructor_ ? 0
: (a.constructor_ < b.constructor_ ? -1 : 1);
}
static int Compare(const JSObjectsCluster& a, const JSObjectsCluster& b) {
// Strings are unique, so it is sufficient to compare their pointers.
const int cons_cmp = CompareConstructors(a, b);
return cons_cmp == 0 ?
(a.instance_ == b.instance_ ? 0 : (a.instance_ < b.instance_ ? -1 : 1))
: cons_cmp;
}
static int Compare(const JSObjectsCluster* a, const JSObjectsCluster* b) {
return Compare(*a, *b);
}
bool is_null() const { return constructor_ == NULL; }
bool can_be_coarsed() const { return instance_ != NULL; }
String* constructor() const { return constructor_; }
Object* instance() const { return instance_; }
const char* GetSpecialCaseName() const;
void Print(StringStream* accumulator) const;
// Allows null clusters to be printed.
void DebugPrint(StringStream* accumulator) const;
private:
static String* FromSpecialCase(SpecialCase special) {
// We use symbols that are illegal JS identifiers to identify special cases.
// Their actual value is irrelevant for us.
switch (special) {
case ROOTS: return HEAP->result_symbol();
case GLOBAL_PROPERTY: return HEAP->catch_var_symbol();
case CODE: return HEAP->code_symbol();
case SELF: return HEAP->this_symbol();
default:
UNREACHABLE();
return NULL;
}
}
String* constructor_;
Object* instance_;
};
struct JSObjectsClusterTreeConfig {
typedef JSObjectsCluster Key;
typedef NumberAndSizeInfo Value;
static const Key kNoKey;
static const Value kNoValue;
static int Compare(const Key& a, const Key& b) {
return Key::Compare(a, b);
}
};
typedef ZoneSplayTree<JSObjectsClusterTreeConfig> JSObjectsClusterTree;
// ConstructorHeapProfile is responsible for gathering and logging
// "constructor profile" of JS objects allocated on heap.
// It is run during garbage collection cycle, thus it doesn't need
// to use handles.
class ConstructorHeapProfile BASE_EMBEDDED {
public:
ConstructorHeapProfile();
virtual ~ConstructorHeapProfile() {}
void CollectStats(HeapObject* obj);
void PrintStats();
template<class Callback>
void ForEach(Callback* callback) { js_objects_info_tree_.ForEach(callback); }
// Used by ZoneSplayTree::ForEach. Made virtual to allow overriding in tests.
virtual void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size);
private:
ZoneScope zscope_;
JSObjectsClusterTree js_objects_info_tree_;
};
// JSObjectsRetainerTree is used to represent retainer graphs using
// adjacency list form:
//
// Cluster -> (Cluster -> NumberAndSizeInfo)
//
// Subordinate splay trees are stored by pointer. They are zone-allocated,
// so it isn't needed to manage their lifetime.
//
struct JSObjectsRetainerTreeConfig {
typedef JSObjectsCluster Key;
typedef JSObjectsClusterTree* Value;
static const Key kNoKey;
static const Value kNoValue;
static int Compare(const Key& a, const Key& b) {
return Key::Compare(a, b);
}
};
typedef ZoneSplayTree<JSObjectsRetainerTreeConfig> JSObjectsRetainerTree;
class ClustersCoarser BASE_EMBEDDED {
public:
ClustersCoarser();
// Processes a given retainer graph.
void Process(JSObjectsRetainerTree* tree);
// Returns an equivalent cluster (can be the cluster itself).
// If the given cluster doesn't have an equivalent, returns null cluster.
JSObjectsCluster GetCoarseEquivalent(const JSObjectsCluster& cluster);
// Returns whether a cluster can be substitued with an equivalent and thus,
// skipped in some cases.
bool HasAnEquivalent(const JSObjectsCluster& cluster);
// Used by JSObjectsRetainerTree::ForEach.
void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size);
private:
// Stores a list of back references for a cluster.
struct ClusterBackRefs {
explicit ClusterBackRefs(const JSObjectsCluster& cluster_);
ClusterBackRefs(const ClusterBackRefs& src);
ClusterBackRefs& operator=(const ClusterBackRefs& src);
static int Compare(const ClusterBackRefs& a, const ClusterBackRefs& b);
void SortRefs() { refs.Sort(JSObjectsCluster::Compare); }
static void SortRefsIterator(ClusterBackRefs* ref) { ref->SortRefs(); }
JSObjectsCluster cluster;
ZoneList<JSObjectsCluster> refs;
};
typedef ZoneList<ClusterBackRefs> SimilarityList;
// A tree for storing a list of equivalents for a cluster.
struct ClusterEqualityConfig {
typedef JSObjectsCluster Key;
typedef JSObjectsCluster Value;
static const Key kNoKey;
static const Value kNoValue;
static int Compare(const Key& a, const Key& b) {
return Key::Compare(a, b);
}
};
typedef ZoneSplayTree<ClusterEqualityConfig> EqualityTree;
static int ClusterBackRefsCmp(const ClusterBackRefs* a,
const ClusterBackRefs* b) {
return ClusterBackRefs::Compare(*a, *b);
}
int DoProcess(JSObjectsRetainerTree* tree);
int FillEqualityTree();
static const int kInitialBackrefsListCapacity = 2;
static const int kInitialSimilarityListCapacity = 2000;
// Number of passes for finding equivalents. Limits the length of paths
// that can be considered equivalent.
static const int kMaxPassesCount = 10;
ZoneScope zscope_;
SimilarityList sim_list_;
EqualityTree eq_tree_;
ClusterBackRefs* current_pair_;
JSObjectsRetainerTree* current_set_;
const JSObjectsCluster* self_;
};
// RetainerHeapProfile is responsible for gathering and logging
// "retainer profile" of JS objects allocated on heap.
// It is run during garbage collection cycle, thus it doesn't need
// to use handles.
class RetainerTreeAggregator;
class RetainerHeapProfile BASE_EMBEDDED {
public:
class Printer {
public:
virtual ~Printer() {}
virtual void PrintRetainers(const JSObjectsCluster& cluster,
const StringStream& retainers) = 0;
};
RetainerHeapProfile();
~RetainerHeapProfile();
RetainerTreeAggregator* aggregator() { return aggregator_; }
ClustersCoarser* coarser() { return &coarser_; }
JSObjectsRetainerTree* retainers_tree() { return &retainers_tree_; }
void CollectStats(HeapObject* obj);
void CoarseAndAggregate();
void PrintStats();
void DebugPrintStats(Printer* printer);
void StoreReference(const JSObjectsCluster& cluster, HeapObject* ref);
private:
ZoneScope zscope_;
JSObjectsRetainerTree retainers_tree_;
ClustersCoarser coarser_;
RetainerTreeAggregator* aggregator_;
};
class AggregatedHeapSnapshot {
public:
AggregatedHeapSnapshot();
~AggregatedHeapSnapshot();
HistogramInfo* info() { return info_; }
ConstructorHeapProfile* js_cons_profile() { return &js_cons_profile_; }
RetainerHeapProfile* js_retainer_profile() { return &js_retainer_profile_; }
private:
HistogramInfo* info_;
ConstructorHeapProfile js_cons_profile_;
RetainerHeapProfile js_retainer_profile_;
};
class HeapEntriesMap;
class HeapEntriesAllocator;
class AggregatedHeapSnapshotGenerator {
public:
explicit AggregatedHeapSnapshotGenerator(AggregatedHeapSnapshot* snapshot);
void GenerateSnapshot();
void FillHeapSnapshot(HeapSnapshot* snapshot);
static const int kAllStringsType = LAST_TYPE + 1;
private:
void CalculateStringsStats();
void CollectStats(HeapObject* obj);
template<class Iterator>
void IterateRetainers(
HeapEntriesAllocator* allocator, HeapEntriesMap* entries_map);
AggregatedHeapSnapshot* agg_snapshot_;
};
class ProducerHeapProfile {
public:
void Setup();
void RecordJSObjectAllocation(Object* obj) {
if (FLAG_log_producers) DoRecordJSObjectAllocation(obj);
}
private:
ProducerHeapProfile() : can_log_(false) { }
void DoRecordJSObjectAllocation(Object* obj);
Isolate* isolate_;
bool can_log_;
friend class Isolate;
DISALLOW_COPY_AND_ASSIGN(ProducerHeapProfile);
};
#endif // ENABLE_LOGGING_AND_PROFILING
} } // namespace v8::internal } } // namespace v8::internal
#endif // V8_HEAP_PROFILER_H_ #endif // V8_HEAP_PROFILER_H_
...@@ -523,11 +523,6 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) { ...@@ -523,11 +523,6 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
GarbageCollectionEpilogue(); GarbageCollectionEpilogue();
} }
#ifdef ENABLE_LOGGING_AND_PROFILING
if (FLAG_log_gc) HeapProfiler::WriteSample();
#endif
return next_gc_likely_to_collect_more; return next_gc_likely_to_collect_more;
} }
...@@ -2984,9 +2979,6 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { ...@@ -2984,9 +2979,6 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
if (!maybe_result->ToObject(&result)) return maybe_result; if (!maybe_result->ToObject(&result)) return maybe_result;
} }
HeapObject::cast(result)->set_map(map); HeapObject::cast(result)->set_map(map);
#ifdef ENABLE_LOGGING_AND_PROFILING
isolate_->producer_heap_profile()->RecordJSObjectAllocation(result);
#endif
return result; return result;
} }
...@@ -3435,9 +3427,6 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) { ...@@ -3435,9 +3427,6 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
JSObject::cast(clone)->set_properties(FixedArray::cast(prop)); JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
} }
// Return the new clone. // Return the new clone.
#ifdef ENABLE_LOGGING_AND_PROFILING
isolate_->producer_heap_profile()->RecordJSObjectAllocation(clone);
#endif
return clone; return clone;
} }
...@@ -5122,11 +5111,6 @@ bool Heap::Setup(bool create_heap_objects) { ...@@ -5122,11 +5111,6 @@ bool Heap::Setup(bool create_heap_objects) {
LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
LOG(isolate_, IntPtrTEvent("heap-available", Available())); LOG(isolate_, IntPtrTEvent("heap-available", Available()));
#ifdef ENABLE_LOGGING_AND_PROFILING
// This should be called only after initial objects have been created.
isolate_->producer_heap_profile()->Setup();
#endif
return true; return true;
} }
......
...@@ -1445,10 +1445,6 @@ Isolate::Isolate() ...@@ -1445,10 +1445,6 @@ Isolate::Isolate()
debugger_ = NULL; debugger_ = NULL;
#endif #endif
#ifdef ENABLE_LOGGING_AND_PROFILING
producer_heap_profile_ = NULL;
#endif
handle_scope_data_.Initialize(); handle_scope_data_.Initialize();
#define ISOLATE_INIT_EXECUTE(type, name, initial_value) \ #define ISOLATE_INIT_EXECUTE(type, name, initial_value) \
...@@ -1537,11 +1533,6 @@ void Isolate::SetIsolateThreadLocals(Isolate* isolate, ...@@ -1537,11 +1533,6 @@ void Isolate::SetIsolateThreadLocals(Isolate* isolate,
Isolate::~Isolate() { Isolate::~Isolate() {
TRACE_ISOLATE(destructor); TRACE_ISOLATE(destructor);
#ifdef ENABLE_LOGGING_AND_PROFILING
delete producer_heap_profile_;
producer_heap_profile_ = NULL;
#endif
delete unicode_cache_; delete unicode_cache_;
unicode_cache_ = NULL; unicode_cache_ = NULL;
...@@ -1657,11 +1648,6 @@ bool Isolate::PreInit() { ...@@ -1657,11 +1648,6 @@ bool Isolate::PreInit() {
regexp_stack_ = new RegExpStack(); regexp_stack_ = new RegExpStack();
regexp_stack_->isolate_ = this; regexp_stack_->isolate_ = this;
#ifdef ENABLE_LOGGING_AND_PROFILING
producer_heap_profile_ = new ProducerHeapProfile();
producer_heap_profile_->isolate_ = this;
#endif
state_ = PREINITIALIZED; state_ = PREINITIALIZED;
return true; return true;
} }
......
...@@ -69,7 +69,6 @@ class InlineRuntimeFunctionsTable; ...@@ -69,7 +69,6 @@ class InlineRuntimeFunctionsTable;
class NoAllocationStringAllocator; class NoAllocationStringAllocator;
class PcToCodeCache; class PcToCodeCache;
class PreallocatedMemoryThread; class PreallocatedMemoryThread;
class ProducerHeapProfile;
class RegExpStack; class RegExpStack;
class SaveContext; class SaveContext;
class UnicodeCache; class UnicodeCache;
...@@ -907,12 +906,6 @@ class Isolate { ...@@ -907,12 +906,6 @@ class Isolate {
inline bool DebuggerHasBreakPoints(); inline bool DebuggerHasBreakPoints();
#ifdef ENABLE_LOGGING_AND_PROFILING
ProducerHeapProfile* producer_heap_profile() {
return producer_heap_profile_;
}
#endif
#ifdef DEBUG #ifdef DEBUG
HistogramInfo* heap_histograms() { return heap_histograms_; } HistogramInfo* heap_histograms() { return heap_histograms_; }
...@@ -1172,10 +1165,6 @@ class Isolate { ...@@ -1172,10 +1165,6 @@ class Isolate {
Debug* debug_; Debug* debug_;
#endif #endif
#ifdef ENABLE_LOGGING_AND_PROFILING
ProducerHeapProfile* producer_heap_profile_;
#endif
#define GLOBAL_BACKING_STORE(type, name, initialvalue) \ #define GLOBAL_BACKING_STORE(type, name, initialvalue) \
type name##_; type name##_;
ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE) ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
......
...@@ -521,7 +521,6 @@ Logger::Logger() ...@@ -521,7 +521,6 @@ Logger::Logger()
log_events_(NULL), log_events_(NULL),
logging_nesting_(0), logging_nesting_(0),
cpu_profiler_nesting_(0), cpu_profiler_nesting_(0),
heap_profiler_nesting_(0),
log_(new Log(this)), log_(new Log(this)),
name_buffer_(new NameBuffer), name_buffer_(new NameBuffer),
address_to_name_map_(NULL), address_to_name_map_(NULL),
...@@ -1286,19 +1285,6 @@ void Logger::HeapSampleBeginEvent(const char* space, const char* kind) { ...@@ -1286,19 +1285,6 @@ void Logger::HeapSampleBeginEvent(const char* space, const char* kind) {
} }
void Logger::HeapSampleStats(const char* space, const char* kind,
intptr_t capacity, intptr_t used) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg(this);
msg.Append("heap-sample-stats,\"%s\",\"%s\","
"%" V8_PTR_PREFIX "d,%" V8_PTR_PREFIX "d\n",
space, kind, capacity, used);
msg.WriteToLogFile();
#endif
}
void Logger::HeapSampleEndEvent(const char* space, const char* kind) { void Logger::HeapSampleEndEvent(const char* space, const char* kind) {
#ifdef ENABLE_LOGGING_AND_PROFILING #ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_gc) return; if (!log_->IsEnabled() || !FLAG_log_gc) return;
...@@ -1319,72 +1305,6 @@ void Logger::HeapSampleItemEvent(const char* type, int number, int bytes) { ...@@ -1319,72 +1305,6 @@ void Logger::HeapSampleItemEvent(const char* type, int number, int bytes) {
} }
void Logger::HeapSampleJSConstructorEvent(const char* constructor,
int number, int bytes) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg(this);
msg.Append("heap-js-cons-item,%s,%d,%d\n", constructor, number, bytes);
msg.WriteToLogFile();
#endif
}
// Event starts with comma, so we don't have it in the format string.
static const char kEventText[] = "heap-js-ret-item,%s";
// We take placeholder strings into account, but it's OK to be conservative.
static const int kEventTextLen = sizeof(kEventText)/sizeof(kEventText[0]);
void Logger::HeapSampleJSRetainersEvent(
const char* constructor, const char* event) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_gc) return;
const int cons_len = StrLength(constructor);
const int event_len = StrLength(event);
int pos = 0;
// Retainer lists can be long. We may need to split them into multiple events.
do {
LogMessageBuilder msg(this);
msg.Append(kEventText, constructor);
int to_write = event_len - pos;
if (to_write > Log::kMessageBufferSize - (cons_len + kEventTextLen)) {
int cut_pos = pos + Log::kMessageBufferSize - (cons_len + kEventTextLen);
ASSERT(cut_pos < event_len);
while (cut_pos > pos && event[cut_pos] != ',') --cut_pos;
if (event[cut_pos] != ',') {
// Crash in debug mode, skip in release mode.
ASSERT(false);
return;
}
// Append a piece of event that fits, without trailing comma.
msg.AppendStringPart(event + pos, cut_pos - pos);
// Start next piece with comma.
pos = cut_pos;
} else {
msg.Append("%s", event + pos);
pos += event_len;
}
msg.Append('\n');
msg.WriteToLogFile();
} while (pos < event_len);
#endif
}
void Logger::HeapSampleJSProducerEvent(const char* constructor,
Address* stack) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg(this);
msg.Append("heap-js-prod-item,%s", constructor);
while (*stack != NULL) {
msg.Append(",0x%" V8PRIxPTR, *stack++);
}
msg.Append("\n");
msg.WriteToLogFile();
#endif
}
void Logger::DebugTag(const char* call_site_tag) { void Logger::DebugTag(const char* call_site_tag) {
#ifdef ENABLE_LOGGING_AND_PROFILING #ifdef ENABLE_LOGGING_AND_PROFILING
if (!log_->IsEnabled() || !FLAG_log) return; if (!log_->IsEnabled() || !FLAG_log) return;
...@@ -1447,9 +1367,6 @@ int Logger::GetActiveProfilerModules() { ...@@ -1447,9 +1367,6 @@ int Logger::GetActiveProfilerModules() {
if (profiler_ != NULL && !profiler_->paused()) { if (profiler_ != NULL && !profiler_->paused()) {
result |= PROFILER_MODULE_CPU; result |= PROFILER_MODULE_CPU;
} }
if (FLAG_log_gc) {
result |= PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS;
}
return result; return result;
} }
...@@ -1471,13 +1388,6 @@ void Logger::PauseProfiler(int flags, int tag) { ...@@ -1471,13 +1388,6 @@ void Logger::PauseProfiler(int flags, int tag) {
--logging_nesting_; --logging_nesting_;
} }
} }
if (flags &
(PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
if (--heap_profiler_nesting_ == 0) {
FLAG_log_gc = false;
--logging_nesting_;
}
}
if (tag != 0) { if (tag != 0) {
UncheckedIntEvent("close-tag", tag); UncheckedIntEvent("close-tag", tag);
} }
...@@ -1505,13 +1415,6 @@ void Logger::ResumeProfiler(int flags, int tag) { ...@@ -1505,13 +1415,6 @@ void Logger::ResumeProfiler(int flags, int tag) {
profiler_->resume(); profiler_->resume();
} }
} }
if (flags &
(PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
if (heap_profiler_nesting_++ == 0) {
++logging_nesting_;
FLAG_log_gc = true;
}
}
} }
......
...@@ -401,7 +401,6 @@ class Logger { ...@@ -401,7 +401,6 @@ class Logger {
int logging_nesting_; int logging_nesting_;
int cpu_profiler_nesting_; int cpu_profiler_nesting_;
int heap_profiler_nesting_;
Log* log_; Log* log_;
......
...@@ -638,8 +638,7 @@ class HeapSnapshotsCollection; ...@@ -638,8 +638,7 @@ class HeapSnapshotsCollection;
class HeapSnapshot { class HeapSnapshot {
public: public:
enum Type { enum Type {
kFull = v8::HeapSnapshot::kFull, kFull = v8::HeapSnapshot::kFull
kAggregated = v8::HeapSnapshot::kAggregated
}; };
HeapSnapshot(HeapSnapshotsCollection* collection, HeapSnapshot(HeapSnapshotsCollection* collection,
......
This diff is collapsed.
...@@ -708,24 +708,6 @@ TEST(IsLoggingPreserved) { ...@@ -708,24 +708,6 @@ TEST(IsLoggingPreserved) {
CHECK(LOGGER->is_logging()); CHECK(LOGGER->is_logging());
LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 1); LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 1);
CHECK(LOGGER->is_logging()); CHECK(LOGGER->is_logging());
CHECK(LOGGER->is_logging());
LOGGER->ResumeProfiler(
v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
CHECK(LOGGER->is_logging());
LOGGER->PauseProfiler(
v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
CHECK(LOGGER->is_logging());
CHECK(LOGGER->is_logging());
LOGGER->ResumeProfiler(
v8::PROFILER_MODULE_CPU |
v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
CHECK(LOGGER->is_logging());
LOGGER->PauseProfiler(
v8::PROFILER_MODULE_CPU |
v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
CHECK(LOGGER->is_logging());
} }
......
...@@ -169,17 +169,12 @@ function TickProcessor( ...@@ -169,17 +169,12 @@ function TickProcessor(
processor: this.processHeapSampleBegin }, processor: this.processHeapSampleBegin },
'heap-sample-end': { parsers: [null, null], 'heap-sample-end': { parsers: [null, null],
processor: this.processHeapSampleEnd }, processor: this.processHeapSampleEnd },
'heap-js-prod-item': { parsers: [null, 'var-args'],
processor: this.processJSProducer },
// Ignored events. // Ignored events.
'profiler': null, 'profiler': null,
'function-creation': null, 'function-creation': null,
'function-move': null, 'function-move': null,
'function-delete': null, 'function-delete': null,
'heap-sample-stats': null,
'heap-sample-item': null, 'heap-sample-item': null,
'heap-js-cons-item': null,
'heap-js-ret-item': null,
// Obsolete row types. // Obsolete row types.
'code-allocate': null, 'code-allocate': null,
'begin-code-region': null, 'begin-code-region': null,
...@@ -401,17 +396,6 @@ TickProcessor.prototype.processHeapSampleEnd = function(space, state) { ...@@ -401,17 +396,6 @@ TickProcessor.prototype.processHeapSampleEnd = function(space, state) {
}; };
TickProcessor.prototype.processJSProducer = function(constructor, stack) {
if (!this.currentProducerProfile_) return;
if (stack.length == 0) return;
var first = stack.shift();
var processedStack =
this.profile_.resolveAndFilterFuncs_(this.processStack(first, 0, stack));
processedStack.unshift(constructor);
this.currentProducerProfile_.addPath(processedStack);
};
TickProcessor.prototype.printStatistics = function() { TickProcessor.prototype.printStatistics = function() {
print('Statistical profiling result from ' + this.lastLogFileName_ + print('Statistical profiling result from ' + this.lastLogFileName_ +
', (' + this.ticks_.total + ', (' + this.ticks_.total +
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment