Commit 2b2a1694 authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Rework and improve object stats tracing

- Instead of tracing during marking we can now trace in a separate phase in MC.
  (Heap is iterable.)
- Add more subtypes for fixed arrays, reducing the unknown bucket to around ~8%
  (local run).
- Refactor collection calls to have a single bottleneck.
- Provide JSON-based output format that can be "easily" processed in JS.

BUG=
R=ulan@chromium.org,hpayer@chromium.org

Review-Url: https://codereview.chromium.org/2129173002
Cr-Commit-Position: refs/heads/master@{#37718}
parent f4ba2a48
......@@ -141,7 +141,8 @@ Heap::Heap()
incremental_marking_(nullptr),
gc_idle_time_handler_(nullptr),
memory_reducer_(nullptr),
object_stats_(nullptr),
live_object_stats_(nullptr),
dead_object_stats_(nullptr),
scavenge_job_(nullptr),
idle_scavenge_observer_(nullptr),
full_codegen_bytes_generated_(0),
......@@ -5327,8 +5328,10 @@ bool Heap::SetUp() {
memory_reducer_ = new MemoryReducer(this);
object_stats_ = new ObjectStats(this);
object_stats_->ClearObjectStats(true);
if (FLAG_track_gc_object_stats) {
live_object_stats_ = new ObjectStats(this);
dead_object_stats_ = new ObjectStats(this);
}
scavenge_job_ = new ScavengeJob();
......@@ -5486,8 +5489,15 @@ void Heap::TearDown() {
memory_reducer_ = nullptr;
}
delete object_stats_;
object_stats_ = nullptr;
if (live_object_stats_ != nullptr) {
delete live_object_stats_;
live_object_stats_ = nullptr;
}
if (dead_object_stats_ != nullptr) {
delete dead_object_stats_;
dead_object_stats_ = nullptr;
}
delete scavenge_job_;
scavenge_job_ = nullptr;
......@@ -6378,14 +6388,16 @@ size_t Heap::NumberOfTrackedHeapObjectTypes() {
size_t Heap::ObjectCountAtLastGC(size_t index) {
if (index >= ObjectStats::OBJECT_STATS_COUNT) return 0;
return object_stats_->object_count_last_gc(index);
if (live_object_stats_ == nullptr || index >= ObjectStats::OBJECT_STATS_COUNT)
return 0;
return live_object_stats_->object_count_last_gc(index);
}
size_t Heap::ObjectSizeAtLastGC(size_t index) {
if (index >= ObjectStats::OBJECT_STATS_COUNT) return 0;
return object_stats_->object_size_last_gc(index);
if (live_object_stats_ == nullptr || index >= ObjectStats::OBJECT_STATS_COUNT)
return 0;
return live_object_stats_->object_size_last_gc(index);
}
......
......@@ -2201,7 +2201,8 @@ class Heap {
MemoryReducer* memory_reducer_;
ObjectStats* object_stats_;
ObjectStats* live_object_stats_;
ObjectStats* dead_object_stats_;
ScavengeJob* scavenge_job_;
......
......@@ -213,9 +213,6 @@ class IncrementalMarkingMarkingVisitor
table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental);
table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
table_.Register(kVisitJSRegExp, &VisitJSRegExp);
if (FLAG_track_gc_object_stats) {
IncrementalMarkingObjectStatsVisitor::Initialize(&table_);
}
}
static const int kProgressBarScanningChunk = 32 * 1024;
......
......@@ -325,7 +325,6 @@ void MarkCompactCollector::ClearInvalidRememberedSetSlots() {
#endif
}
void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below will
// update the state as they proceed.
......@@ -1263,10 +1262,6 @@ void MarkCompactMarkingVisitor::Initialize() {
StaticMarkingVisitor<MarkCompactMarkingVisitor>::Initialize();
table_.Register(kVisitJSRegExp, &VisitRegExpAndFlushCode);
if (FLAG_track_gc_object_stats) {
MarkCompactObjectStatsVisitor::Initialize(&table_);
}
}
......@@ -2243,6 +2238,41 @@ void MarkCompactCollector::RegisterExternallyReferencedObject(Object** object) {
MarkObject(heap_object, mark_bit);
}
class MarkCompactCollector::ObjectStatsVisitor
: public MarkCompactCollector::HeapObjectVisitor {
public:
ObjectStatsVisitor(ObjectStats* live_stats, ObjectStats* dead_stats)
: live_stats_(live_stats), dead_stats_(dead_stats) {
DCHECK_NOT_NULL(live_stats_);
DCHECK_NOT_NULL(dead_stats_);
}
bool Visit(HeapObject* obj) override {
if (Marking::IsBlack(ObjectMarking::MarkBitFrom(obj))) {
ObjectStatsCollector::CollectStatistics(live_stats_, obj);
} else {
DCHECK(!Marking::IsGrey(ObjectMarking::MarkBitFrom(obj)));
ObjectStatsCollector::CollectStatistics(dead_stats_, obj);
}
return true;
}
private:
ObjectStats* live_stats_;
ObjectStats* dead_stats_;
};
void MarkCompactCollector::VisitAllObjects(HeapObjectVisitor* visitor) {
SpaceIterator space_it(heap());
HeapObject* obj = nullptr;
while (space_it.has_next()) {
ObjectIterator* it = space_it.next();
while ((obj = it->Next()) != nullptr) {
visitor->Visit(obj);
}
}
}
void MarkCompactCollector::MarkLiveObjects() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK);
double start_time = 0.0;
......@@ -2262,10 +2292,6 @@ void MarkCompactCollector::MarkLiveObjects() {
} else {
// Abort any pending incremental activities e.g. incremental sweeping.
incremental_marking->Stop();
if (FLAG_track_gc_object_stats) {
// Clear object stats collected during incremental marking.
heap()->object_stats_->ClearObjectStats();
}
if (marking_deque_.in_use()) {
marking_deque_.Uninitialize(true);
}
......@@ -2347,10 +2373,15 @@ void MarkCompactCollector::MarkLiveObjects() {
start_time);
}
if (FLAG_track_gc_object_stats) {
ObjectStatsVisitor visitor(heap()->live_object_stats_,
heap()->dead_object_stats_);
VisitAllObjects(&visitor);
if (FLAG_trace_gc_object_stats) {
heap()->object_stats_->TraceObjectStats();
heap()->live_object_stats_->PrintJSON("live");
heap()->dead_object_stats_->PrintJSON("dead");
}
heap()->object_stats_->CheckpointObjectStats();
heap()->live_object_stats_->CheckpointObjectStats();
heap()->dead_object_stats_->ClearObjectStats();
}
}
......
......@@ -501,6 +501,7 @@ class MarkCompactCollector {
class EvacuateRecordOnlyVisitor;
class EvacuateVisitorBase;
class HeapObjectVisitor;
class ObjectStatsVisitor;
explicit MarkCompactCollector(Heap* heap);
......@@ -511,6 +512,8 @@ class MarkCompactCollector {
int* target_fragmentation_percent,
int* max_evacuated_bytes);
void VisitAllObjects(HeapObjectVisitor* visitor);
// Finishes GC, performs heap verification if enabled.
void Finish();
......
This diff is collapsed.
......@@ -14,7 +14,7 @@ namespace internal {
class ObjectStats {
public:
explicit ObjectStats(Heap* heap) : heap_(heap) {}
explicit ObjectStats(Heap* heap) : heap_(heap) { ClearObjectStats(); }
// ObjectStats are kept in two arrays, counts and sizes. Related stats are
// stored in a contiguous linear buffer. Stats groups are stored one after
......@@ -30,14 +30,14 @@ class ObjectStats {
void ClearObjectStats(bool clear_last_time_stats = false);
void TraceObjectStats();
void TraceObjectStat(const char* name, int count, int size, double time);
void CheckpointObjectStats();
void PrintJSON(const char* key);
void RecordObjectStats(InstanceType type, size_t size) {
DCHECK(type <= LAST_TYPE);
object_counts_[type]++;
object_sizes_[type] += size;
size_histogram_[type][HistogramIndexFromSize(size)]++;
}
void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size) {
......@@ -52,12 +52,22 @@ class ObjectStats {
object_sizes_[code_sub_type_index] += size;
object_counts_[code_age_index]++;
object_sizes_[code_age_index] += size;
const int idx = HistogramIndexFromSize(size);
size_histogram_[code_sub_type_index][idx]++;
size_histogram_[code_age_index][idx]++;
}
void RecordFixedArraySubTypeStats(int array_sub_type, size_t size) {
void RecordFixedArraySubTypeStats(int array_sub_type, size_t size,
size_t over_allocated) {
DCHECK(array_sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++;
object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size;
size_histogram_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]
[HistogramIndexFromSize(size)]++;
over_allocated_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] +=
over_allocated;
over_allocated_histogram_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]
[HistogramIndexFromSize(over_allocated)]++;
}
size_t object_count_last_gc(size_t index) {
......@@ -72,46 +82,49 @@ class ObjectStats {
Heap* heap() { return heap_; }
private:
Heap* heap_;
static const int kFirstBucketShift = 5; // <=32
static const int kLastBucketShift = 19; // >512k
static const int kFirstBucket = 1 << kFirstBucketShift;
static const int kLastBucket = 1 << kLastBucketShift;
static const int kNumberOfBuckets = kLastBucketShift - kFirstBucketShift;
int HistogramIndexFromSize(size_t size) {
if (size == 0) return 0;
int idx =
static_cast<int>(log2(static_cast<double>(size))) - kFirstBucketShift;
return idx < 0 ? 0 : idx;
}
// Object counts and used memory by InstanceType
Heap* heap_;
// Object counts and used memory by InstanceType.
size_t object_counts_[OBJECT_STATS_COUNT];
size_t object_counts_last_time_[OBJECT_STATS_COUNT];
size_t object_sizes_[OBJECT_STATS_COUNT];
size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
// Approximation of overallocated memory by InstanceType.
size_t over_allocated_[OBJECT_STATS_COUNT];
// Detailed histograms by InstanceType.
size_t size_histogram_[OBJECT_STATS_COUNT][kNumberOfBuckets];
size_t over_allocated_histogram_[OBJECT_STATS_COUNT][kNumberOfBuckets];
};
class ObjectStatsCollector {
public:
static void CollectStatistics(StaticVisitorBase::VisitorId id, Map* map,
HeapObject* obj);
static void CollectFixedArrayStatistics(HeapObject* obj);
static void CountFixedArray(FixedArrayBase* fixed_array,
FixedArraySubInstanceType fast_type,
FixedArraySubInstanceType dictionary_type);
static void RecordMapStats(Map* map, HeapObject* obj);
static void RecordCodeStats(Map* map, HeapObject* obj);
static void RecordSharedFunctionInfoStats(Map* map, HeapObject* obj);
static void RecordFixedArrayStats(Map* map, HeapObject* obj);
};
class MarkCompactObjectStatsVisitor
: public StaticMarkingVisitor<MarkCompactObjectStatsVisitor> {
public:
static void Initialize(VisitorDispatchTable<Callback>* original);
static void CollectStatistics(ObjectStats* stats, HeapObject* obj);
template <VisitorId id>
static inline void Visit(Map* map, HeapObject* obj);
};
class IncrementalMarkingObjectStatsVisitor
: public StaticMarkingVisitor<IncrementalMarkingObjectStatsVisitor> {
public:
static void Initialize(VisitorDispatchTable<Callback>* original);
template <VisitorId id>
static inline void Visit(Map* map, HeapObject* obj);
private:
static void RecordMapDetails(ObjectStats* stats, Heap* heap, HeapObject* obj);
static void RecordCodeDetails(ObjectStats* stats, Heap* heap,
HeapObject* obj);
static void RecordSharedFunctionInfoDetails(ObjectStats* stats, Heap* heap,
HeapObject* obj);
static void RecordFixedArrayDetails(ObjectStats* stats, Heap* heap,
HeapObject* obj);
static void RecordJSObjectDetails(ObjectStats* stats, Heap* heap,
JSObject* object);
static void RecordJSWeakCollectionDetails(ObjectStats* stats, Heap* heap,
JSWeakCollection* obj);
};
} // namespace internal
......
......@@ -3228,6 +3228,7 @@ CAST_ACCESSOR(JSSet)
CAST_ACCESSOR(JSSetIterator)
CAST_ACCESSOR(JSTypedArray)
CAST_ACCESSOR(JSValue)
CAST_ACCESSOR(JSWeakCollection)
CAST_ACCESSOR(JSWeakMap)
CAST_ACCESSOR(JSWeakSet)
CAST_ACCESSOR(LayoutDescriptor)
......
......@@ -777,22 +777,33 @@ STATIC_ASSERT(FOREIGN_TYPE == Internals::kForeignType);
std::ostream& operator<<(std::ostream& os, InstanceType instance_type);
#define FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(V) \
V(FAST_ELEMENTS_SUB_TYPE) \
V(CONTEXT_SUB_TYPE) \
V(COPY_ON_WRITE_SUB_TYPE) \
V(DEOPTIMIZATION_DATA_SUB_TYPE) \
V(DESCRIPTOR_ARRAY_SUB_TYPE) \
V(ENUM_CACHE_SUB_TYPE) \
V(ENUM_INDICES_CACHE_SUB_TYPE) \
V(DICTIONARY_ELEMENTS_SUB_TYPE) \
V(FAST_PROPERTIES_SUB_TYPE) \
V(DICTIONARY_PROPERTIES_SUB_TYPE) \
V(FAST_ELEMENTS_SUB_TYPE) \
V(FAST_PROPERTIES_SUB_TYPE) \
V(LITERALS_ARRAY_SUB_TYPE) \
V(MAP_CODE_CACHE_SUB_TYPE) \
V(OBJECT_TO_CODE_SUB_TYPE) \
V(RELOC_INFO_SUB_TYPE) \
V(SCOPE_INFO_SUB_TYPE) \
V(SOURCE_POS_SUB_TYPE) \
V(STRING_TABLE_SUB_TYPE) \
V(DESCRIPTOR_ARRAY_SUB_TYPE)
V(TYPE_FEEDBACK_VECTOR_SUB_TYPE) \
V(TYPE_FEEDBACK_METADATA_SUB_TYPE) \
V(WEAK_COLLECTION_SUB_TYPE)
enum FixedArraySubInstanceType {
#define DEFINE_FIXED_ARRAY_SUB_INSTANCE_TYPE(name) name,
FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(DEFINE_FIXED_ARRAY_SUB_INSTANCE_TYPE)
#undef DEFINE_FIXED_ARRAY_SUB_INSTANCE_TYPE
LAST_FIXED_ARRAY_SUB_TYPE = DESCRIPTOR_ARRAY_SUB_TYPE
LAST_FIXED_ARRAY_SUB_TYPE = WEAK_COLLECTION_SUB_TYPE
};
......@@ -10056,6 +10067,8 @@ class JSMapIterator: public OrderedHashTableIterator<JSMapIterator,
// Base class for both JSWeakMap and JSWeakSet
class JSWeakCollection: public JSObject {
public:
DECLARE_CAST(JSWeakCollection)
// [table]: the backing hash table mapping keys to values.
DECL_ACCESSORS(table, Object)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment