Commit 2b2a1694 authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Rework and improve object stats tracing

- Instead of tracing during marking we can now trace in a separate phase in MC.
  (Heap is iterable.)
- Add more subtypes for fixed arrays, reducing the unknown bucket to around ~8%
  (local run).
- Refactor collection calls to have a single bottleneck.
- Provide JSON-based output format that can be "easily" processed in JS.

BUG=
R=ulan@chromium.org,hpayer@chromium.org

Review-Url: https://codereview.chromium.org/2129173002
Cr-Commit-Position: refs/heads/master@{#37718}
parent f4ba2a48
......@@ -141,7 +141,8 @@ Heap::Heap()
incremental_marking_(nullptr),
gc_idle_time_handler_(nullptr),
memory_reducer_(nullptr),
object_stats_(nullptr),
live_object_stats_(nullptr),
dead_object_stats_(nullptr),
scavenge_job_(nullptr),
idle_scavenge_observer_(nullptr),
full_codegen_bytes_generated_(0),
......@@ -5327,8 +5328,10 @@ bool Heap::SetUp() {
memory_reducer_ = new MemoryReducer(this);
object_stats_ = new ObjectStats(this);
object_stats_->ClearObjectStats(true);
if (FLAG_track_gc_object_stats) {
live_object_stats_ = new ObjectStats(this);
dead_object_stats_ = new ObjectStats(this);
}
scavenge_job_ = new ScavengeJob();
......@@ -5486,8 +5489,15 @@ void Heap::TearDown() {
memory_reducer_ = nullptr;
}
delete object_stats_;
object_stats_ = nullptr;
if (live_object_stats_ != nullptr) {
delete live_object_stats_;
live_object_stats_ = nullptr;
}
if (dead_object_stats_ != nullptr) {
delete dead_object_stats_;
dead_object_stats_ = nullptr;
}
delete scavenge_job_;
scavenge_job_ = nullptr;
......@@ -6378,14 +6388,16 @@ size_t Heap::NumberOfTrackedHeapObjectTypes() {
size_t Heap::ObjectCountAtLastGC(size_t index) {
if (index >= ObjectStats::OBJECT_STATS_COUNT) return 0;
return object_stats_->object_count_last_gc(index);
if (live_object_stats_ == nullptr || index >= ObjectStats::OBJECT_STATS_COUNT)
return 0;
return live_object_stats_->object_count_last_gc(index);
}
size_t Heap::ObjectSizeAtLastGC(size_t index) {
if (index >= ObjectStats::OBJECT_STATS_COUNT) return 0;
return object_stats_->object_size_last_gc(index);
if (live_object_stats_ == nullptr || index >= ObjectStats::OBJECT_STATS_COUNT)
return 0;
return live_object_stats_->object_size_last_gc(index);
}
......
......@@ -2201,7 +2201,8 @@ class Heap {
MemoryReducer* memory_reducer_;
ObjectStats* object_stats_;
ObjectStats* live_object_stats_;
ObjectStats* dead_object_stats_;
ScavengeJob* scavenge_job_;
......
......@@ -213,9 +213,6 @@ class IncrementalMarkingMarkingVisitor
table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental);
table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
table_.Register(kVisitJSRegExp, &VisitJSRegExp);
if (FLAG_track_gc_object_stats) {
IncrementalMarkingObjectStatsVisitor::Initialize(&table_);
}
}
static const int kProgressBarScanningChunk = 32 * 1024;
......
......@@ -325,7 +325,6 @@ void MarkCompactCollector::ClearInvalidRememberedSetSlots() {
#endif
}
void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below will
// update the state as they proceed.
......@@ -1263,10 +1262,6 @@ void MarkCompactMarkingVisitor::Initialize() {
StaticMarkingVisitor<MarkCompactMarkingVisitor>::Initialize();
table_.Register(kVisitJSRegExp, &VisitRegExpAndFlushCode);
if (FLAG_track_gc_object_stats) {
MarkCompactObjectStatsVisitor::Initialize(&table_);
}
}
......@@ -2243,6 +2238,41 @@ void MarkCompactCollector::RegisterExternallyReferencedObject(Object** object) {
MarkObject(heap_object, mark_bit);
}
class MarkCompactCollector::ObjectStatsVisitor
: public MarkCompactCollector::HeapObjectVisitor {
public:
ObjectStatsVisitor(ObjectStats* live_stats, ObjectStats* dead_stats)
: live_stats_(live_stats), dead_stats_(dead_stats) {
DCHECK_NOT_NULL(live_stats_);
DCHECK_NOT_NULL(dead_stats_);
}
bool Visit(HeapObject* obj) override {
if (Marking::IsBlack(ObjectMarking::MarkBitFrom(obj))) {
ObjectStatsCollector::CollectStatistics(live_stats_, obj);
} else {
DCHECK(!Marking::IsGrey(ObjectMarking::MarkBitFrom(obj)));
ObjectStatsCollector::CollectStatistics(dead_stats_, obj);
}
return true;
}
private:
ObjectStats* live_stats_;
ObjectStats* dead_stats_;
};
void MarkCompactCollector::VisitAllObjects(HeapObjectVisitor* visitor) {
SpaceIterator space_it(heap());
HeapObject* obj = nullptr;
while (space_it.has_next()) {
ObjectIterator* it = space_it.next();
while ((obj = it->Next()) != nullptr) {
visitor->Visit(obj);
}
}
}
void MarkCompactCollector::MarkLiveObjects() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK);
double start_time = 0.0;
......@@ -2262,10 +2292,6 @@ void MarkCompactCollector::MarkLiveObjects() {
} else {
// Abort any pending incremental activities e.g. incremental sweeping.
incremental_marking->Stop();
if (FLAG_track_gc_object_stats) {
// Clear object stats collected during incremental marking.
heap()->object_stats_->ClearObjectStats();
}
if (marking_deque_.in_use()) {
marking_deque_.Uninitialize(true);
}
......@@ -2347,10 +2373,15 @@ void MarkCompactCollector::MarkLiveObjects() {
start_time);
}
if (FLAG_track_gc_object_stats) {
ObjectStatsVisitor visitor(heap()->live_object_stats_,
heap()->dead_object_stats_);
VisitAllObjects(&visitor);
if (FLAG_trace_gc_object_stats) {
heap()->object_stats_->TraceObjectStats();
heap()->live_object_stats_->PrintJSON("live");
heap()->dead_object_stats_->PrintJSON("dead");
}
heap()->object_stats_->CheckpointObjectStats();
heap()->live_object_stats_->CheckpointObjectStats();
heap()->dead_object_stats_->ClearObjectStats();
}
}
......
......@@ -501,6 +501,7 @@ class MarkCompactCollector {
class EvacuateRecordOnlyVisitor;
class EvacuateVisitorBase;
class HeapObjectVisitor;
class ObjectStatsVisitor;
explicit MarkCompactCollector(Heap* heap);
......@@ -511,6 +512,8 @@ class MarkCompactCollector {
int* target_fragmentation_percent,
int* max_evacuated_bytes);
void VisitAllObjects(HeapObjectVisitor* visitor);
// Finishes GC, performs heap verification if enabled.
void Finish();
......
......@@ -18,60 +18,85 @@ static base::LazyMutex object_stats_mutex = LAZY_MUTEX_INITIALIZER;
void ObjectStats::ClearObjectStats(bool clear_last_time_stats) {
memset(object_counts_, 0, sizeof(object_counts_));
memset(object_sizes_, 0, sizeof(object_sizes_));
memset(over_allocated_, 0, sizeof(over_allocated_));
memset(size_histogram_, 0, sizeof(size_histogram_));
memset(over_allocated_histogram_, 0, sizeof(over_allocated_histogram_));
if (clear_last_time_stats) {
memset(object_counts_last_time_, 0, sizeof(object_counts_last_time_));
memset(object_sizes_last_time_, 0, sizeof(object_sizes_last_time_));
}
}
void ObjectStats::TraceObjectStat(const char* name, int count, int size,
double time) {
int ms_count = heap()->ms_count();
PrintIsolate(isolate(),
"heap:%p, time:%f, gc:%d, type:%s, count:%d, size:%d\n",
static_cast<void*>(heap()), time, ms_count, name, count, size);
static void PrintJSONArray(size_t* array, const int len) {
PrintF("[ ");
for (int i = 0; i < len; i++) {
PrintF("%zu", array[i]);
if (i != (len - 1)) PrintF(", ");
}
PrintF(" ]");
}
void ObjectStats::TraceObjectStats() {
base::LockGuard<base::Mutex> lock_guard(object_stats_mutex.Pointer());
int index;
int count;
int size;
int total_size = 0;
void ObjectStats::PrintJSON(const char* key) {
double time = isolate()->time_millis_since_init();
#define TRACE_OBJECT_COUNT(name) \
count = static_cast<int>(object_counts_[name]); \
size = static_cast<int>(object_sizes_[name]) / KB; \
total_size += size; \
TraceObjectStat(#name, count, size, time);
INSTANCE_TYPE_LIST(TRACE_OBJECT_COUNT)
#undef TRACE_OBJECT_COUNT
#define TRACE_OBJECT_COUNT(name) \
index = FIRST_CODE_KIND_SUB_TYPE + Code::name; \
count = static_cast<int>(object_counts_[index]); \
size = static_cast<int>(object_sizes_[index]) / KB; \
TraceObjectStat("*CODE_" #name, count, size, time);
CODE_KIND_LIST(TRACE_OBJECT_COUNT)
#undef TRACE_OBJECT_COUNT
#define TRACE_OBJECT_COUNT(name) \
index = FIRST_FIXED_ARRAY_SUB_TYPE + name; \
count = static_cast<int>(object_counts_[index]); \
size = static_cast<int>(object_sizes_[index]) / KB; \
TraceObjectStat("*FIXED_ARRAY_" #name, count, size, time);
FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(TRACE_OBJECT_COUNT)
#undef TRACE_OBJECT_COUNT
#define TRACE_OBJECT_COUNT(name) \
index = \
FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge - Code::kFirstCodeAge; \
count = static_cast<int>(object_counts_[index]); \
size = static_cast<int>(object_sizes_[index]) / KB; \
TraceObjectStat("*CODE_AGE_" #name, count, size, time);
CODE_AGE_LIST_COMPLETE(TRACE_OBJECT_COUNT)
#undef TRACE_OBJECT_COUNT
}
int gc_count = heap()->gc_count();
#define PRINT_KEY_AND_ID() \
PrintF("\"isolate\": \"%p\", \"id\": %d, \"key\": \"%s\", ", \
reinterpret_cast<void*>(isolate()), gc_count, key);
// gc_descriptor
PrintF("{ ");
PRINT_KEY_AND_ID();
PrintF("\"type\": \"gc_descriptor\", \"time\": %f }\n", time);
// bucket_sizes
PrintF("{ ");
PRINT_KEY_AND_ID();
PrintF("\"type\": \"bucket_sizes\", \"sizes\": [ ");
for (int i = 0; i < kNumberOfBuckets; i++) {
PrintF("%d", 1 << (kFirstBucketShift + i));
if (i != (kNumberOfBuckets - 1)) PrintF(", ");
}
PrintF(" ] }\n");
// instance_type_data
#define PRINT_INSTANCE_TYPE_DATA(name, index) \
PrintF("{ "); \
PRINT_KEY_AND_ID(); \
PrintF("\"type\": \"instance_type_data\", "); \
PrintF("\"instance_type\": %d, ", index); \
PrintF("\"instance_type_name\": \"%s\", ", name); \
PrintF("\"overall\": %zu, ", object_sizes_[index]); \
PrintF("\"count\": %zu, ", object_counts_[index]); \
PrintF("\"over_allocated\": %zu, ", over_allocated_[index]); \
PrintF("\"histogram\": "); \
PrintJSONArray(size_histogram_[index], kNumberOfBuckets); \
PrintF(","); \
PrintF("\"over_allocated_histogram\": "); \
PrintJSONArray(over_allocated_histogram_[index], kNumberOfBuckets); \
PrintF(" }\n");
#define INSTANCE_TYPE_WRAPPER(name) PRINT_INSTANCE_TYPE_DATA(#name, name)
#define CODE_KIND_WRAPPER(name) \
PRINT_INSTANCE_TYPE_DATA("*CODE_" #name, \
FIRST_CODE_KIND_SUB_TYPE + Code::name)
#define FIXED_ARRAY_SUB_INSTANCE_TYPE_WRAPPER(name) \
PRINT_INSTANCE_TYPE_DATA("*FIXED_ARRAY_" #name, \
FIRST_FIXED_ARRAY_SUB_TYPE + name)
#define CODE_AGE_WRAPPER(name) \
PRINT_INSTANCE_TYPE_DATA( \
"*CODE_AGE_" #name, \
FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge - Code::kFirstCodeAge)
INSTANCE_TYPE_LIST(INSTANCE_TYPE_WRAPPER)
CODE_KIND_LIST(CODE_KIND_WRAPPER)
FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(FIXED_ARRAY_SUB_INSTANCE_TYPE_WRAPPER)
CODE_AGE_LIST_COMPLETE(CODE_AGE_WRAPPER)
#undef INSTANCE_TYPE_WRAPPER
#undef CODE_KIND_WRAPPER
#undef FIXED_ARRAY_SUB_INSTANCE_TYPE_WRAPPER
#undef CODE_AGE_WRAPPER
#undef PRINT_INSTANCE_TYPE_DATA
}
void ObjectStats::CheckpointObjectStats() {
base::LockGuard<base::Mutex> lock_guard(object_stats_mutex.Pointer());
......@@ -134,135 +159,213 @@ void ObjectStats::CheckpointObjectStats() {
Isolate* ObjectStats::isolate() { return heap()->isolate(); }
void ObjectStatsCollector::CountFixedArray(
FixedArrayBase* fixed_array, FixedArraySubInstanceType fast_type,
FixedArraySubInstanceType dictionary_type) {
Heap* heap = fixed_array->map()->GetHeap();
if (fixed_array->map() != heap->fixed_cow_array_map() &&
fixed_array->map() != heap->fixed_double_array_map() &&
fixed_array != heap->empty_fixed_array()) {
if (fixed_array->IsDictionary()) {
heap->object_stats_->RecordFixedArraySubTypeStats(dictionary_type,
fixed_array->Size());
} else {
heap->object_stats_->RecordFixedArraySubTypeStats(fast_type,
fixed_array->Size());
void ObjectStatsCollector::CollectStatistics(ObjectStats* stats,
HeapObject* obj) {
Map* map = obj->map();
Heap* heap = obj->GetHeap();
// Record for the InstanceType.
int object_size = obj->Size();
stats->RecordObjectStats(map->instance_type(), object_size);
// Record specific sub types where possible.
if (obj->IsMap()) {
RecordMapDetails(stats, heap, obj);
}
if (obj->IsCode()) {
RecordCodeDetails(stats, heap, obj);
}
if (obj->IsSharedFunctionInfo()) {
RecordSharedFunctionInfoDetails(stats, heap, obj);
}
if (obj->IsFixedArray()) {
RecordFixedArrayDetails(stats, heap, obj);
}
if (obj->IsJSObject()) {
RecordJSObjectDetails(stats, heap, JSObject::cast(obj));
}
if (obj->IsJSWeakCollection()) {
RecordJSWeakCollectionDetails(stats, heap, JSWeakCollection::cast(obj));
}
}
void ObjectStatsCollector::CollectStatistics(StaticVisitorBase::VisitorId id,
Map* map, HeapObject* obj) {
// Record any type specific statistics here.
switch (id) {
case StaticVisitorBase::kVisitMap:
RecordMapStats(map, obj);
break;
case StaticVisitorBase::kVisitCode:
RecordCodeStats(map, obj);
break;
case StaticVisitorBase::kVisitSharedFunctionInfo:
RecordSharedFunctionInfoStats(map, obj);
break;
case StaticVisitorBase::kVisitFixedArray:
RecordFixedArrayStats(map, obj);
break;
default:
break;
}
Heap* heap = map->GetHeap();
int object_size = obj->Size();
heap->object_stats_->RecordObjectStats(map->instance_type(), object_size);
static bool CanRecordFixedArray(Heap* heap, FixedArrayBase* array) {
return array->map() != heap->fixed_cow_array_map() &&
array->map() != heap->fixed_double_array_map() &&
array != heap->empty_fixed_array();
}
void ObjectStatsCollector::CollectFixedArrayStatistics(HeapObject* obj) {
if (obj->IsJSObject()) {
JSObject* object = JSObject::cast(obj);
CountFixedArray(object->elements(), DICTIONARY_ELEMENTS_SUB_TYPE,
FAST_ELEMENTS_SUB_TYPE);
CountFixedArray(object->properties(), DICTIONARY_PROPERTIES_SUB_TYPE,
FAST_PROPERTIES_SUB_TYPE);
void ObjectStatsCollector::RecordJSObjectDetails(ObjectStats* stats, Heap* heap,
JSObject* object) {
DCHECK(object->IsJSObject());
size_t overhead = 0;
FixedArrayBase* elements = object->elements();
if (CanRecordFixedArray(heap, elements)) {
if (elements->IsDictionary()) {
SeededNumberDictionary* dict = object->element_dictionary();
int used = dict->NumberOfElements() * SeededNumberDictionary::kEntrySize;
CHECK_GE(elements->Size(), used);
overhead = elements->Size() - used;
stats->RecordFixedArraySubTypeStats(DICTIONARY_ELEMENTS_SUB_TYPE,
elements->Size(), overhead);
} else {
if (IsFastHoleyElementsKind(object->GetElementsKind())) {
int used = object->GetFastElementsUsage() * kPointerSize;
if (object->GetElementsKind() == FAST_HOLEY_DOUBLE_ELEMENTS) used *= 2;
CHECK_GE(elements->Size(), used);
overhead = elements->Size() - used;
}
stats->RecordFixedArraySubTypeStats(FAST_ELEMENTS_SUB_TYPE,
elements->Size(), overhead);
}
}
overhead = 0;
FixedArrayBase* properties = object->properties();
if (CanRecordFixedArray(heap, properties)) {
if (properties->IsDictionary()) {
NameDictionary* dict = object->property_dictionary();
int used = dict->NumberOfElements() * NameDictionary::kEntrySize;
CHECK_GE(properties->Size(), used);
overhead = properties->Size() - used;
stats->RecordFixedArraySubTypeStats(DICTIONARY_PROPERTIES_SUB_TYPE,
properties->Size(), overhead);
} else {
stats->RecordFixedArraySubTypeStats(FAST_PROPERTIES_SUB_TYPE,
properties->Size(), overhead);
}
}
}
void ObjectStatsCollector::RecordJSWeakCollectionDetails(
ObjectStats* stats, Heap* heap, JSWeakCollection* obj) {
if (obj->table()->IsHashTable()) {
ObjectHashTable* table = ObjectHashTable::cast(obj->table());
int used = table->NumberOfElements() * ObjectHashTable::kEntrySize;
size_t overhead = table->Size() - used;
stats->RecordFixedArraySubTypeStats(WEAK_COLLECTION_SUB_TYPE, table->Size(),
overhead);
}
}
void ObjectStatsCollector::RecordMapStats(Map* map, HeapObject* obj) {
Heap* heap = map->GetHeap();
void ObjectStatsCollector::RecordMapDetails(ObjectStats* stats, Heap* heap,
HeapObject* obj) {
Map* map_obj = Map::cast(obj);
DCHECK(map->instance_type() == MAP_TYPE);
DCHECK(obj->map()->instance_type() == MAP_TYPE);
DescriptorArray* array = map_obj->instance_descriptors();
if (map_obj->owns_descriptors() && array != heap->empty_descriptor_array()) {
int fixed_array_size = array->Size();
heap->object_stats_->RecordFixedArraySubTypeStats(DESCRIPTOR_ARRAY_SUB_TYPE,
fixed_array_size);
stats->RecordFixedArraySubTypeStats(DESCRIPTOR_ARRAY_SUB_TYPE,
fixed_array_size, 0);
if (array->HasEnumCache()) {
stats->RecordFixedArraySubTypeStats(ENUM_CACHE_SUB_TYPE,
array->GetEnumCache()->Size(), 0);
}
if (array->HasEnumIndicesCache()) {
stats->RecordFixedArraySubTypeStats(
ENUM_INDICES_CACHE_SUB_TYPE, array->GetEnumIndicesCache()->Size(), 0);
}
}
if (map_obj->has_code_cache()) {
FixedArray* cache = map_obj->code_cache();
heap->object_stats_->RecordFixedArraySubTypeStats(MAP_CODE_CACHE_SUB_TYPE,
cache->Size());
stats->RecordFixedArraySubTypeStats(MAP_CODE_CACHE_SUB_TYPE, cache->Size(),
0);
}
}
void ObjectStatsCollector::RecordCodeStats(Map* map, HeapObject* obj) {
Heap* heap = map->GetHeap();
void ObjectStatsCollector::RecordCodeDetails(ObjectStats* stats, Heap* heap,
HeapObject* obj) {
int object_size = obj->Size();
DCHECK(map->instance_type() == CODE_TYPE);
DCHECK(obj->map()->instance_type() == CODE_TYPE);
Code* code_obj = Code::cast(obj);
heap->object_stats_->RecordCodeSubTypeStats(code_obj->kind(),
code_obj->GetAge(), object_size);
stats->RecordCodeSubTypeStats(code_obj->kind(), code_obj->GetAge(),
object_size);
Code* code = Code::cast(obj);
if (code->deoptimization_data() != heap->empty_fixed_array()) {
stats->RecordFixedArraySubTypeStats(DEOPTIMIZATION_DATA_SUB_TYPE,
code->deoptimization_data()->Size(), 0);
}
FixedArrayBase* reloc_info =
reinterpret_cast<FixedArrayBase*>(code->unchecked_relocation_info());
if (reloc_info != heap->empty_fixed_array()) {
stats->RecordFixedArraySubTypeStats(RELOC_INFO_SUB_TYPE,
code->relocation_info()->Size(), 0);
}
FixedArrayBase* source_pos_table =
reinterpret_cast<FixedArrayBase*>(code->source_position_table());
if (source_pos_table != heap->empty_fixed_array()) {
stats->RecordFixedArraySubTypeStats(SOURCE_POS_SUB_TYPE,
source_pos_table->Size(), 0);
}
}
void ObjectStatsCollector::RecordSharedFunctionInfoStats(Map* map,
void ObjectStatsCollector::RecordSharedFunctionInfoDetails(ObjectStats* stats,
Heap* heap,
HeapObject* obj) {
Heap* heap = map->GetHeap();
SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj);
if (sfi->scope_info() != heap->empty_fixed_array()) {
heap->object_stats_->RecordFixedArraySubTypeStats(
SCOPE_INFO_SUB_TYPE, FixedArray::cast(sfi->scope_info())->Size());
stats->RecordFixedArraySubTypeStats(SCOPE_INFO_SUB_TYPE,
sfi->scope_info()->Size(), 0);
}
if (sfi->feedback_metadata() != heap->empty_fixed_array()) {
stats->RecordFixedArraySubTypeStats(TYPE_FEEDBACK_METADATA_SUB_TYPE,
sfi->feedback_metadata()->Size(), 0);
}
if (!sfi->OptimizedCodeMapIsCleared()) {
FixedArray* optimized_code_map = sfi->optimized_code_map();
// Optimized code map should be small, so skip accounting.
int len = optimized_code_map->length();
for (int i = SharedFunctionInfo::kEntriesStart; i < len;
i += SharedFunctionInfo::kEntryLength) {
Object* slot =
optimized_code_map->get(i + SharedFunctionInfo::kLiteralsOffset);
LiteralsArray* literals = nullptr;
if (slot->IsWeakCell()) {
WeakCell* cell = WeakCell::cast(slot);
if (!cell->cleared()) {
literals = LiteralsArray::cast(cell->value());
}
} else {
literals = LiteralsArray::cast(slot);
}
if (literals != nullptr) {
stats->RecordFixedArraySubTypeStats(LITERALS_ARRAY_SUB_TYPE,
literals->Size(), 0);
TypeFeedbackVector* tfv = literals->feedback_vector();
stats->RecordFixedArraySubTypeStats(TYPE_FEEDBACK_VECTOR_SUB_TYPE,
tfv->Size(), 0);
}
}
}
}
void ObjectStatsCollector::RecordFixedArrayStats(Map* map, HeapObject* obj) {
Heap* heap = map->GetHeap();
void ObjectStatsCollector::RecordFixedArrayDetails(ObjectStats* stats,
Heap* heap,
HeapObject* obj) {
FixedArray* fixed_array = FixedArray::cast(obj);
if (fixed_array == heap->string_table()) {
heap->object_stats_->RecordFixedArraySubTypeStats(STRING_TABLE_SUB_TYPE,
fixed_array->Size());
stats->RecordFixedArraySubTypeStats(STRING_TABLE_SUB_TYPE,
fixed_array->Size(), 0);
}
if (fixed_array == heap->weak_object_to_code_table()) {
WeakHashTable* table = reinterpret_cast<WeakHashTable*>(fixed_array);
int used = table->NumberOfElements() * WeakHashTable::kEntrySize;
CHECK_GE(fixed_array->Size(), used);
size_t overhead = fixed_array->Size() - used;
stats->RecordFixedArraySubTypeStats(OBJECT_TO_CODE_SUB_TYPE,
fixed_array->Size(), overhead);
}
if (obj->IsContext()) {
stats->RecordFixedArraySubTypeStats(CONTEXT_SUB_TYPE, fixed_array->Size(),
0);
}
if (fixed_array->map() == heap->fixed_cow_array_map()) {
stats->RecordFixedArraySubTypeStats(COPY_ON_WRITE_SUB_TYPE,
fixed_array->Size(), 0);
}
}
void MarkCompactObjectStatsVisitor::Initialize(
VisitorDispatchTable<Callback>* original) {
// Copy the original visitor table to make call-through possible. After we
// preserved a copy locally, we patch the original table to call us.
table_.CopyFrom(original);
#define COUNT_FUNCTION(id) original->Register(kVisit##id, Visit<kVisit##id>);
VISITOR_ID_LIST(COUNT_FUNCTION)
#undef COUNT_FUNCTION
}
template <MarkCompactObjectStatsVisitor::VisitorId id>
void MarkCompactObjectStatsVisitor::Visit(Map* map, HeapObject* obj) {
ObjectStatsCollector::CollectStatistics(id, map, obj);
table_.GetVisitorById(id)(map, obj);
ObjectStatsCollector::CollectFixedArrayStatistics(obj);
}
void IncrementalMarkingObjectStatsVisitor::Initialize(
VisitorDispatchTable<Callback>* original) {
// Copy the original visitor table to make call-through possible. After we
// preserved a copy locally, we patch the original table to call us.
table_.CopyFrom(original);
#define COUNT_FUNCTION(id) original->Register(kVisit##id, Visit<kVisit##id>);
VISITOR_ID_LIST(COUNT_FUNCTION)
#undef COUNT_FUNCTION
}
template <IncrementalMarkingObjectStatsVisitor::VisitorId id>
void IncrementalMarkingObjectStatsVisitor::Visit(Map* map, HeapObject* obj) {
ObjectStatsCollector::CollectStatistics(id, map, obj);
table_.GetVisitorById(id)(map, obj);
ObjectStatsCollector::CollectFixedArrayStatistics(obj);
}
} // namespace internal
......
......@@ -14,7 +14,7 @@ namespace internal {
class ObjectStats {
public:
explicit ObjectStats(Heap* heap) : heap_(heap) {}
explicit ObjectStats(Heap* heap) : heap_(heap) { ClearObjectStats(); }
// ObjectStats are kept in two arrays, counts and sizes. Related stats are
// stored in a contiguous linear buffer. Stats groups are stored one after
......@@ -30,14 +30,14 @@ class ObjectStats {
void ClearObjectStats(bool clear_last_time_stats = false);
void TraceObjectStats();
void TraceObjectStat(const char* name, int count, int size, double time);
void CheckpointObjectStats();
void PrintJSON(const char* key);
void RecordObjectStats(InstanceType type, size_t size) {
DCHECK(type <= LAST_TYPE);
object_counts_[type]++;
object_sizes_[type] += size;
size_histogram_[type][HistogramIndexFromSize(size)]++;
}
void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size) {
......@@ -52,12 +52,22 @@ class ObjectStats {
object_sizes_[code_sub_type_index] += size;
object_counts_[code_age_index]++;
object_sizes_[code_age_index] += size;
const int idx = HistogramIndexFromSize(size);
size_histogram_[code_sub_type_index][idx]++;
size_histogram_[code_age_index][idx]++;
}
void RecordFixedArraySubTypeStats(int array_sub_type, size_t size) {
void RecordFixedArraySubTypeStats(int array_sub_type, size_t size,
size_t over_allocated) {
DCHECK(array_sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++;
object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size;
size_histogram_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]
[HistogramIndexFromSize(size)]++;
over_allocated_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] +=
over_allocated;
over_allocated_histogram_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]
[HistogramIndexFromSize(over_allocated)]++;
}
size_t object_count_last_gc(size_t index) {
......@@ -72,46 +82,49 @@ class ObjectStats {
Heap* heap() { return heap_; }
private:
Heap* heap_;
static const int kFirstBucketShift = 5; // <=32
static const int kLastBucketShift = 19; // >512k
static const int kFirstBucket = 1 << kFirstBucketShift;
static const int kLastBucket = 1 << kLastBucketShift;
static const int kNumberOfBuckets = kLastBucketShift - kFirstBucketShift;
int HistogramIndexFromSize(size_t size) {
if (size == 0) return 0;
int idx =
static_cast<int>(log2(static_cast<double>(size))) - kFirstBucketShift;
return idx < 0 ? 0 : idx;
}
// Object counts and used memory by InstanceType
Heap* heap_;
// Object counts and used memory by InstanceType.
size_t object_counts_[OBJECT_STATS_COUNT];
size_t object_counts_last_time_[OBJECT_STATS_COUNT];
size_t object_sizes_[OBJECT_STATS_COUNT];
size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
// Approximation of overallocated memory by InstanceType.
size_t over_allocated_[OBJECT_STATS_COUNT];
// Detailed histograms by InstanceType.
size_t size_histogram_[OBJECT_STATS_COUNT][kNumberOfBuckets];
size_t over_allocated_histogram_[OBJECT_STATS_COUNT][kNumberOfBuckets];
};
class ObjectStatsCollector {
public:
static void CollectStatistics(StaticVisitorBase::VisitorId id, Map* map,
HeapObject* obj);
static void CollectFixedArrayStatistics(HeapObject* obj);
static void CountFixedArray(FixedArrayBase* fixed_array,
FixedArraySubInstanceType fast_type,
FixedArraySubInstanceType dictionary_type);
static void RecordMapStats(Map* map, HeapObject* obj);
static void RecordCodeStats(Map* map, HeapObject* obj);
static void RecordSharedFunctionInfoStats(Map* map, HeapObject* obj);
static void RecordFixedArrayStats(Map* map, HeapObject* obj);
};
class MarkCompactObjectStatsVisitor
: public StaticMarkingVisitor<MarkCompactObjectStatsVisitor> {
public:
static void Initialize(VisitorDispatchTable<Callback>* original);
static void CollectStatistics(ObjectStats* stats, HeapObject* obj);
template <VisitorId id>
static inline void Visit(Map* map, HeapObject* obj);
};
class IncrementalMarkingObjectStatsVisitor
: public StaticMarkingVisitor<IncrementalMarkingObjectStatsVisitor> {
public:
static void Initialize(VisitorDispatchTable<Callback>* original);
private:
static void RecordMapDetails(ObjectStats* stats, Heap* heap, HeapObject* obj);
static void RecordCodeDetails(ObjectStats* stats, Heap* heap,
HeapObject* obj);
static void RecordSharedFunctionInfoDetails(ObjectStats* stats, Heap* heap,
HeapObject* obj);
static void RecordFixedArrayDetails(ObjectStats* stats, Heap* heap,
HeapObject* obj);
template <VisitorId id>
static inline void Visit(Map* map, HeapObject* obj);
static void RecordJSObjectDetails(ObjectStats* stats, Heap* heap,
JSObject* object);
static void RecordJSWeakCollectionDetails(ObjectStats* stats, Heap* heap,
JSWeakCollection* obj);
};
} // namespace internal
......
......@@ -3228,6 +3228,7 @@ CAST_ACCESSOR(JSSet)
CAST_ACCESSOR(JSSetIterator)
CAST_ACCESSOR(JSTypedArray)
CAST_ACCESSOR(JSValue)
CAST_ACCESSOR(JSWeakCollection)
CAST_ACCESSOR(JSWeakMap)
CAST_ACCESSOR(JSWeakSet)
CAST_ACCESSOR(LayoutDescriptor)
......
......@@ -777,22 +777,33 @@ STATIC_ASSERT(FOREIGN_TYPE == Internals::kForeignType);
std::ostream& operator<<(std::ostream& os, InstanceType instance_type);
#define FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(V) \
V(FAST_ELEMENTS_SUB_TYPE) \
V(CONTEXT_SUB_TYPE) \
V(COPY_ON_WRITE_SUB_TYPE) \
V(DEOPTIMIZATION_DATA_SUB_TYPE) \
V(DESCRIPTOR_ARRAY_SUB_TYPE) \
V(ENUM_CACHE_SUB_TYPE) \
V(ENUM_INDICES_CACHE_SUB_TYPE) \
V(DICTIONARY_ELEMENTS_SUB_TYPE) \
V(FAST_PROPERTIES_SUB_TYPE) \
V(DICTIONARY_PROPERTIES_SUB_TYPE) \
V(FAST_ELEMENTS_SUB_TYPE) \
V(FAST_PROPERTIES_SUB_TYPE) \
V(LITERALS_ARRAY_SUB_TYPE) \
V(MAP_CODE_CACHE_SUB_TYPE) \
V(OBJECT_TO_CODE_SUB_TYPE) \
V(RELOC_INFO_SUB_TYPE) \
V(SCOPE_INFO_SUB_TYPE) \
V(SOURCE_POS_SUB_TYPE) \
V(STRING_TABLE_SUB_TYPE) \
V(DESCRIPTOR_ARRAY_SUB_TYPE)
V(TYPE_FEEDBACK_VECTOR_SUB_TYPE) \
V(TYPE_FEEDBACK_METADATA_SUB_TYPE) \
V(WEAK_COLLECTION_SUB_TYPE)
enum FixedArraySubInstanceType {
#define DEFINE_FIXED_ARRAY_SUB_INSTANCE_TYPE(name) name,
FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(DEFINE_FIXED_ARRAY_SUB_INSTANCE_TYPE)
#undef DEFINE_FIXED_ARRAY_SUB_INSTANCE_TYPE
LAST_FIXED_ARRAY_SUB_TYPE = DESCRIPTOR_ARRAY_SUB_TYPE
LAST_FIXED_ARRAY_SUB_TYPE = WEAK_COLLECTION_SUB_TYPE
};
......@@ -10056,6 +10067,8 @@ class JSMapIterator: public OrderedHashTableIterator<JSMapIterator,
// Base class for both JSWeakMap and JSWeakSet
class JSWeakCollection: public JSObject {
public:
DECLARE_CAST(JSWeakCollection)
// [table]: the backing hash table mapping keys to values.
DECL_ACCESSORS(table, Object)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment