Provide more accurate results about used heap size via GetHeapStatistics.

I observed that used heap size provided by Heap::SizeOfObjects() is
usually about ~10% bigger than the number calculated by summing up
heap objects sizes.

This aligns DevTools Timeline stats with Heap profiler stats.

Review URL: http://codereview.chromium.org/4888001

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5825 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent ca7a4387
......@@ -788,15 +788,13 @@ void AggregatedHeapSnapshotGenerator::CalculateStringsStats() {
void AggregatedHeapSnapshotGenerator::CollectStats(HeapObject* obj) {
InstanceType type = obj->map()->instance_type();
ASSERT(0 <= type && type <= LAST_TYPE);
if (!FreeListNode::IsFreeListNode(obj)) {
agg_snapshot_->info()[type].increment_number(1);
agg_snapshot_->info()[type].increment_bytes(obj->Size());
}
agg_snapshot_->info()[type].increment_number(1);
agg_snapshot_->info()[type].increment_bytes(obj->Size());
}
void AggregatedHeapSnapshotGenerator::GenerateSnapshot() {
HeapIterator iterator;
HeapIterator iterator(HeapIterator::kPreciseFiltering);
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
CollectStats(obj);
agg_snapshot_->js_cons_profile()->CollectStats(obj);
......
......@@ -404,7 +404,7 @@ intptr_t Heap::SizeOfObjects() {
intptr_t total = 0;
AllSpaces spaces;
for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
total += space->Size();
total += space->SizeOfObjects();
}
return total;
}
......@@ -4408,13 +4408,10 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
MemoryAllocator::Size() + MemoryAllocator::Available();
*stats->os_error = OS::GetLastError();
if (take_snapshot) {
HeapIterator iterator;
HeapIterator iterator(HeapIterator::kPreciseFiltering);
for (HeapObject* obj = iterator.next();
obj != NULL;
obj = iterator.next()) {
// Note: snapshot won't be precise because IsFreeListNode returns true
// for any bytearray.
if (FreeListNode::IsFreeListNode(obj)) continue;
InstanceType type = obj->map()->instance_type();
ASSERT(0 <= type && type <= LAST_TYPE);
stats->objects_per_type[type]++;
......@@ -4769,7 +4766,17 @@ OldSpace* OldSpaces::next() {
}
SpaceIterator::SpaceIterator() : current_space_(FIRST_SPACE), iterator_(NULL) {
SpaceIterator::SpaceIterator()
: current_space_(FIRST_SPACE),
iterator_(NULL),
size_func_(NULL) {
}
SpaceIterator::SpaceIterator(HeapObjectCallback size_func)
: current_space_(FIRST_SPACE),
iterator_(NULL),
size_func_(size_func) {
}
......@@ -4807,25 +4814,25 @@ ObjectIterator* SpaceIterator::CreateIterator() {
switch (current_space_) {
case NEW_SPACE:
iterator_ = new SemiSpaceIterator(Heap::new_space());
iterator_ = new SemiSpaceIterator(Heap::new_space(), size_func_);
break;
case OLD_POINTER_SPACE:
iterator_ = new HeapObjectIterator(Heap::old_pointer_space());
iterator_ = new HeapObjectIterator(Heap::old_pointer_space(), size_func_);
break;
case OLD_DATA_SPACE:
iterator_ = new HeapObjectIterator(Heap::old_data_space());
iterator_ = new HeapObjectIterator(Heap::old_data_space(), size_func_);
break;
case CODE_SPACE:
iterator_ = new HeapObjectIterator(Heap::code_space());
iterator_ = new HeapObjectIterator(Heap::code_space(), size_func_);
break;
case MAP_SPACE:
iterator_ = new HeapObjectIterator(Heap::map_space());
iterator_ = new HeapObjectIterator(Heap::map_space(), size_func_);
break;
case CELL_SPACE:
iterator_ = new HeapObjectIterator(Heap::cell_space());
iterator_ = new HeapObjectIterator(Heap::cell_space(), size_func_);
break;
case LO_SPACE:
iterator_ = new LargeObjectIterator(Heap::lo_space());
iterator_ = new LargeObjectIterator(Heap::lo_space(), size_func_);
break;
}
......@@ -4835,7 +4842,54 @@ ObjectIterator* SpaceIterator::CreateIterator() {
}
HeapIterator::HeapIterator() {
class FreeListNodesFilter {
public:
FreeListNodesFilter() {
MarkFreeListNodes();
}
inline bool IsFreeListNode(HeapObject* object) {
if (object->IsMarked()) {
object->ClearMark();
return true;
} else {
return false;
}
}
private:
void MarkFreeListNodes() {
Heap::old_pointer_space()->MarkFreeListNodes();
Heap::old_data_space()->MarkFreeListNodes();
MarkCodeSpaceFreeListNodes();
Heap::map_space()->MarkFreeListNodes();
Heap::cell_space()->MarkFreeListNodes();
}
void MarkCodeSpaceFreeListNodes() {
// For code space, using FreeListNode::IsFreeListNode is OK.
HeapObjectIterator iter(Heap::code_space());
for (HeapObject* obj = iter.next_object();
obj != NULL;
obj = iter.next_object()) {
if (FreeListNode::IsFreeListNode(obj)) obj->SetMark();
}
}
AssertNoAllocation no_alloc;
};
HeapIterator::HeapIterator()
: filtering_(HeapIterator::kNoFiltering),
filter_(NULL) {
Init();
}
HeapIterator::HeapIterator(HeapIterator::FreeListNodesFiltering filtering)
: filtering_(filtering),
filter_(NULL) {
Init();
}
......@@ -4847,20 +4901,44 @@ HeapIterator::~HeapIterator() {
void HeapIterator::Init() {
// Start the iteration.
space_iterator_ = new SpaceIterator();
if (filtering_ == kPreciseFiltering) {
filter_ = new FreeListNodesFilter;
space_iterator_ =
new SpaceIterator(MarkCompactCollector::SizeOfMarkedObject);
} else {
space_iterator_ = new SpaceIterator;
}
object_iterator_ = space_iterator_->next();
}
void HeapIterator::Shutdown() {
#ifdef DEBUG
// Assert that in precise mode we have iterated through all
// objects. Otherwise, heap will be left in an inconsistent state.
if (filtering_ == kPreciseFiltering) {
ASSERT(object_iterator_ == NULL);
}
#endif
// Make sure the last iterator is deallocated.
delete space_iterator_;
space_iterator_ = NULL;
object_iterator_ = NULL;
delete filter_;
filter_ = NULL;
}
HeapObject* HeapIterator::next() {
if (filter_ == NULL) return NextObject();
HeapObject* obj = NextObject();
while (obj != NULL && filter_->IsFreeListNode(obj)) obj = NextObject();
return obj;
}
HeapObject* HeapIterator::NextObject() {
// No iterator means we are done.
if (object_iterator_ == NULL) return NULL;
......
......@@ -1558,6 +1558,7 @@ class PagedSpaces BASE_EMBEDDED {
class SpaceIterator : public Malloced {
public:
SpaceIterator();
explicit SpaceIterator(HeapObjectCallback size_func);
virtual ~SpaceIterator();
bool has_next();
......@@ -1568,17 +1569,31 @@ class SpaceIterator : public Malloced {
int current_space_; // from enum AllocationSpace.
ObjectIterator* iterator_; // object iterator for the current space.
HeapObjectCallback size_func_;
};
// A HeapIterator provides iteration over the whole heap It aggregates a the
// specific iterators for the different spaces as these can only iterate over
// one space only.
// A HeapIterator provides iteration over the whole heap. It
// aggregates the specific iterators for the different spaces as
// these can only iterate over one space only.
//
// HeapIterator can skip free list nodes (that is, de-allocated heap
// objects that still remain in the heap). As implementation of free
// nodes filtering uses GC marks, it can't be used during MS/MC GC
// phases. Also, it is forbidden to interrupt iteration in this mode,
// as this will leave heap objects marked (and thus, unusable).
class FreeListNodesFilter;
class HeapIterator BASE_EMBEDDED {
public:
explicit HeapIterator();
virtual ~HeapIterator();
enum FreeListNodesFiltering {
kNoFiltering,
kPreciseFiltering
};
HeapIterator();
explicit HeapIterator(FreeListNodesFiltering filtering);
~HeapIterator();
HeapObject* next();
void reset();
......@@ -1586,10 +1601,12 @@ class HeapIterator BASE_EMBEDDED {
private:
// Perform the initialization.
void Init();
// Perform all necessary shutdown (destruction) work.
void Shutdown();
HeapObject* NextObject();
FreeListNodesFiltering filtering_;
FreeListNodesFilter* filter_;
// Space iterator for iterating all the spaces.
SpaceIterator* space_iterator_;
// Object iterator for the space currently being iterated.
......
......@@ -1099,13 +1099,6 @@ void MarkCompactCollector::MarkLiveObjects() {
}
static int CountMarkedCallback(HeapObject* obj) {
MapWord map_word = obj->map_word();
map_word.ClearMark();
return obj->SizeFromMap(map_word.ToMap());
}
#ifdef DEBUG
void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
live_bytes_ += obj->Size();
......@@ -1152,7 +1145,7 @@ bool MarkCompactCollector::SafeIsMap(HeapObject* object) {
void MarkCompactCollector::ClearNonLiveTransitions() {
HeapObjectIterator map_iterator(Heap::map_space(), &CountMarkedCallback);
HeapObjectIterator map_iterator(Heap::map_space(), &SizeOfMarkedObject);
// Iterate over the map space, setting map transitions that go from
// a marked map to an unmarked map to null transitions. At the same time,
// set all the prototype fields of maps back to their original value,
......@@ -2673,6 +2666,13 @@ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) {
}
int MarkCompactCollector::SizeOfMarkedObject(HeapObject* obj) {
MapWord map_word = obj->map_word();
map_word.ClearMark();
return obj->SizeFromMap(map_word.ToMap());
}
void MarkCompactCollector::Initialize() {
StaticPointersToNewGenUpdatingVisitor::Initialize();
StaticMarkingVisitor::Initialize();
......
......@@ -119,6 +119,9 @@ class MarkCompactCollector: public AllStatic {
// Determine type of object and emit deletion log event.
static void ReportDeleteIfNeeded(HeapObject* obj);
// Returns size of a possibly marked object.
static int SizeOfMarkedObject(HeapObject* obj);
// Distinguishable invalid map encodings (for single word and multiple words)
// that indicate free regions.
static const uint32_t kSingleFreeEncoding = 0;
......
......@@ -1898,6 +1898,18 @@ MaybeObject* OldSpaceFreeList::Allocate(int size_in_bytes, int* wasted_bytes) {
}
void OldSpaceFreeList::MarkNodes() {
for (int i = 0; i < kFreeListsLength; i++) {
Address cur_addr = free_[i].head_node_;
while (cur_addr != NULL) {
FreeListNode* cur_node = FreeListNode::FromAddress(cur_addr);
cur_addr = cur_node->next();
cur_node->SetMark();
}
}
}
#ifdef DEBUG
bool OldSpaceFreeList::Contains(FreeListNode* node) {
for (int i = 0; i < kFreeListsLength; i++) {
......@@ -1957,6 +1969,16 @@ MaybeObject* FixedSizeFreeList::Allocate() {
}
void FixedSizeFreeList::MarkNodes() {
Address cur_addr = head_;
while (cur_addr != NULL && cur_addr != tail_) {
FreeListNode* cur_node = FreeListNode::FromAddress(cur_addr);
cur_addr = cur_node->next();
cur_node->SetMark();
}
}
// -----------------------------------------------------------------------------
// OldSpace implementation
......@@ -2711,13 +2733,15 @@ LargeObjectSpace::LargeObjectSpace(AllocationSpace id)
: Space(id, NOT_EXECUTABLE), // Managed on a per-allocation basis
first_chunk_(NULL),
size_(0),
page_count_(0) {}
page_count_(0),
objects_size_(0) {}
bool LargeObjectSpace::Setup() {
first_chunk_ = NULL;
size_ = 0;
page_count_ = 0;
objects_size_ = 0;
return true;
}
......@@ -2740,6 +2764,7 @@ void LargeObjectSpace::TearDown() {
size_ = 0;
page_count_ = 0;
objects_size_ = 0;
}
......@@ -2786,6 +2811,7 @@ MaybeObject* LargeObjectSpace::AllocateRawInternal(int requested_size,
}
size_ += static_cast<int>(chunk_size);
objects_size_ += requested_size;
page_count_++;
chunk->set_next(first_chunk_);
chunk->set_size(chunk_size);
......@@ -2948,6 +2974,7 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
// Free the chunk.
MarkCompactCollector::ReportDeleteIfNeeded(object);
size_ -= static_cast<int>(chunk_size);
objects_size_ -= object->Size();
page_count_--;
ObjectSpace space = kObjectSpaceLoSpace;
if (executable == EXECUTABLE) space = kObjectSpaceCodeSpace;
......@@ -3052,7 +3079,8 @@ void LargeObjectSpace::ReportStatistics() {
CollectHistogramInfo(obj);
}
PrintF(" number of objects %d\n", num_objects);
PrintF(" number of objects %d, "
"size of objects %" V8_PTR_PREFIX "d\n", num_objects, objects_size_);
if (num_objects > 0) ReportHistogram(false);
}
......
......@@ -371,8 +371,13 @@ class Space : public Malloced {
// Identity used in error reporting.
AllocationSpace identity() { return id_; }
// Returns allocated size.
virtual intptr_t Size() = 0;
// Returns size of objects. Can differ from the allocated size
// (e.g. see LargeObjectSpace).
virtual intptr_t SizeOfObjects() { return Size(); }
#ifdef ENABLE_HEAP_PROTECTION
// Protect/unprotect the space by marking it read-only/writable.
virtual void Protect() = 0;
......@@ -1715,6 +1720,8 @@ class OldSpaceFreeList BASE_EMBEDDED {
// 'wasted_bytes'. The size should be a non-zero multiple of the word size.
MUST_USE_RESULT MaybeObject* Allocate(int size_in_bytes, int* wasted_bytes);
void MarkNodes();
private:
// The size range of blocks, in bytes. (Smaller allocations are allowed, but
// will always result in waste.)
......@@ -1813,6 +1820,8 @@ class FixedSizeFreeList BASE_EMBEDDED {
// A failure is returned if no block is available.
MUST_USE_RESULT MaybeObject* Allocate();
void MarkNodes();
private:
// Available bytes on the free list.
intptr_t available_;
......@@ -1884,6 +1893,8 @@ class OldSpace : public PagedSpace {
virtual void PutRestOfCurrentPageOnFreeList(Page* current_page);
void MarkFreeListNodes() { free_list_.MarkNodes(); }
#ifdef DEBUG
// Reports statistics for the space
void ReportStatistics();
......@@ -1951,6 +1962,9 @@ class FixedSpace : public PagedSpace {
virtual void DeallocateBlock(Address start,
int size_in_bytes,
bool add_to_freelist);
void MarkFreeListNodes() { free_list_.MarkNodes(); }
#ifdef DEBUG
// Reports statistic info of the space
void ReportStatistics();
......@@ -2191,6 +2205,10 @@ class LargeObjectSpace : public Space {
return size_;
}
virtual intptr_t SizeOfObjects() {
return objects_size_;
}
int PageCount() {
return page_count_;
}
......@@ -2242,7 +2260,7 @@ class LargeObjectSpace : public Space {
LargeObjectChunk* first_chunk_;
intptr_t size_; // allocated bytes
int page_count_; // number of chunks
intptr_t objects_size_; // size of objects
// Shared implementation of AllocateRaw, AllocateRawCode and
// AllocateRawFixedArray.
......
......@@ -1095,3 +1095,32 @@ TEST(TestInternalWeakListsTraverseWithGC) {
ctx[i]->Exit();
}
}
TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
InitializeVM();
intptr_t size_of_objects_1 = Heap::SizeOfObjects();
HeapIterator iterator(HeapIterator::kPreciseFiltering);
intptr_t size_of_objects_2 = 0;
for (HeapObject* obj = iterator.next();
obj != NULL;
obj = iterator.next()) {
size_of_objects_2 += obj->Size();
}
// Delta must be within 1% of the larger result.
if (size_of_objects_1 > size_of_objects_2) {
intptr_t delta = size_of_objects_1 - size_of_objects_2;
PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
"Iterator: %" V8_PTR_PREFIX "d, "
"delta: %" V8_PTR_PREFIX "d\n",
size_of_objects_1, size_of_objects_2, delta);
CHECK_GT(size_of_objects_1 / 100, delta);
} else {
intptr_t delta = size_of_objects_2 - size_of_objects_1;
PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
"Iterator: %" V8_PTR_PREFIX "d, "
"delta: %" V8_PTR_PREFIX "d\n",
size_of_objects_1, size_of_objects_2, delta);
CHECK_GT(size_of_objects_2 / 100, delta);
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment