Commit 6832f292 authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

Revert "[heap] Skip ro-space from heap iterators, add CombinedHeapIterator."

This reverts commit 3d1d8eae.

Reason for revert: Speculative revert; seems to break all nosnap bots, e.g. https://ci.chromium.org/p/v8/builders/ci/V8%20Linux%20-%20nosnap/25240

Original change's description:
> [heap] Skip ro-space from heap iterators, add CombinedHeapIterator.
> 
> Read-only space sharing requires an iterator independent of heap. This
> also enables future removal of read-only space from heap.
> 
> Bug: v8:7464
> Change-Id: Ia07a9369494ea2c547d12c01ffa1d7b8b6bbeabc
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1552795
> Commit-Queue: Maciej Goszczycki <goszczycki@google.com>
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Reviewed-by: Dan Elphick <delphick@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#60819}

TBR=ulan@chromium.org,hpayer@chromium.org,delphick@chromium.org,goszczycki@google.com

Change-Id: I64b58b1b0c5eb073a6d2cfae81bb4de65f0511bf
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: v8:7464
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1565895Reviewed-by: 's avatarClemens Hammacher <clemensh@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#60825}
parent 168c186c
......@@ -2213,8 +2213,6 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/barrier.h",
"src/heap/code-stats.cc",
"src/heap/code-stats.h",
"src/heap/combined-heap.cc",
"src/heap/combined-heap.h",
"src/heap/concurrent-marking.cc",
"src/heap/concurrent-marking.h",
"src/heap/embedder-tracing.cc",
......
......@@ -8,7 +8,6 @@ include_rules = [
"+src/compiler/code-assembler.h",
"+src/compiler/wasm-compiler.h",
"-src/heap",
"+src/heap/combined-heap.h",
"+src/heap/embedder-tracing.h",
"+src/heap/factory.h",
"+src/heap/factory-inl.h",
......
......@@ -8374,18 +8374,12 @@ i::Address* Isolate::GetDataFromSnapshotOnce(size_t index) {
void Isolate::GetHeapStatistics(HeapStatistics* heap_statistics) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
i::Heap* heap = isolate->heap();
i::ReadOnlySpace* ro_space = heap->read_only_space();
heap_statistics->total_heap_size_ =
heap->CommittedMemory() + ro_space->CommittedMemory();
heap_statistics->total_heap_size_ = heap->CommittedMemory();
heap_statistics->total_heap_size_executable_ =
heap->CommittedMemoryExecutable();
heap_statistics->total_physical_size_ =
heap->CommittedPhysicalMemory() + ro_space->CommittedPhysicalMemory();
heap_statistics->total_available_size_ =
heap->Available() + ro_space->Available();
heap_statistics->used_heap_size_ =
heap->SizeOfObjects() + ro_space->SizeOfObjects();
heap_statistics->total_physical_size_ = heap->CommittedPhysicalMemory();
heap_statistics->total_available_size_ = heap->Available();
heap_statistics->used_heap_size_ = heap->SizeOfObjects();
heap_statistics->heap_size_limit_ = heap->MaxReserved();
// TODO(7424): There is no public API for the {WasmEngine} yet. Once such an
// API becomes available we should report the malloced memory separately. For
......
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/combined-heap.h"
namespace v8 {
namespace internal {
HeapObject CombinedHeapIterator::next() {
HeapObject object = heap_iterator_.next();
if (!object.is_null()) {
return object;
}
return ro_heap_iterator_.next();
}
} // namespace internal
} // namespace v8
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_COMBINED_HEAP_H_
#define V8_HEAP_COMBINED_HEAP_H_
#include "src/heap/heap.h"
#include "src/heap/read-only-heap.h"
#include "src/objects.h"
namespace v8 {
namespace internal {
// This class allows iteration over the entire heap (Heap and ReadOnlyHeap). It
// uses the HeapIterator to iterate over non-read-only objects and accepts the
// same filtering option. (Interrupting iteration while filtering unreachable
// objects is still forbidden)
class V8_EXPORT_PRIVATE CombinedHeapIterator final {
public:
CombinedHeapIterator(Heap* heap,
HeapIterator::HeapObjectsFiltering filtering =
HeapIterator::HeapObjectsFiltering::kNoFiltering)
: heap_iterator_(heap, filtering),
ro_heap_iterator_(heap->read_only_heap()) {}
HeapObject next();
private:
HeapIterator heap_iterator_;
ReadOnlyHeapIterator ro_heap_iterator_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_COMBINED_HEAP_H_
......@@ -227,7 +227,7 @@ size_t Heap::Capacity() {
size_t Heap::OldGenerationCapacity() {
if (!HasBeenSetUp()) return 0;
PagedSpaces spaces(this);
PagedSpaces spaces(this, PagedSpaces::SpacesSpecifier::kAllPagedSpaces);
size_t total = 0;
for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) {
......@@ -239,7 +239,7 @@ size_t Heap::OldGenerationCapacity() {
size_t Heap::CommittedOldGenerationMemory() {
if (!HasBeenSetUp()) return 0;
PagedSpaces spaces(this);
PagedSpaces spaces(this, PagedSpaces::SpacesSpecifier::kAllPagedSpaces);
size_t total = 0;
for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) {
......@@ -431,16 +431,14 @@ void Heap::PrintShortHeapStatistics() {
code_lo_space_->SizeOfObjects() / KB,
code_lo_space_->Available() / KB,
code_lo_space_->CommittedMemory() / KB);
ReadOnlySpace* const ro_space = read_only_space_;
PrintIsolate(isolate_,
"All spaces, used: %6" PRIuS
" KB"
", available: %6" PRIuS
" KB"
", committed: %6" PRIuS "KB\n",
(this->SizeOfObjects() + ro_space->SizeOfObjects()) / KB,
(this->Available() + ro_space->Available()) / KB,
(this->CommittedMemory() + ro_space->CommittedMemory()) / KB);
this->SizeOfObjects() / KB, this->Available() / KB,
this->CommittedMemory() / KB);
PrintIsolate(isolate_,
"Unmapper buffering %zu chunks of committed: %6" PRIuS " KB\n",
memory_allocator()->unmapper()->NumberOfCommittedChunks(),
......@@ -4310,7 +4308,7 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
}
size_t Heap::OldGenerationSizeOfObjects() {
PagedSpaces spaces(this);
PagedSpaces spaces(this, PagedSpaces::SpacesSpecifier::kAllPagedSpaces);
size_t total = 0;
for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) {
......@@ -5215,7 +5213,7 @@ PagedSpace* PagedSpaces::next() {
}
SpaceIterator::SpaceIterator(Heap* heap)
: heap_(heap), current_space_(FIRST_MUTABLE_SPACE - 1) {}
: heap_(heap), current_space_(FIRST_SPACE - 1) {}
SpaceIterator::~SpaceIterator() = default;
......
......@@ -2205,7 +2205,13 @@ class VerifySmisVisitor : public RootVisitor {
// space in turn, and null when it is done.
class V8_EXPORT_PRIVATE PagedSpaces {
public:
explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
enum class SpacesSpecifier { kSweepablePagedSpaces, kAllPagedSpaces };
explicit PagedSpaces(Heap* heap, SpacesSpecifier specifier =
SpacesSpecifier::kSweepablePagedSpaces)
: heap_(heap),
counter_(specifier == SpacesSpecifier::kAllPagedSpaces ? RO_SPACE
: OLD_SPACE) {}
PagedSpace* next();
private:
......@@ -2227,21 +2233,19 @@ class SpaceIterator : public Malloced {
int current_space_; // from enum AllocationSpace.
};
// A HeapIterator provides iteration over the entire non-read-only heap. It
// aggregates the specific iterators for the different spaces as these can only
// iterate over one space only.
//
// HeapIterator ensures there is no allocation during its lifetime (using an
// embedded DisallowHeapAllocation instance).
// A HeapIterator provides iteration over the whole heap. It
// aggregates the specific iterators for the different spaces as
// these can only iterate over one space only.
//
// HeapIterator can skip free list nodes (that is, de-allocated heap objects
// that still remain in the heap). As implementation of free nodes filtering
// uses GC marks, it can't be used during MS/MC GC phases. Also, it is forbidden
// to interrupt iteration in this mode, as this will leave heap objects marked
// (and thus, unusable).
// HeapIterator ensures there is no allocation during its lifetime
// (using an embedded DisallowHeapAllocation instance).
//
// See ReadOnlyHeapIterator if you need to iterate over read-only space objects,
// or CombinedHeapIterator if you need to iterate over both heaps.
// HeapIterator can skip free list nodes (that is, de-allocated heap
// objects that still remain in the heap). As implementation of free
// nodes filtering uses GC marks, it can't be used during MS/MC GC
// phases. Also, it is forbidden to interrupt iteration in this mode,
// as this will leave heap objects marked (and thus, unusable).
class V8_EXPORT_PRIVATE HeapIterator {
public:
enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
......
......@@ -12,7 +12,6 @@
#include "src/compilation-cache.h"
#include "src/counters.h"
#include "src/globals.h"
#include "src/heap/combined-heap.h"
#include "src/heap/heap-inl.h"
#include "src/heap/mark-compact.h"
#include "src/isolate.h"
......@@ -1082,10 +1081,14 @@ class ObjectStatsVisitor {
namespace {
void IterateHeap(Heap* heap, ObjectStatsVisitor* visitor) {
CombinedHeapIterator iterator(heap);
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
visitor->Visit(obj, obj->Size());
SpaceIterator space_it(heap);
HeapObject obj;
while (space_it.has_next()) {
std::unique_ptr<ObjectIterator> it(space_it.next()->GetObjectIterator());
ObjectIterator* obj_it = it.get();
for (obj = obj_it->Next(); !obj.is_null(); obj = obj_it->Next()) {
visitor->Visit(obj, obj->Size());
}
}
}
......
......@@ -9,8 +9,6 @@
#include "src/base/once.h"
#include "src/heap/heap-inl.h"
#include "src/heap/spaces.h"
#include "src/objects-inl.h"
#include "src/objects/heap-object-inl.h"
#include "src/snapshot/read-only-deserializer.h"
namespace v8 {
......@@ -79,47 +77,5 @@ bool ReadOnlyHeap::Contains(HeapObject object) {
return Page::FromAddress(object.ptr())->owner()->identity() == RO_SPACE;
}
ReadOnlyHeapIterator::ReadOnlyHeapIterator(ReadOnlyHeap* ro_heap)
: ReadOnlyHeapIterator(ro_heap->read_only_space()) {}
ReadOnlyHeapIterator::ReadOnlyHeapIterator(ReadOnlySpace* ro_space)
: ro_space_(ro_space),
current_page_(ro_space->first_page()),
current_addr_(current_page_->area_start()) {}
HeapObject ReadOnlyHeapIterator::next() {
if (current_page_ == nullptr) {
return HeapObject();
}
for (;;) {
DCHECK_LE(current_addr_, current_page_->area_end());
if (current_addr_ == current_page_->area_end()) {
// Progress to the next page.
current_page_ = current_page_->next_page();
if (current_page_ == nullptr) {
return HeapObject();
}
current_addr_ = current_page_->area_start();
}
if (current_addr_ == ro_space_->top() &&
current_addr_ != ro_space_->limit()) {
current_addr_ = ro_space_->limit();
continue;
}
HeapObject object = HeapObject::FromAddress(current_addr_);
const int object_size = object->Size();
current_addr_ += object_size;
if (object->IsFiller()) {
continue;
}
DCHECK_OBJECT_SIZE(object_size);
return object;
}
}
} // namespace internal
} // namespace v8
......@@ -13,9 +13,8 @@
namespace v8 {
namespace internal {
class Page;
class ReadOnlyDeserializer;
class ReadOnlySpace;
class ReadOnlyDeserializer;
// This class transparently manages read-only space, roots and cache creation
// and destruction.
......@@ -59,20 +58,6 @@ class ReadOnlyHeap final {
DISALLOW_COPY_AND_ASSIGN(ReadOnlyHeap);
};
// This class enables iterating over all read-only heap objects.
class V8_EXPORT_PRIVATE ReadOnlyHeapIterator {
public:
explicit ReadOnlyHeapIterator(ReadOnlyHeap* ro_heap);
explicit ReadOnlyHeapIterator(ReadOnlySpace* ro_space);
HeapObject next();
private:
ReadOnlySpace* const ro_space_;
Page* current_page_;
Address current_addr_;
};
} // namespace internal
} // namespace v8
......
......@@ -17,7 +17,6 @@
#include "src/heap/heap-controller.h"
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/mark-compact.h"
#include "src/heap/read-only-heap.h"
#include "src/heap/remembered-set.h"
#include "src/heap/slot-set.h"
#include "src/heap/sweeper.h"
......@@ -3367,12 +3366,14 @@ void ReadOnlySpace::ClearStringPaddingIfNeeded() {
if (is_string_padding_cleared_) return;
WritableScope writable_scope(this);
ReadOnlyHeapIterator iterator(this);
for (HeapObject o = iterator.next(); !o.is_null(); o = iterator.next()) {
if (o->IsSeqOneByteString()) {
SeqOneByteString::cast(o)->clear_padding();
} else if (o->IsSeqTwoByteString()) {
SeqTwoByteString::cast(o)->clear_padding();
for (Page* page : *this) {
HeapObjectIterator iterator(page);
for (HeapObject o = iterator.Next(); !o.is_null(); o = iterator.Next()) {
if (o->IsSeqOneByteString()) {
SeqOneByteString::cast(o)->clear_padding();
} else if (o->IsSeqTwoByteString()) {
SeqTwoByteString::cast(o)->clear_padding();
}
}
}
is_string_padding_cleared_ = true;
......
......@@ -6,7 +6,6 @@
#include "src/api-inl.h"
#include "src/debug/debug.h"
#include "src/heap/combined-heap.h"
#include "src/heap/heap-inl.h"
#include "src/profiler/allocation-tracker.h"
#include "src/profiler/heap-snapshot-generator-inl.h"
......@@ -173,7 +172,7 @@ void HeapProfiler::UpdateObjectSizeEvent(Address addr, int size) {
Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) {
HeapObject object;
CombinedHeapIterator iterator(heap(), HeapIterator::kFilterUnreachable);
HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable);
// Make sure that object with the given id is still reachable.
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
......@@ -183,7 +182,6 @@ Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) {
// Can't break -- kFilterUnreachable requires full heap traversal.
}
}
return !object.is_null() ? Handle<HeapObject>(object, isolate())
: Handle<HeapObject>();
}
......@@ -205,7 +203,7 @@ void HeapProfiler::QueryObjects(Handle<Context> context,
// We should return accurate information about live objects, so we need to
// collect all garbage first.
heap()->CollectAllAvailableGarbage(GarbageCollectionReason::kHeapProfiler);
CombinedHeapIterator heap_iterator(heap());
HeapIterator heap_iterator(heap());
for (HeapObject heap_obj = heap_iterator.next(); !heap_obj.is_null();
heap_obj = heap_iterator.next()) {
if (!heap_obj->IsJSObject() || heap_obj->IsExternal(isolate())) continue;
......
......@@ -11,7 +11,6 @@
#include "src/conversions.h"
#include "src/debug/debug.h"
#include "src/global-handles.h"
#include "src/heap/combined-heap.h"
#include "src/layout-descriptor.h"
#include "src/objects-body-descriptors.h"
#include "src/objects-inl.h"
......@@ -395,7 +394,7 @@ void HeapObjectsMap::UpdateHeapObjectsMap() {
}
heap_->PreciseCollectAllGarbage(Heap::kNoGCFlags,
GarbageCollectionReason::kHeapProfiler);
CombinedHeapIterator iterator(heap_);
HeapIterator iterator(heap_);
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
FindOrAddEntry(obj->address(), obj->Size());
......@@ -646,7 +645,7 @@ const char* V8HeapExplorer::GetSystemEntryName(HeapObject object) {
}
int V8HeapExplorer::EstimateObjectsCount() {
CombinedHeapIterator it(heap_, HeapIterator::kFilterUnreachable);
HeapIterator it(heap_, HeapIterator::kFilterUnreachable);
int objects_count = 0;
while (!it.next().is_null()) ++objects_count;
return objects_count;
......@@ -1457,7 +1456,7 @@ bool V8HeapExplorer::IterateAndExtractReferences(
bool interrupted = false;
CombinedHeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
// Heap iteration with filtering must be finished in any case.
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next(), progress_->ProgressStep()) {
......
......@@ -125,7 +125,6 @@ v8_source_set("cctest_sources") {
"heap/test-heap.cc",
"heap/test-incremental-marking.cc",
"heap/test-invalidated-slots.cc",
"heap/test-iterators.cc",
"heap/test-lab.cc",
"heap/test-mark-compact.cc",
"heap/test-page-promotion.cc",
......
......@@ -121,7 +121,6 @@ void CcTest::Run() {
}
i::Heap* CcTest::heap() { return i_isolate()->heap(); }
i::ReadOnlyHeap* CcTest::read_only_heap() { return heap()->read_only_heap(); }
void CcTest::CollectGarbage(i::AllocationSpace space) {
heap()->CollectGarbage(space, i::GarbageCollectionReason::kTesting);
......
......@@ -133,7 +133,6 @@ class CcTest {
}
static i::Heap* heap();
static i::ReadOnlyHeap* read_only_heap();
static void CollectGarbage(i::AllocationSpace space);
static void CollectAllGarbage(i::Isolate* isolate = nullptr);
......
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "include/v8.h"
#include "src/api-inl.h"
#include "src/heap/combined-heap.h"
#include "src/heap/heap.h"
#include "src/heap/read-only-heap.h"
#include "src/isolate.h"
#include "src/objects.h"
#include "src/objects/heap-object.h"
#include "src/roots-inl.h"
#include "test/cctest/cctest.h"
namespace v8 {
namespace internal {
namespace heap {
TEST(HeapIteratorNullPastEnd) {
HeapIterator iterator(CcTest::heap());
while (!iterator.next().is_null()) {
}
for (int i = 0; i < 20; i++) {
CHECK(iterator.next().is_null());
}
}
TEST(ReadOnlyHeapIteratorNullPastEnd) {
ReadOnlyHeapIterator iterator(CcTest::heap()->read_only_heap());
while (!iterator.next().is_null()) {
}
for (int i = 0; i < 20; i++) {
CHECK(iterator.next().is_null());
}
}
TEST(CombinedHeapIteratorNullPastEnd) {
CombinedHeapIterator iterator(CcTest::heap());
while (!iterator.next().is_null()) {
}
for (int i = 0; i < 20; i++) {
CHECK(iterator.next().is_null());
}
}
namespace {
// An arbitrary object guaranteed to live on the non-read-only heap.
Object CreateWritableObject() {
return *v8::Utils::OpenHandle(*v8::Object::New(CcTest::isolate()));
}
} // namespace
// TODO(v8:7464): Add more CHECKs once Contains doesn't include read-only space.
TEST(ReadOnlyHeapIterator) {
CcTest::InitializeVM();
HandleScope handle_scope(CcTest::i_isolate());
const Object sample_object = CreateWritableObject();
ReadOnlyHeapIterator iterator(CcTest::read_only_heap());
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
CHECK(ReadOnlyHeap::Contains(obj));
CHECK_NE(sample_object, obj);
}
}
TEST(HeapIterator) {
CcTest::InitializeVM();
HandleScope handle_scope(CcTest::i_isolate());
const Object sample_object = CreateWritableObject();
HeapIterator iterator(CcTest::heap());
bool seen_sample_object = false;
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
CHECK(!ReadOnlyHeap::Contains(obj));
if (sample_object == obj) seen_sample_object = true;
}
CHECK(seen_sample_object);
}
TEST(CombinedHeapIterator) {
CcTest::InitializeVM();
HandleScope handle_scope(CcTest::i_isolate());
const Object sample_object = CreateWritableObject();
CombinedHeapIterator iterator(CcTest::heap());
bool seen_sample_object = false;
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
CHECK(CcTest::heap()->Contains(obj));
if (sample_object == obj) seen_sample_object = true;
}
CHECK(seen_sample_object);
}
} // namespace heap
} // namespace internal
} // namespace v8
......@@ -9,7 +9,6 @@
#include "src/frames.h"
#include "src/heap/heap-inl.h"
#include "src/heap/read-only-heap.h"
#include "src/heap/spaces.h"
#include "src/isolate.h"
#include "src/objects-inl.h"
......@@ -66,37 +65,6 @@ static void DumpMaps(i::PagedSpace* space) {
#undef MUTABLE_ROOT_LIST_CASE
#undef RO_ROOT_LIST_CASE
static void DumpKnownObject(i::Heap* heap, const char* space_name,
i::HeapObject object) {
#define RO_ROOT_LIST_CASE(type, name, CamelName) \
if (root_name == NULL && object == roots.name()) { \
root_name = #CamelName; \
root_index = i::RootIndex::k##CamelName; \
}
#define ROOT_LIST_CASE(type, name, CamelName) \
if (root_name == NULL && object == heap->name()) { \
root_name = #CamelName; \
root_index = i::RootIndex::k##CamelName; \
}
i::ReadOnlyRoots roots(heap);
const char* root_name = nullptr;
i::RootIndex root_index = i::RootIndex::kFirstSmiRoot;
intptr_t root_ptr = object.ptr() & (i::Page::kPageSize - 1);
STRONG_READ_ONLY_ROOT_LIST(RO_ROOT_LIST_CASE)
MUTABLE_ROOT_LIST(ROOT_LIST_CASE)
if (root_name == nullptr) return;
if (!i::RootsTable::IsImmortalImmovable(root_index)) return;
i::PrintF(" (\"%s\", 0x%05" V8PRIxPTR "): \"%s\",\n", space_name, root_ptr,
root_name);
#undef ROOT_LIST_CASE
#undef RO_ROOT_LIST_CASE
}
static int DumpHeapConstants(const char* argv0) {
// Start up V8.
std::unique_ptr<v8::Platform> platform = v8::platform::NewDefaultPlatform();
......@@ -110,6 +78,7 @@ static int DumpHeapConstants(const char* argv0) {
{
Isolate::Scope scope(isolate);
i::Heap* heap = reinterpret_cast<i::Isolate*>(isolate)->heap();
i::ReadOnlyRoots roots(heap);
i::PrintF("%s", kHeader);
#define DUMP_TYPE(T) i::PrintF(" %d: \"%s\",\n", i::T, #T);
i::PrintF("INSTANCE_TYPES = {\n");
......@@ -126,16 +95,18 @@ static int DumpHeapConstants(const char* argv0) {
// Dump the KNOWN_OBJECTS table to the console.
i::PrintF("\n# List of known V8 objects.\n");
#define RO_ROOT_LIST_CASE(type, name, CamelName) \
if (n == NULL && o == roots.name()) { \
n = #CamelName; \
i = i::RootIndex::k##CamelName; \
}
#define ROOT_LIST_CASE(type, name, CamelName) \
if (n == NULL && o == heap->name()) { \
n = #CamelName; \
i = i::RootIndex::k##CamelName; \
}
i::PagedSpaces spit(heap, i::PagedSpaces::SpacesSpecifier::kAllPagedSpaces);
i::PrintF("KNOWN_OBJECTS = {\n");
i::ReadOnlyHeapIterator ro_iterator(heap->read_only_heap());
for (i::HeapObject object = ro_iterator.next(); !object.is_null();
object = ro_iterator.next()) {
// Skip read-only heap maps, they will be reported elsewhere.
if (object->IsMap()) continue;
DumpKnownObject(heap, "RO_SPACE", object);
}
i::PagedSpaces spit(heap);
for (i::PagedSpace* s = spit.next(); s != nullptr; s = spit.next()) {
i::HeapObjectIterator it(s);
// Code objects are generally platform-dependent.
......@@ -143,10 +114,21 @@ static int DumpHeapConstants(const char* argv0) {
continue;
const char* sname = s->name();
for (i::HeapObject o = it.Next(); !o.is_null(); o = it.Next()) {
DumpKnownObject(heap, sname, o);
// Skip maps in RO_SPACE since they will be reported elsewhere.
if (o->IsMap()) continue;
const char* n = nullptr;
i::RootIndex i = i::RootIndex::kFirstSmiRoot;
intptr_t p = o.ptr() & (i::Page::kPageSize - 1);
STRONG_READ_ONLY_ROOT_LIST(RO_ROOT_LIST_CASE)
MUTABLE_ROOT_LIST(ROOT_LIST_CASE)
if (n == nullptr) continue;
if (!i::RootsTable::IsImmortalImmovable(i)) continue;
i::PrintF(" (\"%s\", 0x%05" V8PRIxPTR "): \"%s\",\n", sname, p, n);
}
}
i::PrintF("}\n");
#undef ROOT_LIST_CASE
#undef RO_ROOT_LIST_CASE
// Dump frame markers
i::PrintF("\n# List of known V8 Frame Markers.\n");
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment