Commit 9c062093 authored by Maciej Goszczycki's avatar Maciej Goszczycki Committed by Commit Bot

Reland "[heap] Skip ro-space from heap iterators, add CombinedHeapIterator."

Code relocation info is now always allocated in old-space. Before relocation
info allocated for placeholders and builtins (which get replaced with
trampolines in nosnap builds) would become unreachable. Since read-only space
is not GCed and ReadOnlyHeapIterator doesn't check for reachability,
ValidateSnapshot would fail finding unreachable objects returned by
ReadOnlyHeapIterator.

Because trampoline relocation info gets replaced with canonical one, this only
affects no-embdded-builtins nosnap builds, which don't get much benefit from
read-only relocation info anyway.

A new check has been added to the read-only deserializer to verify that every
read-only object is reachable at mksnapshot-time.

The CombinedHeapIterator iteration order was changed to iterate over
read-only space first, because that's how HeapIterator worked.

This is a reland of 3d1d8eae

Original change's description:
> [heap] Skip ro-space from heap iterators, add CombinedHeapIterator.
>
> Read-only space sharing requires an iterator independent of heap. This
> also enables future removal of read-only space from heap.
>
> Bug: v8:7464
> Change-Id: Ia07a9369494ea2c547d12c01ffa1d7b8b6bbeabc
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1552795
> Commit-Queue: Maciej Goszczycki <goszczycki@google.com>
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Reviewed-by: Dan Elphick <delphick@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#60819}

Bug: v8:7464
Change-Id: I49ae070955b77956962334a84f762ab29052d5ff
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1566513Reviewed-by: 's avatarDan Elphick <delphick@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Maciej Goszczycki <goszczycki@google.com>
Cr-Commit-Position: refs/heads/master@{#61185}
parent e09fbbd1
...@@ -2221,6 +2221,8 @@ v8_source_set("v8_base_without_compiler") { ...@@ -2221,6 +2221,8 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/barrier.h", "src/heap/barrier.h",
"src/heap/code-stats.cc", "src/heap/code-stats.cc",
"src/heap/code-stats.h", "src/heap/code-stats.h",
"src/heap/combined-heap.cc",
"src/heap/combined-heap.h",
"src/heap/concurrent-marking.cc", "src/heap/concurrent-marking.cc",
"src/heap/concurrent-marking.h", "src/heap/concurrent-marking.h",
"src/heap/embedder-tracing.cc", "src/heap/embedder-tracing.cc",
......
...@@ -8,6 +8,7 @@ include_rules = [ ...@@ -8,6 +8,7 @@ include_rules = [
"+src/compiler/code-assembler.h", "+src/compiler/code-assembler.h",
"+src/compiler/wasm-compiler.h", "+src/compiler/wasm-compiler.h",
"-src/heap", "-src/heap",
"+src/heap/combined-heap.h",
"+src/heap/embedder-tracing.h", "+src/heap/embedder-tracing.h",
"+src/heap/factory.h", "+src/heap/factory.h",
"+src/heap/factory-inl.h", "+src/heap/factory-inl.h",
......
...@@ -8342,12 +8342,18 @@ i::Address* Isolate::GetDataFromSnapshotOnce(size_t index) { ...@@ -8342,12 +8342,18 @@ i::Address* Isolate::GetDataFromSnapshotOnce(size_t index) {
void Isolate::GetHeapStatistics(HeapStatistics* heap_statistics) { void Isolate::GetHeapStatistics(HeapStatistics* heap_statistics) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
i::Heap* heap = isolate->heap(); i::Heap* heap = isolate->heap();
heap_statistics->total_heap_size_ = heap->CommittedMemory(); i::ReadOnlySpace* ro_space = heap->read_only_space();
heap_statistics->total_heap_size_ =
heap->CommittedMemory() + ro_space->CommittedMemory();
heap_statistics->total_heap_size_executable_ = heap_statistics->total_heap_size_executable_ =
heap->CommittedMemoryExecutable(); heap->CommittedMemoryExecutable();
heap_statistics->total_physical_size_ = heap->CommittedPhysicalMemory(); heap_statistics->total_physical_size_ =
heap_statistics->total_available_size_ = heap->Available(); heap->CommittedPhysicalMemory() + ro_space->CommittedPhysicalMemory();
heap_statistics->used_heap_size_ = heap->SizeOfObjects(); heap_statistics->total_available_size_ =
heap->Available() + ro_space->Available();
heap_statistics->used_heap_size_ =
heap->SizeOfObjects() + ro_space->SizeOfObjects();
heap_statistics->heap_size_limit_ = heap->MaxReserved(); heap_statistics->heap_size_limit_ = heap->MaxReserved();
// TODO(7424): There is no public API for the {WasmEngine} yet. Once such an // TODO(7424): There is no public API for the {WasmEngine} yet. Once such an
// API becomes available we should report the malloced memory separately. For // API becomes available we should report the malloced memory separately. For
......
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/combined-heap.h"
namespace v8 {
namespace internal {
HeapObject CombinedHeapIterator::next() {
HeapObject object = ro_heap_iterator_.next();
if (!object.is_null()) {
return object;
}
return heap_iterator_.next();
}
} // namespace internal
} // namespace v8
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_COMBINED_HEAP_H_
#define V8_HEAP_COMBINED_HEAP_H_
#include "src/heap/heap.h"
#include "src/heap/read-only-heap.h"
#include "src/objects.h"
namespace v8 {
namespace internal {
// This class allows iteration over the entire heap (Heap and ReadOnlyHeap). It
// uses the HeapIterator to iterate over non-read-only objects and accepts the
// same filtering option. (Interrupting iteration while filtering unreachable
// objects is still forbidden)
class V8_EXPORT_PRIVATE CombinedHeapIterator final {
public:
CombinedHeapIterator(Heap* heap,
HeapIterator::HeapObjectsFiltering filtering =
HeapIterator::HeapObjectsFiltering::kNoFiltering)
: heap_iterator_(heap, filtering),
ro_heap_iterator_(heap->read_only_heap()) {}
HeapObject next();
private:
HeapIterator heap_iterator_;
ReadOnlyHeapIterator ro_heap_iterator_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_COMBINED_HEAP_H_
...@@ -83,10 +83,8 @@ Factory::CodeBuilder::CodeBuilder(Isolate* isolate, const CodeDesc& desc, ...@@ -83,10 +83,8 @@ Factory::CodeBuilder::CodeBuilder(Isolate* isolate, const CodeDesc& desc,
MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(bool failing_allocation) { MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(bool failing_allocation) {
const auto factory = isolate_->factory(); const auto factory = isolate_->factory();
// Allocate objects needed for code initialization. // Allocate objects needed for code initialization.
Handle<ByteArray> reloc_info = factory->NewByteArray( Handle<ByteArray> reloc_info =
code_desc_.reloc_size, Builtins::IsBuiltinId(builtin_index_) factory->NewByteArray(code_desc_.reloc_size, AllocationType::kOld);
? AllocationType::kReadOnly
: AllocationType::kOld);
Handle<CodeDataContainer> data_container = factory->NewCodeDataContainer(0); Handle<CodeDataContainer> data_container = factory->NewCodeDataContainer(0);
Handle<Code> code; Handle<Code> code;
{ {
......
...@@ -228,7 +228,7 @@ size_t Heap::Capacity() { ...@@ -228,7 +228,7 @@ size_t Heap::Capacity() {
size_t Heap::OldGenerationCapacity() { size_t Heap::OldGenerationCapacity() {
if (!HasBeenSetUp()) return 0; if (!HasBeenSetUp()) return 0;
PagedSpaces spaces(this, PagedSpaces::SpacesSpecifier::kAllPagedSpaces); PagedSpaces spaces(this);
size_t total = 0; size_t total = 0;
for (PagedSpace* space = spaces.next(); space != nullptr; for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) { space = spaces.next()) {
...@@ -240,7 +240,7 @@ size_t Heap::OldGenerationCapacity() { ...@@ -240,7 +240,7 @@ size_t Heap::OldGenerationCapacity() {
size_t Heap::CommittedOldGenerationMemory() { size_t Heap::CommittedOldGenerationMemory() {
if (!HasBeenSetUp()) return 0; if (!HasBeenSetUp()) return 0;
PagedSpaces spaces(this, PagedSpaces::SpacesSpecifier::kAllPagedSpaces); PagedSpaces spaces(this);
size_t total = 0; size_t total = 0;
for (PagedSpace* space = spaces.next(); space != nullptr; for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) { space = spaces.next()) {
...@@ -432,14 +432,16 @@ void Heap::PrintShortHeapStatistics() { ...@@ -432,14 +432,16 @@ void Heap::PrintShortHeapStatistics() {
code_lo_space_->SizeOfObjects() / KB, code_lo_space_->SizeOfObjects() / KB,
code_lo_space_->Available() / KB, code_lo_space_->Available() / KB,
code_lo_space_->CommittedMemory() / KB); code_lo_space_->CommittedMemory() / KB);
ReadOnlySpace* const ro_space = read_only_space_;
PrintIsolate(isolate_, PrintIsolate(isolate_,
"All spaces, used: %6" PRIuS "All spaces, used: %6" PRIuS
" KB" " KB"
", available: %6" PRIuS ", available: %6" PRIuS
" KB" " KB"
", committed: %6" PRIuS "KB\n", ", committed: %6" PRIuS "KB\n",
this->SizeOfObjects() / KB, this->Available() / KB, (this->SizeOfObjects() + ro_space->SizeOfObjects()) / KB,
this->CommittedMemory() / KB); (this->Available() + ro_space->Available()) / KB,
(this->CommittedMemory() + ro_space->CommittedMemory()) / KB);
PrintIsolate(isolate_, PrintIsolate(isolate_,
"Unmapper buffering %zu chunks of committed: %6" PRIuS " KB\n", "Unmapper buffering %zu chunks of committed: %6" PRIuS " KB\n",
memory_allocator()->unmapper()->NumberOfCommittedChunks(), memory_allocator()->unmapper()->NumberOfCommittedChunks(),
...@@ -4309,7 +4311,7 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) { ...@@ -4309,7 +4311,7 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
} }
size_t Heap::OldGenerationSizeOfObjects() { size_t Heap::OldGenerationSizeOfObjects() {
PagedSpaces spaces(this, PagedSpaces::SpacesSpecifier::kAllPagedSpaces); PagedSpaces spaces(this);
size_t total = 0; size_t total = 0;
for (PagedSpace* space = spaces.next(); space != nullptr; for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) { space = spaces.next()) {
...@@ -5220,7 +5222,7 @@ PagedSpace* PagedSpaces::next() { ...@@ -5220,7 +5222,7 @@ PagedSpace* PagedSpaces::next() {
} }
SpaceIterator::SpaceIterator(Heap* heap) SpaceIterator::SpaceIterator(Heap* heap)
: heap_(heap), current_space_(FIRST_SPACE - 1) {} : heap_(heap), current_space_(FIRST_MUTABLE_SPACE - 1) {}
SpaceIterator::~SpaceIterator() = default; SpaceIterator::~SpaceIterator() = default;
......
...@@ -2207,13 +2207,7 @@ class VerifySmisVisitor : public RootVisitor { ...@@ -2207,13 +2207,7 @@ class VerifySmisVisitor : public RootVisitor {
// space in turn, and null when it is done. // space in turn, and null when it is done.
class V8_EXPORT_PRIVATE PagedSpaces { class V8_EXPORT_PRIVATE PagedSpaces {
public: public:
enum class SpacesSpecifier { kSweepablePagedSpaces, kAllPagedSpaces }; explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
explicit PagedSpaces(Heap* heap, SpacesSpecifier specifier =
SpacesSpecifier::kSweepablePagedSpaces)
: heap_(heap),
counter_(specifier == SpacesSpecifier::kAllPagedSpaces ? RO_SPACE
: OLD_SPACE) {}
PagedSpace* next(); PagedSpace* next();
private: private:
...@@ -2235,19 +2229,21 @@ class SpaceIterator : public Malloced { ...@@ -2235,19 +2229,21 @@ class SpaceIterator : public Malloced {
int current_space_; // from enum AllocationSpace. int current_space_; // from enum AllocationSpace.
}; };
// A HeapIterator provides iteration over the entire non-read-only heap. It
// A HeapIterator provides iteration over the whole heap. It // aggregates the specific iterators for the different spaces as these can only
// aggregates the specific iterators for the different spaces as // iterate over one space only.
// these can only iterate over one space only. //
// HeapIterator ensures there is no allocation during its lifetime (using an
// embedded DisallowHeapAllocation instance).
// //
// HeapIterator ensures there is no allocation during its lifetime // HeapIterator can skip free list nodes (that is, de-allocated heap objects
// (using an embedded DisallowHeapAllocation instance). // that still remain in the heap). As implementation of free nodes filtering
// uses GC marks, it can't be used during MS/MC GC phases. Also, it is forbidden
// to interrupt iteration in this mode, as this will leave heap objects marked
// (and thus, unusable).
// //
// HeapIterator can skip free list nodes (that is, de-allocated heap // See ReadOnlyHeapIterator if you need to iterate over read-only space objects,
// objects that still remain in the heap). As implementation of free // or CombinedHeapIterator if you need to iterate over both heaps.
// nodes filtering uses GC marks, it can't be used during MS/MC GC
// phases. Also, it is forbidden to interrupt iteration in this mode,
// as this will leave heap objects marked (and thus, unusable).
class V8_EXPORT_PRIVATE HeapIterator { class V8_EXPORT_PRIVATE HeapIterator {
public: public:
enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable }; enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
#include "src/compilation-cache.h" #include "src/compilation-cache.h"
#include "src/counters.h" #include "src/counters.h"
#include "src/globals.h" #include "src/globals.h"
#include "src/heap/combined-heap.h"
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/heap/mark-compact.h" #include "src/heap/mark-compact.h"
#include "src/isolate.h" #include "src/isolate.h"
...@@ -1079,14 +1080,10 @@ class ObjectStatsVisitor { ...@@ -1079,14 +1080,10 @@ class ObjectStatsVisitor {
namespace { namespace {
void IterateHeap(Heap* heap, ObjectStatsVisitor* visitor) { void IterateHeap(Heap* heap, ObjectStatsVisitor* visitor) {
SpaceIterator space_it(heap); CombinedHeapIterator iterator(heap);
HeapObject obj; for (HeapObject obj = iterator.next(); !obj.is_null();
while (space_it.has_next()) { obj = iterator.next()) {
std::unique_ptr<ObjectIterator> it(space_it.next()->GetObjectIterator()); visitor->Visit(obj, obj->Size());
ObjectIterator* obj_it = it.get();
for (obj = obj_it->Next(); !obj.is_null(); obj = obj_it->Next()) {
visitor->Visit(obj, obj->Size());
}
} }
} }
......
...@@ -9,6 +9,8 @@ ...@@ -9,6 +9,8 @@
#include "src/base/once.h" #include "src/base/once.h"
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/heap/spaces.h" #include "src/heap/spaces.h"
#include "src/objects-inl.h"
#include "src/objects/heap-object-inl.h"
#include "src/snapshot/read-only-deserializer.h" #include "src/snapshot/read-only-deserializer.h"
namespace v8 { namespace v8 {
...@@ -77,5 +79,47 @@ bool ReadOnlyHeap::Contains(HeapObject object) { ...@@ -77,5 +79,47 @@ bool ReadOnlyHeap::Contains(HeapObject object) {
return Page::FromAddress(object.ptr())->owner()->identity() == RO_SPACE; return Page::FromAddress(object.ptr())->owner()->identity() == RO_SPACE;
} }
ReadOnlyHeapIterator::ReadOnlyHeapIterator(ReadOnlyHeap* ro_heap)
: ReadOnlyHeapIterator(ro_heap->read_only_space()) {}
ReadOnlyHeapIterator::ReadOnlyHeapIterator(ReadOnlySpace* ro_space)
: ro_space_(ro_space),
current_page_(ro_space->first_page()),
current_addr_(current_page_->area_start()) {}
HeapObject ReadOnlyHeapIterator::next() {
if (current_page_ == nullptr) {
return HeapObject();
}
for (;;) {
DCHECK_LE(current_addr_, current_page_->area_end());
if (current_addr_ == current_page_->area_end()) {
// Progress to the next page.
current_page_ = current_page_->next_page();
if (current_page_ == nullptr) {
return HeapObject();
}
current_addr_ = current_page_->area_start();
}
if (current_addr_ == ro_space_->top() &&
current_addr_ != ro_space_->limit()) {
current_addr_ = ro_space_->limit();
continue;
}
HeapObject object = HeapObject::FromAddress(current_addr_);
const int object_size = object->Size();
current_addr_ += object_size;
if (object->IsFiller()) {
continue;
}
DCHECK_OBJECT_SIZE(object_size);
return object;
}
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -13,8 +13,9 @@ ...@@ -13,8 +13,9 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
class ReadOnlySpace; class Page;
class ReadOnlyDeserializer; class ReadOnlyDeserializer;
class ReadOnlySpace;
// This class transparently manages read-only space, roots and cache creation // This class transparently manages read-only space, roots and cache creation
// and destruction. // and destruction.
...@@ -58,6 +59,20 @@ class ReadOnlyHeap final { ...@@ -58,6 +59,20 @@ class ReadOnlyHeap final {
DISALLOW_COPY_AND_ASSIGN(ReadOnlyHeap); DISALLOW_COPY_AND_ASSIGN(ReadOnlyHeap);
}; };
// This class enables iterating over all read-only heap objects.
class V8_EXPORT_PRIVATE ReadOnlyHeapIterator {
public:
explicit ReadOnlyHeapIterator(ReadOnlyHeap* ro_heap);
explicit ReadOnlyHeapIterator(ReadOnlySpace* ro_space);
HeapObject next();
private:
ReadOnlySpace* const ro_space_;
Page* current_page_;
Address current_addr_;
};
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
#include "src/heap/heap-controller.h" #include "src/heap/heap-controller.h"
#include "src/heap/incremental-marking-inl.h" #include "src/heap/incremental-marking-inl.h"
#include "src/heap/mark-compact.h" #include "src/heap/mark-compact.h"
#include "src/heap/read-only-heap.h"
#include "src/heap/remembered-set.h" #include "src/heap/remembered-set.h"
#include "src/heap/slot-set.h" #include "src/heap/slot-set.h"
#include "src/heap/sweeper.h" #include "src/heap/sweeper.h"
...@@ -3397,14 +3398,12 @@ void ReadOnlySpace::ClearStringPaddingIfNeeded() { ...@@ -3397,14 +3398,12 @@ void ReadOnlySpace::ClearStringPaddingIfNeeded() {
if (is_string_padding_cleared_) return; if (is_string_padding_cleared_) return;
WritableScope writable_scope(this); WritableScope writable_scope(this);
for (Page* page : *this) { ReadOnlyHeapIterator iterator(this);
HeapObjectIterator iterator(page); for (HeapObject o = iterator.next(); !o.is_null(); o = iterator.next()) {
for (HeapObject o = iterator.Next(); !o.is_null(); o = iterator.Next()) { if (o->IsSeqOneByteString()) {
if (o->IsSeqOneByteString()) { SeqOneByteString::cast(o)->clear_padding();
SeqOneByteString::cast(o)->clear_padding(); } else if (o->IsSeqTwoByteString()) {
} else if (o->IsSeqTwoByteString()) { SeqTwoByteString::cast(o)->clear_padding();
SeqTwoByteString::cast(o)->clear_padding();
}
} }
} }
is_string_padding_cleared_ = true; is_string_padding_cleared_ = true;
......
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
#include "src/api-inl.h" #include "src/api-inl.h"
#include "src/debug/debug.h" #include "src/debug/debug.h"
#include "src/heap/combined-heap.h"
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/profiler/allocation-tracker.h" #include "src/profiler/allocation-tracker.h"
#include "src/profiler/heap-snapshot-generator-inl.h" #include "src/profiler/heap-snapshot-generator-inl.h"
...@@ -172,7 +173,7 @@ void HeapProfiler::UpdateObjectSizeEvent(Address addr, int size) { ...@@ -172,7 +173,7 @@ void HeapProfiler::UpdateObjectSizeEvent(Address addr, int size) {
Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) { Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) {
HeapObject object; HeapObject object;
HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable); CombinedHeapIterator iterator(heap(), HeapIterator::kFilterUnreachable);
// Make sure that object with the given id is still reachable. // Make sure that object with the given id is still reachable.
for (HeapObject obj = iterator.next(); !obj.is_null(); for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) { obj = iterator.next()) {
...@@ -182,6 +183,7 @@ Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) { ...@@ -182,6 +183,7 @@ Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) {
// Can't break -- kFilterUnreachable requires full heap traversal. // Can't break -- kFilterUnreachable requires full heap traversal.
} }
} }
return !object.is_null() ? Handle<HeapObject>(object, isolate()) return !object.is_null() ? Handle<HeapObject>(object, isolate())
: Handle<HeapObject>(); : Handle<HeapObject>();
} }
...@@ -203,7 +205,7 @@ void HeapProfiler::QueryObjects(Handle<Context> context, ...@@ -203,7 +205,7 @@ void HeapProfiler::QueryObjects(Handle<Context> context,
// We should return accurate information about live objects, so we need to // We should return accurate information about live objects, so we need to
// collect all garbage first. // collect all garbage first.
heap()->CollectAllAvailableGarbage(GarbageCollectionReason::kHeapProfiler); heap()->CollectAllAvailableGarbage(GarbageCollectionReason::kHeapProfiler);
HeapIterator heap_iterator(heap()); CombinedHeapIterator heap_iterator(heap());
for (HeapObject heap_obj = heap_iterator.next(); !heap_obj.is_null(); for (HeapObject heap_obj = heap_iterator.next(); !heap_obj.is_null();
heap_obj = heap_iterator.next()) { heap_obj = heap_iterator.next()) {
if (!heap_obj->IsJSObject() || heap_obj->IsExternal(isolate())) continue; if (!heap_obj->IsJSObject() || heap_obj->IsExternal(isolate())) continue;
......
...@@ -11,6 +11,7 @@ ...@@ -11,6 +11,7 @@
#include "src/conversions.h" #include "src/conversions.h"
#include "src/debug/debug.h" #include "src/debug/debug.h"
#include "src/global-handles.h" #include "src/global-handles.h"
#include "src/heap/combined-heap.h"
#include "src/layout-descriptor.h" #include "src/layout-descriptor.h"
#include "src/objects-body-descriptors.h" #include "src/objects-body-descriptors.h"
#include "src/objects-inl.h" #include "src/objects-inl.h"
...@@ -394,7 +395,7 @@ void HeapObjectsMap::UpdateHeapObjectsMap() { ...@@ -394,7 +395,7 @@ void HeapObjectsMap::UpdateHeapObjectsMap() {
} }
heap_->PreciseCollectAllGarbage(Heap::kNoGCFlags, heap_->PreciseCollectAllGarbage(Heap::kNoGCFlags,
GarbageCollectionReason::kHeapProfiler); GarbageCollectionReason::kHeapProfiler);
HeapIterator iterator(heap_); CombinedHeapIterator iterator(heap_);
for (HeapObject obj = iterator.next(); !obj.is_null(); for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) { obj = iterator.next()) {
FindOrAddEntry(obj->address(), obj->Size()); FindOrAddEntry(obj->address(), obj->Size());
...@@ -645,7 +646,7 @@ const char* V8HeapExplorer::GetSystemEntryName(HeapObject object) { ...@@ -645,7 +646,7 @@ const char* V8HeapExplorer::GetSystemEntryName(HeapObject object) {
} }
int V8HeapExplorer::EstimateObjectsCount() { int V8HeapExplorer::EstimateObjectsCount() {
HeapIterator it(heap_, HeapIterator::kFilterUnreachable); CombinedHeapIterator it(heap_, HeapIterator::kFilterUnreachable);
int objects_count = 0; int objects_count = 0;
while (!it.next().is_null()) ++objects_count; while (!it.next().is_null()) ++objects_count;
return objects_count; return objects_count;
...@@ -1456,7 +1457,7 @@ bool V8HeapExplorer::IterateAndExtractReferences( ...@@ -1456,7 +1457,7 @@ bool V8HeapExplorer::IterateAndExtractReferences(
bool interrupted = false; bool interrupted = false;
HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable); CombinedHeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
// Heap iteration with filtering must be finished in any case. // Heap iteration with filtering must be finished in any case.
for (HeapObject obj = iterator.next(); !obj.is_null(); for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next(), progress_->ProgressStep()) { obj = iterator.next(), progress_->ProgressStep()) {
......
...@@ -40,6 +40,9 @@ void ReadOnlySerializer::SerializeObject(HeapObject obj) { ...@@ -40,6 +40,9 @@ void ReadOnlySerializer::SerializeObject(HeapObject obj) {
// Object has not yet been serialized. Serialize it here. // Object has not yet been serialized. Serialize it here.
ObjectSerializer object_serializer(this, obj, &sink_); ObjectSerializer object_serializer(this, obj, &sink_);
object_serializer.Serialize(); object_serializer.Serialize();
#ifdef DEBUG
serialized_objects_.insert(obj);
#endif
} }
void ReadOnlySerializer::SerializeReadOnlyRoots() { void ReadOnlySerializer::SerializeReadOnlyRoots() {
...@@ -60,6 +63,16 @@ void ReadOnlySerializer::FinalizeSerialization() { ...@@ -60,6 +63,16 @@ void ReadOnlySerializer::FinalizeSerialization() {
FullObjectSlot(&undefined)); FullObjectSlot(&undefined));
SerializeDeferredObjects(); SerializeDeferredObjects();
Pad(); Pad();
#ifdef DEBUG
// Check that every object on read-only heap is reachable (and was
// serialized).
ReadOnlyHeapIterator iterator(isolate()->heap()->read_only_heap());
for (HeapObject object = iterator.next(); !object.is_null();
object = iterator.next()) {
CHECK(serialized_objects_.count(object));
}
#endif
} }
bool ReadOnlySerializer::MustBeDeferred(HeapObject object) { bool ReadOnlySerializer::MustBeDeferred(HeapObject object) {
......
...@@ -5,6 +5,8 @@ ...@@ -5,6 +5,8 @@
#ifndef V8_SNAPSHOT_READ_ONLY_SERIALIZER_H_ #ifndef V8_SNAPSHOT_READ_ONLY_SERIALIZER_H_
#define V8_SNAPSHOT_READ_ONLY_SERIALIZER_H_ #define V8_SNAPSHOT_READ_ONLY_SERIALIZER_H_
#include <unordered_set>
#include "src/snapshot/roots-serializer.h" #include "src/snapshot/roots-serializer.h"
namespace v8 { namespace v8 {
...@@ -35,6 +37,9 @@ class V8_EXPORT_PRIVATE ReadOnlySerializer : public RootsSerializer { ...@@ -35,6 +37,9 @@ class V8_EXPORT_PRIVATE ReadOnlySerializer : public RootsSerializer {
void SerializeObject(HeapObject o) override; void SerializeObject(HeapObject o) override;
bool MustBeDeferred(HeapObject object) override; bool MustBeDeferred(HeapObject object) override;
#ifdef DEBUG
std::unordered_set<HeapObject, Object::Hasher> serialized_objects_;
#endif
DISALLOW_COPY_AND_ASSIGN(ReadOnlySerializer); DISALLOW_COPY_AND_ASSIGN(ReadOnlySerializer);
}; };
......
...@@ -125,6 +125,7 @@ v8_source_set("cctest_sources") { ...@@ -125,6 +125,7 @@ v8_source_set("cctest_sources") {
"heap/test-heap.cc", "heap/test-heap.cc",
"heap/test-incremental-marking.cc", "heap/test-incremental-marking.cc",
"heap/test-invalidated-slots.cc", "heap/test-invalidated-slots.cc",
"heap/test-iterators.cc",
"heap/test-lab.cc", "heap/test-lab.cc",
"heap/test-mark-compact.cc", "heap/test-mark-compact.cc",
"heap/test-page-promotion.cc", "heap/test-page-promotion.cc",
......
...@@ -121,6 +121,7 @@ void CcTest::Run() { ...@@ -121,6 +121,7 @@ void CcTest::Run() {
} }
i::Heap* CcTest::heap() { return i_isolate()->heap(); } i::Heap* CcTest::heap() { return i_isolate()->heap(); }
i::ReadOnlyHeap* CcTest::read_only_heap() { return heap()->read_only_heap(); }
void CcTest::CollectGarbage(i::AllocationSpace space) { void CcTest::CollectGarbage(i::AllocationSpace space) {
heap()->CollectGarbage(space, i::GarbageCollectionReason::kTesting); heap()->CollectGarbage(space, i::GarbageCollectionReason::kTesting);
......
...@@ -133,6 +133,7 @@ class CcTest { ...@@ -133,6 +133,7 @@ class CcTest {
} }
static i::Heap* heap(); static i::Heap* heap();
static i::ReadOnlyHeap* read_only_heap();
static void CollectGarbage(i::AllocationSpace space); static void CollectGarbage(i::AllocationSpace space);
static void CollectAllGarbage(i::Isolate* isolate = nullptr); static void CollectAllGarbage(i::Isolate* isolate = nullptr);
......
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "include/v8.h"
#include "src/api-inl.h"
#include "src/heap/combined-heap.h"
#include "src/heap/heap.h"
#include "src/heap/read-only-heap.h"
#include "src/isolate.h"
#include "src/objects.h"
#include "src/objects/heap-object.h"
#include "src/roots-inl.h"
#include "test/cctest/cctest.h"
namespace v8 {
namespace internal {
namespace heap {
TEST(HeapIteratorNullPastEnd) {
HeapIterator iterator(CcTest::heap());
while (!iterator.next().is_null()) {
}
for (int i = 0; i < 20; i++) {
CHECK(iterator.next().is_null());
}
}
TEST(ReadOnlyHeapIteratorNullPastEnd) {
ReadOnlyHeapIterator iterator(CcTest::heap()->read_only_heap());
while (!iterator.next().is_null()) {
}
for (int i = 0; i < 20; i++) {
CHECK(iterator.next().is_null());
}
}
TEST(CombinedHeapIteratorNullPastEnd) {
CombinedHeapIterator iterator(CcTest::heap());
while (!iterator.next().is_null()) {
}
for (int i = 0; i < 20; i++) {
CHECK(iterator.next().is_null());
}
}
namespace {
// An arbitrary object guaranteed to live on the non-read-only heap.
Object CreateWritableObject() {
return *v8::Utils::OpenHandle(*v8::Object::New(CcTest::isolate()));
}
} // namespace
// TODO(v8:7464): Add more CHECKs once Contains doesn't include read-only space.
TEST(ReadOnlyHeapIterator) {
CcTest::InitializeVM();
HandleScope handle_scope(CcTest::i_isolate());
const Object sample_object = CreateWritableObject();
ReadOnlyHeapIterator iterator(CcTest::read_only_heap());
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
CHECK(ReadOnlyHeap::Contains(obj));
CHECK_NE(sample_object, obj);
}
}
TEST(HeapIterator) {
CcTest::InitializeVM();
HandleScope handle_scope(CcTest::i_isolate());
const Object sample_object = CreateWritableObject();
HeapIterator iterator(CcTest::heap());
bool seen_sample_object = false;
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
CHECK(!ReadOnlyHeap::Contains(obj));
if (sample_object == obj) seen_sample_object = true;
}
CHECK(seen_sample_object);
}
TEST(CombinedHeapIterator) {
CcTest::InitializeVM();
HandleScope handle_scope(CcTest::i_isolate());
const Object sample_object = CreateWritableObject();
CombinedHeapIterator iterator(CcTest::heap());
bool seen_sample_object = false;
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
CHECK(CcTest::heap()->Contains(obj));
if (sample_object == obj) seen_sample_object = true;
}
CHECK(seen_sample_object);
}
} // namespace heap
} // namespace internal
} // namespace v8
...@@ -9,6 +9,7 @@ ...@@ -9,6 +9,7 @@
#include "src/frames.h" #include "src/frames.h"
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/heap/read-only-heap.h"
#include "src/heap/spaces.h" #include "src/heap/spaces.h"
#include "src/isolate.h" #include "src/isolate.h"
#include "src/objects-inl.h" #include "src/objects-inl.h"
...@@ -65,6 +66,37 @@ static void DumpMaps(i::PagedSpace* space) { ...@@ -65,6 +66,37 @@ static void DumpMaps(i::PagedSpace* space) {
#undef MUTABLE_ROOT_LIST_CASE #undef MUTABLE_ROOT_LIST_CASE
#undef RO_ROOT_LIST_CASE #undef RO_ROOT_LIST_CASE
static void DumpKnownObject(i::Heap* heap, const char* space_name,
i::HeapObject object) {
#define RO_ROOT_LIST_CASE(type, name, CamelName) \
if (root_name == NULL && object == roots.name()) { \
root_name = #CamelName; \
root_index = i::RootIndex::k##CamelName; \
}
#define ROOT_LIST_CASE(type, name, CamelName) \
if (root_name == NULL && object == heap->name()) { \
root_name = #CamelName; \
root_index = i::RootIndex::k##CamelName; \
}
i::ReadOnlyRoots roots(heap);
const char* root_name = nullptr;
i::RootIndex root_index = i::RootIndex::kFirstSmiRoot;
intptr_t root_ptr = object.ptr() & (i::Page::kPageSize - 1);
STRONG_READ_ONLY_ROOT_LIST(RO_ROOT_LIST_CASE)
MUTABLE_ROOT_LIST(ROOT_LIST_CASE)
if (root_name == nullptr) return;
if (!i::RootsTable::IsImmortalImmovable(root_index)) return;
i::PrintF(" (\"%s\", 0x%05" V8PRIxPTR "): \"%s\",\n", space_name, root_ptr,
root_name);
#undef ROOT_LIST_CASE
#undef RO_ROOT_LIST_CASE
}
static int DumpHeapConstants(const char* argv0) { static int DumpHeapConstants(const char* argv0) {
// Start up V8. // Start up V8.
std::unique_ptr<v8::Platform> platform = v8::platform::NewDefaultPlatform(); std::unique_ptr<v8::Platform> platform = v8::platform::NewDefaultPlatform();
...@@ -78,7 +110,6 @@ static int DumpHeapConstants(const char* argv0) { ...@@ -78,7 +110,6 @@ static int DumpHeapConstants(const char* argv0) {
{ {
Isolate::Scope scope(isolate); Isolate::Scope scope(isolate);
i::Heap* heap = reinterpret_cast<i::Isolate*>(isolate)->heap(); i::Heap* heap = reinterpret_cast<i::Isolate*>(isolate)->heap();
i::ReadOnlyRoots roots(heap);
i::PrintF("%s", kHeader); i::PrintF("%s", kHeader);
#define DUMP_TYPE(T) i::PrintF(" %d: \"%s\",\n", i::T, #T); #define DUMP_TYPE(T) i::PrintF(" %d: \"%s\",\n", i::T, #T);
i::PrintF("INSTANCE_TYPES = {\n"); i::PrintF("INSTANCE_TYPES = {\n");
...@@ -95,18 +126,16 @@ static int DumpHeapConstants(const char* argv0) { ...@@ -95,18 +126,16 @@ static int DumpHeapConstants(const char* argv0) {
// Dump the KNOWN_OBJECTS table to the console. // Dump the KNOWN_OBJECTS table to the console.
i::PrintF("\n# List of known V8 objects.\n"); i::PrintF("\n# List of known V8 objects.\n");
#define RO_ROOT_LIST_CASE(type, name, CamelName) \
if (n == NULL && o == roots.name()) { \
n = #CamelName; \
i = i::RootIndex::k##CamelName; \
}
#define ROOT_LIST_CASE(type, name, CamelName) \
if (n == NULL && o == heap->name()) { \
n = #CamelName; \
i = i::RootIndex::k##CamelName; \
}
i::PagedSpaces spit(heap, i::PagedSpaces::SpacesSpecifier::kAllPagedSpaces);
i::PrintF("KNOWN_OBJECTS = {\n"); i::PrintF("KNOWN_OBJECTS = {\n");
i::ReadOnlyHeapIterator ro_iterator(heap->read_only_heap());
for (i::HeapObject object = ro_iterator.next(); !object.is_null();
object = ro_iterator.next()) {
// Skip read-only heap maps, they will be reported elsewhere.
if (object->IsMap()) continue;
DumpKnownObject(heap, i::Heap::GetSpaceName(i::RO_SPACE), object);
}
i::PagedSpaces spit(heap);
for (i::PagedSpace* s = spit.next(); s != nullptr; s = spit.next()) { for (i::PagedSpace* s = spit.next(); s != nullptr; s = spit.next()) {
i::HeapObjectIterator it(s); i::HeapObjectIterator it(s);
// Code objects are generally platform-dependent. // Code objects are generally platform-dependent.
...@@ -114,21 +143,10 @@ static int DumpHeapConstants(const char* argv0) { ...@@ -114,21 +143,10 @@ static int DumpHeapConstants(const char* argv0) {
continue; continue;
const char* sname = s->name(); const char* sname = s->name();
for (i::HeapObject o = it.Next(); !o.is_null(); o = it.Next()) { for (i::HeapObject o = it.Next(); !o.is_null(); o = it.Next()) {
// Skip maps in RO_SPACE since they will be reported elsewhere. DumpKnownObject(heap, sname, o);
if (o->IsMap()) continue;
const char* n = nullptr;
i::RootIndex i = i::RootIndex::kFirstSmiRoot;
intptr_t p = o.ptr() & (i::Page::kPageSize - 1);
STRONG_READ_ONLY_ROOT_LIST(RO_ROOT_LIST_CASE)
MUTABLE_ROOT_LIST(ROOT_LIST_CASE)
if (n == nullptr) continue;
if (!i::RootsTable::IsImmortalImmovable(i)) continue;
i::PrintF(" (\"%s\", 0x%05" V8PRIxPTR "): \"%s\",\n", sname, p, n);
} }
} }
i::PrintF("}\n"); i::PrintF("}\n");
#undef ROOT_LIST_CASE
#undef RO_ROOT_LIST_CASE
// Dump frame markers // Dump frame markers
i::PrintF("\n# List of known V8 Frame Markers.\n"); i::PrintF("\n# List of known V8 Frame Markers.\n");
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment