Commit fea98b6f authored by Dan Elphick's avatar Dan Elphick Committed by Commit Bot

[heap] Enable Verify for RO_SPACE when it is shared

This re-enables verification for RO_SPACE when V8_SHARED_RO_HEAP is
defined. This required refactoring PagedSpaceObjectIterator and methods
in its calling chain to explicitly pass the PagedSpace and Heap pointers
through as they cannot be obtained from the Page in this mode.

Also removes unused Page::Print() method.

Bug: v8:7464
Change-Id: Ibc2c147a7bde6723f43bbaf93cf1db93e76c611e
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1893350Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Dan Elphick <delphick@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64722}
parent 05eb0e41
......@@ -61,7 +61,7 @@ void CodeStatistics::ResetCodeAndMetadataStatistics(Isolate* isolate) {
// - by code comment (only in debug mode)
void CodeStatistics::CollectCodeStatistics(PagedSpace* space,
Isolate* isolate) {
PagedSpaceObjectIterator obj_it(space);
PagedSpaceObjectIterator obj_it(isolate->heap(), space);
for (HeapObject obj = obj_it.Next(); !obj.is_null(); obj = obj_it.Next()) {
RecordCodeAndMetadataStatistics(obj, isolate);
}
......
......@@ -1392,7 +1392,7 @@ void Heap::CollectAllAvailableGarbage(GarbageCollectionReason gc_reason) {
PagedSpaceIterator spaces(this);
for (PagedSpace* space = spaces.Next(); space != nullptr;
space = spaces.Next()) {
PagedSpaceObjectIterator it(space);
PagedSpaceObjectIterator it(this, space);
for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
objects_by_size[obj.Size()].push_back(obj);
}
......@@ -3985,14 +3985,8 @@ void Heap::Verify() {
void Heap::VerifyReadOnlyHeap() {
CHECK(!read_only_space_->writable());
// TODO(v8:7464): Always verify read-only space once PagedSpace::Verify
// supports verifying shared read-only space. Currently
// PagedSpaceObjectIterator is explicitly disabled for read-only space when
// sharing is enabled, because it relies on PagedSpace::heap_ being non-null.
#ifndef V8_SHARED_RO_HEAP
VerifyReadOnlyPointersVisitor read_only_visitor(this);
read_only_space_->Verify(isolate(), &read_only_visitor);
#endif
}
class SlotVerifyingVisitor : public ObjectVisitor {
......@@ -4155,7 +4149,7 @@ void Heap::VerifyCountersAfterSweeping() {
PagedSpaceIterator spaces(this);
for (PagedSpace* space = spaces.Next(); space != nullptr;
space = spaces.Next()) {
space->VerifyCountersAfterSweeping();
space->VerifyCountersAfterSweeping(this);
}
}
......@@ -5730,7 +5724,7 @@ HeapObjectIterator::HeapObjectIterator(
default:
break;
}
object_iterator_ = space_iterator_->Next()->GetObjectIterator();
object_iterator_ = space_iterator_->Next()->GetObjectIterator(heap_);
}
HeapObjectIterator::~HeapObjectIterator() {
......@@ -5764,7 +5758,7 @@ HeapObject HeapObjectIterator::NextObject() {
} else {
// Go though the spaces looking for one that has objects.
while (space_iterator_->HasNext()) {
object_iterator_ = space_iterator_->Next()->GetObjectIterator();
object_iterator_ = space_iterator_->Next()->GetObjectIterator(heap_);
obj = object_iterator_.get()->Next();
if (!obj.is_null()) {
return obj;
......
......@@ -85,7 +85,7 @@ HeapObject PagedSpaceObjectIterator::FromCurrentPage() {
DCHECK_LE(cur_addr_, cur_end_);
if (!obj.IsFreeSpaceOrFiller()) {
if (obj.IsCode()) {
DCHECK_EQ(space_, space_->heap()->code_space());
DCHECK_EQ(space_, heap_->code_space());
DCHECK_CODEOBJECT_SIZE(obj_size, space_);
} else {
DCHECK_OBJECT_SIZE(obj_size);
......
......@@ -48,29 +48,24 @@ STATIC_ASSERT(kClearedWeakHeapObjectLower32 < LargePage::kHeaderSize);
// ----------------------------------------------------------------------------
// PagedSpaceObjectIterator
PagedSpaceObjectIterator::PagedSpaceObjectIterator(PagedSpace* space)
PagedSpaceObjectIterator::PagedSpaceObjectIterator(Heap* heap,
PagedSpace* space)
: cur_addr_(kNullAddress),
cur_end_(kNullAddress),
heap_(heap),
space_(space),
page_range_(space->first_page(), nullptr),
current_page_(page_range_.begin()) {
#ifdef V8_SHARED_RO_HEAP
DCHECK_NE(space->identity(), RO_SPACE);
#endif
}
current_page_(page_range_.begin()) {}
PagedSpaceObjectIterator::PagedSpaceObjectIterator(Page* page)
PagedSpaceObjectIterator::PagedSpaceObjectIterator(Heap* heap,
PagedSpace* space,
Page* page)
: cur_addr_(kNullAddress),
cur_end_(kNullAddress),
space_(reinterpret_cast<PagedSpace*>(page->owner())),
heap_(heap),
space_(space),
page_range_(page),
current_page_(page_range_.begin()) {
#ifdef V8_SHARED_RO_HEAP
// TODO(v8:7464): Always enforce this once PagedSpace::Verify is no longer
// used to verify read-only space for non-shared builds.
DCHECK(!page->InReadOnlySpace());
#endif // V8_SHARED_RO_HEAP
#ifdef DEBUG
AllocationSpace owner = page->owner_identity();
DCHECK(owner == RO_SPACE || owner == OLD_SPACE || owner == MAP_SPACE ||
......@@ -86,10 +81,9 @@ bool PagedSpaceObjectIterator::AdvanceToNextPage() {
Page* cur_page = *(current_page_++);
#ifdef ENABLE_MINOR_MC
Heap* heap = space_->heap();
heap->mark_compact_collector()->sweeper()->EnsurePageIsIterable(cur_page);
heap_->mark_compact_collector()->sweeper()->EnsurePageIsIterable(cur_page);
if (cur_page->IsFlagSet(Page::SWEEP_TO_ITERATE)) {
heap->minor_mark_compact_collector()->MakeIterable(
heap_->minor_mark_compact_collector()->MakeIterable(
cur_page, MarkingTreatmentMode::CLEAR,
FreeSpaceTreatmentMode::IGNORE_FREE_SPACE);
}
......@@ -2026,8 +2020,9 @@ void PagedSpace::SetReadAndWritable() {
}
}
std::unique_ptr<ObjectIterator> PagedSpace::GetObjectIterator() {
return std::unique_ptr<ObjectIterator>(new PagedSpaceObjectIterator(this));
std::unique_ptr<ObjectIterator> PagedSpace::GetObjectIterator(Heap* heap) {
return std::unique_ptr<ObjectIterator>(
new PagedSpaceObjectIterator(heap, this));
}
bool PagedSpace::RefillLinearAllocationAreaFromFreeList(
......@@ -2101,7 +2096,15 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) {
}
for (Page* page : *this) {
CHECK(page->owner() == this);
#ifdef V8_SHARED_RO_HEAP
if (identity() == RO_SPACE) {
CHECK_NULL(page->owner());
} else {
CHECK_EQ(page->owner(), this);
}
#else
CHECK_EQ(page->owner(), this);
#endif
for (int i = 0; i < kNumTypes; i++) {
external_page_bytes[static_cast<ExternalBackingStoreType>(i)] = 0;
......@@ -2111,7 +2114,7 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) {
allocation_pointer_found_in_space = true;
}
CHECK(page->SweepingDone());
PagedSpaceObjectIterator it(page);
PagedSpaceObjectIterator it(isolate->heap(), this, page);
Address end_of_previous_object = page->area_start();
Address top = page->area_end();
......@@ -2165,16 +2168,17 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) {
}
CHECK(allocation_pointer_found_in_space);
#ifdef DEBUG
VerifyCountersAfterSweeping();
VerifyCountersAfterSweeping(isolate->heap());
#endif
}
void PagedSpace::VerifyLiveBytes() {
DCHECK_NE(identity(), RO_SPACE);
IncrementalMarking::MarkingState* marking_state =
heap()->incremental_marking()->marking_state();
for (Page* page : *this) {
CHECK(page->SweepingDone());
PagedSpaceObjectIterator it(page);
PagedSpaceObjectIterator it(heap(), this, page);
int black_size = 0;
for (HeapObject object = it.Next(); !object.is_null(); object = it.Next()) {
// All the interior pointers should be contained in the heap.
......@@ -2188,13 +2192,13 @@ void PagedSpace::VerifyLiveBytes() {
#endif // VERIFY_HEAP
#ifdef DEBUG
void PagedSpace::VerifyCountersAfterSweeping() {
void PagedSpace::VerifyCountersAfterSweeping(Heap* heap) {
size_t total_capacity = 0;
size_t total_allocated = 0;
for (Page* page : *this) {
DCHECK(page->SweepingDone());
total_capacity += page->area_size();
PagedSpaceObjectIterator it(page);
PagedSpaceObjectIterator it(heap, this, page);
size_t real_allocated = 0;
for (HeapObject object = it.Next(); !object.is_null(); object = it.Next()) {
if (!object.IsFreeSpaceOrFiller()) {
......@@ -2583,7 +2587,7 @@ void SpaceWithLinearArea::InlineAllocationStep(Address top,
}
}
std::unique_ptr<ObjectIterator> NewSpace::GetObjectIterator() {
std::unique_ptr<ObjectIterator> NewSpace::GetObjectIterator(Heap* heap) {
return std::unique_ptr<ObjectIterator>(new SemiSpaceObjectIterator(this));
}
......@@ -2876,7 +2880,7 @@ void SemiSpace::set_age_mark(Address mark) {
}
}
std::unique_ptr<ObjectIterator> SemiSpace::GetObjectIterator() {
std::unique_ptr<ObjectIterator> SemiSpace::GetObjectIterator(Heap* heap) {
// Use the NewSpace::NewObjectIterator to iterate the ToSpace.
UNREACHABLE();
}
......@@ -4202,7 +4206,8 @@ bool LargeObjectSpace::ContainsSlow(Address addr) {
return false;
}
std::unique_ptr<ObjectIterator> LargeObjectSpace::GetObjectIterator() {
std::unique_ptr<ObjectIterator> LargeObjectSpace::GetObjectIterator(
Heap* heap) {
return std::unique_ptr<ObjectIterator>(
new LargeObjectSpaceObjectIterator(this));
}
......@@ -4298,30 +4303,6 @@ void LargeObjectSpace::Print() {
obj.Print(os);
}
}
void Page::Print() {
// Make a best-effort to print the objects in the page.
PrintF("Page@%p in %s\n", reinterpret_cast<void*>(this->address()),
Heap::GetSpaceName(this->owner_identity()));
PrintF(" --------------------------------------\n");
PagedSpaceObjectIterator objects(this);
unsigned mark_size = 0;
for (HeapObject object = objects.Next(); !object.is_null();
object = objects.Next()) {
bool is_marked =
heap()->incremental_marking()->marking_state()->IsBlackOrGrey(object);
PrintF(" %c ", (is_marked ? '!' : ' ')); // Indent a little.
if (is_marked) {
mark_size += object.Size();
}
object.ShortPrint();
PrintF("\n");
}
PrintF(" --------------------------------------\n");
PrintF(" Marked: %x, LiveCount: %" V8PRIdPTR "\n", mark_size,
heap()->incremental_marking()->marking_state()->live_bytes(this));
}
#endif // DEBUG
OldLargeObjectSpace::OldLargeObjectSpace(Heap* heap)
......
......@@ -455,7 +455,7 @@ class V8_EXPORT_PRIVATE Space : public Malloced {
}
}
virtual std::unique_ptr<ObjectIterator> GetObjectIterator() = 0;
virtual std::unique_ptr<ObjectIterator> GetObjectIterator(Heap* heap) = 0;
void AccountCommitted(size_t bytes) {
DCHECK_GE(committed_ + bytes, committed_);
......@@ -1101,10 +1101,6 @@ class Page : public MemoryChunk {
void MoveOldToNewRememberedSetForSweeping();
void MergeOldToNewRememberedSets();
#ifdef DEBUG
void Print();
#endif // DEBUG
private:
friend class MemoryAllocator;
};
......@@ -1619,8 +1615,8 @@ class PageRange {
class V8_EXPORT_PRIVATE PagedSpaceObjectIterator : public ObjectIterator {
public:
// Creates a new object iterator in a given space.
explicit PagedSpaceObjectIterator(PagedSpace* space);
explicit PagedSpaceObjectIterator(Page* page);
PagedSpaceObjectIterator(Heap* heap, PagedSpace* space);
PagedSpaceObjectIterator(Heap* heap, PagedSpace* space, Page* page);
// Advance to the next object, skipping free spaces and other fillers and
// skipping the special garbage section of which there is one per space.
......@@ -1637,6 +1633,7 @@ class V8_EXPORT_PRIVATE PagedSpaceObjectIterator : public ObjectIterator {
Address cur_addr_; // Current iteration point.
Address cur_end_; // End iteration point.
Heap* heap_;
PagedSpace* space_;
PageRange page_range_;
PageRange::iterator current_page_;
......@@ -2442,7 +2439,7 @@ class V8_EXPORT_PRIVATE PagedSpace
#endif
#ifdef DEBUG
void VerifyCountersAfterSweeping();
void VerifyCountersAfterSweeping(Heap* heap);
void VerifyCountersBeforeConcurrentSweeping();
// Print meta info and objects in this space.
void Print() override;
......@@ -2493,7 +2490,7 @@ class V8_EXPORT_PRIVATE PagedSpace
size_t ShrinkPageToHighWaterMark(Page* page);
std::unique_ptr<ObjectIterator> GetObjectIterator() override;
std::unique_ptr<ObjectIterator> GetObjectIterator(Heap* heap) override;
void SetLinearAllocationArea(Address top, Address limit);
......@@ -2701,7 +2698,7 @@ class SemiSpace : public Space {
iterator begin() { return iterator(first_page()); }
iterator end() { return iterator(nullptr); }
std::unique_ptr<ObjectIterator> GetObjectIterator() override;
std::unique_ptr<ObjectIterator> GetObjectIterator(Heap* heap) override;
#ifdef DEBUG
V8_EXPORT_PRIVATE void Print() override;
......@@ -2996,7 +2993,7 @@ class V8_EXPORT_PRIVATE NewSpace
iterator begin() { return to_space_.begin(); }
iterator end() { return to_space_.end(); }
std::unique_ptr<ObjectIterator> GetObjectIterator() override;
std::unique_ptr<ObjectIterator> GetObjectIterator(Heap* heap) override;
SemiSpace& from_space() { return from_space_; }
SemiSpace& to_space() { return to_space_; }
......@@ -3268,7 +3265,7 @@ class LargeObjectSpace : public Space {
iterator begin() { return iterator(first_page()); }
iterator end() { return iterator(nullptr); }
std::unique_ptr<ObjectIterator> GetObjectIterator() override;
std::unique_ptr<ObjectIterator> GetObjectIterator(Heap* heap) override;
#ifdef VERIFY_HEAP
virtual void Verify(Isolate* isolate);
......
......@@ -135,7 +135,7 @@ static int DumpHeapConstants(FILE* out, const char* argv0) {
if (!object.IsMap()) continue;
DumpKnownMap(out, heap, i::Heap::GetSpaceName(i::RO_SPACE), object);
}
i::PagedSpaceObjectIterator iterator(heap->map_space());
i::PagedSpaceObjectIterator iterator(heap, heap->map_space());
for (i::HeapObject object = iterator.Next(); !object.is_null();
object = iterator.Next()) {
if (!object.IsMap()) continue;
......@@ -158,7 +158,7 @@ static int DumpHeapConstants(FILE* out, const char* argv0) {
i::PagedSpaceIterator spit(heap);
for (i::PagedSpace* s = spit.Next(); s != nullptr; s = spit.Next()) {
i::PagedSpaceObjectIterator it(s);
i::PagedSpaceObjectIterator it(heap, s);
// Code objects are generally platform-dependent.
if (s->identity() == i::CODE_SPACE || s->identity() == i::MAP_SPACE)
continue;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment