Commit 4244b989 authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Modernize all *Page iterators to be proper C++ iterators

As part of the page type unification also unify page iterators. Iterating
over a space works the same for all spaces now (new, old, lo).

Iterating over pages of a space follows now the regular C++ iterator pattern:
- for (auto it = space->begin(); it != space->end(); ++it) {}
- for (Page* p : *space) {}

GC only: Loop supporting unlinking/freeing of a Page on the fly:
  for (auto it = space->begin(); != space->end();) {
    Page* p = *(it++);
    p->Unlink();
  }

For iteration of a range of new space pages use NewSpacePageRange which
also verifies that the range is actually a proper new space page range.

BUG=chromium:581412
LOG=N

Review-Url: https://codereview.chromium.org/2088223002
Cr-Commit-Position: refs/heads/master@{#37210}
parent 2658eb2a
......@@ -75,10 +75,8 @@ void LocalArrayBufferTracker::Process(Callback callback) {
void ArrayBufferTracker::FreeDeadInNewSpace(Heap* heap) {
DCHECK_EQ(heap->gc_state(), Heap::HeapState::SCAVENGE);
NewSpacePageIterator from_it(heap->new_space()->FromSpaceStart(),
heap->new_space()->FromSpaceEnd());
while (from_it.has_next()) {
Page* page = from_it.next();
for (Page* page : NewSpacePageRange(heap->new_space()->FromSpaceStart(),
heap->new_space()->FromSpaceEnd())) {
bool empty = ProcessBuffers(page, kUpdateForwardedRemoveOthers);
CHECK(empty);
}
......
......@@ -4659,10 +4659,8 @@ void Heap::Verify() {
void Heap::ZapFromSpace() {
if (!new_space_.IsFromSpaceCommitted()) return;
NewSpacePageIterator it(new_space_.FromSpaceStart(),
new_space_.FromSpaceEnd());
while (it.has_next()) {
Page* page = it.next();
for (Page* page : NewSpacePageRange(new_space_.FromSpaceStart(),
new_space_.FromSpaceEnd())) {
for (Address cursor = page->area_start(), limit = page->area_end();
cursor < limit; cursor += kPointerSize) {
Memory::Address_at(cursor) = kFromSpaceZapValue;
......@@ -5379,8 +5377,9 @@ void Heap::NotifyDeserializationComplete() {
// All pages right after bootstrapping must be marked as never-evacuate.
PagedSpaces spaces(this);
for (PagedSpace* s = spaces.next(); s != NULL; s = spaces.next()) {
PageIterator it(s);
while (it.has_next()) CHECK(it.next()->NeverEvacuate());
for (Page* p : *s) {
CHECK(p->NeverEvacuate());
}
}
#endif // DEBUG
}
......
......@@ -345,9 +345,7 @@ void IncrementalMarking::SetNewSpacePageFlags(MemoryChunk* chunk,
void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
PagedSpace* space) {
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
for (Page* p : *space) {
SetOldSpacePageFlags(p, false, false);
}
}
......@@ -355,9 +353,7 @@ void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
NewSpace* space) {
NewSpacePageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
for (Page* p : *space) {
SetNewSpacePageFlags(p, false);
}
}
......@@ -369,27 +365,21 @@ void IncrementalMarking::DeactivateIncrementalWriteBarrier() {
DeactivateIncrementalWriteBarrierForSpace(heap_->code_space());
DeactivateIncrementalWriteBarrierForSpace(heap_->new_space());
LargePage* lop = heap_->lo_space()->first_page();
while (LargePage::IsValid(lop)) {
for (LargePage* lop : *heap_->lo_space()) {
SetOldSpacePageFlags(lop, false, false);
lop = lop->next_page();
}
}
void IncrementalMarking::ActivateIncrementalWriteBarrier(PagedSpace* space) {
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
for (Page* p : *space) {
SetOldSpacePageFlags(p, true, is_compacting_);
}
}
void IncrementalMarking::ActivateIncrementalWriteBarrier(NewSpace* space) {
NewSpacePageIterator it(space->ToSpaceStart(), space->ToSpaceEnd());
while (it.has_next()) {
Page* p = it.next();
for (Page* p : *space) {
SetNewSpacePageFlags(p, true);
}
}
......@@ -401,10 +391,8 @@ void IncrementalMarking::ActivateIncrementalWriteBarrier() {
ActivateIncrementalWriteBarrier(heap_->code_space());
ActivateIncrementalWriteBarrier(heap_->new_space());
LargePage* lop = heap_->lo_space()->first_page();
while (LargePage::IsValid(lop)) {
for (LargePage* lop : *heap_->lo_space()) {
SetOldSpacePageFlags(lop, true, is_compacting_);
lop = lop->next_page();
}
}
......
......@@ -131,13 +131,14 @@ static void VerifyMarkingBlackPage(Heap* heap, Page* page) {
static void VerifyMarking(NewSpace* space) {
Address end = space->top();
NewSpacePageIterator it(space->bottom(), end);
// The bottom position is at the start of its page. Allows us to use
// page->area_start() as start of range on all pages.
CHECK_EQ(space->bottom(), Page::FromAddress(space->bottom())->area_start());
while (it.has_next()) {
Page* page = it.next();
Address limit = it.has_next() ? page->area_end() : end;
NewSpacePageRange range(space->bottom(), end);
for (auto it = range.begin(); it != range.end();) {
Page* page = *(it++);
Address limit = it != range.end() ? page->area_end() : end;
CHECK(limit == end || !page->Contains(end));
VerifyMarking(space->heap(), page->area_start(), limit);
}
......@@ -145,10 +146,7 @@ static void VerifyMarking(NewSpace* space) {
static void VerifyMarking(PagedSpace* space) {
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
for (Page* p : *space) {
if (p->IsFlagSet(Page::BLACK_PAGE)) {
VerifyMarkingBlackPage(space->heap(), p);
} else {
......@@ -204,13 +202,12 @@ static void VerifyEvacuation(Page* page) {
static void VerifyEvacuation(NewSpace* space) {
NewSpacePageIterator it(space->bottom(), space->top());
VerifyEvacuationVisitor visitor;
while (it.has_next()) {
Page* page = it.next();
NewSpacePageRange range(space->bottom(), space->top());
for (auto it = range.begin(); it != range.end();) {
Page* page = *(it++);
Address current = page->area_start();
Address limit = it.has_next() ? page->area_end() : space->top();
Address limit = it != range.end() ? page->area_end() : space->top();
CHECK(limit == space->top() || !page->Contains(space->top()));
while (current < limit) {
HeapObject* object = HeapObject::FromAddress(current);
......@@ -225,10 +222,7 @@ static void VerifyEvacuation(Heap* heap, PagedSpace* space) {
if (FLAG_use_allocation_folding && (space == heap->old_space())) {
return;
}
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
for (Page* p : *space) {
if (p->IsEvacuationCandidate()) continue;
VerifyEvacuation(p);
}
......@@ -360,10 +354,7 @@ void MarkCompactCollector::CollectGarbage() {
#ifdef VERIFY_HEAP
void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
for (Page* p : *space) {
CHECK(p->markbits()->IsClean());
CHECK_EQ(0, p->LiveBytes());
}
......@@ -371,10 +362,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
NewSpacePageIterator it(space->bottom(), space->top());
while (it.has_next()) {
Page* p = it.next();
for (Page* p : NewSpacePageRange(space->bottom(), space->top())) {
CHECK(p->markbits()->IsClean());
CHECK_EQ(0, p->LiveBytes());
}
......@@ -419,10 +407,7 @@ void MarkCompactCollector::VerifyOmittedMapChecks() {
static void ClearMarkbitsInPagedSpace(PagedSpace* space) {
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
for (Page* p : *space) {
Bitmap::Clear(p);
if (p->IsFlagSet(Page::BLACK_PAGE)) {
p->ClearFlag(Page::BLACK_PAGE);
......@@ -432,10 +417,8 @@ static void ClearMarkbitsInPagedSpace(PagedSpace* space) {
static void ClearMarkbitsInNewSpace(NewSpace* space) {
NewSpacePageIterator it(space->ToSpaceStart(), space->ToSpaceEnd());
while (it.has_next()) {
Bitmap::Clear(it.next());
for (Page* page : *space) {
Bitmap::Clear(page);
}
}
......@@ -572,10 +555,8 @@ void MarkCompactCollector::Sweeper::EnsureCompleted() {
void MarkCompactCollector::Sweeper::EnsureNewSpaceCompleted() {
if (!sweeping_in_progress_) return;
if (!FLAG_concurrent_sweeping || !IsSweepingCompleted()) {
NewSpacePageIterator pit(heap_->new_space());
while (pit.has_next()) {
Page* page = pit.next();
SweepOrWaitUntilSweepingCompleted(page);
for (Page* p : *heap_->new_space()) {
SweepOrWaitUntilSweepingCompleted(p);
}
}
}
......@@ -715,9 +696,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
std::vector<LiveBytesPagePair> pages;
pages.reserve(number_of_pages);
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
for (Page* p : *space) {
if (p->NeverEvacuate()) continue;
if (p->IsFlagSet(Page::BLACK_PAGE)) continue;
// Invariant: Evacuation candidates are just created when marking is
......@@ -1967,9 +1946,7 @@ class MarkCompactCollector::EvacuateRecordOnlyVisitor final
};
void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) {
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
for (Page* p : *space) {
if (!p->IsFlagSet(Page::BLACK_PAGE)) {
DiscoverGreyObjectsOnPage(p);
}
......@@ -1980,9 +1957,7 @@ void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) {
void MarkCompactCollector::DiscoverGreyObjectsInNewSpace() {
NewSpace* space = heap()->new_space();
NewSpacePageIterator it(space->bottom(), space->top());
while (it.has_next()) {
Page* page = it.next();
for (Page* page : NewSpacePageRange(space->bottom(), space->top())) {
DiscoverGreyObjectsOnPage(page);
if (marking_deque()->IsFull()) return;
}
......@@ -3060,10 +3035,9 @@ HeapObject* MarkCompactCollector::FindBlackObjectBySlotSlow(Address slot) {
void MarkCompactCollector::EvacuateNewSpacePrologue() {
NewSpace* new_space = heap()->new_space();
NewSpacePageIterator it(new_space->bottom(), new_space->top());
// Append the list of new space pages to be processed.
while (it.has_next()) {
newspace_evacuation_candidates_.Add(it.next());
for (Page* p : NewSpacePageRange(new_space->bottom(), new_space->top())) {
newspace_evacuation_candidates_.Add(p);
}
new_space->Flip();
new_space->ResetAllocationInfo();
......@@ -3795,9 +3769,7 @@ void UpdateToSpacePointersInParallel(Heap* heap, base::Semaphore* semaphore) {
heap, heap->isolate()->cancelable_task_manager(), semaphore);
Address space_start = heap->new_space()->bottom();
Address space_end = heap->new_space()->top();
NewSpacePageIterator it(space_start, space_end);
while (it.has_next()) {
Page* page = it.next();
for (Page* page : NewSpacePageRange(space_start, space_end)) {
Address start =
page->Contains(space_start) ? space_start : page->area_start();
Address end = page->Contains(space_end) ? space_end : page->area_end();
......@@ -3952,13 +3924,12 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
Address space_top = space->top();
space->ClearStats();
PageIterator it(space);
int will_be_swept = 0;
bool unused_page_present = false;
while (it.has_next()) {
Page* p = it.next();
// Loop needs to support deletion if live bytes == 0 for a page.
for (auto it = space->begin(); it != space->end();) {
Page* p = *(it++);
DCHECK(p->SweepingDone());
if (p->IsEvacuationCandidate()) {
......
......@@ -16,10 +16,7 @@ namespace internal {
template <PointerDirection direction>
void RememberedSet<direction>::ClearInvalidSlots(Heap* heap) {
STATIC_ASSERT(direction == OLD_TO_NEW);
PageIterator it(heap->old_space());
MemoryChunk* chunk;
while (it.has_next()) {
chunk = it.next();
for (MemoryChunk* chunk : *heap->old_space()) {
SlotSet* slots = GetSlotSet(chunk);
if (slots != nullptr) {
slots->Iterate([heap, chunk](Address addr) {
......
......@@ -15,6 +15,23 @@
namespace v8 {
namespace internal {
template <class PAGE_TYPE>
PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() {
p_ = p_->next_page();
return *this;
}
template <class PAGE_TYPE>
PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(int) {
PageIteratorImpl<PAGE_TYPE> tmp(*this);
operator++();
return tmp;
}
NewSpacePageRange::NewSpacePageRange(Address start, Address limit)
: start_(start), limit_(limit) {
SemiSpace::AssertValidRange(start, limit);
}
// -----------------------------------------------------------------------------
// Bitmap
......@@ -31,25 +48,6 @@ void Bitmap::SetAllBits(MemoryChunk* chunk) {
bitmap->cells()[i] = 0xffffffff;
}
// -----------------------------------------------------------------------------
// PageIterator
PageIterator::PageIterator(PagedSpace* space)
: space_(space),
prev_page_(&space->anchor_),
next_page_(prev_page_->next_page()) {}
bool PageIterator::has_next() { return next_page_ != &space_->anchor_; }
Page* PageIterator::next() {
DCHECK(has_next());
prev_page_ = next_page_;
next_page_ = next_page_->next_page();
return prev_page_;
}
// -----------------------------------------------------------------------------
// SemiSpaceIterator
......@@ -76,37 +74,6 @@ HeapObject* SemiSpaceIterator::Next() {
HeapObject* SemiSpaceIterator::next_object() { return Next(); }
// -----------------------------------------------------------------------------
// NewSpacePageIterator
NewSpacePageIterator::NewSpacePageIterator(NewSpace* space)
: prev_page_(Page::FromAddress(space->ToSpaceStart())->prev_page()),
next_page_(Page::FromAddress(space->ToSpaceStart())),
last_page_(Page::FromAllocationAreaAddress(space->ToSpaceEnd())) {}
NewSpacePageIterator::NewSpacePageIterator(SemiSpace* space)
: prev_page_(space->anchor()),
next_page_(prev_page_->next_page()),
last_page_(prev_page_->prev_page()) {}
NewSpacePageIterator::NewSpacePageIterator(Address start, Address limit)
: prev_page_(Page::FromAddress(start)->prev_page()),
next_page_(Page::FromAddress(start)),
last_page_(Page::FromAllocationAreaAddress(limit)) {
SemiSpace::AssertValidRange(start, limit);
}
bool NewSpacePageIterator::has_next() { return prev_page_ != last_page_; }
Page* NewSpacePageIterator::next() {
DCHECK(has_next());
prev_page_ = next_page_;
next_page_ = next_page_->next_page();
return prev_page_;
}
// -----------------------------------------------------------------------------
// HeapObjectIterator
......@@ -151,20 +118,6 @@ HeapObject* HeapObjectIterator::FromCurrentPage() {
return NULL;
}
// -----------------------------------------------------------------------------
// LargePageIterator
LargePageIterator::LargePageIterator(LargeObjectSpace* space)
: next_page_(space->first_page()) {}
LargePage* LargePageIterator::next() {
LargePage* result = next_page_;
if (next_page_ != nullptr) {
next_page_ = next_page_->next_page();
}
return result;
}
// -----------------------------------------------------------------------------
// MemoryAllocator
......@@ -209,9 +162,8 @@ bool SemiSpace::Contains(Object* o) {
}
bool SemiSpace::ContainsSlow(Address a) {
NewSpacePageIterator it(this);
while (it.has_next()) {
if (it.next() == MemoryChunk::FromAddress(a)) return true;
for (Page* p : *this) {
if (p == MemoryChunk::FromAddress(a)) return true;
}
return false;
}
......@@ -406,40 +358,33 @@ void Page::ClearEvacuationCandidate() {
}
MemoryChunkIterator::MemoryChunkIterator(Heap* heap)
: state_(kOldSpaceState),
old_iterator_(heap->old_space()),
code_iterator_(heap->code_space()),
map_iterator_(heap->map_space()),
lo_iterator_(heap->lo_space()) {}
: heap_(heap),
state_(kOldSpaceState),
old_iterator_(heap->old_space()->begin()),
code_iterator_(heap->code_space()->begin()),
map_iterator_(heap->map_space()->begin()),
lo_iterator_(heap->lo_space()->begin()) {}
MemoryChunk* MemoryChunkIterator::next() {
switch (state_) {
case kOldSpaceState: {
if (old_iterator_.has_next()) {
return old_iterator_.next();
}
if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++);
state_ = kMapState;
// Fall through.
}
case kMapState: {
if (map_iterator_.has_next()) {
return map_iterator_.next();
}
if (map_iterator_ != heap_->map_space()->end()) return *(map_iterator_++);
state_ = kCodeState;
// Fall through.
}
case kCodeState: {
if (code_iterator_.has_next()) {
return code_iterator_.next();
}
if (code_iterator_ != heap_->code_space()->end())
return *(code_iterator_++);
state_ = kLargeObjectState;
// Fall through.
}
case kLargeObjectState: {
MemoryChunk* answer = lo_iterator_.next();
if (answer != nullptr) {
return answer;
}
if (lo_iterator_ != heap_->lo_space()->end()) return *(lo_iterator_++);
state_ = kFinishedState;
// Fall through;
}
......
......@@ -1088,9 +1088,8 @@ bool PagedSpace::HasBeenSetUp() { return true; }
void PagedSpace::TearDown() {
PageIterator iterator(this);
while (iterator.has_next()) {
Page* page = iterator.next();
for (auto it = begin(); it != end();) {
Page* page = *(it++); // Will be erased.
ArrayBufferTracker::FreeAll(page);
heap()->memory_allocator()->Free<MemoryAllocator::kFull>(page);
}
......@@ -1148,10 +1147,8 @@ void PagedSpace::MergeCompactionSpace(CompactionSpace* other) {
AccountCommitted(other->CommittedMemory());
// Move over pages.
PageIterator it(other);
Page* p = nullptr;
while (it.has_next()) {
p = it.next();
for (auto it = other->begin(); it != other->end();) {
Page* p = *(it++);
// Relinking requires the category to be unlinked.
other->UnlinkFreeListCategories(p);
......@@ -1168,18 +1165,16 @@ size_t PagedSpace::CommittedPhysicalMemory() {
if (!base::VirtualMemory::HasLazyCommits()) return CommittedMemory();
MemoryChunk::UpdateHighWaterMark(allocation_info_.top());
size_t size = 0;
PageIterator it(this);
while (it.has_next()) {
size += it.next()->CommittedPhysicalMemory();
for (Page* page : *this) {
size += page->CommittedPhysicalMemory();
}
return size;
}
bool PagedSpace::ContainsSlow(Address addr) {
Page* p = Page::FromAddress(addr);
PageIterator iterator(this);
while (iterator.has_next()) {
if (iterator.next() == p) return true;
for (Page* page : *this) {
if (page == p) return true;
}
return false;
}
......@@ -1203,7 +1198,6 @@ Object* PagedSpace::FindObject(Address addr) {
return Smi::FromInt(0);
}
bool PagedSpace::Expand() {
int size = AreaSize();
if (snapshotable() && !HasPages()) {
......@@ -1241,20 +1235,17 @@ bool PagedSpace::Expand() {
int PagedSpace::CountTotalPages() {
PageIterator it(this);
int count = 0;
while (it.has_next()) {
it.next();
for (Page* page : *this) {
count++;
USE(page);
}
return count;
}
void PagedSpace::ResetFreeListStatistics() {
PageIterator page_iterator(this);
while (page_iterator.has_next()) {
Page* page = page_iterator.next();
for (Page* page : *this) {
page->ResetFreeListStatistics();
}
}
......@@ -1297,9 +1288,7 @@ void PagedSpace::Print() {}
void PagedSpace::Verify(ObjectVisitor* visitor) {
bool allocation_pointer_found_in_space =
(allocation_info_.top() == allocation_info_.limit());
PageIterator page_iterator(this);
while (page_iterator.has_next()) {
Page* page = page_iterator.next();
for (Page* page : *this) {
CHECK(page->owner() == this);
if (page == Page::FromAllocationAreaAddress(allocation_info_.top())) {
allocation_pointer_found_in_space = true;
......@@ -1530,9 +1519,8 @@ void NewSpace::ResetAllocationInfo() {
to_space_.Reset();
UpdateAllocationInfo();
// Clear all mark-bits in the to-space.
NewSpacePageIterator it(&to_space_);
while (it.has_next()) {
Bitmap::Clear(it.next());
for (Page* p : to_space_) {
Bitmap::Clear(p);
}
InlineAllocationStep(old_top, allocation_info_.top(), nullptr, 0);
}
......@@ -1750,10 +1738,8 @@ void SemiSpace::SetUp(int initial_capacity, int maximum_capacity) {
void SemiSpace::TearDown() {
// Properly uncommit memory to keep the allocator counters in sync.
if (is_committed()) {
NewSpacePageIterator it(this);
while (it.has_next()) {
Page* page = it.next();
ArrayBufferTracker::FreeAll(page);
for (Page* p : *this) {
ArrayBufferTracker::FreeAll(p);
}
Uncommit();
}
......@@ -1788,10 +1774,9 @@ bool SemiSpace::Commit() {
bool SemiSpace::Uncommit() {
DCHECK(is_committed());
NewSpacePageIterator it(this);
while (it.has_next()) {
heap()->memory_allocator()->Free<MemoryAllocator::kPooledAndQueue>(
it.next());
for (auto it = begin(); it != end();) {
Page* p = *(it++);
heap()->memory_allocator()->Free<MemoryAllocator::kPooledAndQueue>(p);
}
anchor()->set_next_page(anchor());
anchor()->set_prev_page(anchor());
......@@ -1805,9 +1790,8 @@ bool SemiSpace::Uncommit() {
size_t SemiSpace::CommittedPhysicalMemory() {
if (!is_committed()) return 0;
size_t size = 0;
NewSpacePageIterator it(this);
while (it.has_next()) {
size += it.next()->CommittedPhysicalMemory();
for (Page* p : *this) {
size += p->CommittedPhysicalMemory();
}
return size;
}
......@@ -1888,9 +1872,7 @@ void SemiSpace::FixPagesFlags(intptr_t flags, intptr_t mask) {
anchor_.prev_page()->set_next_page(&anchor_);
anchor_.next_page()->set_prev_page(&anchor_);
NewSpacePageIterator it(this);
while (it.has_next()) {
Page* page = it.next();
for (Page* page : *this) {
page->set_owner(this);
page->SetFlags(flags, mask);
if (id_ == kToSpace) {
......@@ -1953,9 +1935,8 @@ void SemiSpace::set_age_mark(Address mark) {
DCHECK_EQ(Page::FromAllocationAreaAddress(mark)->owner(), this);
age_mark_ = mark;
// Mark all pages up to the one containing mark.
NewSpacePageIterator it(space_start(), mark);
while (it.has_next()) {
it.next()->SetFlag(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK);
for (Page* p : NewSpacePageRange(space_start(), mark)) {
p->SetFlag(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK);
}
}
......@@ -2661,9 +2642,7 @@ void PagedSpace::RepairFreeListsAfterDeserialization() {
free_list_.RepairLists(heap());
// Each page may have a small free space that is not tracked by a free list.
// Update the maps for those free space objects.
PageIterator iterator(this);
while (iterator.has_next()) {
Page* page = iterator.next();
for (Page* page : *this) {
int size = static_cast<int>(page->wasted_memory());
if (size == 0) continue;
Address address = page->OffsetToAddress(Page::kPageSize - size);
......
......@@ -1619,25 +1619,6 @@ class HeapObjectIterator : public ObjectIterator {
};
// -----------------------------------------------------------------------------
// A PageIterator iterates the pages in a paged space.
class PageIterator BASE_EMBEDDED {
public:
explicit inline PageIterator(PagedSpace* space);
inline bool has_next();
inline Page* next();
private:
PagedSpace* space_;
Page* prev_page_; // Previous page returned.
// Next page that will be returned. Cached here so that we can use this
// iterator for operations that deallocate pages.
Page* next_page_;
};
// -----------------------------------------------------------------------------
// A space has a circular list of pages. The next page can be accessed via
// Page::next_page() call.
......@@ -2086,8 +2067,49 @@ class LocalAllocationBuffer {
AllocationInfo allocation_info_;
};
template <class PAGE_TYPE>
class PageIteratorImpl
: public std::iterator<std::forward_iterator_tag, PAGE_TYPE> {
public:
explicit PageIteratorImpl(PAGE_TYPE* p) : p_(p) {}
PageIteratorImpl(const PageIteratorImpl<PAGE_TYPE>& other) : p_(other.p_) {}
PAGE_TYPE* operator*() { return p_; }
bool operator==(const PageIteratorImpl<PAGE_TYPE>& rhs) {
return rhs.p_ == p_;
}
bool operator!=(const PageIteratorImpl<PAGE_TYPE>& rhs) {
return rhs.p_ != p_;
}
inline PageIteratorImpl<PAGE_TYPE>& operator++();
inline PageIteratorImpl<PAGE_TYPE> operator++(int);
private:
PAGE_TYPE* p_;
};
typedef PageIteratorImpl<Page> PageIterator;
typedef PageIteratorImpl<LargePage> LargePageIterator;
class NewSpacePageRange {
public:
typedef PageIterator iterator;
inline NewSpacePageRange(Address start, Address limit);
iterator begin() { return iterator(Page::FromAddress(start_)); }
iterator end() {
return iterator(Page::FromAllocationAreaAddress(limit_)->next_page());
}
private:
Address start_;
Address limit_;
};
class PagedSpace : public Space {
public:
typedef PageIterator iterator;
static const intptr_t kCompactionMemoryWanted = 500 * KB;
// Creates a space with an id.
......@@ -2296,6 +2318,9 @@ class PagedSpace : public Space {
inline void UnlinkFreeListCategories(Page* page);
inline intptr_t RelinkFreeListCategories(Page* page);
iterator begin() { return iterator(anchor_.next_page()); }
iterator end() { return iterator(&anchor_); }
protected:
// PagedSpaces that should be included in snapshots have different, i.e.,
// smaller, initial pages.
......@@ -2350,7 +2375,6 @@ class PagedSpace : public Space {
friend class IncrementalMarking;
friend class MarkCompactCollector;
friend class PageIterator;
// Used in cctest.
friend class HeapTester;
......@@ -2401,6 +2425,8 @@ enum SemiSpaceId { kFromSpace = 0, kToSpace = 1 };
// space as a marking stack when tracing live objects.
class SemiSpace : public Space {
public:
typedef PageIterator iterator;
static void Swap(SemiSpace* from, SemiSpace* to);
SemiSpace(Heap* heap, SemiSpaceId semispace)
......@@ -2525,6 +2551,9 @@ class SemiSpace : public Space {
virtual void Verify();
#endif
iterator begin() { return iterator(anchor_.next_page()); }
iterator end() { return iterator(anchor()); }
private:
void RewindPages(Page* start, int num_pages);
......@@ -2555,7 +2584,6 @@ class SemiSpace : public Space {
int pages_used_;
friend class NewSpace;
friend class NewSpacePageIterator;
friend class SemiSpaceIterator;
};
......@@ -2584,35 +2612,6 @@ class SemiSpaceIterator : public ObjectIterator {
Address limit_;
};
// -----------------------------------------------------------------------------
// A PageIterator iterates the pages in a semi-space.
class NewSpacePageIterator BASE_EMBEDDED {
public:
// Make an iterator that runs over all pages in to-space.
explicit inline NewSpacePageIterator(NewSpace* space);
// Make an iterator that runs over all pages in the given semispace,
// even those not used in allocation.
explicit inline NewSpacePageIterator(SemiSpace* space);
// Make iterator that iterates from the page containing start
// to the page that contains limit in the same semispace.
inline NewSpacePageIterator(Address start, Address limit);
inline bool has_next();
inline Page* next();
private:
Page* prev_page_; // Previous page returned.
// Next page that will be returned. Cached here so that we can use this
// iterator for operations that deallocate pages.
Page* next_page_;
// Last page returned.
Page* last_page_;
};
// -----------------------------------------------------------------------------
// The young generation space.
//
......@@ -2621,6 +2620,8 @@ class NewSpacePageIterator BASE_EMBEDDED {
class NewSpace : public Space {
public:
typedef PageIterator iterator;
explicit NewSpace(Heap* heap)
: Space(heap, NEW_SPACE, NOT_EXECUTABLE),
to_space_(heap, kToSpace),
......@@ -2884,6 +2885,9 @@ class NewSpace : public Space {
void PauseAllocationObservers() override;
void ResumeAllocationObservers() override;
iterator begin() { return to_space_.begin(); }
iterator end() { return to_space_.end(); }
private:
// Update allocation info to match the current to-space page.
void UpdateAllocationInfo();
......@@ -3027,6 +3031,8 @@ class MapSpace : public PagedSpace {
class LargeObjectSpace : public Space {
public:
typedef LargePageIterator iterator;
LargeObjectSpace(Heap* heap, AllocationSpace id);
virtual ~LargeObjectSpace();
......@@ -3088,6 +3094,9 @@ class LargeObjectSpace : public Space {
// Collect code statistics.
void CollectCodeStatistics();
iterator begin() { return iterator(first_page_); }
iterator end() { return iterator(nullptr); }
#ifdef VERIFY_HEAP
virtual void Verify();
#endif
......@@ -3123,16 +3132,6 @@ class LargeObjectIterator : public ObjectIterator {
LargePage* current_;
};
class LargePageIterator BASE_EMBEDDED {
public:
explicit inline LargePageIterator(LargeObjectSpace* space);
inline LargePage* next();
private:
LargePage* next_page_;
};
// Iterates over the chunks (pages and large object pages) that can contain
// pointers to new space or to evacuation candidates.
class MemoryChunkIterator BASE_EMBEDDED {
......@@ -3150,6 +3149,7 @@ class MemoryChunkIterator BASE_EMBEDDED {
kLargeObjectState,
kFinishedState
};
Heap* heap_;
State state_;
PageIterator old_iterator_;
PageIterator code_iterator_;
......
......@@ -31,9 +31,7 @@ void Deserializer::DecodeReservation(
void Deserializer::FlushICacheForNewIsolate() {
DCHECK(!deserializing_user_code_);
// The entire isolate is newly deserialized. Simply flush all code pages.
PageIterator it(isolate_->heap()->code_space());
while (it.has_next()) {
Page* p = it.next();
for (Page* p : *isolate_->heap()->code_space()) {
Assembler::FlushICache(isolate_, p->area_start(),
p->area_end() - p->area_start());
}
......
......@@ -18,10 +18,8 @@ void SealCurrentObjects(Heap* heap) {
heap->CollectAllGarbage();
heap->CollectAllGarbage();
heap->mark_compact_collector()->EnsureSweepingCompleted();
PageIterator it(heap->old_space());
heap->old_space()->EmptyAllocationInfo();
while (it.has_next()) {
Page* page = it.next();
for (Page* page : *heap->old_space()) {
page->MarkNeverAllocateForTesting();
}
}
......@@ -143,9 +141,8 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) {
void AbandonCurrentlyFreeMemory(PagedSpace* space) {
space->EmptyAllocationInfo();
PageIterator pit(space);
while (pit.has_next()) {
pit.next()->MarkNeverAllocateForTesting();
for (Page* page : *space) {
page->MarkNeverAllocateForTesting();
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment