Commit dfc6b4dd authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] New live byte tracking.

This patch changes how space size and capacity are updated in GC:
- space capacity changes only when a page added/removed from the space.
- space size is reset to zero before sweeping and incremented by
  page->live_bytes_count_ for each to-be-swept page.
- space size is refined after sweeping using the accurate
  page->allocated_bytes counter produces by the sweeper.

Invariants:
1. space.capacity = sum [page.size | for page in space].
2. After marking, before sweeping:
   a) space.size = sum [page.live_bytes_count | for page in space].
3. After sweeping, before marking ends:
   a) space.size = sum [page.allocated_bytes | for page in space].
   b) page.allocated_bytes >= (sum [object.size | for object in page] +
         page.linear_allocation_area).
   c) page.area_size = (page.allocated_bytes + page.wasted_memory +
         sum [free_list_entry.size | for free_list_entry in page].

3.b becomes equality if the mutator is not doing array trimming,
object slack tracking during sweeping.

Bug: chromium:694255
Change-Id: Ic8d16a8171187a113fee2df8bf3c2a4c5e77bc08
Reviewed-on: https://chromium-review.googlesource.com/618889
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47409}
parent 5c741cbd
...@@ -5257,6 +5257,23 @@ void Heap::VerifyRememberedSetFor(HeapObject* object) { ...@@ -5257,6 +5257,23 @@ void Heap::VerifyRememberedSetFor(HeapObject* object) {
} }
#endif #endif
#ifdef DEBUG
void Heap::VerifyCountersAfterSweeping() {
PagedSpaces spaces(this);
for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) {
space->VerifyCountersAfterSweeping();
}
}
void Heap::VerifyCountersBeforeConcurrentSweeping() {
PagedSpaces spaces(this);
for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) {
space->VerifyCountersBeforeConcurrentSweeping();
}
}
#endif
void Heap::ZapFromSpace() { void Heap::ZapFromSpace() {
if (!new_space_->IsFromSpaceCommitted()) return; if (!new_space_->IsFromSpaceCommitted()) return;
......
...@@ -1496,6 +1496,9 @@ class Heap { ...@@ -1496,6 +1496,9 @@ class Heap {
#endif #endif
#ifdef DEBUG #ifdef DEBUG
void VerifyCountersAfterSweeping();
void VerifyCountersBeforeConcurrentSweeping();
void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; } void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
void Print(); void Print();
......
...@@ -1044,6 +1044,9 @@ void IncrementalMarking::FinalizeSweeping() { ...@@ -1044,6 +1044,9 @@ void IncrementalMarking::FinalizeSweeping() {
heap_->mark_compact_collector()->EnsureSweepingCompleted(); heap_->mark_compact_collector()->EnsureSweepingCompleted();
} }
if (!heap_->mark_compact_collector()->sweeping_in_progress()) { if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
#ifdef DEBUG
heap_->VerifyCountersAfterSweeping();
#endif
StartMarking(); StartMarking();
} }
} }
......
...@@ -863,7 +863,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { ...@@ -863,7 +863,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
CHECK_NULL(p->typed_slot_set<OLD_TO_OLD>()); CHECK_NULL(p->typed_slot_set<OLD_TO_OLD>());
CHECK(p->SweepingDone()); CHECK(p->SweepingDone());
DCHECK(p->area_size() == area_size); DCHECK(p->area_size() == area_size);
pages.push_back(std::make_pair(p->LiveBytesFromFreeList(), p)); pages.push_back(std::make_pair(p->allocated_bytes(), p));
} }
int candidate_count = 0; int candidate_count = 0;
...@@ -1044,6 +1044,10 @@ void MarkCompactCollector::Prepare() { ...@@ -1044,6 +1044,10 @@ void MarkCompactCollector::Prepare() {
void MarkCompactCollector::Finish() { void MarkCompactCollector::Finish() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_FINISH); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_FINISH);
#ifdef DEBUG
heap()->VerifyCountersBeforeConcurrentSweeping();
#endif
if (!heap()->delay_sweeper_tasks_for_testing_) { if (!heap()->delay_sweeper_tasks_for_testing_) {
sweeper().StartSweeperTasks(); sweeper().StartSweeperTasks();
} }
...@@ -3703,10 +3707,15 @@ int MarkCompactCollector::Sweeper::RawSweep( ...@@ -3703,10 +3707,15 @@ int MarkCompactCollector::Sweeper::RawSweep(
skip_list->Clear(); skip_list->Clear();
} }
intptr_t live_bytes = 0;
intptr_t freed_bytes = 0; intptr_t freed_bytes = 0;
intptr_t max_freed_bytes = 0; intptr_t max_freed_bytes = 0;
int curr_region = -1; int curr_region = -1;
// Set the allocated_bytes counter to area_size. The free operations below
// will decrease the counter to actual live bytes.
p->ResetAllocatedBytes();
for (auto object_and_size : for (auto object_and_size :
LiveObjectRange<kBlackObjects>(p, marking_state_->bitmap(p))) { LiveObjectRange<kBlackObjects>(p, marking_state_->bitmap(p))) {
HeapObject* const object = object_and_size.first; HeapObject* const object = object_and_size.first;
...@@ -3738,6 +3747,7 @@ int MarkCompactCollector::Sweeper::RawSweep( ...@@ -3738,6 +3747,7 @@ int MarkCompactCollector::Sweeper::RawSweep(
} }
Map* map = object->synchronized_map(); Map* map = object->synchronized_map();
int size = object->SizeFromMap(map); int size = object->SizeFromMap(map);
live_bytes += size;
if (rebuild_skip_list) { if (rebuild_skip_list) {
int new_region_start = SkipList::RegionNumber(free_end); int new_region_start = SkipList::RegionNumber(free_end);
int new_region_end = int new_region_end =
...@@ -3788,9 +3798,18 @@ int MarkCompactCollector::Sweeper::RawSweep( ...@@ -3788,9 +3798,18 @@ int MarkCompactCollector::Sweeper::RawSweep(
} }
} }
// Clear the mark bits of that page and reset live bytes count. marking_state_->bitmap(p)->Clear();
marking_state_->ClearLiveness(p); if (free_list_mode == IGNORE_FREE_LIST) {
marking_state_->SetLiveBytes(p, 0);
// We did not free memory, so have to adjust allocated bytes here.
intptr_t freed_bytes = p->area_size() - live_bytes;
p->DecreaseAllocatedBytes(freed_bytes);
} else {
// Keep the old live bytes counter of the page until RefillFreeList, where
// the space size is refined.
// The allocated_bytes() counter is precisely the total size of objects.
DCHECK_EQ(live_bytes, p->allocated_bytes());
}
p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
if (free_list_mode == IGNORE_FREE_LIST) return 0; if (free_list_mode == IGNORE_FREE_LIST) return 0;
return static_cast<int>(FreeList::GuaranteedAllocatable(max_freed_bytes)); return static_cast<int>(FreeList::GuaranteedAllocatable(max_freed_bytes));
...@@ -4539,9 +4558,10 @@ void MarkCompactCollector::Sweeper::PrepareToBeSweptPage(AllocationSpace space, ...@@ -4539,9 +4558,10 @@ void MarkCompactCollector::Sweeper::PrepareToBeSweptPage(AllocationSpace space,
page->concurrent_sweeping_state().SetValue(Page::kSweepingPending); page->concurrent_sweeping_state().SetValue(Page::kSweepingPending);
DCHECK_GE(page->area_size(), DCHECK_GE(page->area_size(),
static_cast<size_t>(marking_state_->live_bytes(page))); static_cast<size_t>(marking_state_->live_bytes(page)));
size_t to_sweep = page->area_size() - marking_state_->live_bytes(page); if (space != NEW_SPACE) {
if (space != NEW_SPACE) heap_->paged_space(space)->IncreaseAllocatedBytes(
heap_->paged_space(space)->accounting_stats_.ShrinkSpace(to_sweep); marking_state_->live_bytes(page), page);
}
} }
Page* MarkCompactCollector::Sweeper::GetSweepingPageSafe( Page* MarkCompactCollector::Sweeper::GetSweepingPageSafe(
...@@ -4582,6 +4602,7 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) { ...@@ -4582,6 +4602,7 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
Heap::ShouldZapGarbage() Heap::ShouldZapGarbage()
? FreeSpaceTreatmentMode::ZAP_FREE_SPACE ? FreeSpaceTreatmentMode::ZAP_FREE_SPACE
: FreeSpaceTreatmentMode::IGNORE_FREE_SPACE); : FreeSpaceTreatmentMode::IGNORE_FREE_SPACE);
space->IncreaseAllocatedBytes(p->allocated_bytes(), p);
continue; continue;
} }
......
...@@ -168,7 +168,8 @@ intptr_t PagedSpace::RelinkFreeListCategories(Page* page) { ...@@ -168,7 +168,8 @@ intptr_t PagedSpace::RelinkFreeListCategories(Page* page) {
added += category->available(); added += category->available();
category->Relink(); category->Relink();
}); });
DCHECK_EQ(page->AvailableInFreeList(), page->available_in_free_list()); DCHECK_EQ(page->AvailableInFreeList(),
page->AvailableInFreeListFromAllocatedBytes());
return added; return added;
} }
......
This diff is collapsed.
...@@ -184,7 +184,7 @@ class FreeListCategory { ...@@ -184,7 +184,7 @@ class FreeListCategory {
// category is currently unlinked. // category is currently unlinked.
void Relink(); void Relink();
bool Free(FreeSpace* node, size_t size_in_bytes, FreeMode mode); void Free(FreeSpace* node, size_t size_in_bytes, FreeMode mode);
// Picks a node from the list and stores its size in |node_size|. Returns // Picks a node from the list and stores its size in |node_size|. Returns
// nullptr if the category is empty. // nullptr if the category is empty.
...@@ -677,8 +677,10 @@ class MemoryChunk { ...@@ -677,8 +677,10 @@ class MemoryChunk {
base::AtomicValue<ConcurrentSweepingState> concurrent_sweeping_; base::AtomicValue<ConcurrentSweepingState> concurrent_sweeping_;
// PagedSpace free-list statistics. // Byte allocated on the page, which includes all objects on the page
base::AtomicNumber<intptr_t> available_in_free_list_; // and the linear allocation area.
base::AtomicNumber<intptr_t> allocated_bytes_;
// Freed memory that was not added to the free list.
base::AtomicNumber<intptr_t> wasted_memory_; base::AtomicNumber<intptr_t> wasted_memory_;
// next_chunk_ holds a pointer of type MemoryChunk // next_chunk_ holds a pointer of type MemoryChunk
...@@ -704,6 +706,7 @@ class MemoryChunk { ...@@ -704,6 +706,7 @@ class MemoryChunk {
friend class MemoryChunkValidator; friend class MemoryChunkValidator;
friend class MinorMarkingState; friend class MinorMarkingState;
friend class MinorNonAtomicMarkingState; friend class MinorNonAtomicMarkingState;
friend class PagedSpace;
}; };
static_assert(kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory, static_assert(kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory,
...@@ -801,9 +804,9 @@ class Page : public MemoryChunk { ...@@ -801,9 +804,9 @@ class Page : public MemoryChunk {
size_t AvailableInFreeList(); size_t AvailableInFreeList();
size_t LiveBytesFromFreeList() { size_t AvailableInFreeListFromAllocatedBytes() {
DCHECK_GE(area_size(), wasted_memory() + available_in_free_list()); DCHECK_GE(area_size(), wasted_memory() + allocated_bytes());
return area_size() - wasted_memory() - available_in_free_list(); return area_size() - wasted_memory() - allocated_bytes();
} }
FreeListCategory* free_list_category(FreeListCategoryType type) { FreeListCategory* free_list_category(FreeListCategoryType type) {
...@@ -814,17 +817,19 @@ class Page : public MemoryChunk { ...@@ -814,17 +817,19 @@ class Page : public MemoryChunk {
size_t wasted_memory() { return wasted_memory_.Value(); } size_t wasted_memory() { return wasted_memory_.Value(); }
void add_wasted_memory(size_t waste) { wasted_memory_.Increment(waste); } void add_wasted_memory(size_t waste) { wasted_memory_.Increment(waste); }
size_t available_in_free_list() { return available_in_free_list_.Value(); } size_t allocated_bytes() { return allocated_bytes_.Value(); }
void add_available_in_free_list(size_t available) { void IncreaseAllocatedBytes(size_t bytes) {
DCHECK_LE(available, area_size()); DCHECK_LE(bytes, area_size());
available_in_free_list_.Increment(available); allocated_bytes_.Increment(bytes);
} }
void remove_available_in_free_list(size_t available) { void DecreaseAllocatedBytes(size_t bytes) {
DCHECK_LE(available, area_size()); DCHECK_LE(bytes, area_size());
DCHECK_GE(available_in_free_list(), available); DCHECK_GE(allocated_bytes(), bytes);
available_in_free_list_.Decrement(available); allocated_bytes_.Decrement(bytes);
} }
void ResetAllocatedBytes();
size_t ShrinkToHighWaterMark(); size_t ShrinkToHighWaterMark();
V8_EXPORT_PRIVATE void CreateBlackArea(Address start, Address end); V8_EXPORT_PRIVATE void CreateBlackArea(Address start, Address end);
...@@ -1611,47 +1616,39 @@ class AllocationStats BASE_EMBEDDED { ...@@ -1611,47 +1616,39 @@ class AllocationStats BASE_EMBEDDED {
void Clear() { void Clear() {
capacity_ = 0; capacity_ = 0;
max_capacity_ = 0; max_capacity_ = 0;
size_ = 0; ClearSize();
} }
void ClearSize() { size_ = capacity_; } void ClearSize() {
size_ = 0;
#ifdef DEBUG
allocated_on_page_.clear();
#endif
}
// Accessors for the allocation statistics. // Accessors for the allocation statistics.
size_t Capacity() { return capacity_; } size_t Capacity() { return capacity_; }
size_t MaxCapacity() { return max_capacity_; } size_t MaxCapacity() { return max_capacity_; }
size_t Size() { return size_; } size_t Size() { return size_; }
#ifdef DEBUG
size_t AllocatedOnPage(Page* page) { return allocated_on_page_[page]; }
#endif
// Grow the space by adding available bytes. They are initially marked as void IncreaseAllocatedBytes(size_t bytes, Page* page) {
// being in use (part of the size), but will normally be immediately freed,
// putting them on the free list and removing them from size_.
void ExpandSpace(size_t bytes) {
DCHECK_GE(size_ + bytes, size_);
DCHECK_GE(capacity_ + bytes, capacity_);
capacity_ += bytes;
size_ += bytes;
if (capacity_ > max_capacity_) {
max_capacity_ = capacity_;
}
}
// Shrink the space by removing available bytes. Since shrinking is done
// during sweeping, bytes have been marked as being in use (part of the size)
// and are hereby freed.
void ShrinkSpace(size_t bytes) {
DCHECK_GE(capacity_, bytes);
DCHECK_GE(size_, bytes);
capacity_ -= bytes;
size_ -= bytes;
}
void AllocateBytes(size_t bytes) {
DCHECK_GE(size_ + bytes, size_); DCHECK_GE(size_ + bytes, size_);
size_ += bytes; size_ += bytes;
#ifdef DEBUG
allocated_on_page_[page] += bytes;
#endif
} }
void DeallocateBytes(size_t bytes) { void DecreaseAllocatedBytes(size_t bytes, Page* page) {
DCHECK_GE(size_, bytes); DCHECK_GE(size_, bytes);
size_ -= bytes; size_ -= bytes;
#ifdef DEBUG
DCHECK_GE(allocated_on_page_[page], bytes);
allocated_on_page_[page] -= bytes;
#endif
} }
void DecreaseCapacity(size_t bytes) { void DecreaseCapacity(size_t bytes) {
...@@ -1663,16 +1660,8 @@ class AllocationStats BASE_EMBEDDED { ...@@ -1663,16 +1660,8 @@ class AllocationStats BASE_EMBEDDED {
void IncreaseCapacity(size_t bytes) { void IncreaseCapacity(size_t bytes) {
DCHECK_GE(capacity_ + bytes, capacity_); DCHECK_GE(capacity_ + bytes, capacity_);
capacity_ += bytes; capacity_ += bytes;
} if (capacity_ > max_capacity_) {
max_capacity_ = capacity_;
// Merge |other| into |this|.
void Merge(const AllocationStats& other) {
DCHECK_GE(capacity_ + other.capacity_, capacity_);
DCHECK_GE(size_ + other.size_, size_);
capacity_ += other.capacity_;
size_ += other.size_;
if (other.max_capacity_ > max_capacity_) {
max_capacity_ = other.max_capacity_;
} }
} }
...@@ -1686,6 +1675,10 @@ class AllocationStats BASE_EMBEDDED { ...@@ -1686,6 +1675,10 @@ class AllocationStats BASE_EMBEDDED {
// |size_|: The number of allocated bytes. // |size_|: The number of allocated bytes.
size_t size_; size_t size_;
#ifdef DEBUG
std::unordered_map<Page*, size_t, Page::Hasher> allocated_on_page_;
#endif
}; };
// A free list maintaining free blocks of memory. The free list is organized in // A free list maintaining free blocks of memory. The free list is organized in
...@@ -2062,7 +2055,8 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) { ...@@ -2062,7 +2055,8 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
// no attempt to add area to free list is made. // no attempt to add area to free list is made.
size_t Free(Address start, size_t size_in_bytes) { size_t Free(Address start, size_t size_in_bytes) {
size_t wasted = free_list_.Free(start, size_in_bytes, kLinkCategory); size_t wasted = free_list_.Free(start, size_in_bytes, kLinkCategory);
accounting_stats_.DeallocateBytes(size_in_bytes); Page* page = Page::FromAddress(start);
accounting_stats_.DecreaseAllocatedBytes(size_in_bytes, page);
DCHECK_GE(size_in_bytes, wasted); DCHECK_GE(size_in_bytes, wasted);
return size_in_bytes - wasted; return size_in_bytes - wasted;
} }
...@@ -2093,15 +2087,26 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) { ...@@ -2093,15 +2087,26 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
void MarkAllocationInfoBlack(); void MarkAllocationInfoBlack();
void UnmarkAllocationInfo(); void UnmarkAllocationInfo();
void AccountAllocatedBytes(size_t bytes) { void DecreaseAllocatedBytes(size_t bytes, Page* page) {
accounting_stats_.AllocateBytes(bytes); accounting_stats_.DecreaseAllocatedBytes(bytes, page);
}
void IncreaseAllocatedBytes(size_t bytes, Page* page) {
accounting_stats_.IncreaseAllocatedBytes(bytes, page);
}
void DecreaseCapacity(size_t bytes) {
accounting_stats_.DecreaseCapacity(bytes);
}
void IncreaseCapacity(size_t bytes) {
accounting_stats_.IncreaseCapacity(bytes);
} }
void IncreaseCapacity(size_t bytes);
// Releases an unused page and shrinks the space. // Releases an unused page and shrinks the space.
void ReleasePage(Page* page); void ReleasePage(Page* page);
void AccountAddedPage(Page* page);
void AccountRemovedPage(Page* page);
void RefineAllocatedBytesAfterSweeping(Page* page);
// The dummy page that anchors the linked list of pages. // The dummy page that anchors the linked list of pages.
Page* anchor() { return &anchor_; } Page* anchor() { return &anchor_; }
...@@ -2116,6 +2121,8 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) { ...@@ -2116,6 +2121,8 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
#endif #endif
#ifdef DEBUG #ifdef DEBUG
void VerifyCountersAfterSweeping();
void VerifyCountersBeforeConcurrentSweeping();
// Print meta info and objects in this space. // Print meta info and objects in this space.
void Print() override; void Print() override;
...@@ -2162,6 +2169,8 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) { ...@@ -2162,6 +2169,8 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
// using the high water mark. // using the high water mark.
void ShrinkImmortalImmovablePages(); void ShrinkImmortalImmovablePages();
size_t ShrinkPageToHighWaterMark(Page* page);
std::unique_ptr<ObjectIterator> GetObjectIterator() override; std::unique_ptr<ObjectIterator> GetObjectIterator() override;
// Remove a page if it has at least |size_in_bytes| bytes available that can // Remove a page if it has at least |size_in_bytes| bytes available that can
......
...@@ -176,7 +176,6 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) { ...@@ -176,7 +176,6 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) {
} }
space->EmptyAllocationInfo(); space->EmptyAllocationInfo();
space->ResetFreeList(); space->ResetFreeList();
space->ClearStats();
} }
void AbandonCurrentlyFreeMemory(PagedSpace* space) { void AbandonCurrentlyFreeMemory(PagedSpace* space) {
......
...@@ -609,13 +609,14 @@ TEST(ShrinkPageToHighWaterMarkFreeSpaceEnd) { ...@@ -609,13 +609,14 @@ TEST(ShrinkPageToHighWaterMarkFreeSpaceEnd) {
Page* page = Page::FromAddress(array->address()); Page* page = Page::FromAddress(array->address());
// Reset space so high water mark is consistent. // Reset space so high water mark is consistent.
CcTest::heap()->old_space()->ResetFreeList(); PagedSpace* old_space = CcTest::heap()->old_space();
CcTest::heap()->old_space()->EmptyAllocationInfo(); old_space->ResetFreeList();
old_space->EmptyAllocationInfo();
HeapObject* filler = HeapObject* filler =
HeapObject::FromAddress(array->address() + array->Size()); HeapObject::FromAddress(array->address() + array->Size());
CHECK(filler->IsFreeSpace()); CHECK(filler->IsFreeSpace());
size_t shrunk = page->ShrinkToHighWaterMark(); size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
size_t should_have_shrunk = size_t should_have_shrunk =
RoundDown(static_cast<size_t>(Page::kAllocatableMemory - array->Size()), RoundDown(static_cast<size_t>(Page::kAllocatableMemory - array->Size()),
base::OS::CommitPageSize()); base::OS::CommitPageSize());
...@@ -636,10 +637,11 @@ TEST(ShrinkPageToHighWaterMarkNoFiller) { ...@@ -636,10 +637,11 @@ TEST(ShrinkPageToHighWaterMarkNoFiller) {
CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize); CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize);
// Reset space so high water mark and fillers are consistent. // Reset space so high water mark and fillers are consistent.
CcTest::heap()->old_space()->ResetFreeList(); PagedSpace* old_space = CcTest::heap()->old_space();
CcTest::heap()->old_space()->EmptyAllocationInfo(); old_space->ResetFreeList();
old_space->EmptyAllocationInfo();
const size_t shrunk = page->ShrinkToHighWaterMark(); size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
CHECK_EQ(0u, shrunk); CHECK_EQ(0u, shrunk);
} }
...@@ -658,14 +660,15 @@ TEST(ShrinkPageToHighWaterMarkOneWordFiller) { ...@@ -658,14 +660,15 @@ TEST(ShrinkPageToHighWaterMarkOneWordFiller) {
CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize); CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize);
// Reset space so high water mark and fillers are consistent. // Reset space so high water mark and fillers are consistent.
CcTest::heap()->old_space()->ResetFreeList(); PagedSpace* old_space = CcTest::heap()->old_space();
CcTest::heap()->old_space()->EmptyAllocationInfo(); old_space->ResetFreeList();
old_space->EmptyAllocationInfo();
HeapObject* filler = HeapObject* filler =
HeapObject::FromAddress(array->address() + array->Size()); HeapObject::FromAddress(array->address() + array->Size());
CHECK_EQ(filler->map(), CcTest::heap()->one_pointer_filler_map()); CHECK_EQ(filler->map(), CcTest::heap()->one_pointer_filler_map());
const size_t shrunk = page->ShrinkToHighWaterMark(); size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
CHECK_EQ(0u, shrunk); CHECK_EQ(0u, shrunk);
} }
...@@ -684,14 +687,15 @@ TEST(ShrinkPageToHighWaterMarkTwoWordFiller) { ...@@ -684,14 +687,15 @@ TEST(ShrinkPageToHighWaterMarkTwoWordFiller) {
CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize); CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize);
// Reset space so high water mark and fillers are consistent. // Reset space so high water mark and fillers are consistent.
CcTest::heap()->old_space()->ResetFreeList(); PagedSpace* old_space = CcTest::heap()->old_space();
CcTest::heap()->old_space()->EmptyAllocationInfo(); old_space->ResetFreeList();
old_space->EmptyAllocationInfo();
HeapObject* filler = HeapObject* filler =
HeapObject::FromAddress(array->address() + array->Size()); HeapObject::FromAddress(array->address() + array->Size());
CHECK_EQ(filler->map(), CcTest::heap()->two_pointer_filler_map()); CHECK_EQ(filler->map(), CcTest::heap()->two_pointer_filler_map());
const size_t shrunk = page->ShrinkToHighWaterMark(); size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
CHECK_EQ(0u, shrunk); CHECK_EQ(0u, shrunk);
} }
......
...@@ -53,6 +53,7 @@ TEST_F(SequentialUnmapperTest, UnmapOnTeardownAfterAlreadyFreeingPooled) { ...@@ -53,6 +53,7 @@ TEST_F(SequentialUnmapperTest, UnmapOnTeardownAfterAlreadyFreeingPooled) {
static_cast<PagedSpace*>(heap()->old_space()), static_cast<PagedSpace*>(heap()->old_space()),
Executability::NOT_EXECUTABLE); Executability::NOT_EXECUTABLE);
heap()->old_space()->UnlinkFreeListCategories(page); heap()->old_space()->UnlinkFreeListCategories(page);
heap()->old_space()->AccountRemovedPage(page);
EXPECT_NE(nullptr, page); EXPECT_NE(nullptr, page);
const int page_size = getpagesize(); const int page_size = getpagesize();
void* start_address = static_cast<void*>(page->address()); void* start_address = static_cast<void*>(page->address());
...@@ -72,6 +73,7 @@ TEST_F(SequentialUnmapperTest, UnmapOnTeardown) { ...@@ -72,6 +73,7 @@ TEST_F(SequentialUnmapperTest, UnmapOnTeardown) {
static_cast<PagedSpace*>(heap()->old_space()), static_cast<PagedSpace*>(heap()->old_space()),
Executability::NOT_EXECUTABLE); Executability::NOT_EXECUTABLE);
heap()->old_space()->UnlinkFreeListCategories(page); heap()->old_space()->UnlinkFreeListCategories(page);
heap()->old_space()->AccountRemovedPage(page);
EXPECT_NE(nullptr, page); EXPECT_NE(nullptr, page);
const int page_size = getpagesize(); const int page_size = getpagesize();
void* start_address = static_cast<void*>(page->address()); void* start_address = static_cast<void*>(page->address());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment