Commit 4bf80431 authored by Hannes Payer's avatar Hannes Payer Committed by Commit Bot

[heap] Refactor and clean-up runtime allocation.

Bug: chromium:796896
Change-Id: I7f46f82d079502b8ec04c5e3be5f803ec9e62ffa
Reviewed-on: https://chromium-review.googlesource.com/854797
Commit-Queue: Hannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#50424}
parent 8fbc6a05
...@@ -1845,7 +1845,7 @@ void Heap::EvacuateYoungGeneration() { ...@@ -1845,7 +1845,7 @@ void Heap::EvacuateYoungGeneration() {
if (!new_space()->Rebalance()) { if (!new_space()->Rebalance()) {
FatalProcessOutOfMemory("NewSpace::Rebalance"); FatalProcessOutOfMemory("NewSpace::Rebalance");
} }
new_space()->ResetAllocationInfo(); new_space()->ResetLinearAllocationArea();
new_space()->set_age_mark(new_space()->top()); new_space()->set_age_mark(new_space()->top());
// Fix up special trackers. // Fix up special trackers.
...@@ -1956,7 +1956,7 @@ void Heap::Scavenge() { ...@@ -1956,7 +1956,7 @@ void Heap::Scavenge() {
// Flip the semispaces. After flipping, to space is empty, from space has // Flip the semispaces. After flipping, to space is empty, from space has
// live objects. // live objects.
new_space_->Flip(); new_space_->Flip();
new_space_->ResetAllocationInfo(); new_space_->ResetLinearAllocationArea();
ItemParallelJob job(isolate()->cancelable_task_manager(), ItemParallelJob job(isolate()->cancelable_task_manager(),
&parallel_scavenge_semaphore_); &parallel_scavenge_semaphore_);
...@@ -5497,7 +5497,7 @@ void Heap::DisableInlineAllocation() { ...@@ -5497,7 +5497,7 @@ void Heap::DisableInlineAllocation() {
CodeSpaceMemoryModificationScope modification_scope(this); CodeSpaceMemoryModificationScope modification_scope(this);
for (PagedSpace* space = spaces.next(); space != nullptr; for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) { space = spaces.next()) {
space->EmptyAllocationInfo(); space->FreeLinearAllocationArea();
} }
} }
......
...@@ -435,9 +435,9 @@ void IncrementalMarking::StartBlackAllocation() { ...@@ -435,9 +435,9 @@ void IncrementalMarking::StartBlackAllocation() {
DCHECK(!black_allocation_); DCHECK(!black_allocation_);
DCHECK(IsMarking()); DCHECK(IsMarking());
black_allocation_ = true; black_allocation_ = true;
heap()->old_space()->MarkAllocationInfoBlack(); heap()->old_space()->MarkLinearAllocationAreaBlack();
heap()->map_space()->MarkAllocationInfoBlack(); heap()->map_space()->MarkLinearAllocationAreaBlack();
heap()->code_space()->MarkAllocationInfoBlack(); heap()->code_space()->MarkLinearAllocationAreaBlack();
if (FLAG_trace_incremental_marking) { if (FLAG_trace_incremental_marking) {
heap()->isolate()->PrintWithTimestamp( heap()->isolate()->PrintWithTimestamp(
"[IncrementalMarking] Black allocation started\n"); "[IncrementalMarking] Black allocation started\n");
...@@ -447,9 +447,9 @@ void IncrementalMarking::StartBlackAllocation() { ...@@ -447,9 +447,9 @@ void IncrementalMarking::StartBlackAllocation() {
void IncrementalMarking::PauseBlackAllocation() { void IncrementalMarking::PauseBlackAllocation() {
DCHECK(FLAG_black_allocation); DCHECK(FLAG_black_allocation);
DCHECK(IsMarking()); DCHECK(IsMarking());
heap()->old_space()->UnmarkAllocationInfo(); heap()->old_space()->UnmarkLinearAllocationArea();
heap()->map_space()->UnmarkAllocationInfo(); heap()->map_space()->UnmarkLinearAllocationArea();
heap()->code_space()->UnmarkAllocationInfo(); heap()->code_space()->UnmarkLinearAllocationArea();
if (FLAG_trace_incremental_marking) { if (FLAG_trace_incremental_marking) {
heap()->isolate()->PrintWithTimestamp( heap()->isolate()->PrintWithTimestamp(
"[IncrementalMarking] Black allocation paused\n"); "[IncrementalMarking] Black allocation paused\n");
......
...@@ -33,7 +33,7 @@ class LocalAllocator { ...@@ -33,7 +33,7 @@ class LocalAllocator {
compaction_spaces_.Get(CODE_SPACE)); compaction_spaces_.Get(CODE_SPACE));
// Give back remaining LAB space if this LocalAllocator's new space LAB // Give back remaining LAB space if this LocalAllocator's new space LAB
// sits right next to new space allocation top. // sits right next to new space allocation top.
const AllocationInfo info = new_space_lab_.Close(); const LinearAllocationArea info = new_space_lab_.Close();
const Address top = new_space_->top(); const Address top = new_space_->top();
if (info.limit() != nullptr && info.limit() == top) { if (info.limit() != nullptr && info.limit() == top) {
DCHECK_NOT_NULL(info.top()); DCHECK_NOT_NULL(info.top());
......
...@@ -2286,7 +2286,7 @@ void MinorMarkCompactCollector::EvacuatePrologue() { ...@@ -2286,7 +2286,7 @@ void MinorMarkCompactCollector::EvacuatePrologue() {
new_space_evacuation_pages_.push_back(p); new_space_evacuation_pages_.push_back(p);
} }
new_space->Flip(); new_space->Flip();
new_space->ResetAllocationInfo(); new_space->ResetLinearAllocationArea();
} }
void MinorMarkCompactCollector::EvacuateEpilogue() { void MinorMarkCompactCollector::EvacuateEpilogue() {
...@@ -2932,7 +2932,7 @@ void MarkCompactCollector::EvacuatePrologue() { ...@@ -2932,7 +2932,7 @@ void MarkCompactCollector::EvacuatePrologue() {
new_space_evacuation_pages_.push_back(p); new_space_evacuation_pages_.push_back(p);
} }
new_space->Flip(); new_space->Flip();
new_space->ResetAllocationInfo(); new_space->ResetLinearAllocationArea();
// Old space. // Old space.
DCHECK(old_space_evacuation_pages_.empty()); DCHECK(old_space_evacuation_pages_.empty());
......
...@@ -301,8 +301,7 @@ AllocationResult LocalAllocationBuffer::AllocateRawAligned( ...@@ -301,8 +301,7 @@ AllocationResult LocalAllocationBuffer::AllocateRawAligned(
bool PagedSpace::EnsureLinearAllocationArea(int size_in_bytes) { bool PagedSpace::EnsureLinearAllocationArea(int size_in_bytes) {
if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit()) if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit())
return true; return true;
if (free_list_.Allocate(size_in_bytes)) return true; return SlowRefillLinearAllocationArea(size_in_bytes);
return SlowAllocateRaw(size_in_bytes);
} }
HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
...@@ -490,7 +489,7 @@ size_t LargeObjectSpace::Available() { ...@@ -490,7 +489,7 @@ size_t LargeObjectSpace::Available() {
LocalAllocationBuffer LocalAllocationBuffer::InvalidBuffer() { LocalAllocationBuffer LocalAllocationBuffer::InvalidBuffer() {
return LocalAllocationBuffer(nullptr, AllocationInfo(nullptr, nullptr)); return LocalAllocationBuffer(nullptr, LinearAllocationArea(nullptr, nullptr));
} }
...@@ -503,7 +502,7 @@ LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap, ...@@ -503,7 +502,7 @@ LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
USE(ok); USE(ok);
DCHECK(ok); DCHECK(ok);
Address top = HeapObject::cast(obj)->address(); Address top = HeapObject::cast(obj)->address();
return LocalAllocationBuffer(heap, AllocationInfo(top, top + size)); return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
} }
......
This diff is collapsed.
...@@ -34,12 +34,12 @@ class HeapTester; ...@@ -34,12 +34,12 @@ class HeapTester;
class TestCodeRangeScope; class TestCodeRangeScope;
} // namespace heap } // namespace heap
class AllocationInfo;
class AllocationObserver; class AllocationObserver;
class CompactionSpace; class CompactionSpace;
class CompactionSpaceCollection; class CompactionSpaceCollection;
class FreeList; class FreeList;
class Isolate; class Isolate;
class LinearAllocationArea;
class LocalArrayBufferTracker; class LocalArrayBufferTracker;
class MemoryAllocator; class MemoryAllocator;
class MemoryChunk; class MemoryChunk;
...@@ -1562,10 +1562,10 @@ class V8_EXPORT_PRIVATE HeapObjectIterator : public ObjectIterator { ...@@ -1562,10 +1562,10 @@ class V8_EXPORT_PRIVATE HeapObjectIterator : public ObjectIterator {
// An abstraction of allocation and relocation pointers in a page-structured // An abstraction of allocation and relocation pointers in a page-structured
// space. // space.
class AllocationInfo { class LinearAllocationArea {
public: public:
AllocationInfo() : top_(nullptr), limit_(nullptr) {} LinearAllocationArea() : top_(nullptr), limit_(nullptr) {}
AllocationInfo(Address top, Address limit) : top_(top), limit_(limit) {} LinearAllocationArea(Address top, Address limit) : top_(top), limit_(limit) {}
void Reset(Address top, Address limit) { void Reset(Address top, Address limit) {
set_top(top); set_top(top);
...@@ -1763,10 +1763,11 @@ class V8_EXPORT_PRIVATE FreeList { ...@@ -1763,10 +1763,11 @@ class V8_EXPORT_PRIVATE FreeList {
// and the size should be a non-zero multiple of the word size. // and the size should be a non-zero multiple of the word size.
size_t Free(Address start, size_t size_in_bytes, FreeMode mode); size_t Free(Address start, size_t size_in_bytes, FreeMode mode);
// Finds a node of size at least size_in_bytes and sets up a linear allocation // Allocates a free space node frome the free list of at least size_in_bytes
// area using this node. Returns false if there is no such node and the caller // bytes. Returns the actual node size in node_size which can be bigger than
// has to retry allocation after collecting garbage. // size_in_bytes. This method returns null if the allocation request cannot be
MUST_USE_RESULT bool Allocate(size_t size_in_bytes); // handled by the free list.
MUST_USE_RESULT FreeSpace* Allocate(size_t size_in_bytes, size_t* node_size);
// Clear the free list. // Clear the free list.
void Reset(); void Reset();
...@@ -1865,8 +1866,6 @@ class V8_EXPORT_PRIVATE FreeList { ...@@ -1865,8 +1866,6 @@ class V8_EXPORT_PRIVATE FreeList {
static const size_t kMediumAllocationMax = kSmallListMax; static const size_t kMediumAllocationMax = kSmallListMax;
static const size_t kLargeAllocationMax = kMediumListMax; static const size_t kLargeAllocationMax = kMediumListMax;
FreeSpace* FindNodeFor(size_t size_in_bytes, size_t* node_size);
// Walks all available categories for a given |type| and tries to retrieve // Walks all available categories for a given |type| and tries to retrieve
// a node. Returns nullptr if the category is empty. // a node. Returns nullptr if the category is empty.
FreeSpace* FindNodeIn(FreeListCategoryType type, size_t* node_size); FreeSpace* FindNodeIn(FreeListCategoryType type, size_t* node_size);
...@@ -1953,13 +1952,13 @@ class LocalAllocationBuffer { ...@@ -1953,13 +1952,13 @@ class LocalAllocationBuffer {
inline bool TryFreeLast(HeapObject* object, int object_size); inline bool TryFreeLast(HeapObject* object, int object_size);
// Close a LAB, effectively invalidating it. Returns the unused area. // Close a LAB, effectively invalidating it. Returns the unused area.
AllocationInfo Close(); LinearAllocationArea Close();
private: private:
LocalAllocationBuffer(Heap* heap, AllocationInfo allocation_info); LocalAllocationBuffer(Heap* heap, LinearAllocationArea allocation_info);
Heap* heap_; Heap* heap_;
AllocationInfo allocation_info_; LinearAllocationArea allocation_info_;
}; };
class SpaceWithLinearArea : public Space { class SpaceWithLinearArea : public Space {
...@@ -2014,7 +2013,7 @@ class SpaceWithLinearArea : public Space { ...@@ -2014,7 +2013,7 @@ class SpaceWithLinearArea : public Space {
V8_EXPORT_PRIVATE void StartNextInlineAllocationStep() override; V8_EXPORT_PRIVATE void StartNextInlineAllocationStep() override;
// TODO(ofrobots): make these private after refactoring is complete. // TODO(ofrobots): make these private after refactoring is complete.
AllocationInfo allocation_info_; LinearAllocationArea allocation_info_;
Address top_on_previous_step_; Address top_on_previous_step_;
}; };
...@@ -2131,11 +2130,11 @@ class V8_EXPORT_PRIVATE PagedSpace ...@@ -2131,11 +2130,11 @@ class V8_EXPORT_PRIVATE PagedSpace
void ResetFreeList(); void ResetFreeList();
// Empty space allocation info, returning unused area to free list. // Empty space linear allocation area, returning unused area to free list.
void EmptyAllocationInfo(); void FreeLinearAllocationArea();
void MarkAllocationInfoBlack(); void MarkLinearAllocationAreaBlack();
void UnmarkAllocationInfo(); void UnmarkLinearAllocationArea();
void DecreaseAllocatedBytes(size_t bytes, Page* page) { void DecreaseAllocatedBytes(size_t bytes, Page* page) {
accounting_stats_.DecreaseAllocatedBytes(bytes, page); accounting_stats_.DecreaseAllocatedBytes(bytes, page);
...@@ -2229,10 +2228,10 @@ class V8_EXPORT_PRIVATE PagedSpace ...@@ -2229,10 +2228,10 @@ class V8_EXPORT_PRIVATE PagedSpace
std::unique_ptr<ObjectIterator> GetObjectIterator() override; std::unique_ptr<ObjectIterator> GetObjectIterator() override;
void SetAllocationInfo(Address top, Address limit); void SetLinearAllocationArea(Address top, Address limit);
private: private:
// Set space allocation info. // Set space linear allocation area.
void SetTopAndLimit(Address top, Address limit) { void SetTopAndLimit(Address top, Address limit) {
DCHECK(top == limit || DCHECK(top == limit ||
Page::FromAddress(top) == Page::FromAddress(limit - 1)); Page::FromAddress(top) == Page::FromAddress(limit - 1));
...@@ -2274,6 +2273,10 @@ class V8_EXPORT_PRIVATE PagedSpace ...@@ -2274,6 +2273,10 @@ class V8_EXPORT_PRIVATE PagedSpace
// (object size + alignment filler size) to the size_in_bytes. // (object size + alignment filler size) to the size_in_bytes.
inline HeapObject* TryAllocateLinearlyAligned(int* size_in_bytes, inline HeapObject* TryAllocateLinearlyAligned(int* size_in_bytes,
AllocationAlignment alignment); AllocationAlignment alignment);
MUST_USE_RESULT bool RefillLinearAllocationAreaFromFreeList(
size_t size_in_bytes);
// If sweeping is still in progress try to sweep unswept pages. If that is // If sweeping is still in progress try to sweep unswept pages. If that is
// not successful, wait for the sweeper threads and retry free-list // not successful, wait for the sweeper threads and retry free-list
// allocation. Returns false if there is not enough space and the caller // allocation. Returns false if there is not enough space and the caller
...@@ -2283,11 +2286,12 @@ class V8_EXPORT_PRIVATE PagedSpace ...@@ -2283,11 +2286,12 @@ class V8_EXPORT_PRIVATE PagedSpace
// Slow path of AllocateRaw. This function is space-dependent. Returns false // Slow path of AllocateRaw. This function is space-dependent. Returns false
// if there is not enough space and the caller has to retry after // if there is not enough space and the caller has to retry after
// collecting garbage. // collecting garbage.
MUST_USE_RESULT virtual bool SlowAllocateRaw(int size_in_bytes); MUST_USE_RESULT virtual bool SlowRefillLinearAllocationArea(
int size_in_bytes);
// Implementation of SlowAllocateRaw. Returns false if there is not enough // Implementation of SlowAllocateRaw. Returns false if there is not enough
// space and the caller has to retry after collecting garbage. // space and the caller has to retry after collecting garbage.
MUST_USE_RESULT bool RawSlowAllocateRaw(int size_in_bytes); MUST_USE_RESULT bool RawSlowRefillLinearAllocationArea(int size_in_bytes);
size_t area_size_; size_t area_size_;
...@@ -2671,7 +2675,7 @@ class NewSpace : public SpaceWithLinearArea { ...@@ -2671,7 +2675,7 @@ class NewSpace : public SpaceWithLinearArea {
int size_in_bytes, AllocationAlignment alignment); int size_in_bytes, AllocationAlignment alignment);
// Reset the allocation pointer to the beginning of the active semispace. // Reset the allocation pointer to the beginning of the active semispace.
void ResetAllocationInfo(); void ResetLinearAllocationArea();
// When inline allocation stepping is active, either because of incremental // When inline allocation stepping is active, either because of incremental
// marking, idle scavenge, or allocation statistics gathering, we 'interrupt' // marking, idle scavenge, or allocation statistics gathering, we 'interrupt'
...@@ -2738,12 +2742,12 @@ class NewSpace : public SpaceWithLinearArea { ...@@ -2738,12 +2742,12 @@ class NewSpace : public SpaceWithLinearArea {
SemiSpace& to_space() { return to_space_; } SemiSpace& to_space() { return to_space_; }
private: private:
// Update allocation info to match the current to-space page. // Update linear allocation area to match the current to-space page.
void UpdateAllocationInfo(); void UpdateLinearAllocationArea();
base::Mutex mutex_; base::Mutex mutex_;
// The top and the limit at the time of setting the allocation info. // The top and the limit at the time of setting the linear allocation area.
// These values can be accessed by background tasks. // These values can be accessed by background tasks.
base::AtomicValue<Address> original_top_; base::AtomicValue<Address> original_top_;
base::AtomicValue<Address> original_limit_; base::AtomicValue<Address> original_limit_;
...@@ -2785,7 +2789,8 @@ class V8_EXPORT_PRIVATE CompactionSpace : public PagedSpace { ...@@ -2785,7 +2789,8 @@ class V8_EXPORT_PRIVATE CompactionSpace : public PagedSpace {
MUST_USE_RESULT bool SweepAndRetryAllocation(int size_in_bytes) override; MUST_USE_RESULT bool SweepAndRetryAllocation(int size_in_bytes) override;
MUST_USE_RESULT bool SlowAllocateRaw(int size_in_bytes) override; MUST_USE_RESULT bool SlowRefillLinearAllocationArea(
int size_in_bytes) override;
}; };
......
...@@ -20,7 +20,7 @@ void SealCurrentObjects(Heap* heap) { ...@@ -20,7 +20,7 @@ void SealCurrentObjects(Heap* heap) {
heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
GarbageCollectionReason::kTesting); GarbageCollectionReason::kTesting);
heap->mark_compact_collector()->EnsureSweepingCompleted(); heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->old_space()->EmptyAllocationInfo(); heap->old_space()->FreeLinearAllocationArea();
for (Page* page : *heap->old_space()) { for (Page* page : *heap->old_space()) {
page->MarkNeverAllocateForTesting(); page->MarkNeverAllocateForTesting();
} }
...@@ -68,7 +68,7 @@ std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size, ...@@ -68,7 +68,7 @@ std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
int length; int length;
int free_memory = padding_size; int free_memory = padding_size;
if (tenure == i::TENURED) { if (tenure == i::TENURED) {
heap->old_space()->EmptyAllocationInfo(); heap->old_space()->FreeLinearAllocationArea();
int overall_free_memory = static_cast<int>(heap->old_space()->Available()); int overall_free_memory = static_cast<int>(heap->old_space()->Available());
CHECK(padding_size <= overall_free_memory || overall_free_memory == 0); CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
} else { } else {
...@@ -175,12 +175,12 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) { ...@@ -175,12 +175,12 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) {
if (collector->sweeping_in_progress()) { if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted(); collector->EnsureSweepingCompleted();
} }
space->EmptyAllocationInfo(); space->FreeLinearAllocationArea();
space->ResetFreeList(); space->ResetFreeList();
} }
void AbandonCurrentlyFreeMemory(PagedSpace* space) { void AbandonCurrentlyFreeMemory(PagedSpace* space) {
space->EmptyAllocationInfo(); space->FreeLinearAllocationArea();
for (Page* page : *space) { for (Page* page : *space) {
page->MarkNeverAllocateForTesting(); page->MarkNeverAllocateForTesting();
} }
...@@ -204,7 +204,7 @@ void ForceEvacuationCandidate(Page* page) { ...@@ -204,7 +204,7 @@ void ForceEvacuationCandidate(Page* page) {
int remaining = static_cast<int>(limit - top); int remaining = static_cast<int>(limit - top);
space->heap()->CreateFillerObjectAt(top, remaining, space->heap()->CreateFillerObjectAt(top, remaining,
ClearRecordedSlots::kNo); ClearRecordedSlots::kNo);
space->EmptyAllocationInfo(); space->FreeLinearAllocationArea();
} }
} }
......
...@@ -1704,7 +1704,7 @@ static Address AlignOldSpace(AllocationAlignment alignment, int offset) { ...@@ -1704,7 +1704,7 @@ static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
} }
Address top = *top_addr; Address top = *top_addr;
// Now force the remaining allocation onto the free list. // Now force the remaining allocation onto the free list.
CcTest::heap()->old_space()->EmptyAllocationInfo(); CcTest::heap()->old_space()->FreeLinearAllocationArea();
return top; return top;
} }
......
...@@ -331,7 +331,7 @@ TEST(Regress5829) { ...@@ -331,7 +331,7 @@ TEST(Regress5829) {
array->set_length(9); array->set_length(9);
heap->CreateFillerObjectAt(old_end - kPointerSize, kPointerSize, heap->CreateFillerObjectAt(old_end - kPointerSize, kPointerSize,
ClearRecordedSlots::kNo); ClearRecordedSlots::kNo);
heap->old_space()->EmptyAllocationInfo(); heap->old_space()->FreeLinearAllocationArea();
Page* page = Page::FromAddress(array->address()); Page* page = Page::FromAddress(array->address());
IncrementalMarking::MarkingState* marking_state = marking->marking_state(); IncrementalMarking::MarkingState* marking_state = marking->marking_state();
for (auto object_and_size : for (auto object_and_size :
......
...@@ -676,7 +676,7 @@ TEST(ShrinkPageToHighWaterMarkFreeSpaceEnd) { ...@@ -676,7 +676,7 @@ TEST(ShrinkPageToHighWaterMarkFreeSpaceEnd) {
// Reset space so high water mark is consistent. // Reset space so high water mark is consistent.
PagedSpace* old_space = CcTest::heap()->old_space(); PagedSpace* old_space = CcTest::heap()->old_space();
old_space->EmptyAllocationInfo(); old_space->FreeLinearAllocationArea();
old_space->ResetFreeList(); old_space->ResetFreeList();
HeapObject* filler = HeapObject* filler =
...@@ -705,7 +705,7 @@ TEST(ShrinkPageToHighWaterMarkNoFiller) { ...@@ -705,7 +705,7 @@ TEST(ShrinkPageToHighWaterMarkNoFiller) {
// Reset space so high water mark and fillers are consistent. // Reset space so high water mark and fillers are consistent.
PagedSpace* old_space = CcTest::heap()->old_space(); PagedSpace* old_space = CcTest::heap()->old_space();
old_space->ResetFreeList(); old_space->ResetFreeList();
old_space->EmptyAllocationInfo(); old_space->FreeLinearAllocationArea();
size_t shrunk = old_space->ShrinkPageToHighWaterMark(page); size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
CHECK_EQ(0u, shrunk); CHECK_EQ(0u, shrunk);
...@@ -727,7 +727,7 @@ TEST(ShrinkPageToHighWaterMarkOneWordFiller) { ...@@ -727,7 +727,7 @@ TEST(ShrinkPageToHighWaterMarkOneWordFiller) {
// Reset space so high water mark and fillers are consistent. // Reset space so high water mark and fillers are consistent.
PagedSpace* old_space = CcTest::heap()->old_space(); PagedSpace* old_space = CcTest::heap()->old_space();
old_space->EmptyAllocationInfo(); old_space->FreeLinearAllocationArea();
old_space->ResetFreeList(); old_space->ResetFreeList();
HeapObject* filler = HeapObject* filler =
...@@ -754,7 +754,7 @@ TEST(ShrinkPageToHighWaterMarkTwoWordFiller) { ...@@ -754,7 +754,7 @@ TEST(ShrinkPageToHighWaterMarkTwoWordFiller) {
// Reset space so high water mark and fillers are consistent. // Reset space so high water mark and fillers are consistent.
PagedSpace* old_space = CcTest::heap()->old_space(); PagedSpace* old_space = CcTest::heap()->old_space();
old_space->EmptyAllocationInfo(); old_space->FreeLinearAllocationArea();
old_space->ResetFreeList(); old_space->ResetFreeList();
HeapObject* filler = HeapObject* filler =
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment