Commit 4bf80431 authored by Hannes Payer's avatar Hannes Payer Committed by Commit Bot

[heap] Refactor and clean-up runtime allocation.

Bug: chromium:796896
Change-Id: I7f46f82d079502b8ec04c5e3be5f803ec9e62ffa
Reviewed-on: https://chromium-review.googlesource.com/854797
Commit-Queue: Hannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#50424}
parent 8fbc6a05
......@@ -1845,7 +1845,7 @@ void Heap::EvacuateYoungGeneration() {
if (!new_space()->Rebalance()) {
FatalProcessOutOfMemory("NewSpace::Rebalance");
}
new_space()->ResetAllocationInfo();
new_space()->ResetLinearAllocationArea();
new_space()->set_age_mark(new_space()->top());
// Fix up special trackers.
......@@ -1956,7 +1956,7 @@ void Heap::Scavenge() {
// Flip the semispaces. After flipping, to space is empty, from space has
// live objects.
new_space_->Flip();
new_space_->ResetAllocationInfo();
new_space_->ResetLinearAllocationArea();
ItemParallelJob job(isolate()->cancelable_task_manager(),
&parallel_scavenge_semaphore_);
......@@ -5497,7 +5497,7 @@ void Heap::DisableInlineAllocation() {
CodeSpaceMemoryModificationScope modification_scope(this);
for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) {
space->EmptyAllocationInfo();
space->FreeLinearAllocationArea();
}
}
......
......@@ -435,9 +435,9 @@ void IncrementalMarking::StartBlackAllocation() {
DCHECK(!black_allocation_);
DCHECK(IsMarking());
black_allocation_ = true;
heap()->old_space()->MarkAllocationInfoBlack();
heap()->map_space()->MarkAllocationInfoBlack();
heap()->code_space()->MarkAllocationInfoBlack();
heap()->old_space()->MarkLinearAllocationAreaBlack();
heap()->map_space()->MarkLinearAllocationAreaBlack();
heap()->code_space()->MarkLinearAllocationAreaBlack();
if (FLAG_trace_incremental_marking) {
heap()->isolate()->PrintWithTimestamp(
"[IncrementalMarking] Black allocation started\n");
......@@ -447,9 +447,9 @@ void IncrementalMarking::StartBlackAllocation() {
void IncrementalMarking::PauseBlackAllocation() {
DCHECK(FLAG_black_allocation);
DCHECK(IsMarking());
heap()->old_space()->UnmarkAllocationInfo();
heap()->map_space()->UnmarkAllocationInfo();
heap()->code_space()->UnmarkAllocationInfo();
heap()->old_space()->UnmarkLinearAllocationArea();
heap()->map_space()->UnmarkLinearAllocationArea();
heap()->code_space()->UnmarkLinearAllocationArea();
if (FLAG_trace_incremental_marking) {
heap()->isolate()->PrintWithTimestamp(
"[IncrementalMarking] Black allocation paused\n");
......
......@@ -33,7 +33,7 @@ class LocalAllocator {
compaction_spaces_.Get(CODE_SPACE));
// Give back remaining LAB space if this LocalAllocator's new space LAB
// sits right next to new space allocation top.
const AllocationInfo info = new_space_lab_.Close();
const LinearAllocationArea info = new_space_lab_.Close();
const Address top = new_space_->top();
if (info.limit() != nullptr && info.limit() == top) {
DCHECK_NOT_NULL(info.top());
......
......@@ -2286,7 +2286,7 @@ void MinorMarkCompactCollector::EvacuatePrologue() {
new_space_evacuation_pages_.push_back(p);
}
new_space->Flip();
new_space->ResetAllocationInfo();
new_space->ResetLinearAllocationArea();
}
void MinorMarkCompactCollector::EvacuateEpilogue() {
......@@ -2932,7 +2932,7 @@ void MarkCompactCollector::EvacuatePrologue() {
new_space_evacuation_pages_.push_back(p);
}
new_space->Flip();
new_space->ResetAllocationInfo();
new_space->ResetLinearAllocationArea();
// Old space.
DCHECK(old_space_evacuation_pages_.empty());
......
......@@ -301,8 +301,7 @@ AllocationResult LocalAllocationBuffer::AllocateRawAligned(
bool PagedSpace::EnsureLinearAllocationArea(int size_in_bytes) {
if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit())
return true;
if (free_list_.Allocate(size_in_bytes)) return true;
return SlowAllocateRaw(size_in_bytes);
return SlowRefillLinearAllocationArea(size_in_bytes);
}
HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
......@@ -490,7 +489,7 @@ size_t LargeObjectSpace::Available() {
LocalAllocationBuffer LocalAllocationBuffer::InvalidBuffer() {
return LocalAllocationBuffer(nullptr, AllocationInfo(nullptr, nullptr));
return LocalAllocationBuffer(nullptr, LinearAllocationArea(nullptr, nullptr));
}
......@@ -503,7 +502,7 @@ LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
USE(ok);
DCHECK(ok);
Address top = HeapObject::cast(obj)->address();
return LocalAllocationBuffer(heap, AllocationInfo(top, top + size));
return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
}
......
This diff is collapsed.
......@@ -34,12 +34,12 @@ class HeapTester;
class TestCodeRangeScope;
} // namespace heap
class AllocationInfo;
class AllocationObserver;
class CompactionSpace;
class CompactionSpaceCollection;
class FreeList;
class Isolate;
class LinearAllocationArea;
class LocalArrayBufferTracker;
class MemoryAllocator;
class MemoryChunk;
......@@ -1562,10 +1562,10 @@ class V8_EXPORT_PRIVATE HeapObjectIterator : public ObjectIterator {
// An abstraction of allocation and relocation pointers in a page-structured
// space.
class AllocationInfo {
class LinearAllocationArea {
public:
AllocationInfo() : top_(nullptr), limit_(nullptr) {}
AllocationInfo(Address top, Address limit) : top_(top), limit_(limit) {}
LinearAllocationArea() : top_(nullptr), limit_(nullptr) {}
LinearAllocationArea(Address top, Address limit) : top_(top), limit_(limit) {}
void Reset(Address top, Address limit) {
set_top(top);
......@@ -1763,10 +1763,11 @@ class V8_EXPORT_PRIVATE FreeList {
// and the size should be a non-zero multiple of the word size.
size_t Free(Address start, size_t size_in_bytes, FreeMode mode);
// Finds a node of size at least size_in_bytes and sets up a linear allocation
// area using this node. Returns false if there is no such node and the caller
// has to retry allocation after collecting garbage.
MUST_USE_RESULT bool Allocate(size_t size_in_bytes);
// Allocates a free space node frome the free list of at least size_in_bytes
// bytes. Returns the actual node size in node_size which can be bigger than
// size_in_bytes. This method returns null if the allocation request cannot be
// handled by the free list.
MUST_USE_RESULT FreeSpace* Allocate(size_t size_in_bytes, size_t* node_size);
// Clear the free list.
void Reset();
......@@ -1865,8 +1866,6 @@ class V8_EXPORT_PRIVATE FreeList {
static const size_t kMediumAllocationMax = kSmallListMax;
static const size_t kLargeAllocationMax = kMediumListMax;
FreeSpace* FindNodeFor(size_t size_in_bytes, size_t* node_size);
// Walks all available categories for a given |type| and tries to retrieve
// a node. Returns nullptr if the category is empty.
FreeSpace* FindNodeIn(FreeListCategoryType type, size_t* node_size);
......@@ -1953,13 +1952,13 @@ class LocalAllocationBuffer {
inline bool TryFreeLast(HeapObject* object, int object_size);
// Close a LAB, effectively invalidating it. Returns the unused area.
AllocationInfo Close();
LinearAllocationArea Close();
private:
LocalAllocationBuffer(Heap* heap, AllocationInfo allocation_info);
LocalAllocationBuffer(Heap* heap, LinearAllocationArea allocation_info);
Heap* heap_;
AllocationInfo allocation_info_;
LinearAllocationArea allocation_info_;
};
class SpaceWithLinearArea : public Space {
......@@ -2014,7 +2013,7 @@ class SpaceWithLinearArea : public Space {
V8_EXPORT_PRIVATE void StartNextInlineAllocationStep() override;
// TODO(ofrobots): make these private after refactoring is complete.
AllocationInfo allocation_info_;
LinearAllocationArea allocation_info_;
Address top_on_previous_step_;
};
......@@ -2131,11 +2130,11 @@ class V8_EXPORT_PRIVATE PagedSpace
void ResetFreeList();
// Empty space allocation info, returning unused area to free list.
void EmptyAllocationInfo();
// Empty space linear allocation area, returning unused area to free list.
void FreeLinearAllocationArea();
void MarkAllocationInfoBlack();
void UnmarkAllocationInfo();
void MarkLinearAllocationAreaBlack();
void UnmarkLinearAllocationArea();
void DecreaseAllocatedBytes(size_t bytes, Page* page) {
accounting_stats_.DecreaseAllocatedBytes(bytes, page);
......@@ -2229,10 +2228,10 @@ class V8_EXPORT_PRIVATE PagedSpace
std::unique_ptr<ObjectIterator> GetObjectIterator() override;
void SetAllocationInfo(Address top, Address limit);
void SetLinearAllocationArea(Address top, Address limit);
private:
// Set space allocation info.
// Set space linear allocation area.
void SetTopAndLimit(Address top, Address limit) {
DCHECK(top == limit ||
Page::FromAddress(top) == Page::FromAddress(limit - 1));
......@@ -2274,6 +2273,10 @@ class V8_EXPORT_PRIVATE PagedSpace
// (object size + alignment filler size) to the size_in_bytes.
inline HeapObject* TryAllocateLinearlyAligned(int* size_in_bytes,
AllocationAlignment alignment);
MUST_USE_RESULT bool RefillLinearAllocationAreaFromFreeList(
size_t size_in_bytes);
// If sweeping is still in progress try to sweep unswept pages. If that is
// not successful, wait for the sweeper threads and retry free-list
// allocation. Returns false if there is not enough space and the caller
......@@ -2283,11 +2286,12 @@ class V8_EXPORT_PRIVATE PagedSpace
// Slow path of AllocateRaw. This function is space-dependent. Returns false
// if there is not enough space and the caller has to retry after
// collecting garbage.
MUST_USE_RESULT virtual bool SlowAllocateRaw(int size_in_bytes);
MUST_USE_RESULT virtual bool SlowRefillLinearAllocationArea(
int size_in_bytes);
// Implementation of SlowAllocateRaw. Returns false if there is not enough
// space and the caller has to retry after collecting garbage.
MUST_USE_RESULT bool RawSlowAllocateRaw(int size_in_bytes);
MUST_USE_RESULT bool RawSlowRefillLinearAllocationArea(int size_in_bytes);
size_t area_size_;
......@@ -2671,7 +2675,7 @@ class NewSpace : public SpaceWithLinearArea {
int size_in_bytes, AllocationAlignment alignment);
// Reset the allocation pointer to the beginning of the active semispace.
void ResetAllocationInfo();
void ResetLinearAllocationArea();
// When inline allocation stepping is active, either because of incremental
// marking, idle scavenge, or allocation statistics gathering, we 'interrupt'
......@@ -2738,12 +2742,12 @@ class NewSpace : public SpaceWithLinearArea {
SemiSpace& to_space() { return to_space_; }
private:
// Update allocation info to match the current to-space page.
void UpdateAllocationInfo();
// Update linear allocation area to match the current to-space page.
void UpdateLinearAllocationArea();
base::Mutex mutex_;
// The top and the limit at the time of setting the allocation info.
// The top and the limit at the time of setting the linear allocation area.
// These values can be accessed by background tasks.
base::AtomicValue<Address> original_top_;
base::AtomicValue<Address> original_limit_;
......@@ -2785,7 +2789,8 @@ class V8_EXPORT_PRIVATE CompactionSpace : public PagedSpace {
MUST_USE_RESULT bool SweepAndRetryAllocation(int size_in_bytes) override;
MUST_USE_RESULT bool SlowAllocateRaw(int size_in_bytes) override;
MUST_USE_RESULT bool SlowRefillLinearAllocationArea(
int size_in_bytes) override;
};
......
......@@ -20,7 +20,7 @@ void SealCurrentObjects(Heap* heap) {
heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
GarbageCollectionReason::kTesting);
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->old_space()->EmptyAllocationInfo();
heap->old_space()->FreeLinearAllocationArea();
for (Page* page : *heap->old_space()) {
page->MarkNeverAllocateForTesting();
}
......@@ -68,7 +68,7 @@ std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
int length;
int free_memory = padding_size;
if (tenure == i::TENURED) {
heap->old_space()->EmptyAllocationInfo();
heap->old_space()->FreeLinearAllocationArea();
int overall_free_memory = static_cast<int>(heap->old_space()->Available());
CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
} else {
......@@ -175,12 +175,12 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) {
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
}
space->EmptyAllocationInfo();
space->FreeLinearAllocationArea();
space->ResetFreeList();
}
void AbandonCurrentlyFreeMemory(PagedSpace* space) {
space->EmptyAllocationInfo();
space->FreeLinearAllocationArea();
for (Page* page : *space) {
page->MarkNeverAllocateForTesting();
}
......@@ -204,7 +204,7 @@ void ForceEvacuationCandidate(Page* page) {
int remaining = static_cast<int>(limit - top);
space->heap()->CreateFillerObjectAt(top, remaining,
ClearRecordedSlots::kNo);
space->EmptyAllocationInfo();
space->FreeLinearAllocationArea();
}
}
......
......@@ -1704,7 +1704,7 @@ static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
}
Address top = *top_addr;
// Now force the remaining allocation onto the free list.
CcTest::heap()->old_space()->EmptyAllocationInfo();
CcTest::heap()->old_space()->FreeLinearAllocationArea();
return top;
}
......
......@@ -331,7 +331,7 @@ TEST(Regress5829) {
array->set_length(9);
heap->CreateFillerObjectAt(old_end - kPointerSize, kPointerSize,
ClearRecordedSlots::kNo);
heap->old_space()->EmptyAllocationInfo();
heap->old_space()->FreeLinearAllocationArea();
Page* page = Page::FromAddress(array->address());
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
for (auto object_and_size :
......
......@@ -676,7 +676,7 @@ TEST(ShrinkPageToHighWaterMarkFreeSpaceEnd) {
// Reset space so high water mark is consistent.
PagedSpace* old_space = CcTest::heap()->old_space();
old_space->EmptyAllocationInfo();
old_space->FreeLinearAllocationArea();
old_space->ResetFreeList();
HeapObject* filler =
......@@ -705,7 +705,7 @@ TEST(ShrinkPageToHighWaterMarkNoFiller) {
// Reset space so high water mark and fillers are consistent.
PagedSpace* old_space = CcTest::heap()->old_space();
old_space->ResetFreeList();
old_space->EmptyAllocationInfo();
old_space->FreeLinearAllocationArea();
size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
CHECK_EQ(0u, shrunk);
......@@ -727,7 +727,7 @@ TEST(ShrinkPageToHighWaterMarkOneWordFiller) {
// Reset space so high water mark and fillers are consistent.
PagedSpace* old_space = CcTest::heap()->old_space();
old_space->EmptyAllocationInfo();
old_space->FreeLinearAllocationArea();
old_space->ResetFreeList();
HeapObject* filler =
......@@ -754,7 +754,7 @@ TEST(ShrinkPageToHighWaterMarkTwoWordFiller) {
// Reset space so high water mark and fillers are consistent.
PagedSpace* old_space = CcTest::heap()->old_space();
old_space->EmptyAllocationInfo();
old_space->FreeLinearAllocationArea();
old_space->ResetFreeList();
HeapObject* filler =
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment