Commit 1f4e8c7c authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Background allocation supports sweeping

Before actually failing to allocate, let the background thread help to
sweep all pages of that space.

As a drive-by also rename allocation functions to make background and
main thread allocation more similar.

Bug: v8:10315
Change-Id: I26d4b622de949d4943e35071cee1df8b3d2889c2
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2297383Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68843}
parent 3d3fd945
...@@ -111,7 +111,7 @@ AllocationResult ConcurrentAllocator::AllocateInLabSlow( ...@@ -111,7 +111,7 @@ AllocationResult ConcurrentAllocator::AllocateInLabSlow(
} }
bool ConcurrentAllocator::EnsureLab(AllocationOrigin origin) { bool ConcurrentAllocator::EnsureLab(AllocationOrigin origin) {
auto result = space_->SlowGetLinearAllocationAreaBackground( auto result = space_->RawRefillLabBackground(
local_heap_, kLabSize, kMaxLabSize, kWordAligned, origin); local_heap_, kLabSize, kMaxLabSize, kWordAligned, origin);
if (!result) return false; if (!result) return false;
...@@ -135,8 +135,8 @@ bool ConcurrentAllocator::EnsureLab(AllocationOrigin origin) { ...@@ -135,8 +135,8 @@ bool ConcurrentAllocator::EnsureLab(AllocationOrigin origin) {
AllocationResult ConcurrentAllocator::AllocateOutsideLab( AllocationResult ConcurrentAllocator::AllocateOutsideLab(
int object_size, AllocationAlignment alignment, AllocationOrigin origin) { int object_size, AllocationAlignment alignment, AllocationOrigin origin) {
auto result = space_->SlowGetLinearAllocationAreaBackground( auto result = space_->RawRefillLabBackground(local_heap_, object_size,
local_heap_, object_size, object_size, alignment, origin); object_size, alignment, origin);
if (!result) return AllocationResult::Retry(OLD_SPACE); if (!result) return AllocationResult::Retry(OLD_SPACE);
HeapObject object = HeapObject::FromAddress(result->first); HeapObject object = HeapObject::FromAddress(result->first);
......
...@@ -512,7 +512,7 @@ std::unique_ptr<ObjectIterator> PagedSpace::GetObjectIterator(Heap* heap) { ...@@ -512,7 +512,7 @@ std::unique_ptr<ObjectIterator> PagedSpace::GetObjectIterator(Heap* heap) {
new PagedSpaceObjectIterator(heap, this)); new PagedSpaceObjectIterator(heap, this));
} }
bool PagedSpace::RefillLabFromFreeListMain(size_t size_in_bytes, bool PagedSpace::TryAllocationFromFreeListMain(size_t size_in_bytes,
AllocationOrigin origin) { AllocationOrigin origin) {
DCHECK(IsAligned(size_in_bytes, kTaggedSize)); DCHECK(IsAligned(size_in_bytes, kTaggedSize));
DCHECK_LE(top(), limit()); DCHECK_LE(top(), limit());
...@@ -561,12 +561,9 @@ bool PagedSpace::RefillLabFromFreeListMain(size_t size_in_bytes, ...@@ -561,12 +561,9 @@ bool PagedSpace::RefillLabFromFreeListMain(size_t size_in_bytes,
return true; return true;
} }
base::Optional<std::pair<Address, size_t>> base::Optional<std::pair<Address, size_t>> PagedSpace::RawRefillLabBackground(
PagedSpace::SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap, LocalHeap* local_heap, size_t min_size_in_bytes, size_t max_size_in_bytes,
size_t min_size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) {
size_t max_size_in_bytes,
AllocationAlignment alignment,
AllocationOrigin origin) {
DCHECK(!is_local_space() && identity() == OLD_SPACE); DCHECK(!is_local_space() && identity() == OLD_SPACE);
DCHECK_EQ(origin, AllocationOrigin::kRuntime); DCHECK_EQ(origin, AllocationOrigin::kRuntime);
...@@ -589,6 +586,8 @@ PagedSpace::SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap, ...@@ -589,6 +586,8 @@ PagedSpace::SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap,
local_heap, min_size_in_bytes, max_size_in_bytes, alignment, origin); local_heap, min_size_in_bytes, max_size_in_bytes, alignment, origin);
if (result) return result; if (result) return result;
// Now contribute to sweeping from background thread and then try to
// reallocate.
Sweeper::FreeSpaceMayContainInvalidatedSlots Sweeper::FreeSpaceMayContainInvalidatedSlots
invalidated_slots_in_free_space = invalidated_slots_in_free_space =
Sweeper::FreeSpaceMayContainInvalidatedSlots::kNo; Sweeper::FreeSpaceMayContainInvalidatedSlots::kNo;
...@@ -620,7 +619,19 @@ PagedSpace::SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap, ...@@ -620,7 +619,19 @@ PagedSpace::SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap,
if (result) return result; if (result) return result;
} }
// TODO(dinfuehr): Complete sweeping here and try allocation again. if (collector->sweeping_in_progress()) {
// Complete sweeping for this space.
collector->DrainSweepingWorklistForSpace(identity());
{
ParkedMutexGuard lock(local_heap, &allocation_mutex_);
RefillFreeList();
}
// Last try to acquire memory from free list.
return TryAllocationFromFreeListBackground(
local_heap, min_size_in_bytes, max_size_in_bytes, alignment, origin);
}
return {}; return {};
} }
...@@ -873,12 +884,12 @@ bool CompactionSpace::RefillLabMain(int size_in_bytes, ...@@ -873,12 +884,12 @@ bool CompactionSpace::RefillLabMain(int size_in_bytes,
} }
bool OffThreadSpace::RefillLabMain(int size_in_bytes, AllocationOrigin origin) { bool OffThreadSpace::RefillLabMain(int size_in_bytes, AllocationOrigin origin) {
if (RefillLabFromFreeListMain(size_in_bytes, origin)) return true; if (TryAllocationFromFreeListMain(size_in_bytes, origin)) return true;
if (heap()->CanExpandOldGenerationBackground(size_in_bytes) && Expand()) { if (heap()->CanExpandOldGenerationBackground(size_in_bytes) && Expand()) {
DCHECK((CountTotalPages() > 1) || DCHECK((CountTotalPages() > 1) ||
(static_cast<size_t>(size_in_bytes) <= free_list_->Available())); (static_cast<size_t>(size_in_bytes) <= free_list_->Available()));
return RefillLabFromFreeListMain(static_cast<size_t>(size_in_bytes), return TryAllocationFromFreeListMain(static_cast<size_t>(size_in_bytes),
origin); origin);
} }
...@@ -893,7 +904,7 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) { ...@@ -893,7 +904,7 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) {
DCHECK_GE(size_in_bytes, 0); DCHECK_GE(size_in_bytes, 0);
const int kMaxPagesToSweep = 1; const int kMaxPagesToSweep = 1;
if (RefillLabFromFreeListMain(size_in_bytes, origin)) return true; if (TryAllocationFromFreeListMain(size_in_bytes, origin)) return true;
MarkCompactCollector* collector = heap()->mark_compact_collector(); MarkCompactCollector* collector = heap()->mark_compact_collector();
// Sweeping is still in progress. // Sweeping is still in progress.
...@@ -908,7 +919,8 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) { ...@@ -908,7 +919,8 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) {
RefillFreeList(); RefillFreeList();
// Retry the free list allocation. // Retry the free list allocation.
if (RefillLabFromFreeListMain(static_cast<size_t>(size_in_bytes), origin)) if (TryAllocationFromFreeListMain(static_cast<size_t>(size_in_bytes),
origin))
return true; return true;
if (ContributeToSweepingMain(size_in_bytes, kMaxPagesToSweep, size_in_bytes, if (ContributeToSweepingMain(size_in_bytes, kMaxPagesToSweep, size_in_bytes,
...@@ -923,7 +935,8 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) { ...@@ -923,7 +935,8 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) {
Page* page = main_space->RemovePageSafe(size_in_bytes); Page* page = main_space->RemovePageSafe(size_in_bytes);
if (page != nullptr) { if (page != nullptr) {
AddPage(page); AddPage(page);
if (RefillLabFromFreeListMain(static_cast<size_t>(size_in_bytes), origin)) if (TryAllocationFromFreeListMain(static_cast<size_t>(size_in_bytes),
origin))
return true; return true;
} }
} }
...@@ -937,7 +950,7 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) { ...@@ -937,7 +950,7 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) {
} }
DCHECK((CountTotalPages() > 1) || DCHECK((CountTotalPages() > 1) ||
(static_cast<size_t>(size_in_bytes) <= free_list_->Available())); (static_cast<size_t>(size_in_bytes) <= free_list_->Available()));
return RefillLabFromFreeListMain(static_cast<size_t>(size_in_bytes), return TryAllocationFromFreeListMain(static_cast<size_t>(size_in_bytes),
origin); origin);
} }
} }
...@@ -953,7 +966,7 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) { ...@@ -953,7 +966,7 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) {
RefillFreeList(); RefillFreeList();
// Last try to acquire memory from free list. // Last try to acquire memory from free list.
return RefillLabFromFreeListMain(size_in_bytes, origin); return TryAllocationFromFreeListMain(size_in_bytes, origin);
} }
return false; return false;
} }
...@@ -975,7 +988,7 @@ bool PagedSpace::ContributeToSweepingMain(int required_freed_bytes, ...@@ -975,7 +988,7 @@ bool PagedSpace::ContributeToSweepingMain(int required_freed_bytes,
invalidated_slots_in_free_space); invalidated_slots_in_free_space);
RefillFreeList(); RefillFreeList();
if (max_freed >= size_in_bytes) if (max_freed >= size_in_bytes)
return RefillLabFromFreeListMain(size_in_bytes, origin); return TryAllocationFromFreeListMain(size_in_bytes, origin);
} }
return false; return false;
} }
......
...@@ -148,8 +148,7 @@ class V8_EXPORT_PRIVATE PagedSpace ...@@ -148,8 +148,7 @@ class V8_EXPORT_PRIVATE PagedSpace
// Allocate the requested number of bytes in the space from a background // Allocate the requested number of bytes in the space from a background
// thread. // thread.
V8_WARN_UNUSED_RESULT base::Optional<std::pair<Address, size_t>> V8_WARN_UNUSED_RESULT base::Optional<std::pair<Address, size_t>>
SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap, RawRefillLabBackground(LocalHeap* local_heap, size_t min_size_in_bytes,
size_t min_size_in_bytes,
size_t max_size_in_bytes, size_t max_size_in_bytes,
AllocationAlignment alignment, AllocationAlignment alignment,
AllocationOrigin origin); AllocationOrigin origin);
...@@ -364,8 +363,8 @@ class V8_EXPORT_PRIVATE PagedSpace ...@@ -364,8 +363,8 @@ class V8_EXPORT_PRIVATE PagedSpace
inline AllocationResult TryAllocateLinearlyAligned( inline AllocationResult TryAllocateLinearlyAligned(
int* size_in_bytes, AllocationAlignment alignment); int* size_in_bytes, AllocationAlignment alignment);
V8_WARN_UNUSED_RESULT bool RefillLabFromFreeListMain(size_t size_in_bytes, V8_WARN_UNUSED_RESULT bool TryAllocationFromFreeListMain(
AllocationOrigin origin); size_t size_in_bytes, AllocationOrigin origin);
V8_WARN_UNUSED_RESULT bool ContributeToSweepingMain(int required_freed_bytes, V8_WARN_UNUSED_RESULT bool ContributeToSweepingMain(int required_freed_bytes,
int max_pages, int max_pages,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment