Commit 3e0ced33 authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Add ParkedMutexGuard

Introduce ParkedMutexGuard which is similar to base::MutexGuard but also
parks the LocalHeap while the thread is blocked.

Bug: v8:10315
Change-Id: I149673511df013881ed2fbb42cf22d3f9b17b92d
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2230518
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68173}
parent dbf89f8a
......@@ -115,6 +115,19 @@ class ParkedScope {
LocalHeap* local_heap_;
};
class ParkedMutexGuard {
base::Mutex* guard_;
public:
explicit ParkedMutexGuard(LocalHeap* local_heap, base::Mutex* guard)
: guard_(guard) {
ParkedScope scope(local_heap);
guard_->Lock();
}
~ParkedMutexGuard() { guard_->Unlock(); }
};
} // namespace internal
} // namespace v8
......
......@@ -344,10 +344,10 @@ Page* PagedSpace::Expand() {
return page;
}
Page* PagedSpace::ExpandBackground() {
Page* PagedSpace::ExpandBackground(LocalHeap* local_heap) {
Page* page = AllocatePage();
if (page == nullptr) return nullptr;
base::MutexGuard guard(&allocation_mutex_);
ParkedMutexGuard lock(local_heap, &allocation_mutex_);
AddPage(page);
Free(page->area_start(), page->area_size(),
SpaceAccountingMode::kSpaceAccounted);
......@@ -579,7 +579,7 @@ PagedSpace::SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap,
DCHECK_EQ(origin, AllocationOrigin::kRuntime);
auto result = TryAllocationFromFreeListBackground(
min_size_in_bytes, max_size_in_bytes, alignment, origin);
local_heap, min_size_in_bytes, max_size_in_bytes, alignment, origin);
if (result) return result;
MarkCompactCollector* collector = heap()->mark_compact_collector();
......@@ -588,13 +588,13 @@ PagedSpace::SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap,
// First try to refill the free-list, concurrent sweeper threads
// may have freed some objects in the meantime.
{
base::MutexGuard lock(&allocation_mutex_);
ParkedMutexGuard lock(local_heap, &allocation_mutex_);
RefillFreeList();
}
// Retry the free list allocation.
auto result = TryAllocationFromFreeListBackground(
min_size_in_bytes, max_size_in_bytes, alignment, origin);
local_heap, min_size_in_bytes, max_size_in_bytes, alignment, origin);
if (result) return result;
Sweeper::FreeSpaceMayContainInvalidatedSlots
......@@ -607,24 +607,24 @@ PagedSpace::SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap,
invalidated_slots_in_free_space);
{
base::MutexGuard lock(&allocation_mutex_);
ParkedMutexGuard lock(local_heap, &allocation_mutex_);
RefillFreeList();
}
if (static_cast<size_t>(max_freed) >= min_size_in_bytes) {
auto result = TryAllocationFromFreeListBackground(
min_size_in_bytes, max_size_in_bytes, alignment, origin);
local_heap, min_size_in_bytes, max_size_in_bytes, alignment, origin);
if (result) return result;
}
}
if (heap()->ShouldExpandOldGenerationOnSlowAllocation(local_heap) &&
heap()->CanExpandOldGenerationBackground(AreaSize()) &&
ExpandBackground()) {
ExpandBackground(local_heap)) {
DCHECK((CountTotalPages() > 1) ||
(min_size_in_bytes <= free_list_->Available()));
auto result = TryAllocationFromFreeListBackground(
min_size_in_bytes, max_size_in_bytes, alignment, origin);
local_heap, min_size_in_bytes, max_size_in_bytes, alignment, origin);
if (result) return result;
}
......@@ -634,11 +634,12 @@ PagedSpace::SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap,
}
base::Optional<std::pair<Address, size_t>>
PagedSpace::TryAllocationFromFreeListBackground(size_t min_size_in_bytes,
PagedSpace::TryAllocationFromFreeListBackground(LocalHeap* local_heap,
size_t min_size_in_bytes,
size_t max_size_in_bytes,
AllocationAlignment alignment,
AllocationOrigin origin) {
base::MutexGuard lock(&allocation_mutex_);
ParkedMutexGuard lock(local_heap, &allocation_mutex_);
DCHECK_LE(min_size_in_bytes, max_size_in_bytes);
DCHECK_EQ(identity(), OLD_SPACE);
......
......@@ -341,7 +341,7 @@ class V8_EXPORT_PRIVATE PagedSpace
// it cannot allocate requested number of pages from OS, or if the hard heap
// size limit has been hit.
Page* Expand();
Page* ExpandBackground();
Page* ExpandBackground(LocalHeap* local_heap);
Page* AllocatePage();
// Sets up a linear allocation area that fits the given number of bytes.
......@@ -386,7 +386,8 @@ class V8_EXPORT_PRIVATE PagedSpace
int size_in_bytes, AllocationOrigin origin);
V8_WARN_UNUSED_RESULT base::Optional<std::pair<Address, size_t>>
TryAllocationFromFreeListBackground(size_t min_size_in_bytes,
TryAllocationFromFreeListBackground(LocalHeap* local_heap,
size_t min_size_in_bytes,
size_t max_size_in_bytes,
AllocationAlignment alignment,
AllocationOrigin origin);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment