Commit ac486487 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Handle new_space() in Heap LAB methods

Let Heap::MakeHeapIterable() and Heap::FreeLinearAllocationAreas() also
handle the new space to be more uniform between spaces. Also removes
Heap::EnsureFillerObjectAtTop() in favor of
NewSpace::MakeLinearAllocationAreaIterable().

Bug: v8:10315
Change-Id: I7d28c1e95e433c4bc5a4a1a1f3aa8d71c43b8887
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3281926
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77905}
parent 1fc840fa
...@@ -1647,20 +1647,6 @@ void Heap::ReportExternalMemoryPressure() { ...@@ -1647,20 +1647,6 @@ void Heap::ReportExternalMemoryPressure() {
int64_t Heap::external_memory_limit() { return external_memory_.limit(); } int64_t Heap::external_memory_limit() { return external_memory_.limit(); }
void Heap::EnsureFillerObjectAtTop() {
// There may be an allocation memento behind objects in new space. Upon
// evacuation of a non-full new space (or if we are on the last page) there
// may be uninitialized memory behind top. We fill the remainder of the page
// with a filler.
if (!new_space_) return;
Address to_top = new_space_->top();
Page* page = Page::FromAddress(to_top - kTaggedSize);
if (page->Contains(to_top)) {
int remaining_in_page = static_cast<int>(page->area_end() - to_top);
CreateFillerObjectAt(to_top, remaining_in_page, ClearRecordedSlots::kNo);
}
}
Heap::DevToolsTraceEventScope::DevToolsTraceEventScope(Heap* heap, Heap::DevToolsTraceEventScope::DevToolsTraceEventScope(Heap* heap,
const char* event_name, const char* event_name,
const char* event_type) const char* event_type)
...@@ -1720,7 +1706,11 @@ bool Heap::CollectGarbage(AllocationSpace space, ...@@ -1720,7 +1706,11 @@ bool Heap::CollectGarbage(AllocationSpace space,
} }
#endif #endif
EnsureFillerObjectAtTop(); // There may be an allocation memento behind objects in new space. Upon
// evacuation of a non-full new space (or if we are on the last page) there
// may be uninitialized memory behind top. We fill the remainder of the page
// with a filler.
if (new_space()) new_space()->MakeLinearAllocationAreaIterable();
if (IsYoungGenerationCollector(collector) && if (IsYoungGenerationCollector(collector) &&
!incremental_marking()->IsStopped()) { !incremental_marking()->IsStopped()) {
...@@ -3530,8 +3520,7 @@ void Heap::MakeHeapIterable() { ...@@ -3530,8 +3520,7 @@ void Heap::MakeHeapIterable() {
space->MakeLinearAllocationAreaIterable(); space->MakeLinearAllocationAreaIterable();
} }
// New space is bump-pointer allocation only and therefore guaranteed to be if (new_space()) new_space()->MakeLinearAllocationAreaIterable();
// iterable up to top().
} }
void Heap::FreeLinearAllocationAreas() { void Heap::FreeLinearAllocationAreas() {
...@@ -3544,8 +3533,7 @@ void Heap::FreeLinearAllocationAreas() { ...@@ -3544,8 +3533,7 @@ void Heap::FreeLinearAllocationAreas() {
space->FreeLinearAllocationArea(); space->FreeLinearAllocationArea();
} }
// New space is bump-pointer allocation only and therefore guaranteed to be if (new_space()) new_space()->FreeLinearAllocationArea();
// iterable up to top().
} }
void Heap::FreeSharedLinearAllocationAreas() { void Heap::FreeSharedLinearAllocationAreas() {
......
...@@ -1780,11 +1780,6 @@ class Heap { ...@@ -1780,11 +1780,6 @@ class Heap {
GarbageCollector SelectGarbageCollector(AllocationSpace space, GarbageCollector SelectGarbageCollector(AllocationSpace space,
const char** reason); const char** reason);
// Make sure there is a filler value behind the top of the new space
// so that the GC does not confuse some unintialized/stale memory
// with the allocation memento of the object at the top
void EnsureFillerObjectAtTop();
// Free all LABs in the heap. // Free all LABs in the heap.
void FreeLinearAllocationAreas(); void FreeLinearAllocationAreas();
......
...@@ -666,6 +666,21 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, ...@@ -666,6 +666,21 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
return result; return result;
} }
void NewSpace::MakeLinearAllocationAreaIterable() {
Address to_top = top();
Page* page = Page::FromAddress(to_top - kTaggedSize);
if (page->Contains(to_top)) {
int remaining_in_page = static_cast<int>(page->area_end() - to_top);
heap_->CreateFillerObjectAt(to_top, remaining_in_page,
ClearRecordedSlots::kNo);
}
}
void NewSpace::FreeLinearAllocationArea() {
MakeLinearAllocationAreaIterable();
UpdateInlineAllocationLimit(0);
}
void NewSpace::VerifyTop() { void NewSpace::VerifyTop() {
// Ensure validity of LAB: start <= top <= limit // Ensure validity of LAB: start <= top <= limit
DCHECK_LE(allocation_info_.start(), allocation_info_.top()); DCHECK_LE(allocation_info_.start(), allocation_info_.top());
......
...@@ -469,6 +469,12 @@ class V8_EXPORT_PRIVATE NewSpace ...@@ -469,6 +469,12 @@ class V8_EXPORT_PRIVATE NewSpace
return &pending_allocation_mutex_; return &pending_allocation_mutex_;
} }
// Creates a filler object in the linear allocation area.
void MakeLinearAllocationAreaIterable();
// Creates a filler object in the linear allocation area and closes it.
void FreeLinearAllocationArea();
private: private:
static const int kAllocationBufferParkingThreshold = 4 * KB; static const int kAllocationBufferParkingThreshold = 4 * KB;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment