Commit c8f9853e authored by Igor Sheludko's avatar Igor Sheludko Committed by V8 LUCI CQ

[ext-code-space] Make Heap::CreateFillerObjectAt() an instance method

... to support creation of fillers in external code space.

Bug: v8:11880
Change-Id: I47b352b8b44733c529b6b0cb2b39cf676ce83923
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3208813
Auto-Submit: Igor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77271}
parent bb500e63
......@@ -3065,27 +3065,24 @@ size_t Heap::GetCodeRangeReservedAreaSize() {
return kReservedCodeRangePages * MemoryAllocator::GetCommitPageSize();
}
// static
HeapObject Heap::PrecedeWithFiller(ReadOnlyRoots roots, HeapObject object,
int filler_size) {
CreateFillerObjectAt(roots, object.address(), filler_size,
HeapObject Heap::PrecedeWithFiller(HeapObject object, int filler_size) {
CreateFillerObjectAt(object.address(), filler_size,
ClearFreedMemoryMode::kDontClearFreedMemory);
return HeapObject::FromAddress(object.address() + filler_size);
}
// static
HeapObject Heap::AlignWithFiller(ReadOnlyRoots roots, HeapObject object,
int object_size, int allocation_size,
HeapObject Heap::AlignWithFiller(HeapObject object, int object_size,
int allocation_size,
AllocationAlignment alignment) {
int filler_size = allocation_size - object_size;
DCHECK_LT(0, filler_size);
int pre_filler = GetFillToAlign(object.address(), alignment);
if (pre_filler) {
object = PrecedeWithFiller(roots, object, pre_filler);
object = PrecedeWithFiller(object, pre_filler);
filler_size -= pre_filler;
}
if (filler_size) {
CreateFillerObjectAt(roots, object.address() + object_size, filler_size,
CreateFillerObjectAt(object.address() + object_size, filler_size,
ClearFreedMemoryMode::kDontClearFreedMemory);
}
return object;
......@@ -3158,10 +3155,11 @@ void Heap::FlushNumberStringCache() {
namespace {
HeapObject CreateFillerObjectAtImpl(ReadOnlyRoots roots, Address addr, int size,
HeapObject CreateFillerObjectAtImpl(Heap* heap, Address addr, int size,
ClearFreedMemoryMode clear_memory_mode) {
if (size == 0) return HeapObject();
HeapObject filler = HeapObject::FromAddress(addr);
ReadOnlyRoots roots(heap);
if (size == kTaggedSize) {
filler.set_map_after_allocation(roots.unchecked_one_pointer_filler_map(),
SKIP_WRITE_BARRIER);
......@@ -3186,8 +3184,8 @@ HeapObject CreateFillerObjectAtImpl(ReadOnlyRoots roots, Address addr, int size,
// At this point, we may be deserializing the heap from a snapshot, and
// none of the maps have been created yet and are nullptr.
DCHECK((filler.map_slot().contains_map_value(kNullAddress) &&
!Heap::FromWritableHeapObject(filler)->deserialization_complete()) ||
filler.map().IsMap());
!heap->deserialization_complete()) ||
filler.map(heap->isolate()).IsMap());
return filler;
}
......@@ -3207,20 +3205,18 @@ void VerifyNoNeedToClearSlots(Address start, Address end) {}
} // namespace
// static
HeapObject Heap::CreateFillerObjectAt(ReadOnlyRoots roots, Address addr,
int size,
HeapObject Heap::CreateFillerObjectAt(Address addr, int size,
ClearFreedMemoryMode clear_memory_mode) {
// TODO(leszeks): Verify that no slots need to be recorded.
HeapObject filler =
CreateFillerObjectAtImpl(roots, addr, size, clear_memory_mode);
CreateFillerObjectAtImpl(this, addr, size, clear_memory_mode);
VerifyNoNeedToClearSlots(addr, addr + size);
return filler;
}
void Heap::CreateFillerObjectAtBackground(
Address addr, int size, ClearFreedMemoryMode clear_memory_mode) {
CreateFillerObjectAtImpl(ReadOnlyRoots(this), addr, size, clear_memory_mode);
CreateFillerObjectAtImpl(this, addr, size, clear_memory_mode);
// Do not verify whether slots are cleared here: the concurrent sweeper is not
// allowed to access the main thread's remembered set.
}
......@@ -3233,7 +3229,7 @@ HeapObject Heap::CreateFillerObjectAt(Address addr, int size,
// LargeObjectSpace::AllocateLargePage.
if (size == 0) return HeapObject();
HeapObject filler = CreateFillerObjectAtImpl(
ReadOnlyRoots(this), addr, size,
this, addr, size,
clear_slots_mode == ClearRecordedSlots::kYes
? ClearFreedMemoryMode::kClearFreedMemory
: ClearFreedMemoryMode::kDontClearFreedMemory);
......
......@@ -1516,16 +1516,15 @@ class Heap {
// ===========================================================================
// Creates a filler object and returns a heap object immediately after it.
V8_EXPORT_PRIVATE static HeapObject PrecedeWithFiller(ReadOnlyRoots roots,
HeapObject object,
int filler_size);
V8_EXPORT_PRIVATE HeapObject PrecedeWithFiller(HeapObject object,
int filler_size);
// Creates a filler object if needed for alignment and returns a heap object
// immediately after it. If any space is left after the returned object,
// another filler object is created so the over allocated memory is iterable.
static V8_WARN_UNUSED_RESULT HeapObject
AlignWithFiller(ReadOnlyRoots roots, HeapObject object, int object_size,
int allocation_size, AllocationAlignment alignment);
V8_WARN_UNUSED_RESULT HeapObject
AlignWithFiller(HeapObject object, int object_size, int allocation_size,
AllocationAlignment alignment);
// Allocate an external backing store with the given allocation callback.
// If the callback fails (indicated by a nullptr result) then this function
......@@ -1848,10 +1847,10 @@ class Heap {
// when introducing gaps within pages. If the memory after the object header
// of the filler should be cleared, pass in kClearFreedMemory. The default is
// kDontClearFreedMemory.
V8_EXPORT_PRIVATE static HeapObject CreateFillerObjectAt(
ReadOnlyRoots roots, Address addr, int size,
ClearFreedMemoryMode clear_memory_mode =
ClearFreedMemoryMode::kDontClearFreedMemory);
V8_EXPORT_PRIVATE HeapObject
CreateFillerObjectAt(Address addr, int size,
ClearFreedMemoryMode clear_memory_mode =
ClearFreedMemoryMode::kDontClearFreedMemory);
// Range write barrier implementation.
template <int kModeMask, typename TSlot>
......
......@@ -144,7 +144,7 @@ AllocationResult NewSpace::AllocateFastAligned(
DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
if (filler_size > 0) {
obj = Heap::PrecedeWithFiller(ReadOnlyRoots(heap()), obj, filler_size);
obj = heap()->PrecedeWithFiller(obj, filler_size);
}
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);
......
......@@ -113,7 +113,7 @@ AllocationResult PagedSpace::AllocateFastAligned(
HeapObject::FromAddress(allocation_info_.IncrementTop(aligned_size));
if (aligned_size_in_bytes) *aligned_size_in_bytes = aligned_size;
if (filler_size > 0) {
obj = Heap::PrecedeWithFiller(ReadOnlyRoots(heap()), obj, filler_size);
obj = heap()->PrecedeWithFiller(obj, filler_size);
}
return AllocationResult(obj);
}
......
......@@ -642,9 +642,8 @@ HeapObject ReadOnlySpace::TryAllocateLinearlyAligned(
top_ = new_top;
if (filler_size > 0) {
return Heap::PrecedeWithFiller(ReadOnlyRoots(heap()),
HeapObject::FromAddress(current_top),
filler_size);
return heap()->PrecedeWithFiller(HeapObject::FromAddress(current_top),
filler_size);
}
return HeapObject::FromAddress(current_top);
......
......@@ -145,7 +145,7 @@ AllocationResult LocalAllocationBuffer::AllocateRawAligned(
HeapObject object =
HeapObject::FromAddress(allocation_info_.IncrementTop(aligned_size));
if (filler_size > 0) {
return Heap::PrecedeWithFiller(ReadOnlyRoots(heap_), object, filler_size);
return heap_->PrecedeWithFiller(object, filler_size);
}
return AllocationResult(object);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment