Commit bc2bc6bf authored by Leszek Swirski's avatar Leszek Swirski Committed by Commit Bot

[offthread] Make Heap::AlignWithFiller static

Enable the use of Heap::AlignWithFiller off-thread by making the method
static.

Bug: chromium:1075999
Change-Id: I8071ae678f954dd4e960b2cd83f63dd22a948920
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2184230Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67602}
parent c64b52a8
...@@ -2848,7 +2848,7 @@ int Heap::GetMaximumFillToAlign(AllocationAlignment alignment) { ...@@ -2848,7 +2848,7 @@ int Heap::GetMaximumFillToAlign(AllocationAlignment alignment) {
return 0; return 0;
} }
// static
int Heap::GetFillToAlign(Address address, AllocationAlignment alignment) { int Heap::GetFillToAlign(Address address, AllocationAlignment alignment) {
if (alignment == kDoubleAligned && (address & kDoubleAlignmentMask) != 0) if (alignment == kDoubleAligned && (address & kDoubleAlignmentMask) != 0)
return kTaggedSize; return kTaggedSize;
...@@ -2861,24 +2861,28 @@ size_t Heap::GetCodeRangeReservedAreaSize() { ...@@ -2861,24 +2861,28 @@ size_t Heap::GetCodeRangeReservedAreaSize() {
return kReservedCodeRangePages * MemoryAllocator::GetCommitPageSize(); return kReservedCodeRangePages * MemoryAllocator::GetCommitPageSize();
} }
HeapObject Heap::PrecedeWithFiller(HeapObject object, int filler_size) { // static
CreateFillerObjectAt(object.address(), filler_size, ClearRecordedSlots::kNo); HeapObject Heap::PrecedeWithFiller(ReadOnlyRoots roots, HeapObject object,
int filler_size) {
CreateFillerObjectAt(roots, object.address(), filler_size,
ClearFreedMemoryMode::kDontClearFreedMemory);
return HeapObject::FromAddress(object.address() + filler_size); return HeapObject::FromAddress(object.address() + filler_size);
} }
HeapObject Heap::AlignWithFiller(HeapObject object, int object_size, // static
int allocation_size, HeapObject Heap::AlignWithFiller(ReadOnlyRoots roots, HeapObject object,
int object_size, int allocation_size,
AllocationAlignment alignment) { AllocationAlignment alignment) {
int filler_size = allocation_size - object_size; int filler_size = allocation_size - object_size;
DCHECK_LT(0, filler_size); DCHECK_LT(0, filler_size);
int pre_filler = GetFillToAlign(object.address(), alignment); int pre_filler = GetFillToAlign(object.address(), alignment);
if (pre_filler) { if (pre_filler) {
object = PrecedeWithFiller(object, pre_filler); object = PrecedeWithFiller(roots, object, pre_filler);
filler_size -= pre_filler; filler_size -= pre_filler;
} }
if (filler_size) { if (filler_size) {
CreateFillerObjectAt(object.address() + object_size, filler_size, CreateFillerObjectAt(roots, object.address() + object_size, filler_size,
ClearRecordedSlots::kNo); ClearFreedMemoryMode::kDontClearFreedMemory);
} }
return object; return object;
} }
......
...@@ -1330,15 +1330,15 @@ class Heap { ...@@ -1330,15 +1330,15 @@ class Heap {
// =========================================================================== // ===========================================================================
// Creates a filler object and returns a heap object immediately after it. // Creates a filler object and returns a heap object immediately after it.
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT HeapObject V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT HeapObject
PrecedeWithFiller(HeapObject object, int filler_size); PrecedeWithFiller(ReadOnlyRoots roots, HeapObject object, int filler_size);
// Creates a filler object if needed for alignment and returns a heap object // Creates a filler object if needed for alignment and returns a heap object
// immediately after it. If any space is left after the returned object, // immediately after it. If any space is left after the returned object,
// another filler object is created so the over allocated memory is iterable. // another filler object is created so the over allocated memory is iterable.
V8_WARN_UNUSED_RESULT HeapObject static V8_WARN_UNUSED_RESULT HeapObject
AlignWithFiller(HeapObject object, int object_size, int allocation_size, AlignWithFiller(ReadOnlyRoots roots, HeapObject object, int object_size,
AllocationAlignment alignment); int allocation_size, AllocationAlignment alignment);
// Allocate an external backing store with the given allocation callback. // Allocate an external backing store with the given allocation callback.
// If the callback fails (indicated by a nullptr result) then this function // If the callback fails (indicated by a nullptr result) then this function
......
...@@ -335,8 +335,9 @@ AllocationResult LocalAllocationBuffer::AllocateRawAligned( ...@@ -335,8 +335,9 @@ AllocationResult LocalAllocationBuffer::AllocateRawAligned(
allocation_info_.set_top(new_top); allocation_info_.set_top(new_top);
if (filler_size > 0) { if (filler_size > 0) {
return heap_->PrecedeWithFiller(HeapObject::FromAddress(current_top), return Heap::PrecedeWithFiller(ReadOnlyRoots(heap_),
filler_size); HeapObject::FromAddress(current_top),
filler_size);
} }
return AllocationResult(HeapObject::FromAddress(current_top)); return AllocationResult(HeapObject::FromAddress(current_top));
...@@ -369,8 +370,9 @@ HeapObject PagedSpace::TryAllocateLinearlyAligned( ...@@ -369,8 +370,9 @@ HeapObject PagedSpace::TryAllocateLinearlyAligned(
allocation_info_.set_top(new_top); allocation_info_.set_top(new_top);
if (filler_size > 0) { if (filler_size > 0) {
*size_in_bytes += filler_size; *size_in_bytes += filler_size;
return heap()->PrecedeWithFiller(HeapObject::FromAddress(current_top), return Heap::PrecedeWithFiller(ReadOnlyRoots(heap()),
filler_size); HeapObject::FromAddress(current_top),
filler_size);
} }
return HeapObject::FromAddress(current_top); return HeapObject::FromAddress(current_top);
...@@ -484,7 +486,7 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, ...@@ -484,7 +486,7 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
if (filler_size > 0) { if (filler_size > 0) {
obj = heap()->PrecedeWithFiller(obj, filler_size); obj = Heap::PrecedeWithFiller(ReadOnlyRoots(heap()), obj, filler_size);
} }
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes); MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);
......
...@@ -85,7 +85,8 @@ Address DeserializerAllocator::Allocate(SnapshotSpace space, int size) { ...@@ -85,7 +85,8 @@ Address DeserializerAllocator::Allocate(SnapshotSpace space, int size) {
DCHECK(ReadOnlyRoots(heap_).free_space_map().IsMap()); DCHECK(ReadOnlyRoots(heap_).free_space_map().IsMap());
DCHECK(ReadOnlyRoots(heap_).one_pointer_filler_map().IsMap()); DCHECK(ReadOnlyRoots(heap_).one_pointer_filler_map().IsMap());
DCHECK(ReadOnlyRoots(heap_).two_pointer_filler_map().IsMap()); DCHECK(ReadOnlyRoots(heap_).two_pointer_filler_map().IsMap());
obj = heap_->AlignWithFiller(obj, size, reserved, next_alignment_); obj = Heap::AlignWithFiller(ReadOnlyRoots(heap_), obj, size, reserved,
next_alignment_);
address = obj.address(); address = obj.address();
next_alignment_ = kWordAligned; next_alignment_ = kWordAligned;
return address; return address;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment