Commit 18de64a1 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

heap: Inline fast paths for AllocateRaw() and AllocateRawWith()

- Both paths are now inlined.
- Outline large object allocation, shrinking trampoline a bit.
- Support a fast path for AllocationType::kOld from AllocateRawWith().

Bug: v8:12615, chromium:1293284
Change-Id: I8f0b9aabc6fe47e1eee159c214403ccffea5eeab
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3456082Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79048}
parent a7a0b765
......@@ -241,42 +241,42 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
allocation = tp_heap_->Allocate(size_in_bytes, type, alignment);
} else {
if (AllocationType::kYoung == type) {
if (large_object) {
allocation = new_lo_space_->AllocateRaw(size_in_bytes);
} else {
allocation = new_space_->AllocateRaw(size_in_bytes, alignment, origin);
}
} else if (AllocationType::kOld == type) {
if (large_object) {
allocation = lo_space_->AllocateRaw(size_in_bytes);
if (V8_UNLIKELY(large_object)) {
allocation =
AllocateRawLargeInternal(size_in_bytes, type, origin, alignment);
} else {
allocation = old_space_->AllocateRaw(size_in_bytes, alignment, origin);
}
} else if (AllocationType::kCode == type) {
switch (type) {
case AllocationType::kYoung:
allocation =
new_space_->AllocateRaw(size_in_bytes, alignment, origin);
break;
case AllocationType::kOld:
allocation =
old_space_->AllocateRaw(size_in_bytes, alignment, origin);
break;
case AllocationType::kCode:
DCHECK_EQ(alignment, AllocationAlignment::kTaggedAligned);
DCHECK(AllowCodeAllocation::IsAllowed());
if (large_object) {
allocation = code_lo_space_->AllocateRaw(size_in_bytes);
} else {
allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
}
} else if (AllocationType::kMap == type) {
break;
case AllocationType::kMap:
DCHECK_EQ(alignment, AllocationAlignment::kTaggedAligned);
allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
} else if (AllocationType::kReadOnly == type) {
DCHECK(!large_object);
break;
case AllocationType::kReadOnly:
DCHECK(CanAllocateInReadOnlySpace());
DCHECK_EQ(AllocationOrigin::kRuntime, origin);
allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment);
} else if (AllocationType::kSharedOld == type) {
allocation =
shared_old_allocator_->AllocateRaw(size_in_bytes, alignment, origin);
} else if (AllocationType::kSharedMap == type) {
allocation =
shared_map_allocator_->AllocateRaw(size_in_bytes, alignment, origin);
} else {
UNREACHABLE();
break;
case AllocationType::kSharedMap:
allocation = shared_map_allocator_->AllocateRaw(size_in_bytes,
alignment, origin);
break;
case AllocationType::kSharedOld:
allocation = shared_old_allocator_->AllocateRaw(size_in_bytes,
alignment, origin);
break;
}
}
}
......@@ -317,21 +317,15 @@ HeapObject Heap::AllocateRawWith(int size, AllocationType allocation,
DCHECK(AllowHandleAllocation::IsAllowed());
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK_EQ(gc_state(), NOT_IN_GC);
Heap* heap = isolate()->heap();
if (allocation == AllocationType::kYoung &&
alignment == AllocationAlignment::kTaggedAligned &&
size <= MaxRegularHeapObjectSize(allocation) &&
V8_LIKELY(!FLAG_single_generation && FLAG_inline_new &&
FLAG_gc_interval == -1)) {
Address* top = heap->NewSpaceAllocationTopAddress();
Address* limit = heap->NewSpaceAllocationLimitAddress();
if (*limit - *top >= static_cast<unsigned>(size)) {
DCHECK(IsAligned(size, kTaggedSize));
HeapObject obj = HeapObject::FromAddress(*top);
*top += size;
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size);
return obj;
}
if (allocation == AllocationType::kYoung) {
auto result = AllocateRaw(size, AllocationType::kYoung, origin, alignment);
HeapObject object;
if (result.To(&object)) return object;
} else if (allocation == AllocationType::kOld) {
auto result = AllocateRaw(size, AllocationType::kOld, origin, alignment);
HeapObject object;
if (result.To(&object)) return object;
}
switch (mode) {
case kLightRetry:
......
......@@ -5648,6 +5648,26 @@ HeapObject Heap::AllocateRawWithLightRetrySlowPath(
return HeapObject();
}
AllocationResult Heap::AllocateRawLargeInternal(int size_in_bytes,
AllocationType allocation,
AllocationOrigin origin,
AllocationAlignment alignment) {
DCHECK_GT(size_in_bytes, MaxRegularHeapObjectSize(allocation));
switch (allocation) {
case AllocationType::kYoung:
return new_lo_space_->AllocateRaw(size_in_bytes);
case AllocationType::kOld:
return lo_space_->AllocateRaw(size_in_bytes);
case AllocationType::kCode:
return code_lo_space_->AllocateRaw(size_in_bytes);
case AllocationType::kMap:
case AllocationType::kReadOnly:
case AllocationType::kSharedMap:
case AllocationType::kSharedOld:
UNREACHABLE();
}
}
HeapObject Heap::AllocateRawWithRetryOrFailSlowPath(
int size, AllocationType allocation, AllocationOrigin origin,
AllocationAlignment alignment) {
......
......@@ -2089,8 +2089,15 @@ class Heap {
// hardware and OS allow. This is the single choke-point for allocations
// performed by the runtime and should not be bypassed (to extend this to
// inlined allocations, use the Heap::DisableInlineAllocation() support).
V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRaw(
int size_in_bytes, AllocationType allocation,
V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult
AllocateRaw(int size_in_bytes, AllocationType allocation,
AllocationOrigin origin = AllocationOrigin::kRuntime,
AllocationAlignment alignment = kTaggedAligned);
// Allocates an uninitialized large object. Used as dispatch by
// `AllocateRaw()` for large objects. Do not call this from anywhere else.
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult
AllocateRawLargeInternal(int size_in_bytes, AllocationType allocation,
AllocationOrigin origin = AllocationOrigin::kRuntime,
AllocationAlignment alignment = kTaggedAligned);
......@@ -2098,8 +2105,8 @@ class Heap {
// otherwise it falls back to a slower path indicated by the mode.
enum AllocationRetryMode { kLightRetry, kRetryOrFail };
template <AllocationRetryMode mode>
V8_WARN_UNUSED_RESULT inline HeapObject AllocateRawWith(
int size, AllocationType allocation,
V8_WARN_UNUSED_RESULT V8_INLINE HeapObject
AllocateRawWith(int size, AllocationType allocation,
AllocationOrigin origin = AllocationOrigin::kRuntime,
AllocationAlignment alignment = kTaggedAligned);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment