Commit 18de64a1 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

heap: Inline fast paths for AllocateRaw() and AllocateRawWith()

- Both paths are now inlined.
- Outline large object allocation, shrinking trampoline a bit.
- Support a fast path for AllocationType::kOld from AllocateRawWith().

Bug: v8:12615, chromium:1293284
Change-Id: I8f0b9aabc6fe47e1eee159c214403ccffea5eeab
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3456082Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79048}
parent a7a0b765
...@@ -241,42 +241,42 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type, ...@@ -241,42 +241,42 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) { if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
allocation = tp_heap_->Allocate(size_in_bytes, type, alignment); allocation = tp_heap_->Allocate(size_in_bytes, type, alignment);
} else { } else {
if (AllocationType::kYoung == type) { if (V8_UNLIKELY(large_object)) {
if (large_object) { allocation =
allocation = new_lo_space_->AllocateRaw(size_in_bytes); AllocateRawLargeInternal(size_in_bytes, type, origin, alignment);
} else {
allocation = new_space_->AllocateRaw(size_in_bytes, alignment, origin);
}
} else if (AllocationType::kOld == type) {
if (large_object) {
allocation = lo_space_->AllocateRaw(size_in_bytes);
} else { } else {
allocation = old_space_->AllocateRaw(size_in_bytes, alignment, origin); switch (type) {
} case AllocationType::kYoung:
} else if (AllocationType::kCode == type) { allocation =
new_space_->AllocateRaw(size_in_bytes, alignment, origin);
break;
case AllocationType::kOld:
allocation =
old_space_->AllocateRaw(size_in_bytes, alignment, origin);
break;
case AllocationType::kCode:
DCHECK_EQ(alignment, AllocationAlignment::kTaggedAligned); DCHECK_EQ(alignment, AllocationAlignment::kTaggedAligned);
DCHECK(AllowCodeAllocation::IsAllowed()); DCHECK(AllowCodeAllocation::IsAllowed());
if (large_object) {
allocation = code_lo_space_->AllocateRaw(size_in_bytes);
} else {
allocation = code_space_->AllocateRawUnaligned(size_in_bytes); allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
} break;
} else if (AllocationType::kMap == type) { case AllocationType::kMap:
DCHECK_EQ(alignment, AllocationAlignment::kTaggedAligned); DCHECK_EQ(alignment, AllocationAlignment::kTaggedAligned);
allocation = map_space_->AllocateRawUnaligned(size_in_bytes); allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
} else if (AllocationType::kReadOnly == type) { break;
DCHECK(!large_object); case AllocationType::kReadOnly:
DCHECK(CanAllocateInReadOnlySpace()); DCHECK(CanAllocateInReadOnlySpace());
DCHECK_EQ(AllocationOrigin::kRuntime, origin); DCHECK_EQ(AllocationOrigin::kRuntime, origin);
allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment); allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment);
} else if (AllocationType::kSharedOld == type) { break;
allocation = case AllocationType::kSharedMap:
shared_old_allocator_->AllocateRaw(size_in_bytes, alignment, origin); allocation = shared_map_allocator_->AllocateRaw(size_in_bytes,
} else if (AllocationType::kSharedMap == type) { alignment, origin);
allocation = break;
shared_map_allocator_->AllocateRaw(size_in_bytes, alignment, origin); case AllocationType::kSharedOld:
} else { allocation = shared_old_allocator_->AllocateRaw(size_in_bytes,
UNREACHABLE(); alignment, origin);
break;
}
} }
} }
...@@ -317,21 +317,15 @@ HeapObject Heap::AllocateRawWith(int size, AllocationType allocation, ...@@ -317,21 +317,15 @@ HeapObject Heap::AllocateRawWith(int size, AllocationType allocation,
DCHECK(AllowHandleAllocation::IsAllowed()); DCHECK(AllowHandleAllocation::IsAllowed());
DCHECK(AllowHeapAllocation::IsAllowed()); DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK_EQ(gc_state(), NOT_IN_GC); DCHECK_EQ(gc_state(), NOT_IN_GC);
Heap* heap = isolate()->heap(); if (allocation == AllocationType::kYoung) {
if (allocation == AllocationType::kYoung && auto result = AllocateRaw(size, AllocationType::kYoung, origin, alignment);
alignment == AllocationAlignment::kTaggedAligned && HeapObject object;
size <= MaxRegularHeapObjectSize(allocation) && if (result.To(&object)) return object;
V8_LIKELY(!FLAG_single_generation && FLAG_inline_new &&
FLAG_gc_interval == -1)) { } else if (allocation == AllocationType::kOld) {
Address* top = heap->NewSpaceAllocationTopAddress(); auto result = AllocateRaw(size, AllocationType::kOld, origin, alignment);
Address* limit = heap->NewSpaceAllocationLimitAddress(); HeapObject object;
if (*limit - *top >= static_cast<unsigned>(size)) { if (result.To(&object)) return object;
DCHECK(IsAligned(size, kTaggedSize));
HeapObject obj = HeapObject::FromAddress(*top);
*top += size;
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size);
return obj;
}
} }
switch (mode) { switch (mode) {
case kLightRetry: case kLightRetry:
......
...@@ -5648,6 +5648,26 @@ HeapObject Heap::AllocateRawWithLightRetrySlowPath( ...@@ -5648,6 +5648,26 @@ HeapObject Heap::AllocateRawWithLightRetrySlowPath(
return HeapObject(); return HeapObject();
} }
AllocationResult Heap::AllocateRawLargeInternal(int size_in_bytes,
AllocationType allocation,
AllocationOrigin origin,
AllocationAlignment alignment) {
DCHECK_GT(size_in_bytes, MaxRegularHeapObjectSize(allocation));
switch (allocation) {
case AllocationType::kYoung:
return new_lo_space_->AllocateRaw(size_in_bytes);
case AllocationType::kOld:
return lo_space_->AllocateRaw(size_in_bytes);
case AllocationType::kCode:
return code_lo_space_->AllocateRaw(size_in_bytes);
case AllocationType::kMap:
case AllocationType::kReadOnly:
case AllocationType::kSharedMap:
case AllocationType::kSharedOld:
UNREACHABLE();
}
}
HeapObject Heap::AllocateRawWithRetryOrFailSlowPath( HeapObject Heap::AllocateRawWithRetryOrFailSlowPath(
int size, AllocationType allocation, AllocationOrigin origin, int size, AllocationType allocation, AllocationOrigin origin,
AllocationAlignment alignment) { AllocationAlignment alignment) {
......
...@@ -2089,8 +2089,15 @@ class Heap { ...@@ -2089,8 +2089,15 @@ class Heap {
// hardware and OS allow. This is the single choke-point for allocations // hardware and OS allow. This is the single choke-point for allocations
// performed by the runtime and should not be bypassed (to extend this to // performed by the runtime and should not be bypassed (to extend this to
// inlined allocations, use the Heap::DisableInlineAllocation() support). // inlined allocations, use the Heap::DisableInlineAllocation() support).
V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRaw( V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult
int size_in_bytes, AllocationType allocation, AllocateRaw(int size_in_bytes, AllocationType allocation,
AllocationOrigin origin = AllocationOrigin::kRuntime,
AllocationAlignment alignment = kTaggedAligned);
// Allocates an uninitialized large object. Used as dispatch by
// `AllocateRaw()` for large objects. Do not call this from anywhere else.
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult
AllocateRawLargeInternal(int size_in_bytes, AllocationType allocation,
AllocationOrigin origin = AllocationOrigin::kRuntime, AllocationOrigin origin = AllocationOrigin::kRuntime,
AllocationAlignment alignment = kTaggedAligned); AllocationAlignment alignment = kTaggedAligned);
...@@ -2098,8 +2105,8 @@ class Heap { ...@@ -2098,8 +2105,8 @@ class Heap {
// otherwise it falls back to a slower path indicated by the mode. // otherwise it falls back to a slower path indicated by the mode.
enum AllocationRetryMode { kLightRetry, kRetryOrFail }; enum AllocationRetryMode { kLightRetry, kRetryOrFail };
template <AllocationRetryMode mode> template <AllocationRetryMode mode>
V8_WARN_UNUSED_RESULT inline HeapObject AllocateRawWith( V8_WARN_UNUSED_RESULT V8_INLINE HeapObject
int size, AllocationType allocation, AllocateRawWith(int size, AllocationType allocation,
AllocationOrigin origin = AllocationOrigin::kRuntime, AllocationOrigin origin = AllocationOrigin::kRuntime,
AllocationAlignment alignment = kTaggedAligned); AllocationAlignment alignment = kTaggedAligned);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment