Commit d89579b3 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

heap: Factor out AllocationResult from heap.h

Allows separating out the allocator from Heap without requiring a
heap.h include.

Drive-by:
- Rename "Retry" to "Failure".
- Avoid implicit constructors.
- Rename "RetrySpace" to "GarbageCollectionSpace" which is its only
  use.

Bug: v8:12615
Change-Id: Idac17cded8f0b2b645a2be9045ab31ffd71999b3
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3456562Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79063}
parent 9ac64a9d
...@@ -1289,6 +1289,7 @@ filegroup( ...@@ -1289,6 +1289,7 @@ filegroup(
"src/handles/persistent-handles.h", "src/handles/persistent-handles.h",
"src/heap/allocation-observer.cc", "src/heap/allocation-observer.cc",
"src/heap/allocation-observer.h", "src/heap/allocation-observer.h",
"src/heap/allocation-result.h",
"src/heap/allocation-stats.h", "src/heap/allocation-stats.h",
"src/heap/array-buffer-sweeper.cc", "src/heap/array-buffer-sweeper.cc",
"src/heap/array-buffer-sweeper.h", "src/heap/array-buffer-sweeper.h",
......
...@@ -2947,6 +2947,7 @@ v8_header_set("v8_internal_headers") { ...@@ -2947,6 +2947,7 @@ v8_header_set("v8_internal_headers") {
"src/handles/maybe-handles.h", "src/handles/maybe-handles.h",
"src/handles/persistent-handles.h", "src/handles/persistent-handles.h",
"src/heap/allocation-observer.h", "src/heap/allocation-observer.h",
"src/heap/allocation-result.h",
"src/heap/allocation-stats.h", "src/heap/allocation-stats.h",
"src/heap/array-buffer-sweeper.h", "src/heap/array-buffer-sweeper.h",
"src/heap/barrier.h", "src/heap/barrier.h",
......
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_ALLOCATION_RESULT_H_
#define V8_HEAP_ALLOCATION_RESULT_H_
#include "src/common/globals.h"
#include "src/objects/heap-object.h"
#include "src/objects/objects.h"
#include "src/objects/smi.h"
namespace v8 {
namespace internal {
// The result of an allocation attempt. Either represents a successful
// allocation that can be turned into an object or a failed attempt.
class AllocationResult final {
public:
static AllocationResult Failure(AllocationSpace space) {
return AllocationResult(space);
}
static AllocationResult FromObject(HeapObject heap_object) {
return AllocationResult(heap_object);
}
// Empty constructor creates a failed result that will turn into a full
// garbage collection.
AllocationResult() : AllocationResult(AllocationSpace::OLD_SPACE) {}
bool IsFailure() const { return object_.IsSmi(); }
template <typename T>
bool To(T* obj) const {
if (IsFailure()) return false;
*obj = T::cast(object_);
return true;
}
HeapObject ToObjectChecked() const {
CHECK(!IsFailure());
return HeapObject::cast(object_);
}
HeapObject ToObject() const {
DCHECK(!IsFailure());
return HeapObject::cast(object_);
}
Address ToAddress() const {
DCHECK(!IsFailure());
return HeapObject::cast(object_).address();
}
// Returns the space that should be passed to a garbage collection call.
AllocationSpace ToGarbageCollectionSpace() const {
DCHECK(IsFailure());
return static_cast<AllocationSpace>(Smi::ToInt(object_));
}
private:
explicit AllocationResult(AllocationSpace space)
: object_(Smi::FromInt(static_cast<int>(space))) {}
explicit AllocationResult(HeapObject heap_object) : object_(heap_object) {}
Object object_;
};
STATIC_ASSERT(sizeof(AllocationResult) == kSystemPointerSize);
} // namespace internal
} // namespace v8
#endif // V8_HEAP_ALLOCATION_RESULT_H_
...@@ -37,11 +37,9 @@ AllocationResult ConcurrentAllocator::AllocateRaw(int object_size, ...@@ -37,11 +37,9 @@ AllocationResult ConcurrentAllocator::AllocateRaw(int object_size,
AllocationResult ConcurrentAllocator::AllocateInLab( AllocationResult ConcurrentAllocator::AllocateInLab(
int object_size, AllocationAlignment alignment, AllocationOrigin origin) { int object_size, AllocationAlignment alignment, AllocationOrigin origin) {
AllocationResult allocation = lab_.AllocateRawAligned(object_size, alignment); AllocationResult allocation = lab_.AllocateRawAligned(object_size, alignment);
if (allocation.IsRetry()) { return allocation.IsFailure()
return AllocateInLabSlow(object_size, alignment, origin); ? AllocateInLabSlow(object_size, alignment, origin)
} else { : allocation;
return allocation;
}
} }
} // namespace internal } // namespace internal
......
...@@ -37,7 +37,7 @@ void StressConcurrentAllocatorTask::RunInternal() { ...@@ -37,7 +37,7 @@ void StressConcurrentAllocatorTask::RunInternal() {
AllocationResult result = local_heap.AllocateRaw( AllocationResult result = local_heap.AllocateRaw(
kSmallObjectSize, AllocationType::kOld, AllocationOrigin::kRuntime, kSmallObjectSize, AllocationType::kOld, AllocationOrigin::kRuntime,
AllocationAlignment::kTaggedAligned); AllocationAlignment::kTaggedAligned);
if (!result.IsRetry()) { if (!result.IsFailure()) {
heap->CreateFillerObjectAtBackground( heap->CreateFillerObjectAtBackground(
result.ToAddress(), kSmallObjectSize, result.ToAddress(), kSmallObjectSize,
ClearFreedMemoryMode::kDontClearFreedMemory); ClearFreedMemoryMode::kDontClearFreedMemory);
...@@ -48,7 +48,7 @@ void StressConcurrentAllocatorTask::RunInternal() { ...@@ -48,7 +48,7 @@ void StressConcurrentAllocatorTask::RunInternal() {
result = local_heap.AllocateRaw(kMediumObjectSize, AllocationType::kOld, result = local_heap.AllocateRaw(kMediumObjectSize, AllocationType::kOld,
AllocationOrigin::kRuntime, AllocationOrigin::kRuntime,
AllocationAlignment::kTaggedAligned); AllocationAlignment::kTaggedAligned);
if (!result.IsRetry()) { if (!result.IsFailure()) {
heap->CreateFillerObjectAtBackground( heap->CreateFillerObjectAtBackground(
result.ToAddress(), kMediumObjectSize, result.ToAddress(), kMediumObjectSize,
ClearFreedMemoryMode::kDontClearFreedMemory); ClearFreedMemoryMode::kDontClearFreedMemory);
...@@ -59,7 +59,7 @@ void StressConcurrentAllocatorTask::RunInternal() { ...@@ -59,7 +59,7 @@ void StressConcurrentAllocatorTask::RunInternal() {
result = local_heap.AllocateRaw(kLargeObjectSize, AllocationType::kOld, result = local_heap.AllocateRaw(kLargeObjectSize, AllocationType::kOld,
AllocationOrigin::kRuntime, AllocationOrigin::kRuntime,
AllocationAlignment::kTaggedAligned); AllocationAlignment::kTaggedAligned);
if (!result.IsRetry()) { if (!result.IsFailure()) {
heap->CreateFillerObjectAtBackground( heap->CreateFillerObjectAtBackground(
result.ToAddress(), kLargeObjectSize, result.ToAddress(), kLargeObjectSize,
ClearFreedMemoryMode::kDontClearFreedMemory); ClearFreedMemoryMode::kDontClearFreedMemory);
...@@ -122,11 +122,11 @@ void ConcurrentAllocator::UnmarkLinearAllocationArea() { ...@@ -122,11 +122,11 @@ void ConcurrentAllocator::UnmarkLinearAllocationArea() {
AllocationResult ConcurrentAllocator::AllocateInLabSlow( AllocationResult ConcurrentAllocator::AllocateInLabSlow(
int object_size, AllocationAlignment alignment, AllocationOrigin origin) { int object_size, AllocationAlignment alignment, AllocationOrigin origin) {
if (!EnsureLab(origin)) { if (!EnsureLab(origin)) {
return AllocationResult::Retry(space_->identity()); return AllocationResult::Failure(space_->identity());
} }
AllocationResult allocation = lab_.AllocateRawAligned(object_size, alignment); AllocationResult allocation = lab_.AllocateRawAligned(object_size, alignment);
DCHECK(!allocation.IsRetry()); DCHECK(!allocation.IsFailure());
return allocation; return allocation;
} }
...@@ -145,7 +145,7 @@ bool ConcurrentAllocator::EnsureLab(AllocationOrigin origin) { ...@@ -145,7 +145,7 @@ bool ConcurrentAllocator::EnsureLab(AllocationOrigin origin) {
HeapObject object = HeapObject::FromAddress(result->first); HeapObject object = HeapObject::FromAddress(result->first);
LocalAllocationBuffer saved_lab = std::move(lab_); LocalAllocationBuffer saved_lab = std::move(lab_);
lab_ = LocalAllocationBuffer::FromResult( lab_ = LocalAllocationBuffer::FromResult(
space_->heap(), AllocationResult(object), result->second); space_->heap(), AllocationResult::FromObject(object), result->second);
DCHECK(lab_.IsValid()); DCHECK(lab_.IsValid());
if (!lab_.TryMerge(&saved_lab)) { if (!lab_.TryMerge(&saved_lab)) {
saved_lab.CloseAndMakeIterable(); saved_lab.CloseAndMakeIterable();
...@@ -157,7 +157,7 @@ AllocationResult ConcurrentAllocator::AllocateOutsideLab( ...@@ -157,7 +157,7 @@ AllocationResult ConcurrentAllocator::AllocateOutsideLab(
int object_size, AllocationAlignment alignment, AllocationOrigin origin) { int object_size, AllocationAlignment alignment, AllocationOrigin origin) {
auto result = space_->RawRefillLabBackground(local_heap_, object_size, auto result = space_->RawRefillLabBackground(local_heap_, object_size,
object_size, alignment, origin); object_size, alignment, origin);
if (!result) return AllocationResult::Retry(space_->identity()); if (!result) return AllocationResult::Failure(space_->identity());
HeapObject object = HeapObject::FromAddress(result->first); HeapObject object = HeapObject::FromAddress(result->first);
...@@ -166,7 +166,7 @@ AllocationResult ConcurrentAllocator::AllocateOutsideLab( ...@@ -166,7 +166,7 @@ AllocationResult ConcurrentAllocator::AllocateOutsideLab(
object_size); object_size);
} }
return AllocationResult(object); return AllocationResult::FromObject(object);
} }
bool ConcurrentAllocator::IsBlackAllocationEnabled() const { bool ConcurrentAllocator::IsBlackAllocationEnabled() const {
......
...@@ -66,26 +66,6 @@ T ForwardingAddress(T heap_obj) { ...@@ -66,26 +66,6 @@ T ForwardingAddress(T heap_obj) {
} }
} }
AllocationSpace AllocationResult::RetrySpace() {
DCHECK(IsRetry());
return static_cast<AllocationSpace>(Smi::ToInt(object_));
}
HeapObject AllocationResult::ToObjectChecked() {
CHECK(!IsRetry());
return HeapObject::cast(object_);
}
HeapObject AllocationResult::ToObject() {
DCHECK(!IsRetry());
return HeapObject::cast(object_);
}
Address AllocationResult::ToAddress() {
DCHECK(!IsRetry());
return HeapObject::cast(object_).address();
}
// static // static
base::EnumSet<CodeFlushMode> Heap::GetCodeFlushMode(Isolate* isolate) { base::EnumSet<CodeFlushMode> Heap::GetCodeFlushMode(Isolate* isolate) {
if (isolate->disable_bytecode_flushing()) { if (isolate->disable_bytecode_flushing()) {
...@@ -215,7 +195,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type, ...@@ -215,7 +195,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
if (FLAG_random_gc_interval > 0 || FLAG_gc_interval >= 0) { if (FLAG_random_gc_interval > 0 || FLAG_gc_interval >= 0) {
if (!always_allocate() && Heap::allocation_timeout_-- <= 0) { if (!always_allocate() && Heap::allocation_timeout_-- <= 0) {
AllocationSpace space = FLAG_single_generation ? OLD_SPACE : NEW_SPACE; AllocationSpace space = FLAG_single_generation ? OLD_SPACE : NEW_SPACE;
return AllocationResult::Retry(space); return AllocationResult::Failure(space);
} }
} }
#endif // V8_ENABLE_ALLOCATION_TIMEOUT #endif // V8_ENABLE_ALLOCATION_TIMEOUT
......
...@@ -5636,7 +5636,7 @@ HeapObject Heap::AllocateRawWithLightRetrySlowPath( ...@@ -5636,7 +5636,7 @@ HeapObject Heap::AllocateRawWithLightRetrySlowPath(
if (IsSharedAllocationType(allocation)) { if (IsSharedAllocationType(allocation)) {
CollectSharedGarbage(GarbageCollectionReason::kAllocationFailure); CollectSharedGarbage(GarbageCollectionReason::kAllocationFailure);
} else { } else {
CollectGarbage(alloc.RetrySpace(), CollectGarbage(alloc.ToGarbageCollectionSpace(),
GarbageCollectionReason::kAllocationFailure); GarbageCollectionReason::kAllocationFailure);
} }
alloc = AllocateRaw(size, allocation, origin, alignment); alloc = AllocateRaw(size, allocation, origin, alignment);
......
...@@ -26,6 +26,7 @@ ...@@ -26,6 +26,7 @@
#include "src/common/assert-scope.h" #include "src/common/assert-scope.h"
#include "src/common/globals.h" #include "src/common/globals.h"
#include "src/heap/allocation-observer.h" #include "src/heap/allocation-observer.h"
#include "src/heap/allocation-result.h"
#include "src/init/heap-symbols.h" #include "src/init/heap-symbols.h"
#include "src/objects/allocation-site.h" #include "src/objects/allocation-site.h"
#include "src/objects/fixed-array.h" #include "src/objects/fixed-array.h"
...@@ -213,44 +214,6 @@ class StrongRootsEntry final { ...@@ -213,44 +214,6 @@ class StrongRootsEntry final {
friend class Heap; friend class Heap;
}; };
class AllocationResult {
public:
static inline AllocationResult Retry(AllocationSpace space) {
return AllocationResult(space);
}
// Implicit constructor from Object.
AllocationResult(Object object) // NOLINT
: object_(object) {
// AllocationResults can't return Smis, which are used to represent
// failure and the space to retry in.
CHECK(!object.IsSmi());
}
AllocationResult() : object_(Smi::FromInt(NEW_SPACE)) {}
inline bool IsRetry() { return object_.IsSmi(); }
inline HeapObject ToObjectChecked();
inline HeapObject ToObject();
inline Address ToAddress();
inline AllocationSpace RetrySpace();
template <typename T>
bool To(T* obj) {
if (IsRetry()) return false;
*obj = T::cast(object_);
return true;
}
private:
explicit AllocationResult(AllocationSpace space)
: object_(Smi::FromInt(static_cast<int>(space))) {}
Object object_;
};
STATIC_ASSERT(sizeof(AllocationResult) == kSystemPointerSize);
#ifdef DEBUG #ifdef DEBUG
struct CommentStatistic { struct CommentStatistic {
const char* comment; const char* comment;
......
...@@ -135,11 +135,11 @@ AllocationResult OldLargeObjectSpace::AllocateRaw(int object_size, ...@@ -135,11 +135,11 @@ AllocationResult OldLargeObjectSpace::AllocateRaw(int object_size,
// If so, fail the allocation. // If so, fail the allocation.
if (!heap()->CanExpandOldGeneration(object_size) || if (!heap()->CanExpandOldGeneration(object_size) ||
!heap()->ShouldExpandOldGenerationOnSlowAllocation()) { !heap()->ShouldExpandOldGenerationOnSlowAllocation()) {
return AllocationResult::Retry(identity()); return AllocationResult::Failure(identity());
} }
LargePage* page = AllocateLargePage(object_size, executable); LargePage* page = AllocateLargePage(object_size, executable);
if (page == nullptr) return AllocationResult::Retry(identity()); if (page == nullptr) return AllocationResult::Failure(identity());
page->SetOldGenerationPageFlags(heap()->incremental_marking()->IsMarking()); page->SetOldGenerationPageFlags(heap()->incremental_marking()->IsMarking());
HeapObject object = page->GetObject(); HeapObject object = page->GetObject();
UpdatePendingObject(object); UpdatePendingObject(object);
...@@ -156,7 +156,7 @@ AllocationResult OldLargeObjectSpace::AllocateRaw(int object_size, ...@@ -156,7 +156,7 @@ AllocationResult OldLargeObjectSpace::AllocateRaw(int object_size,
heap()->NotifyOldGenerationExpansion(identity(), page); heap()->NotifyOldGenerationExpansion(identity(), page);
AdvanceAndInvokeAllocationObservers(object.address(), AdvanceAndInvokeAllocationObservers(object.address(),
static_cast<size_t>(object_size)); static_cast<size_t>(object_size));
return object; return AllocationResult::FromObject(object);
} }
AllocationResult OldLargeObjectSpace::AllocateRawBackground( AllocationResult OldLargeObjectSpace::AllocateRawBackground(
...@@ -171,11 +171,11 @@ AllocationResult OldLargeObjectSpace::AllocateRawBackground( ...@@ -171,11 +171,11 @@ AllocationResult OldLargeObjectSpace::AllocateRawBackground(
// If so, fail the allocation. // If so, fail the allocation.
if (!heap()->CanExpandOldGenerationBackground(local_heap, object_size) || if (!heap()->CanExpandOldGenerationBackground(local_heap, object_size) ||
!heap()->ShouldExpandOldGenerationOnSlowAllocation(local_heap)) { !heap()->ShouldExpandOldGenerationOnSlowAllocation(local_heap)) {
return AllocationResult::Retry(identity()); return AllocationResult::Failure(identity());
} }
LargePage* page = AllocateLargePage(object_size, executable); LargePage* page = AllocateLargePage(object_size, executable);
if (page == nullptr) return AllocationResult::Retry(identity()); if (page == nullptr) return AllocationResult::Failure(identity());
page->SetOldGenerationPageFlags(heap()->incremental_marking()->IsMarking()); page->SetOldGenerationPageFlags(heap()->incremental_marking()->IsMarking());
HeapObject object = page->GetObject(); HeapObject object = page->GetObject();
heap()->StartIncrementalMarkingIfAllocationLimitIsReachedBackground(); heap()->StartIncrementalMarkingIfAllocationLimitIsReachedBackground();
...@@ -189,7 +189,7 @@ AllocationResult OldLargeObjectSpace::AllocateRawBackground( ...@@ -189,7 +189,7 @@ AllocationResult OldLargeObjectSpace::AllocateRawBackground(
if (identity() == CODE_LO_SPACE) { if (identity() == CODE_LO_SPACE) {
heap()->isolate()->AddCodeMemoryChunk(page); heap()->isolate()->AddCodeMemoryChunk(page);
} }
return object; return AllocationResult::FromObject(object);
} }
LargePage* LargeObjectSpace::AllocateLargePage(int object_size, LargePage* LargeObjectSpace::AllocateLargePage(int object_size,
...@@ -483,16 +483,16 @@ AllocationResult NewLargeObjectSpace::AllocateRaw(int object_size) { ...@@ -483,16 +483,16 @@ AllocationResult NewLargeObjectSpace::AllocateRaw(int object_size) {
// Do not allocate more objects if promoting the existing object would exceed // Do not allocate more objects if promoting the existing object would exceed
// the old generation capacity. // the old generation capacity.
if (!heap()->CanExpandOldGeneration(SizeOfObjects())) { if (!heap()->CanExpandOldGeneration(SizeOfObjects())) {
return AllocationResult::Retry(identity()); return AllocationResult::Failure(identity());
} }
// Allocation for the first object must succeed independent from the capacity. // Allocation for the first object must succeed independent from the capacity.
if (SizeOfObjects() > 0 && static_cast<size_t>(object_size) > Available()) { if (SizeOfObjects() > 0 && static_cast<size_t>(object_size) > Available()) {
return AllocationResult::Retry(identity()); return AllocationResult::Failure(identity());
} }
LargePage* page = AllocateLargePage(object_size, NOT_EXECUTABLE); LargePage* page = AllocateLargePage(object_size, NOT_EXECUTABLE);
if (page == nullptr) return AllocationResult::Retry(identity()); if (page == nullptr) return AllocationResult::Failure(identity());
// The size of the first object may exceed the capacity. // The size of the first object may exceed the capacity.
capacity_ = std::max(capacity_, SizeOfObjects()); capacity_ = std::max(capacity_, SizeOfObjects());
...@@ -513,7 +513,7 @@ AllocationResult NewLargeObjectSpace::AllocateRaw(int object_size) { ...@@ -513,7 +513,7 @@ AllocationResult NewLargeObjectSpace::AllocateRaw(int object_size) {
DCHECK_EQ(page->owner_identity(), NEW_LO_SPACE); DCHECK_EQ(page->owner_identity(), NEW_LO_SPACE);
AdvanceAndInvokeAllocationObservers(result.address(), AdvanceAndInvokeAllocationObservers(result.address(),
static_cast<size_t>(object_size)); static_cast<size_t>(object_size));
return result; return AllocationResult::FromObject(result);
} }
size_t NewLargeObjectSpace::Available() { return capacity_ - SizeOfObjects(); } size_t NewLargeObjectSpace::Available() { return capacity_ - SizeOfObjects(); }
......
...@@ -84,15 +84,15 @@ AllocationResult EvacuationAllocator::AllocateInLAB( ...@@ -84,15 +84,15 @@ AllocationResult EvacuationAllocator::AllocateInLAB(
int object_size, AllocationAlignment alignment) { int object_size, AllocationAlignment alignment) {
AllocationResult allocation; AllocationResult allocation;
if (!new_space_lab_.IsValid() && !NewLocalAllocationBuffer()) { if (!new_space_lab_.IsValid() && !NewLocalAllocationBuffer()) {
return AllocationResult::Retry(OLD_SPACE); return AllocationResult::Failure(OLD_SPACE);
} }
allocation = new_space_lab_.AllocateRawAligned(object_size, alignment); allocation = new_space_lab_.AllocateRawAligned(object_size, alignment);
if (allocation.IsRetry()) { if (allocation.IsFailure()) {
if (!NewLocalAllocationBuffer()) { if (!NewLocalAllocationBuffer()) {
return AllocationResult::Retry(OLD_SPACE); return AllocationResult::Failure(OLD_SPACE);
} else { } else {
allocation = new_space_lab_.AllocateRawAligned(object_size, alignment); allocation = new_space_lab_.AllocateRawAligned(object_size, alignment);
CHECK(!allocation.IsRetry()); CHECK(!allocation.IsFailure());
} }
} }
return allocation; return allocation;
...@@ -102,7 +102,7 @@ bool EvacuationAllocator::NewLocalAllocationBuffer() { ...@@ -102,7 +102,7 @@ bool EvacuationAllocator::NewLocalAllocationBuffer() {
if (lab_allocation_will_fail_) return false; if (lab_allocation_will_fail_) return false;
AllocationResult result = AllocationResult result =
new_space_->AllocateRawSynchronized(kLabSize, kTaggedAligned); new_space_->AllocateRawSynchronized(kLabSize, kTaggedAligned);
if (result.IsRetry()) { if (result.IsFailure()) {
lab_allocation_will_fail_ = true; lab_allocation_will_fail_ = true;
return false; return false;
} }
......
...@@ -72,7 +72,8 @@ Address LocalHeap::AllocateRawOrFail(int object_size, AllocationType type, ...@@ -72,7 +72,8 @@ Address LocalHeap::AllocateRawOrFail(int object_size, AllocationType type,
AllocationAlignment alignment) { AllocationAlignment alignment) {
DCHECK(!FLAG_enable_third_party_heap); DCHECK(!FLAG_enable_third_party_heap);
AllocationResult result = AllocateRaw(object_size, type, origin, alignment); AllocationResult result = AllocateRaw(object_size, type, origin, alignment);
if (!result.IsRetry()) return result.ToObject().address(); HeapObject object;
if (result.To(&object)) return object.address();
return PerformCollectionAndAllocateAgain(object_size, type, origin, return PerformCollectionAndAllocateAgain(object_size, type, origin,
alignment); alignment);
} }
......
...@@ -398,7 +398,7 @@ Address LocalHeap::PerformCollectionAndAllocateAgain( ...@@ -398,7 +398,7 @@ Address LocalHeap::PerformCollectionAndAllocateAgain(
AllocationResult result = AllocateRaw(object_size, type, origin, alignment); AllocationResult result = AllocateRaw(object_size, type, origin, alignment);
if (!result.IsRetry()) { if (!result.IsFailure()) {
allocation_failed_ = false; allocation_failed_ = false;
main_thread_parked_ = false; main_thread_parked_ = false;
return result.ToObjectChecked().address(); return result.ToObjectChecked().address();
......
...@@ -1757,7 +1757,7 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase { ...@@ -1757,7 +1757,7 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase {
AllocationSpace space_allocated_in = NEW_SPACE; AllocationSpace space_allocated_in = NEW_SPACE;
AllocationResult allocation = local_allocator_->Allocate( AllocationResult allocation = local_allocator_->Allocate(
NEW_SPACE, size, AllocationOrigin::kGC, alignment); NEW_SPACE, size, AllocationOrigin::kGC, alignment);
if (allocation.IsRetry()) { if (allocation.IsFailure()) {
allocation = AllocateInOldSpace(size, alignment); allocation = AllocateInOldSpace(size, alignment);
space_allocated_in = OLD_SPACE; space_allocated_in = OLD_SPACE;
} }
...@@ -1771,7 +1771,7 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase { ...@@ -1771,7 +1771,7 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase {
AllocationAlignment alignment) { AllocationAlignment alignment) {
AllocationResult allocation = local_allocator_->Allocate( AllocationResult allocation = local_allocator_->Allocate(
OLD_SPACE, size_in_bytes, AllocationOrigin::kGC, alignment); OLD_SPACE, size_in_bytes, AllocationOrigin::kGC, alignment);
if (allocation.IsRetry()) { if (allocation.IsFailure()) {
heap_->FatalProcessOutOfMemory( heap_->FatalProcessOutOfMemory(
"MarkCompactCollector: semi-space copy, fallback in old gen"); "MarkCompactCollector: semi-space copy, fallback in old gen");
} }
......
...@@ -102,17 +102,14 @@ AllocationResult NewSpace::AllocateRaw(int size_in_bytes, ...@@ -102,17 +102,14 @@ AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
result = AllocateFastUnaligned(size_in_bytes, origin); result = AllocateFastUnaligned(size_in_bytes, origin);
} }
if (!result.IsRetry()) { return result.IsFailure() ? AllocateRawSlow(size_in_bytes, alignment, origin)
return result; : result;
} else {
return AllocateRawSlow(size_in_bytes, alignment, origin);
}
} }
AllocationResult NewSpace::AllocateFastUnaligned(int size_in_bytes, AllocationResult NewSpace::AllocateFastUnaligned(int size_in_bytes,
AllocationOrigin origin) { AllocationOrigin origin) {
if (!allocation_info_->CanIncrementTop(size_in_bytes)) { if (!allocation_info_->CanIncrementTop(size_in_bytes)) {
return AllocationResult::Retry(NEW_SPACE); return AllocationResult::Failure(NEW_SPACE);
} }
HeapObject obj = HeapObject obj =
HeapObject::FromAddress(allocation_info_->IncrementTop(size_in_bytes)); HeapObject::FromAddress(allocation_info_->IncrementTop(size_in_bytes));
...@@ -124,7 +121,7 @@ AllocationResult NewSpace::AllocateFastUnaligned(int size_in_bytes, ...@@ -124,7 +121,7 @@ AllocationResult NewSpace::AllocateFastUnaligned(int size_in_bytes,
UpdateAllocationOrigins(origin); UpdateAllocationOrigins(origin);
} }
return obj; return AllocationResult::FromObject(obj);
} }
AllocationResult NewSpace::AllocateFastAligned( AllocationResult NewSpace::AllocateFastAligned(
...@@ -135,7 +132,7 @@ AllocationResult NewSpace::AllocateFastAligned( ...@@ -135,7 +132,7 @@ AllocationResult NewSpace::AllocateFastAligned(
int aligned_size_in_bytes = size_in_bytes + filler_size; int aligned_size_in_bytes = size_in_bytes + filler_size;
if (!allocation_info_->CanIncrementTop(aligned_size_in_bytes)) { if (!allocation_info_->CanIncrementTop(aligned_size_in_bytes)) {
return AllocationResult::Retry(NEW_SPACE); return AllocationResult::Failure(NEW_SPACE);
} }
HeapObject obj = HeapObject::FromAddress( HeapObject obj = HeapObject::FromAddress(
allocation_info_->IncrementTop(aligned_size_in_bytes)); allocation_info_->IncrementTop(aligned_size_in_bytes));
...@@ -153,7 +150,7 @@ AllocationResult NewSpace::AllocateFastAligned( ...@@ -153,7 +150,7 @@ AllocationResult NewSpace::AllocateFastAligned(
UpdateAllocationOrigins(origin); UpdateAllocationOrigins(origin);
} }
return obj; return AllocationResult::FromObject(obj);
} }
V8_WARN_UNUSED_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized( V8_WARN_UNUSED_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized(
......
...@@ -619,13 +619,13 @@ AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes, ...@@ -619,13 +619,13 @@ AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes,
AllocationOrigin origin) { AllocationOrigin origin) {
DCHECK(!FLAG_enable_third_party_heap); DCHECK(!FLAG_enable_third_party_heap);
if (!EnsureAllocation(size_in_bytes, kTaggedAligned)) { if (!EnsureAllocation(size_in_bytes, kTaggedAligned)) {
return AllocationResult::Retry(NEW_SPACE); return AllocationResult::Failure(NEW_SPACE);
} }
DCHECK_EQ(allocation_info_->start(), allocation_info_->top()); DCHECK_EQ(allocation_info_->start(), allocation_info_->top());
AllocationResult result = AllocateFastUnaligned(size_in_bytes, origin); AllocationResult result = AllocateFastUnaligned(size_in_bytes, origin);
DCHECK(!result.IsRetry()); DCHECK(!result.IsFailure());
InvokeAllocationObservers(result.ToAddress(), size_in_bytes, size_in_bytes, InvokeAllocationObservers(result.ToAddress(), size_in_bytes, size_in_bytes,
size_in_bytes); size_in_bytes);
...@@ -638,7 +638,7 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, ...@@ -638,7 +638,7 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
AllocationOrigin origin) { AllocationOrigin origin) {
DCHECK(!FLAG_enable_third_party_heap); DCHECK(!FLAG_enable_third_party_heap);
if (!EnsureAllocation(size_in_bytes, alignment)) { if (!EnsureAllocation(size_in_bytes, alignment)) {
return AllocationResult::Retry(NEW_SPACE); return AllocationResult::Failure(NEW_SPACE);
} }
DCHECK_EQ(allocation_info_->start(), allocation_info_->top()); DCHECK_EQ(allocation_info_->start(), allocation_info_->top());
...@@ -647,7 +647,7 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, ...@@ -647,7 +647,7 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
AllocationResult result = AllocateFastAligned( AllocationResult result = AllocateFastAligned(
size_in_bytes, &aligned_size_in_bytes, alignment, origin); size_in_bytes, &aligned_size_in_bytes, alignment, origin);
DCHECK(!result.IsRetry()); DCHECK(!result.IsFailure());
InvokeAllocationObservers(result.ToAddress(), size_in_bytes, InvokeAllocationObservers(result.ToAddress(), size_in_bytes,
aligned_size_in_bytes, aligned_size_in_bytes); aligned_size_in_bytes, aligned_size_in_bytes);
......
...@@ -95,9 +95,9 @@ bool PagedSpace::EnsureLabMain(int size_in_bytes, AllocationOrigin origin) { ...@@ -95,9 +95,9 @@ bool PagedSpace::EnsureLabMain(int size_in_bytes, AllocationOrigin origin) {
AllocationResult PagedSpace::AllocateFastUnaligned(int size_in_bytes) { AllocationResult PagedSpace::AllocateFastUnaligned(int size_in_bytes) {
if (!allocation_info_->CanIncrementTop(size_in_bytes)) { if (!allocation_info_->CanIncrementTop(size_in_bytes)) {
return AllocationResult::Retry(identity()); return AllocationResult::Failure(identity());
} }
return AllocationResult( return AllocationResult::FromObject(
HeapObject::FromAddress(allocation_info_->IncrementTop(size_in_bytes))); HeapObject::FromAddress(allocation_info_->IncrementTop(size_in_bytes)));
} }
...@@ -108,7 +108,7 @@ AllocationResult PagedSpace::AllocateFastAligned( ...@@ -108,7 +108,7 @@ AllocationResult PagedSpace::AllocateFastAligned(
int filler_size = Heap::GetFillToAlign(current_top, alignment); int filler_size = Heap::GetFillToAlign(current_top, alignment);
int aligned_size = filler_size + size_in_bytes; int aligned_size = filler_size + size_in_bytes;
if (!allocation_info_->CanIncrementTop(aligned_size)) { if (!allocation_info_->CanIncrementTop(aligned_size)) {
return AllocationResult::Retry(identity()); return AllocationResult::Failure(identity());
} }
HeapObject obj = HeapObject obj =
HeapObject::FromAddress(allocation_info_->IncrementTop(aligned_size)); HeapObject::FromAddress(allocation_info_->IncrementTop(aligned_size));
...@@ -116,18 +116,18 @@ AllocationResult PagedSpace::AllocateFastAligned( ...@@ -116,18 +116,18 @@ AllocationResult PagedSpace::AllocateFastAligned(
if (filler_size > 0) { if (filler_size > 0) {
obj = heap()->PrecedeWithFiller(obj, filler_size); obj = heap()->PrecedeWithFiller(obj, filler_size);
} }
return AllocationResult(obj); return AllocationResult::FromObject(obj);
} }
AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes, AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes,
AllocationOrigin origin) { AllocationOrigin origin) {
DCHECK(!FLAG_enable_third_party_heap); DCHECK(!FLAG_enable_third_party_heap);
if (!EnsureLabMain(size_in_bytes, origin)) { if (!EnsureLabMain(size_in_bytes, origin)) {
return AllocationResult::Retry(identity()); return AllocationResult::Failure(identity());
} }
AllocationResult result = AllocateFastUnaligned(size_in_bytes); AllocationResult result = AllocateFastUnaligned(size_in_bytes);
DCHECK(!result.IsRetry()); DCHECK(!result.IsFailure());
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(result.ToObjectChecked().address(), MSAN_ALLOCATED_UNINITIALIZED_MEMORY(result.ToObjectChecked().address(),
size_in_bytes); size_in_bytes);
...@@ -152,12 +152,12 @@ AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes, ...@@ -152,12 +152,12 @@ AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
int filler_size = Heap::GetMaximumFillToAlign(alignment); int filler_size = Heap::GetMaximumFillToAlign(alignment);
allocation_size += filler_size; allocation_size += filler_size;
if (!EnsureLabMain(allocation_size, origin)) { if (!EnsureLabMain(allocation_size, origin)) {
return AllocationResult::Retry(identity()); return AllocationResult::Failure(identity());
} }
int aligned_size_in_bytes; int aligned_size_in_bytes;
AllocationResult result = AllocationResult result =
AllocateFastAligned(size_in_bytes, &aligned_size_in_bytes, alignment); AllocateFastAligned(size_in_bytes, &aligned_size_in_bytes, alignment);
DCHECK(!result.IsRetry()); DCHECK(!result.IsFailure());
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(result.ToObjectChecked().address(), MSAN_ALLOCATED_UNINITIALIZED_MEMORY(result.ToObjectChecked().address(),
size_in_bytes); size_in_bytes);
...@@ -183,11 +183,8 @@ AllocationResult PagedSpace::AllocateRaw(int size_in_bytes, ...@@ -183,11 +183,8 @@ AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
result = AllocateFastUnaligned(size_in_bytes); result = AllocateFastUnaligned(size_in_bytes);
} }
if (!result.IsRetry()) { return result.IsFailure() ? AllocateRawSlow(size_in_bytes, alignment, origin)
return result; : result;
} else {
return AllocateRawSlow(size_in_bytes, alignment, origin);
}
} }
} // namespace internal } // namespace internal
......
...@@ -667,7 +667,7 @@ AllocationResult ReadOnlySpace::AllocateRawAligned( ...@@ -667,7 +667,7 @@ AllocationResult ReadOnlySpace::AllocateRawAligned(
} }
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object.address(), size_in_bytes); MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object.address(), size_in_bytes);
return object; return AllocationResult::FromObject(object);
} }
AllocationResult ReadOnlySpace::AllocateRawUnaligned(int size_in_bytes) { AllocationResult ReadOnlySpace::AllocateRawUnaligned(int size_in_bytes) {
...@@ -687,7 +687,7 @@ AllocationResult ReadOnlySpace::AllocateRawUnaligned(int size_in_bytes) { ...@@ -687,7 +687,7 @@ AllocationResult ReadOnlySpace::AllocateRawUnaligned(int size_in_bytes) {
accounting_stats_.IncreaseAllocatedBytes(size_in_bytes, chunk); accounting_stats_.IncreaseAllocatedBytes(size_in_bytes, chunk);
chunk->IncreaseAllocatedBytes(size_in_bytes); chunk->IncreaseAllocatedBytes(size_in_bytes);
return object; return AllocationResult::FromObject(object);
} }
AllocationResult ReadOnlySpace::AllocateRaw(int size_in_bytes, AllocationResult ReadOnlySpace::AllocateRaw(int size_in_bytes,
...@@ -697,7 +697,7 @@ AllocationResult ReadOnlySpace::AllocateRaw(int size_in_bytes, ...@@ -697,7 +697,7 @@ AllocationResult ReadOnlySpace::AllocateRaw(int size_in_bytes,
? AllocateRawAligned(size_in_bytes, alignment) ? AllocateRawAligned(size_in_bytes, alignment)
: AllocateRawUnaligned(size_in_bytes); : AllocateRawUnaligned(size_in_bytes);
HeapObject heap_obj; HeapObject heap_obj;
if (!result.IsRetry() && result.To(&heap_obj)) { if (result.To(&heap_obj)) {
DCHECK(heap()->incremental_marking()->marking_state()->IsBlack(heap_obj)); DCHECK(heap()->incremental_marking()->marking_state()->IsBlack(heap_obj));
} }
return result; return result;
......
...@@ -153,7 +153,7 @@ AllocationResult Heap::AllocateMap(InstanceType instance_type, ...@@ -153,7 +153,7 @@ AllocationResult Heap::AllocateMap(InstanceType instance_type,
Map::cast(result), instance_type, instance_size, elements_kind, Map::cast(result), instance_type, instance_size, elements_kind,
inobject_properties); inobject_properties);
return map; return AllocationResult::FromObject(map);
} }
AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
...@@ -184,7 +184,7 @@ AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, ...@@ -184,7 +184,7 @@ AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
DCHECK(!map.is_in_retained_map_list()); DCHECK(!map.is_in_retained_map_list());
map.clear_padding(); map.clear_padding();
map.set_elements_kind(TERMINAL_FAST_ELEMENTS_KIND); map.set_elements_kind(TERMINAL_FAST_ELEMENTS_KIND);
return map; return AllocationResult::FromObject(map);
} }
void Heap::FinalizePartialMap(Map map) { void Heap::FinalizePartialMap(Map map) {
...@@ -208,7 +208,7 @@ AllocationResult Heap::Allocate(Handle<Map> map, ...@@ -208,7 +208,7 @@ AllocationResult Heap::Allocate(Handle<Map> map,
allocation_type == AllocationType::kYoung ? SKIP_WRITE_BARRIER allocation_type == AllocationType::kYoung ? SKIP_WRITE_BARRIER
: UPDATE_WRITE_BARRIER; : UPDATE_WRITE_BARRIER;
result.set_map_after_allocation(*map, write_barrier_mode); result.set_map_after_allocation(*map, write_barrier_mode);
return result; return AllocationResult::FromObject(result);
} }
bool Heap::CreateInitialMaps() { bool Heap::CreateInitialMaps() {
......
...@@ -140,21 +140,19 @@ AllocationResult LocalAllocationBuffer::AllocateRawAligned( ...@@ -140,21 +140,19 @@ AllocationResult LocalAllocationBuffer::AllocateRawAligned(
int filler_size = Heap::GetFillToAlign(current_top, alignment); int filler_size = Heap::GetFillToAlign(current_top, alignment);
int aligned_size = filler_size + size_in_bytes; int aligned_size = filler_size + size_in_bytes;
if (!allocation_info_.CanIncrementTop(aligned_size)) { if (!allocation_info_.CanIncrementTop(aligned_size)) {
return AllocationResult::Retry(NEW_SPACE); return AllocationResult::Failure(NEW_SPACE);
} }
HeapObject object = HeapObject object =
HeapObject::FromAddress(allocation_info_.IncrementTop(aligned_size)); HeapObject::FromAddress(allocation_info_.IncrementTop(aligned_size));
if (filler_size > 0) { return filler_size > 0 ? AllocationResult::FromObject(
return heap_->PrecedeWithFiller(object, filler_size); heap_->PrecedeWithFiller(object, filler_size))
} : AllocationResult::FromObject(object);
return AllocationResult(object);
} }
LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap, LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
AllocationResult result, AllocationResult result,
intptr_t size) { intptr_t size) {
if (result.IsRetry()) return InvalidBuffer(); if (result.IsFailure()) return InvalidBuffer();
HeapObject obj; HeapObject obj;
bool ok = result.To(&obj); bool ok = result.To(&obj);
USE(ok); USE(ok);
......
...@@ -248,7 +248,7 @@ class LargeObjectConcurrentAllocationThread final : public v8::base::Thread { ...@@ -248,7 +248,7 @@ class LargeObjectConcurrentAllocationThread final : public v8::base::Thread {
AllocationResult result = local_heap.AllocateRaw( AllocationResult result = local_heap.AllocateRaw(
kLargeObjectSize, AllocationType::kOld, AllocationOrigin::kRuntime, kLargeObjectSize, AllocationType::kOld, AllocationOrigin::kRuntime,
AllocationAlignment::kTaggedAligned); AllocationAlignment::kTaggedAligned);
if (result.IsRetry()) { if (result.IsFailure()) {
local_heap.TryPerformCollection(); local_heap.TryPerformCollection();
} else { } else {
Address address = result.ToAddress(); Address address = result.ToAddress();
......
...@@ -1831,7 +1831,7 @@ TEST(TestAlignedOverAllocation) { ...@@ -1831,7 +1831,7 @@ TEST(TestAlignedOverAllocation) {
heap::AbandonCurrentlyFreeMemory(heap->old_space()); heap::AbandonCurrentlyFreeMemory(heap->old_space());
// Allocate a dummy object to properly set up the linear allocation info. // Allocate a dummy object to properly set up the linear allocation info.
AllocationResult dummy = heap->old_space()->AllocateRawUnaligned(kTaggedSize); AllocationResult dummy = heap->old_space()->AllocateRawUnaligned(kTaggedSize);
CHECK(!dummy.IsRetry()); CHECK(!dummy.IsFailure());
heap->CreateFillerObjectAt(dummy.ToObjectChecked().address(), kTaggedSize, heap->CreateFillerObjectAt(dummy.ToObjectChecked().address(), kTaggedSize,
ClearRecordedSlots::kNo); ClearRecordedSlots::kNo);
...@@ -5387,7 +5387,7 @@ AllocationResult HeapTester::AllocateByteArrayForTest( ...@@ -5387,7 +5387,7 @@ AllocationResult HeapTester::AllocateByteArrayForTest(
SKIP_WRITE_BARRIER); SKIP_WRITE_BARRIER);
ByteArray::cast(result).set_length(length); ByteArray::cast(result).set_length(length);
ByteArray::cast(result).clear_padding(); ByteArray::cast(result).clear_padding();
return result; return AllocationResult::FromObject(result);
} }
bool HeapTester::CodeEnsureLinearAllocationArea(Heap* heap, int size_in_bytes) { bool HeapTester::CodeEnsureLinearAllocationArea(Heap* heap, int size_in_bytes) {
......
...@@ -68,7 +68,8 @@ TEST(UnusedLabImplicitClose) { ...@@ -68,7 +68,8 @@ TEST(UnusedLabImplicitClose) {
std::vector<intptr_t> expected_sizes(expected_sizes_raw, std::vector<intptr_t> expected_sizes(expected_sizes_raw,
expected_sizes_raw + 1); expected_sizes_raw + 1);
{ {
AllocationResult lab_backing_store(HeapObject::FromAddress(base)); AllocationResult lab_backing_store =
AllocationResult::FromObject(HeapObject::FromAddress(base));
LocalAllocationBuffer lab = LocalAllocationBuffer lab =
LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize);
CHECK(lab.IsValid()); CHECK(lab.IsValid());
...@@ -89,7 +90,8 @@ TEST(SimpleAllocate) { ...@@ -89,7 +90,8 @@ TEST(SimpleAllocate) {
std::vector<intptr_t> expected_sizes(expected_sizes_raw, std::vector<intptr_t> expected_sizes(expected_sizes_raw,
expected_sizes_raw + 2); expected_sizes_raw + 2);
{ {
AllocationResult lab_backing_store(HeapObject::FromAddress(base)); AllocationResult lab_backing_store =
AllocationResult::FromObject(HeapObject::FromAddress(base));
LocalAllocationBuffer lab = LocalAllocationBuffer lab =
LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize);
CHECK(lab.IsValid()); CHECK(lab.IsValid());
...@@ -115,7 +117,8 @@ TEST(AllocateUntilLabOOM) { ...@@ -115,7 +117,8 @@ TEST(AllocateUntilLabOOM) {
expected_sizes_raw + 5); expected_sizes_raw + 5);
intptr_t sum = 0; intptr_t sum = 0;
{ {
AllocationResult lab_backing_store(HeapObject::FromAddress(base)); AllocationResult lab_backing_store =
AllocationResult::FromObject(HeapObject::FromAddress(base));
LocalAllocationBuffer lab = LocalAllocationBuffer lab =
LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize);
CHECK(lab.IsValid()); CHECK(lab.IsValid());
...@@ -142,7 +145,8 @@ TEST(AllocateExactlyUntilLimit) { ...@@ -142,7 +145,8 @@ TEST(AllocateExactlyUntilLimit) {
std::vector<intptr_t> expected_sizes(expected_sizes_raw, std::vector<intptr_t> expected_sizes(expected_sizes_raw,
expected_sizes_raw + 5); expected_sizes_raw + 5);
{ {
AllocationResult lab_backing_store(HeapObject::FromAddress(base)); AllocationResult lab_backing_store =
AllocationResult::FromObject(HeapObject::FromAddress(base));
LocalAllocationBuffer lab = LocalAllocationBuffer lab =
LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize);
CHECK(lab.IsValid()); CHECK(lab.IsValid());
...@@ -183,7 +187,8 @@ TEST(MergeSuccessful) { ...@@ -183,7 +187,8 @@ TEST(MergeSuccessful) {
expected_sizes2_raw + 10); expected_sizes2_raw + 10);
{ {
AllocationResult lab_backing_store1(HeapObject::FromAddress(base1)); AllocationResult lab_backing_store1 =
AllocationResult::FromObject(HeapObject::FromAddress(base1));
LocalAllocationBuffer lab1 = LocalAllocationBuffer lab1 =
LocalAllocationBuffer::FromResult(heap, lab_backing_store1, kLabSize); LocalAllocationBuffer::FromResult(heap, lab_backing_store1, kLabSize);
CHECK(lab1.IsValid()); CHECK(lab1.IsValid());
...@@ -196,7 +201,8 @@ TEST(MergeSuccessful) { ...@@ -196,7 +201,8 @@ TEST(MergeSuccessful) {
} }
} }
AllocationResult lab_backing_store2(HeapObject::FromAddress(base2)); AllocationResult lab_backing_store2 =
AllocationResult::FromObject(HeapObject::FromAddress(base2));
LocalAllocationBuffer lab2 = LocalAllocationBuffer lab2 =
LocalAllocationBuffer::FromResult(heap, lab_backing_store2, kLabSize); LocalAllocationBuffer::FromResult(heap, lab_backing_store2, kLabSize);
CHECK(lab2.IsValid()); CHECK(lab2.IsValid());
...@@ -225,17 +231,20 @@ TEST(MergeFailed) { ...@@ -225,17 +231,20 @@ TEST(MergeFailed) {
Address base3 = base2 + kLabSize; Address base3 = base2 + kLabSize;
{ {
AllocationResult lab_backing_store1(HeapObject::FromAddress(base1)); AllocationResult lab_backing_store1 =
AllocationResult::FromObject(HeapObject::FromAddress(base1));
LocalAllocationBuffer lab1 = LocalAllocationBuffer lab1 =
LocalAllocationBuffer::FromResult(heap, lab_backing_store1, kLabSize); LocalAllocationBuffer::FromResult(heap, lab_backing_store1, kLabSize);
CHECK(lab1.IsValid()); CHECK(lab1.IsValid());
AllocationResult lab_backing_store2(HeapObject::FromAddress(base2)); AllocationResult lab_backing_store2 =
AllocationResult::FromObject(HeapObject::FromAddress(base2));
LocalAllocationBuffer lab2 = LocalAllocationBuffer lab2 =
LocalAllocationBuffer::FromResult(heap, lab_backing_store2, kLabSize); LocalAllocationBuffer::FromResult(heap, lab_backing_store2, kLabSize);
CHECK(lab2.IsValid()); CHECK(lab2.IsValid());
AllocationResult lab_backing_store3(HeapObject::FromAddress(base3)); AllocationResult lab_backing_store3 =
AllocationResult::FromObject(HeapObject::FromAddress(base3));
LocalAllocationBuffer lab3 = LocalAllocationBuffer lab3 =
LocalAllocationBuffer::FromResult(heap, lab_backing_store3, kLabSize); LocalAllocationBuffer::FromResult(heap, lab_backing_store3, kLabSize);
CHECK(lab3.IsValid()); CHECK(lab3.IsValid());
...@@ -261,7 +270,8 @@ TEST(AllocateAligned) { ...@@ -261,7 +270,8 @@ TEST(AllocateAligned) {
expected_sizes_raw + 4); expected_sizes_raw + 4);
{ {
AllocationResult lab_backing_store(HeapObject::FromAddress(base)); AllocationResult lab_backing_store =
AllocationResult::FromObject(HeapObject::FromAddress(base));
LocalAllocationBuffer lab = LocalAllocationBuffer lab =
LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize);
CHECK(lab.IsValid()); CHECK(lab.IsValid());
......
...@@ -82,9 +82,9 @@ AllocationResult HeapTester::AllocateMapForTest(Isolate* isolate) { ...@@ -82,9 +82,9 @@ AllocationResult HeapTester::AllocateMapForTest(Isolate* isolate) {
if (!alloc.To(&obj)) return alloc; if (!alloc.To(&obj)) return alloc;
obj.set_map_after_allocation(ReadOnlyRoots(heap).meta_map(), obj.set_map_after_allocation(ReadOnlyRoots(heap).meta_map(),
SKIP_WRITE_BARRIER); SKIP_WRITE_BARRIER);
return isolate->factory()->InitializeMap(Map::cast(obj), JS_OBJECT_TYPE, return AllocationResult::FromObject(isolate->factory()->InitializeMap(
JSObject::kHeaderSize, Map::cast(obj), JS_OBJECT_TYPE, JSObject::kHeaderSize,
TERMINAL_FAST_ELEMENTS_KIND, 0); TERMINAL_FAST_ELEMENTS_KIND, 0));
} }
// This is the same as Factory::NewFixedArray, except it doesn't retry // This is the same as Factory::NewFixedArray, except it doesn't retry
...@@ -104,7 +104,7 @@ AllocationResult HeapTester::AllocateFixedArrayForTest( ...@@ -104,7 +104,7 @@ AllocationResult HeapTester::AllocateFixedArrayForTest(
array.set_length(length); array.set_length(length);
MemsetTagged(array.data_start(), ReadOnlyRoots(heap).undefined_value(), MemsetTagged(array.data_start(), ReadOnlyRoots(heap).undefined_value(),
length); length);
return array; return AllocationResult::FromObject(array);
} }
HEAP_TEST(MarkCompactCollector) { HEAP_TEST(MarkCompactCollector) {
...@@ -128,7 +128,7 @@ HEAP_TEST(MarkCompactCollector) { ...@@ -128,7 +128,7 @@ HEAP_TEST(MarkCompactCollector) {
do { do {
allocation = allocation =
AllocateFixedArrayForTest(heap, arraysize, AllocationType::kYoung); AllocateFixedArrayForTest(heap, arraysize, AllocationType::kYoung);
} while (!allocation.IsRetry()); } while (!allocation.IsFailure());
CcTest::CollectGarbage(NEW_SPACE); CcTest::CollectGarbage(NEW_SPACE);
AllocateFixedArrayForTest(heap, arraysize, AllocationType::kYoung) AllocateFixedArrayForTest(heap, arraysize, AllocationType::kYoung)
.ToObjectChecked(); .ToObjectChecked();
...@@ -137,7 +137,7 @@ HEAP_TEST(MarkCompactCollector) { ...@@ -137,7 +137,7 @@ HEAP_TEST(MarkCompactCollector) {
// keep allocating maps until it fails // keep allocating maps until it fails
do { do {
allocation = AllocateMapForTest(isolate); allocation = AllocateMapForTest(isolate);
} while (!allocation.IsRetry()); } while (!allocation.IsFailure());
CcTest::CollectGarbage(MAP_SPACE); CcTest::CollectGarbage(MAP_SPACE);
AllocateMapForTest(isolate).ToObjectChecked(); AllocateMapForTest(isolate).ToObjectChecked();
......
...@@ -344,14 +344,14 @@ TEST(OldLargeObjectSpace) { ...@@ -344,14 +344,14 @@ TEST(OldLargeObjectSpace) {
while (true) { while (true) {
{ {
AllocationResult allocation = lo->AllocateRaw(lo_size); AllocationResult allocation = lo->AllocateRaw(lo_size);
if (allocation.IsRetry()) break; if (allocation.IsFailure()) break;
ho = HeapObject::cast(allocation.ToObjectChecked()); ho = HeapObject::cast(allocation.ToObjectChecked());
Handle<HeapObject> keep_alive(ho, isolate); Handle<HeapObject> keep_alive(ho, isolate);
} }
} }
CHECK(!lo->IsEmpty()); CHECK(!lo->IsEmpty());
CHECK(lo->AllocateRaw(lo_size).IsRetry()); CHECK(lo->AllocateRaw(lo_size).IsFailure());
} }
#ifndef DEBUG #ifndef DEBUG
...@@ -411,7 +411,7 @@ TEST(SizeOfInitialHeap) { ...@@ -411,7 +411,7 @@ TEST(SizeOfInitialHeap) {
static HeapObject AllocateUnaligned(NewSpace* space, int size) { static HeapObject AllocateUnaligned(NewSpace* space, int size) {
AllocationResult allocation = space->AllocateRaw(size, kTaggedAligned); AllocationResult allocation = space->AllocateRaw(size, kTaggedAligned);
CHECK(!allocation.IsRetry()); CHECK(!allocation.IsFailure());
HeapObject filler; HeapObject filler;
CHECK(allocation.To(&filler)); CHECK(allocation.To(&filler));
space->heap()->CreateFillerObjectAt(filler.address(), size, space->heap()->CreateFillerObjectAt(filler.address(), size,
...@@ -421,7 +421,7 @@ static HeapObject AllocateUnaligned(NewSpace* space, int size) { ...@@ -421,7 +421,7 @@ static HeapObject AllocateUnaligned(NewSpace* space, int size) {
static HeapObject AllocateUnaligned(PagedSpace* space, int size) { static HeapObject AllocateUnaligned(PagedSpace* space, int size) {
AllocationResult allocation = space->AllocateRaw(size, kTaggedAligned); AllocationResult allocation = space->AllocateRaw(size, kTaggedAligned);
CHECK(!allocation.IsRetry()); CHECK(!allocation.IsFailure());
HeapObject filler; HeapObject filler;
CHECK(allocation.To(&filler)); CHECK(allocation.To(&filler));
space->heap()->CreateFillerObjectAt(filler.address(), size, space->heap()->CreateFillerObjectAt(filler.address(), size,
...@@ -431,7 +431,7 @@ static HeapObject AllocateUnaligned(PagedSpace* space, int size) { ...@@ -431,7 +431,7 @@ static HeapObject AllocateUnaligned(PagedSpace* space, int size) {
static HeapObject AllocateUnaligned(OldLargeObjectSpace* space, int size) { static HeapObject AllocateUnaligned(OldLargeObjectSpace* space, int size) {
AllocationResult allocation = space->AllocateRaw(size); AllocationResult allocation = space->AllocateRaw(size);
CHECK(!allocation.IsRetry()); CHECK(!allocation.IsFailure());
HeapObject filler; HeapObject filler;
CHECK(allocation.To(&filler)); CHECK(allocation.To(&filler));
return filler; return filler;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment