Commit cbc18b18 authored by Hannes Payer's avatar Hannes Payer Committed by Commit Bot

[heap] Introduce AllocationType and use it in Heap::AllocateRaw.

Bug: v8:8945
Change-Id: I4e5f08a722e83fd8b4accb066eca50242a116a6f
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1503452Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Hannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#60029}
parent 037ff2b7
......@@ -666,6 +666,14 @@ enum AllocationSpace {
constexpr int kSpaceTagSize = 4;
STATIC_ASSERT(FIRST_SPACE == 0);
enum class AllocationType {
kYoung, // Regular object allocated in NEW_SPACE or NEW_LO_SPACE
kOld, // Regular object allocated in OLD_SPACE or LO_SPACE
kCode, // Code object allocated in CODE_SPACE or CODE_LO_SPACE
kMap, // Map object allocated in MAP_SPACE
kReadOnly // Object allocated in RO_SPACE
};
// TODO(ishell): review and rename kWordAligned to kTaggedAligned.
enum AllocationAlignment { kWordAligned, kDoubleAligned, kDoubleUnaligned };
......
......@@ -381,8 +381,9 @@ MaybeHandle<FixedArray> Factory::TryNewFixedArray(int length,
int size = FixedArray::SizeFor(length);
AllocationSpace space = Heap::SelectSpace(pretenure);
AllocationType type = Heap::SelectType(space);
Heap* heap = isolate()->heap();
AllocationResult allocation = heap->AllocateRaw(size, space);
AllocationResult allocation = heap->AllocateRaw(size, type);
HeapObject result;
if (!allocation.To(&result)) return MaybeHandle<FixedArray>();
if (size > kMaxRegularHeapObjectSize && FLAG_use_marking_progress_bar) {
......
......@@ -158,7 +158,7 @@ size_t Heap::NewSpaceAllocationCounter() {
return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
}
AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
AllocationAlignment alignment) {
DCHECK(AllowHandleAllocation::IsAllowed());
DCHECK(AllowHeapAllocation::IsAllowed());
......@@ -166,7 +166,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
if (FLAG_random_gc_interval > 0 || FLAG_gc_interval >= 0) {
if (!always_allocate() && Heap::allocation_timeout_-- <= 0) {
return AllocationResult::Retry(space);
return AllocationResult::Retry();
}
}
#endif
......@@ -178,44 +178,36 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
HeapObject object;
AllocationResult allocation;
if (NEW_SPACE == space) {
if (AllocationType::kYoung == type) {
if (large_object) {
// TODO(hpayer): Implement a LO tenuring strategy.
space = FLAG_young_generation_large_objects ? NEW_LO_SPACE : LO_SPACE;
if (FLAG_young_generation_large_objects) {
allocation = new_lo_space_->AllocateRaw(size_in_bytes);
} else {
// If young generation large objects are disalbed we have to tenure the
// allocation and violate the given allocation type. This could be
// dangerous. We may want to remove FLAG_young_generation_large_objects
// and avoid patching.
allocation = lo_space_->AllocateRaw(size_in_bytes);
}
} else {
allocation = new_space_->AllocateRaw(size_in_bytes, alignment);
if (allocation.To(&object)) {
OnAllocationEvent(object, size_in_bytes);
}
return allocation;
}
}
// Here we only allocate in the old generation.
if (OLD_SPACE == space) {
} else if (AllocationType::kOld == type) {
if (large_object) {
allocation = lo_space_->AllocateRaw(size_in_bytes);
} else {
allocation = old_space_->AllocateRaw(size_in_bytes, alignment);
}
} else if (CODE_SPACE == space) {
} else if (AllocationType::kCode == type) {
if (size_in_bytes <= code_space()->AreaSize() && !large_object) {
allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
} else {
allocation = code_lo_space_->AllocateRaw(size_in_bytes);
}
} else if (LO_SPACE == space) {
DCHECK(large_object);
allocation = lo_space_->AllocateRaw(size_in_bytes);
} else if (NEW_LO_SPACE == space) {
DCHECK(FLAG_young_generation_large_objects);
allocation = new_lo_space_->AllocateRaw(size_in_bytes);
} else if (CODE_LO_SPACE == space) {
DCHECK(large_object);
allocation = code_lo_space_->AllocateRaw(size_in_bytes);
} else if (MAP_SPACE == space) {
} else if (AllocationType::kMap == type) {
allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
} else if (RO_SPACE == space) {
} else if (AllocationType::kReadOnly == type) {
#ifdef V8_USE_SNAPSHOT
DCHECK(isolate_->serializer_enabled());
#endif
......@@ -223,12 +215,11 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
DCHECK(CanAllocateInReadOnlySpace());
allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment);
} else {
// NEW_SPACE is not allowed here.
UNREACHABLE();
}
if (allocation.To(&object)) {
if (space == CODE_SPACE) {
if (AllocationType::kCode == type) {
// Unprotect the memory chunk of the object if it was not unprotected
// already.
UnprotectAndRegisterMemoryChunk(object);
......
......@@ -4428,7 +4428,8 @@ HeapObject Heap::EnsureImmovableCode(HeapObject heap_object, int object_size) {
HeapObject Heap::AllocateRawWithLightRetry(int size, AllocationSpace space,
AllocationAlignment alignment) {
HeapObject result;
AllocationResult alloc = AllocateRaw(size, space, alignment);
AllocationResult alloc =
AllocateRaw(size, Heap::SelectType(space), alignment);
if (alloc.To(&result)) {
DCHECK(result != ReadOnlyRoots(this).exception());
return result;
......@@ -4437,7 +4438,7 @@ HeapObject Heap::AllocateRawWithLightRetry(int size, AllocationSpace space,
for (int i = 0; i < 2; i++) {
CollectGarbage(alloc.RetrySpace(),
GarbageCollectionReason::kAllocationFailure);
alloc = AllocateRaw(size, space, alignment);
alloc = AllocateRaw(size, Heap::SelectType(space), alignment);
if (alloc.To(&result)) {
DCHECK(result != ReadOnlyRoots(this).exception());
return result;
......@@ -4456,7 +4457,7 @@ HeapObject Heap::AllocateRawWithRetryOrFail(int size, AllocationSpace space,
CollectAllAvailableGarbage(GarbageCollectionReason::kLastResort);
{
AlwaysAllocateScope scope(isolate());
alloc = AllocateRaw(size, space, alignment);
alloc = AllocateRaw(size, Heap::SelectType(space), alignment);
}
if (alloc.To(&result)) {
DCHECK(result != ReadOnlyRoots(this).exception());
......
......@@ -1396,6 +1396,32 @@ class Heap {
}
}
// TODO(hpayer): Remove this translation function as soon as all code is
// converted to use AllocationType. Also remove PretenureFlag and use
// Allocation Type instead.
static AllocationType SelectType(AllocationSpace space) {
switch (space) {
case NEW_SPACE:
return AllocationType::kYoung;
case NEW_LO_SPACE:
return AllocationType::kYoung;
case OLD_SPACE:
return AllocationType::kOld;
case LO_SPACE:
return AllocationType::kOld;
case CODE_SPACE:
return AllocationType::kCode;
case CODE_LO_SPACE:
return AllocationType::kCode;
case MAP_SPACE:
return AllocationType::kMap;
case RO_SPACE:
return AllocationType::kReadOnly;
default:
UNREACHABLE();
}
}
static size_t DefaultGetExternallyAllocatedMemoryInBytesCallback() {
return 0;
}
......@@ -1685,7 +1711,7 @@ class Heap {
// performed by the runtime and should not be bypassed (to extend this to
// inlined allocations, use the Heap::DisableInlineAllocation() support).
V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRaw(
int size_in_bytes, AllocationSpace space,
int size_in_bytes, AllocationType type,
AllocationAlignment aligment = kWordAligned);
// This method will try to perform an allocation of a given size in a given
......
......@@ -110,7 +110,8 @@ AllocationResult Heap::AllocateMap(InstanceType instance_type,
// JSObjects have maps with a mutable prototype_validity_cell, so they cannot
// go in RO_SPACE.
AllocationResult allocation =
AllocateRaw(Map::kSize, is_js_object ? MAP_SPACE : RO_SPACE);
AllocateRaw(Map::kSize, is_js_object ? AllocationType::kMap
: AllocationType::kReadOnly);
if (!allocation.To(&result)) return allocation;
result->set_map_after_allocation(ReadOnlyRoots(this).meta_map(),
......@@ -125,7 +126,8 @@ AllocationResult Heap::AllocateMap(InstanceType instance_type,
AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
int instance_size) {
Object result;
AllocationResult allocation = AllocateRaw(Map::kSize, RO_SPACE);
AllocationResult allocation =
AllocateRaw(Map::kSize, AllocationType::kReadOnly);
if (!allocation.To(&result)) return allocation;
// Map::cast cannot be used due to uninitialized map field.
Map map = Map::unchecked_cast(result);
......@@ -172,7 +174,7 @@ AllocationResult Heap::Allocate(Map map, AllocationSpace space) {
DCHECK(map->instance_type() != MAP_TYPE);
int size = map->instance_size();
HeapObject result;
AllocationResult allocation = AllocateRaw(size, space);
AllocationResult allocation = AllocateRaw(size, Heap::SelectType(space));
if (!allocation.To(&result)) return allocation;
// New space objects are allocated white.
WriteBarrierMode write_barrier_mode =
......@@ -187,7 +189,7 @@ AllocationResult Heap::AllocateEmptyFixedTypedArray(
HeapObject object;
AllocationResult allocation = AllocateRaw(
size, RO_SPACE,
size, AllocationType::kReadOnly,
array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned);
if (!allocation.To(&object)) return allocation;
......@@ -243,7 +245,8 @@ bool Heap::CreateInitialMaps() {
// Allocate the empty array.
{
AllocationResult alloc = AllocateRaw(FixedArray::SizeFor(0), RO_SPACE);
AllocationResult alloc =
AllocateRaw(FixedArray::SizeFor(0), AllocationType::kReadOnly);
if (!alloc.To(&obj)) return false;
obj->set_map_after_allocation(roots.fixed_array_map(), SKIP_WRITE_BARRIER);
FixedArray::cast(obj)->set_length(0);
......@@ -251,7 +254,8 @@ bool Heap::CreateInitialMaps() {
set_empty_fixed_array(FixedArray::cast(obj));
{
AllocationResult alloc = AllocateRaw(WeakFixedArray::SizeFor(0), RO_SPACE);
AllocationResult alloc =
AllocateRaw(WeakFixedArray::SizeFor(0), AllocationType::kReadOnly);
if (!alloc.To(&obj)) return false;
obj->set_map_after_allocation(roots.weak_fixed_array_map(),
SKIP_WRITE_BARRIER);
......@@ -260,8 +264,8 @@ bool Heap::CreateInitialMaps() {
set_empty_weak_fixed_array(WeakFixedArray::cast(obj));
{
AllocationResult allocation =
AllocateRaw(WeakArrayList::SizeForCapacity(0), RO_SPACE);
AllocationResult allocation = AllocateRaw(WeakArrayList::SizeForCapacity(0),
AllocationType::kReadOnly);
if (!allocation.To(&obj)) return false;
obj->set_map_after_allocation(roots.weak_array_list_map(),
SKIP_WRITE_BARRIER);
......@@ -314,7 +318,7 @@ bool Heap::CreateInitialMaps() {
// Allocate the empty descriptor array.
{
int size = DescriptorArray::SizeFor(0);
if (!AllocateRaw(size, RO_SPACE).To(&obj)) return false;
if (!AllocateRaw(size, AllocationType::kReadOnly).To(&obj)) return false;
obj->set_map_after_allocation(roots.descriptor_array_map(),
SKIP_WRITE_BARRIER);
DescriptorArray array = DescriptorArray::cast(obj);
......@@ -427,7 +431,7 @@ bool Heap::CreateInitialMaps() {
{
// The invalid_prototype_validity_cell is needed for JSObject maps.
Smi value = Smi::FromInt(Map::kPrototypeChainInvalid);
AllocationResult alloc = AllocateRaw(Cell::kSize, OLD_SPACE);
AllocationResult alloc = AllocateRaw(Cell::kSize, AllocationType::kOld);
if (!alloc.To(&obj)) return false;
obj->set_map_after_allocation(roots.cell_map(), SKIP_WRITE_BARRIER);
Cell::cast(obj)->set_value(value);
......@@ -519,7 +523,8 @@ bool Heap::CreateInitialMaps() {
}
{
AllocationResult alloc = AllocateRaw(FixedArray::SizeFor(0), RO_SPACE);
AllocationResult alloc =
AllocateRaw(FixedArray::SizeFor(0), AllocationType::kReadOnly);
if (!alloc.To(&obj)) return false;
obj->set_map_after_allocation(roots.scope_info_map(), SKIP_WRITE_BARRIER);
FixedArray::cast(obj)->set_length(0);
......@@ -528,7 +533,8 @@ bool Heap::CreateInitialMaps() {
{
// Empty boilerplate needs a field for literal_flags
AllocationResult alloc = AllocateRaw(FixedArray::SizeFor(1), RO_SPACE);
AllocationResult alloc =
AllocateRaw(FixedArray::SizeFor(1), AllocationType::kReadOnly);
if (!alloc.To(&obj)) return false;
obj->set_map_after_allocation(roots.object_boilerplate_description_map(),
SKIP_WRITE_BARRIER);
......@@ -570,14 +576,16 @@ bool Heap::CreateInitialMaps() {
// Empty arrays.
{
if (!AllocateRaw(ByteArray::SizeFor(0), RO_SPACE).To(&obj)) return false;
if (!AllocateRaw(ByteArray::SizeFor(0), AllocationType::kReadOnly).To(&obj))
return false;
obj->set_map_after_allocation(roots.byte_array_map(), SKIP_WRITE_BARRIER);
ByteArray::cast(obj)->set_length(0);
set_empty_byte_array(ByteArray::cast(obj));
}
{
if (!AllocateRaw(FixedArray::SizeFor(0), RO_SPACE).To(&obj)) {
if (!AllocateRaw(FixedArray::SizeFor(0), AllocationType::kReadOnly)
.To(&obj)) {
return false;
}
obj->set_map_after_allocation(roots.property_array_map(),
......
......@@ -49,15 +49,16 @@ Handle<Object> HeapTester::TestAllocateAfterFailures() {
AlwaysAllocateScope scope(CcTest::i_isolate());
Heap* heap = CcTest::heap();
int size = FixedArray::SizeFor(100);
// New space.
HeapObject obj = heap->AllocateRaw(size, NEW_SPACE).ToObjectChecked();
// Young generation.
HeapObject obj =
heap->AllocateRaw(size, AllocationType::kYoung).ToObjectChecked();
// In order to pass heap verification on Isolate teardown, mark the
// allocated area as a filler.
heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
// Old space.
// Old generation.
heap::SimulateFullSpace(heap->old_space());
obj = heap->AllocateRaw(size, OLD_SPACE).ToObjectChecked();
obj = heap->AllocateRaw(size, AllocationType::kOld).ToObjectChecked();
heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
// Large object space.
......@@ -68,24 +69,24 @@ Handle<Object> HeapTester::TestAllocateAfterFailures() {
CHECK_GT(kLargeObjectSpaceFillerSize,
static_cast<size_t>(heap->old_space()->AreaSize()));
while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) {
obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, OLD_SPACE)
obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, AllocationType::kOld)
.ToObjectChecked();
heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
}
obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, OLD_SPACE)
obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, AllocationType::kOld)
.ToObjectChecked();
heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
// Map space.
heap::SimulateFullSpace(heap->map_space());
obj = heap->AllocateRaw(Map::kSize, MAP_SPACE).ToObjectChecked();
obj = heap->AllocateRaw(Map::kSize, AllocationType::kMap).ToObjectChecked();
heap->CreateFillerObjectAt(obj->address(), Map::kSize,
ClearRecordedSlots::kNo);
// Code space.
heap::SimulateFullSpace(heap->code_space());
size = CcTest::i_isolate()->builtins()->builtin(Builtins::kIllegal)->Size();
obj = heap->AllocateRaw(size, CODE_SPACE).ToObjectChecked();
obj = heap->AllocateRaw(size, AllocationType::kCode).ToObjectChecked();
heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
return CcTest::i_isolate()->factory()->true_value();
}
......
......@@ -5367,7 +5367,8 @@ AllocationResult HeapTester::AllocateByteArrayForTest(Heap* heap, int length,
AllocationSpace space = heap->SelectSpace(pretenure);
HeapObject result;
{
AllocationResult allocation = heap->AllocateRaw(size, space);
AllocationResult allocation =
heap->AllocateRaw(size, Heap::SelectType(space));
if (!allocation.To(&result)) return allocation;
}
......
......@@ -101,7 +101,7 @@ HEAP_TEST(NoPromotion) {
AllocationResult HeapTester::AllocateMapForTest(Isolate* isolate) {
Heap* heap = isolate->heap();
HeapObject obj;
AllocationResult alloc = heap->AllocateRaw(Map::kSize, MAP_SPACE);
AllocationResult alloc = heap->AllocateRaw(Map::kSize, AllocationType::kMap);
if (!alloc.To(&obj)) return alloc;
obj->set_map_after_allocation(ReadOnlyRoots(heap).meta_map(),
SKIP_WRITE_BARRIER);
......@@ -119,7 +119,7 @@ AllocationResult HeapTester::AllocateFixedArrayForTest(
AllocationSpace space = heap->SelectSpace(pretenure);
HeapObject obj;
{
AllocationResult result = heap->AllocateRaw(size, space);
AllocationResult result = heap->AllocateRaw(size, Heap::SelectType(space));
if (!result.To(&obj)) return result;
}
obj->set_map_after_allocation(ReadOnlyRoots(heap).fixed_array_map(),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment