Commit 250f591c authored by hpayer's avatar hpayer Committed by Commit bot

Remove explicit double alignment from allocation helper functions.

BUG=

Review URL: https://codereview.chromium.org/1128323003

Cr-Commit-Position: refs/heads/master@{#28320}
parent 18beb500
......@@ -156,7 +156,8 @@ AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) {
AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
AllocationSpace retry_space) {
AllocationSpace retry_space,
Alignment alignment) {
DCHECK(AllowHandleAllocation::IsAllowed());
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK(gc_state_ == NOT_IN_GC);
......@@ -172,7 +173,15 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
HeapObject* object;
AllocationResult allocation;
if (NEW_SPACE == space) {
#ifndef V8_HOST_ARCH_64_BIT
if (alignment == kDoubleAligned) {
allocation = new_space_.AllocateRawDoubleAligned(size_in_bytes);
} else {
allocation = new_space_.AllocateRaw(size_in_bytes);
}
#else
allocation = new_space_.AllocateRaw(size_in_bytes);
#endif
if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) {
space = retry_space;
} else {
......@@ -184,7 +193,15 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
}
if (OLD_SPACE == space) {
#ifndef V8_HOST_ARCH_64_BIT
if (alignment == kDoubleAligned) {
allocation = old_space_->AllocateRawDoubleAligned(size_in_bytes);
} else {
allocation = old_space_->AllocateRaw(size_in_bytes);
}
#else
allocation = old_space_->AllocateRaw(size_in_bytes);
#endif
} else if (CODE_SPACE == space) {
if (size_in_bytes <= code_space()->AreaSize()) {
allocation = code_space_->AllocateRaw(size_in_bytes);
......
......@@ -3669,21 +3669,14 @@ AllocationResult Heap::AllocateFixedTypedArray(int length,
ForFixedTypedArray(array_type, &element_size, &elements_kind);
int size = OBJECT_POINTER_ALIGN(length * element_size +
FixedTypedArrayBase::kDataOffset);
#ifndef V8_HOST_ARCH_64_BIT
if (array_type == kExternalFloat64Array) {
size += kPointerSize;
}
#endif
AllocationSpace space = SelectSpace(size, pretenure);
HeapObject* object;
AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
AllocationResult allocation = AllocateRaw(
size, space, OLD_SPACE,
array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned);
if (!allocation.To(&object)) return allocation;
if (array_type == kExternalFloat64Array) {
object = EnsureDoubleAligned(object, size);
}
object->set_map(MapForFixedTypedArray(array_type));
FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object);
elements->set_length(length);
......@@ -4398,21 +4391,20 @@ AllocationResult Heap::AllocateUninitializedFixedDoubleArray(
AllocationResult Heap::AllocateRawFixedDoubleArray(int length,
PretenureFlag pretenure) {
if (length < 0 || length > FixedDoubleArray::kMaxLength) {
v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
v8::internal::Heap::FatalProcessOutOfMemory("invalid array length",
kDoubleAligned);
}
int size = FixedDoubleArray::SizeFor(length);
#ifndef V8_HOST_ARCH_64_BIT
size += kPointerSize;
#endif
AllocationSpace space = SelectSpace(size, pretenure);
HeapObject* object;
{
AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
AllocationResult allocation =
AllocateRaw(size, space, OLD_SPACE, kDoubleAligned);
if (!allocation.To(&object)) return allocation;
}
return EnsureDoubleAligned(object, size);
return object;
}
......@@ -4420,17 +4412,14 @@ AllocationResult Heap::AllocateConstantPoolArray(
const ConstantPoolArray::NumberOfEntries& small) {
CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType));
int size = ConstantPoolArray::SizeFor(small);
#ifndef V8_HOST_ARCH_64_BIT
size += kPointerSize;
#endif
AllocationSpace space = SelectSpace(size, TENURED);
HeapObject* object;
{
AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
AllocationResult allocation =
AllocateRaw(size, space, OLD_SPACE, kDoubleAligned);
if (!allocation.To(&object)) return allocation;
}
object = EnsureDoubleAligned(object, size);
object->set_map_no_write_barrier(constant_pool_array_map());
ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
......@@ -4446,17 +4435,14 @@ AllocationResult Heap::AllocateExtendedConstantPoolArray(
CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType));
CHECK(extended.are_in_range(0, kMaxInt));
int size = ConstantPoolArray::SizeForExtended(small, extended);
#ifndef V8_HOST_ARCH_64_BIT
size += kPointerSize;
#endif
AllocationSpace space = SelectSpace(size, TENURED);
HeapObject* object;
{
AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
AllocationResult allocation =
AllocateRaw(size, space, OLD_SPACE, kDoubleAligned);
if (!allocation.To(&object)) return allocation;
}
object = EnsureDoubleAligned(object, size);
object->set_map_no_write_barrier(constant_pool_array_map());
ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
......
......@@ -1816,12 +1816,15 @@ class Heap {
HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size);
enum Alignment { kWordAligned, kDoubleAligned };
// Allocate an uninitialized object. The memory is non-executable if the
// hardware and OS allow. This is the single choke-point for allocations
// performed by the runtime and should not be bypassed (to extend this to
// inlined allocations, use the Heap::DisableInlineAllocation() support).
MUST_USE_RESULT inline AllocationResult AllocateRaw(
int size_in_bytes, AllocationSpace space, AllocationSpace retry_space);
int size_in_bytes, AllocationSpace space, AllocationSpace retry_space,
Alignment aligment = kWordAligned);
// Allocates a heap object based on the map.
MUST_USE_RESULT AllocationResult
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment