Commit 055d4ee1 authored by hpayer's avatar hpayer Committed by Commit bot

Clean-up aligned allocation logic.

BUG=

Review URL: https://codereview.chromium.org/1138643005

Cr-Commit-Position: refs/heads/master@{#28430}
parent 2b17c752
...@@ -173,15 +173,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, ...@@ -173,15 +173,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
HeapObject* object; HeapObject* object;
AllocationResult allocation; AllocationResult allocation;
if (NEW_SPACE == space) { if (NEW_SPACE == space) {
#ifndef V8_HOST_ARCH_64_BIT allocation = new_space_.AllocateRaw(size_in_bytes, alignment);
if (alignment == kWordAligned) {
allocation = new_space_.AllocateRaw(size_in_bytes);
} else {
allocation = new_space_.AllocateRawAligned(size_in_bytes, alignment);
}
#else
allocation = new_space_.AllocateRaw(size_in_bytes);
#endif
if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) { if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) {
space = retry_space; space = retry_space;
} else { } else {
...@@ -193,18 +185,10 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, ...@@ -193,18 +185,10 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
} }
if (OLD_SPACE == space) { if (OLD_SPACE == space) {
#ifndef V8_HOST_ARCH_64_BIT allocation = old_space_->AllocateRaw(size_in_bytes, alignment);
if (alignment == kWordAligned) {
allocation = old_space_->AllocateRaw(size_in_bytes);
} else {
allocation = old_space_->AllocateRawAligned(size_in_bytes, alignment);
}
#else
allocation = old_space_->AllocateRaw(size_in_bytes);
#endif
} else if (CODE_SPACE == space) { } else if (CODE_SPACE == space) {
if (size_in_bytes <= code_space()->AreaSize()) { if (size_in_bytes <= code_space()->AreaSize()) {
allocation = code_space_->AllocateRaw(size_in_bytes); allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
} else { } else {
// Large code objects are allocated in large object space. // Large code objects are allocated in large object space.
allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE); allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
...@@ -213,7 +197,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, ...@@ -213,7 +197,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
} else { } else {
DCHECK(MAP_SPACE == space); DCHECK(MAP_SPACE == space);
allocation = map_space_->AllocateRaw(size_in_bytes); allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
} }
if (allocation.To(&object)) { if (allocation.To(&object)) {
OnAllocationEvent(object, size_in_bytes); OnAllocationEvent(object, size_in_bytes);
......
...@@ -1036,9 +1036,9 @@ bool Heap::ReserveSpace(Reservation* reservations) { ...@@ -1036,9 +1036,9 @@ bool Heap::ReserveSpace(Reservation* reservations) {
DCHECK_LE(size, MemoryAllocator::PageAreaSize( DCHECK_LE(size, MemoryAllocator::PageAreaSize(
static_cast<AllocationSpace>(space))); static_cast<AllocationSpace>(space)));
if (space == NEW_SPACE) { if (space == NEW_SPACE) {
allocation = new_space()->AllocateRaw(size); allocation = new_space()->AllocateRawUnaligned(size);
} else { } else {
allocation = paged_space(space)->AllocateRaw(size); allocation = paged_space(space)->AllocateRawUnaligned(size);
} }
HeapObject* free_space; HeapObject* free_space;
if (allocation.To(&free_space)) { if (allocation.To(&free_space)) {
...@@ -2146,17 +2146,10 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -2146,17 +2146,10 @@ class ScavengingVisitor : public StaticVisitorBase {
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE)); DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE));
AllocationResult allocation; AllocationAlignment align =
#ifdef V8_HOST_ARCH_32_BIT alignment == kDoubleAlignment ? kDoubleAligned : kWordAligned;
if (alignment == kDoubleAlignment) { AllocationResult allocation =
allocation = heap->new_space()->AllocateRaw(object_size, align);
heap->new_space()->AllocateRawAligned(object_size, kDoubleAligned);
} else {
allocation = heap->new_space()->AllocateRaw(object_size);
}
#else
allocation = heap->new_space()->AllocateRaw(object_size);
#endif
HeapObject* target = NULL; // Initialization to please compiler. HeapObject* target = NULL; // Initialization to please compiler.
if (allocation.To(&target)) { if (allocation.To(&target)) {
...@@ -2183,17 +2176,10 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -2183,17 +2176,10 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* object, int object_size) { HeapObject* object, int object_size) {
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
AllocationResult allocation; AllocationAlignment align =
#ifdef V8_HOST_ARCH_32_BIT alignment == kDoubleAlignment ? kDoubleAligned : kWordAligned;
if (alignment == kDoubleAlignment) { AllocationResult allocation =
allocation = heap->old_space()->AllocateRaw(object_size, align);
heap->old_space()->AllocateRawAligned(object_size, kDoubleAligned);
} else {
allocation = heap->old_space()->AllocateRaw(object_size);
}
#else
allocation = heap->old_space()->AllocateRaw(object_size);
#endif
HeapObject* target = NULL; // Initialization to please compiler. HeapObject* target = NULL; // Initialization to please compiler.
if (allocation.To(&target)) { if (allocation.To(&target)) {
......
...@@ -1941,16 +1941,10 @@ int MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage( ...@@ -1941,16 +1941,10 @@ int MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage(
continue; continue;
} }
AllocationResult allocation; AllocationAlignment alignment = object->NeedsToEnsureDoubleAlignment()
#ifdef V8_HOST_ARCH_32_BIT ? kDoubleAligned
if (object->NeedsToEnsureDoubleAlignment()) { : kWordAligned;
allocation = new_space->AllocateRawAligned(size, kDoubleAligned); AllocationResult allocation = new_space->AllocateRaw(size, alignment);
} else {
allocation = new_space->AllocateRaw(size);
}
#else
allocation = new_space->AllocateRaw(size);
#endif
if (allocation.IsRetry()) { if (allocation.IsRetry()) {
if (!new_space->AddFreshPage()) { if (!new_space->AddFreshPage()) {
// Shouldn't happen. We are sweeping linearly, and to-space // Shouldn't happen. We are sweeping linearly, and to-space
...@@ -1958,15 +1952,7 @@ int MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage( ...@@ -1958,15 +1952,7 @@ int MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage(
// always room. // always room.
UNREACHABLE(); UNREACHABLE();
} }
#ifdef V8_HOST_ARCH_32_BIT allocation = new_space->AllocateRaw(size, alignment);
if (object->NeedsToEnsureDoubleAlignment()) {
allocation = new_space->AllocateRawAligned(size, kDoubleAligned);
} else {
allocation = new_space->AllocateRaw(size);
}
#else
allocation = new_space->AllocateRaw(size);
#endif
DCHECK(!allocation.IsRetry()); DCHECK(!allocation.IsRetry());
} }
Object* target = allocation.ToObjectChecked(); Object* target = allocation.ToObjectChecked();
...@@ -3119,16 +3105,9 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object, ...@@ -3119,16 +3105,9 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
OldSpace* old_space = heap()->old_space(); OldSpace* old_space = heap()->old_space();
HeapObject* target; HeapObject* target;
AllocationResult allocation; AllocationAlignment alignment =
#ifdef V8_HOST_ARCH_32_BIT object->NeedsToEnsureDoubleAlignment() ? kDoubleAligned : kWordAligned;
if (object->NeedsToEnsureDoubleAlignment()) { AllocationResult allocation = old_space->AllocateRaw(object_size, alignment);
allocation = old_space->AllocateRawAligned(object_size, kDoubleAligned);
} else {
allocation = old_space->AllocateRaw(object_size);
}
#else
allocation = old_space->AllocateRaw(object_size);
#endif
if (allocation.To(&target)) { if (allocation.To(&target)) {
MigrateObject(target, object, object_size, old_space->identity()); MigrateObject(target, object, object_size, old_space->identity());
heap()->IncrementPromotedObjectsSize(object_size); heap()->IncrementPromotedObjectsSize(object_size);
...@@ -3352,13 +3331,16 @@ void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) { ...@@ -3352,13 +3331,16 @@ void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) {
int size = object->Size(); int size = object->Size();
AllocationAlignment alignment = object->NeedsToEnsureDoubleAlignment()
? kDoubleAligned
: kWordAligned;
HeapObject* target_object; HeapObject* target_object;
AllocationResult allocation = space->AllocateRaw(size); AllocationResult allocation = space->AllocateRaw(size, alignment);
if (!allocation.To(&target_object)) { if (!allocation.To(&target_object)) {
// If allocation failed, use emergency memory and re-try allocation. // If allocation failed, use emergency memory and re-try allocation.
CHECK(space->HasEmergencyMemory()); CHECK(space->HasEmergencyMemory());
space->UseEmergencyMemory(); space->UseEmergencyMemory();
allocation = space->AllocateRaw(size); allocation = space->AllocateRaw(size, alignment);
} }
if (!allocation.To(&target_object)) { if (!allocation.To(&target_object)) {
// OS refused to give us memory. // OS refused to give us memory.
......
...@@ -277,7 +277,7 @@ HeapObject* PagedSpace::AllocateLinearlyAligned(int size_in_bytes, ...@@ -277,7 +277,7 @@ HeapObject* PagedSpace::AllocateLinearlyAligned(int size_in_bytes,
// Raw allocation. // Raw allocation.
AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) { AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes) {
HeapObject* object = AllocateLinearly(size_in_bytes); HeapObject* object = AllocateLinearly(size_in_bytes);
if (object == NULL) { if (object == NULL) {
...@@ -325,6 +325,18 @@ AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes, ...@@ -325,6 +325,18 @@ AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
} }
AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
AllocationAlignment alignment) {
#ifdef V8_HOST_ARCH_32_BIT
return alignment == kDoubleAligned
? AllocateRawAligned(size_in_bytes, kDoubleAligned)
: AllocateRawUnaligned(size_in_bytes);
#else
return AllocateRawUnaligned(size_in_bytes);
#endif
}
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// NewSpace // NewSpace
...@@ -368,7 +380,7 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, ...@@ -368,7 +380,7 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
} }
AllocationResult NewSpace::AllocateRaw(int size_in_bytes) { AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
Address old_top = allocation_info_.top(); Address old_top = allocation_info_.top();
if (allocation_info_.limit() - old_top < size_in_bytes) { if (allocation_info_.limit() - old_top < size_in_bytes) {
...@@ -386,6 +398,18 @@ AllocationResult NewSpace::AllocateRaw(int size_in_bytes) { ...@@ -386,6 +398,18 @@ AllocationResult NewSpace::AllocateRaw(int size_in_bytes) {
} }
AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
AllocationAlignment alignment) {
#ifdef V8_HOST_ARCH_32_BIT
return alignment == kDoubleAligned
? AllocateRawAligned(size_in_bytes, kDoubleAligned)
: AllocateRawUnaligned(size_in_bytes);
#else
return AllocateRawUnaligned(size_in_bytes);
#endif
}
LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk) { LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk) {
heap->incremental_marking()->SetOldSpacePageFlags(chunk); heap->incremental_marking()->SetOldSpacePageFlags(chunk);
return static_cast<LargePage*>(chunk); return static_cast<LargePage*>(chunk);
......
...@@ -1478,7 +1478,7 @@ AllocationResult NewSpace::SlowAllocateRaw(int size_in_bytes, ...@@ -1478,7 +1478,7 @@ AllocationResult NewSpace::SlowAllocateRaw(int size_in_bytes,
return AllocateRawAligned(size_in_bytes, kDoubleAligned); return AllocateRawAligned(size_in_bytes, kDoubleAligned);
else if (alignment == kDoubleUnaligned) else if (alignment == kDoubleUnaligned)
return AllocateRawAligned(size_in_bytes, kDoubleUnaligned); return AllocateRawAligned(size_in_bytes, kDoubleUnaligned);
return AllocateRaw(size_in_bytes); return AllocateRawUnaligned(size_in_bytes);
} else if (AddFreshPage()) { } else if (AddFreshPage()) {
// Switched to new page. Try allocating again. // Switched to new page. Try allocating again.
int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_); int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_);
...@@ -1489,7 +1489,7 @@ AllocationResult NewSpace::SlowAllocateRaw(int size_in_bytes, ...@@ -1489,7 +1489,7 @@ AllocationResult NewSpace::SlowAllocateRaw(int size_in_bytes,
return AllocateRawAligned(size_in_bytes, kDoubleAligned); return AllocateRawAligned(size_in_bytes, kDoubleAligned);
else if (alignment == kDoubleUnaligned) else if (alignment == kDoubleUnaligned)
return AllocateRawAligned(size_in_bytes, kDoubleUnaligned); return AllocateRawAligned(size_in_bytes, kDoubleUnaligned);
return AllocateRaw(size_in_bytes); return AllocateRawUnaligned(size_in_bytes);
} else { } else {
return AllocationResult::Retry(); return AllocationResult::Retry();
} }
......
...@@ -1764,13 +1764,19 @@ class PagedSpace : public Space { ...@@ -1764,13 +1764,19 @@ class PagedSpace : public Space {
// Allocate the requested number of bytes in the space if possible, return a // Allocate the requested number of bytes in the space if possible, return a
// failure object if not. // failure object if not.
MUST_USE_RESULT inline AllocationResult AllocateRaw(int size_in_bytes); MUST_USE_RESULT inline AllocationResult AllocateRawUnaligned(
int size_in_bytes);
// Allocate the requested number of bytes in the space double aligned if // Allocate the requested number of bytes in the space double aligned if
// possible, return a failure object if not. // possible, return a failure object if not.
MUST_USE_RESULT inline AllocationResult AllocateRawAligned( MUST_USE_RESULT inline AllocationResult AllocateRawAligned(
int size_in_bytes, AllocationAlignment alignment); int size_in_bytes, AllocationAlignment alignment);
// Allocate the requested number of bytes in the space and consider allocation
// alignment if needed.
MUST_USE_RESULT inline AllocationResult AllocateRaw(
int size_in_bytes, AllocationAlignment alignment);
// Give a block of memory to the space's free list. It might be added to // Give a block of memory to the space's free list. It might be added to
// the free list or accounted as waste. // the free list or accounted as waste.
// If add_to_freelist is false then just accounting stats are updated and // If add_to_freelist is false then just accounting stats are updated and
...@@ -2501,7 +2507,11 @@ class NewSpace : public Space { ...@@ -2501,7 +2507,11 @@ class NewSpace : public Space {
MUST_USE_RESULT INLINE(AllocationResult AllocateRawAligned( MUST_USE_RESULT INLINE(AllocationResult AllocateRawAligned(
int size_in_bytes, AllocationAlignment alignment)); int size_in_bytes, AllocationAlignment alignment));
MUST_USE_RESULT INLINE(AllocationResult AllocateRaw(int size_in_bytes)); MUST_USE_RESULT INLINE(
AllocationResult AllocateRawUnaligned(int size_in_bytes));
MUST_USE_RESULT INLINE(AllocationResult AllocateRaw(
int size_in_bytes, AllocationAlignment alignment));
// Reset the allocation pointer to the beginning of the active semispace. // Reset the allocation pointer to the beginning of the active semispace.
void ResetAllocationInfo(); void ResetAllocationInfo();
......
...@@ -504,8 +504,8 @@ static inline void ExpectUndefined(const char* code) { ...@@ -504,8 +504,8 @@ static inline void ExpectUndefined(const char* code) {
// Helper function that simulates a full new-space in the heap. // Helper function that simulates a full new-space in the heap.
static inline bool FillUpOnePage(v8::internal::NewSpace* space) { static inline bool FillUpOnePage(v8::internal::NewSpace* space) {
v8::internal::AllocationResult allocation = v8::internal::AllocationResult allocation = space->AllocateRawUnaligned(
space->AllocateRaw(v8::internal::Page::kMaxRegularHeapObjectSize); v8::internal::Page::kMaxRegularHeapObjectSize);
if (allocation.IsRetry()) return false; if (allocation.IsRetry()) return false;
v8::internal::HeapObject* free_space = NULL; v8::internal::HeapObject* free_space = NULL;
CHECK(allocation.To(&free_space)); CHECK(allocation.To(&free_space));
...@@ -524,7 +524,7 @@ static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, ...@@ -524,7 +524,7 @@ static inline void AllocateAllButNBytes(v8::internal::NewSpace* space,
int new_linear_size = space_remaining - extra_bytes; int new_linear_size = space_remaining - extra_bytes;
if (new_linear_size == 0) return; if (new_linear_size == 0) return;
v8::internal::AllocationResult allocation = v8::internal::AllocationResult allocation =
space->AllocateRaw(new_linear_size); space->AllocateRawUnaligned(new_linear_size);
v8::internal::HeapObject* free_space = NULL; v8::internal::HeapObject* free_space = NULL;
CHECK(allocation.To(&free_space)); CHECK(allocation.To(&free_space));
space->heap()->CreateFillerObjectAt(free_space->address(), new_linear_size); space->heap()->CreateFillerObjectAt(free_space->address(), new_linear_size);
......
...@@ -3946,8 +3946,9 @@ TEST(Regress169928) { ...@@ -3946,8 +3946,9 @@ TEST(Regress169928) {
// We need filler the size of AllocationMemento object, plus an extra // We need filler the size of AllocationMemento object, plus an extra
// fill pointer value. // fill pointer value.
HeapObject* obj = NULL; HeapObject* obj = NULL;
AllocationResult allocation = CcTest::heap()->new_space()->AllocateRaw( AllocationResult allocation =
AllocationMemento::kSize + kPointerSize); CcTest::heap()->new_space()->AllocateRawUnaligned(
AllocationMemento::kSize + kPointerSize);
CHECK(allocation.To(&obj)); CHECK(allocation.To(&obj));
Address addr_obj = obj->address(); Address addr_obj = obj->address();
CcTest::heap()->CreateFillerObjectAt( CcTest::heap()->CreateFillerObjectAt(
......
...@@ -358,8 +358,9 @@ TEST(NewSpace) { ...@@ -358,8 +358,9 @@ TEST(NewSpace) {
CHECK(new_space.HasBeenSetUp()); CHECK(new_space.HasBeenSetUp());
while (new_space.Available() >= Page::kMaxRegularHeapObjectSize) { while (new_space.Available() >= Page::kMaxRegularHeapObjectSize) {
Object* obj = new_space.AllocateRaw( Object* obj =
Page::kMaxRegularHeapObjectSize).ToObjectChecked(); new_space.AllocateRawUnaligned(Page::kMaxRegularHeapObjectSize)
.ToObjectChecked();
CHECK(new_space.Contains(HeapObject::cast(obj))); CHECK(new_space.Contains(HeapObject::cast(obj)));
} }
...@@ -384,7 +385,7 @@ TEST(OldSpace) { ...@@ -384,7 +385,7 @@ TEST(OldSpace) {
CHECK(s->SetUp()); CHECK(s->SetUp());
while (s->Available() > 0) { while (s->Available() > 0) {
s->AllocateRaw(Page::kMaxRegularHeapObjectSize).ToObjectChecked(); s->AllocateRawUnaligned(Page::kMaxRegularHeapObjectSize).ToObjectChecked();
} }
s->TearDown(); s->TearDown();
...@@ -485,7 +486,8 @@ UNINITIALIZED_TEST(NewSpaceGrowsToTargetCapacity) { ...@@ -485,7 +486,8 @@ UNINITIALIZED_TEST(NewSpaceGrowsToTargetCapacity) {
// Try to allocate out of the new space. A new page should be added and // Try to allocate out of the new space. A new page should be added and
// the // the
// allocation should succeed. // allocation should succeed.
v8::internal::AllocationResult allocation = new_space->AllocateRaw(80); v8::internal::AllocationResult allocation =
new_space->AllocateRawUnaligned(80);
CHECK(!allocation.IsRetry()); CHECK(!allocation.IsRetry());
CHECK(new_space->CommittedMemory() == 2 * Page::kPageSize); CHECK(new_space->CommittedMemory() == 2 * Page::kPageSize);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment