Commit 2d757afa authored by vegorov@chromium.org's avatar vegorov@chromium.org

Try aligning unboxed double array backing store in allocation or scavenge promotion.

This CL does not align them during compaction or mark-sweep promotion because we are not using specialized evacuation visitors.

R=erik.corry@gmail.com

Review URL: https://chromiumcodereview.appspot.com/10001012

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@11344 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 999fb73b
...@@ -1484,6 +1484,27 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, ...@@ -1484,6 +1484,27 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
} }
STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0);
INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap,
HeapObject* object,
int size));
static HeapObject* EnsureDoubleAligned(Heap* heap,
HeapObject* object,
int size) {
if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) {
heap->CreateFillerObjectAt(object->address(), kPointerSize);
return HeapObject::FromAddress(object->address() + kPointerSize);
} else {
heap->CreateFillerObjectAt(object->address() + size - kPointerSize,
kPointerSize);
return object;
}
}
enum LoggingAndProfiling { enum LoggingAndProfiling {
LOGGING_AND_PROFILING_ENABLED, LOGGING_AND_PROFILING_ENABLED,
LOGGING_AND_PROFILING_DISABLED LOGGING_AND_PROFILING_DISABLED
...@@ -1607,7 +1628,10 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -1607,7 +1628,10 @@ class ScavengingVisitor : public StaticVisitorBase {
} }
} }
template<ObjectContents object_contents, SizeRestriction size_restriction>
template<ObjectContents object_contents,
SizeRestriction size_restriction,
int alignment>
static inline void EvacuateObject(Map* map, static inline void EvacuateObject(Map* map,
HeapObject** slot, HeapObject** slot,
HeapObject* object, HeapObject* object,
...@@ -1616,19 +1640,26 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -1616,19 +1640,26 @@ class ScavengingVisitor : public StaticVisitorBase {
(object_size <= Page::kMaxNonCodeHeapObjectSize)); (object_size <= Page::kMaxNonCodeHeapObjectSize));
SLOW_ASSERT(object->Size() == object_size); SLOW_ASSERT(object->Size() == object_size);
int allocation_size = object_size;
if (alignment != kObjectAlignment) {
ASSERT(alignment == kDoubleAlignment);
allocation_size += kPointerSize;
}
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
if (heap->ShouldBePromoted(object->address(), object_size)) { if (heap->ShouldBePromoted(object->address(), object_size)) {
MaybeObject* maybe_result; MaybeObject* maybe_result;
if ((size_restriction != SMALL) && if ((size_restriction != SMALL) &&
(object_size > Page::kMaxNonCodeHeapObjectSize)) { (allocation_size > Page::kMaxNonCodeHeapObjectSize)) {
maybe_result = heap->lo_space()->AllocateRaw(object_size, maybe_result = heap->lo_space()->AllocateRaw(allocation_size,
NOT_EXECUTABLE); NOT_EXECUTABLE);
} else { } else {
if (object_contents == DATA_OBJECT) { if (object_contents == DATA_OBJECT) {
maybe_result = heap->old_data_space()->AllocateRaw(object_size); maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
} else { } else {
maybe_result = heap->old_pointer_space()->AllocateRaw(object_size); maybe_result =
heap->old_pointer_space()->AllocateRaw(allocation_size);
} }
} }
...@@ -1636,6 +1667,10 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -1636,6 +1667,10 @@ class ScavengingVisitor : public StaticVisitorBase {
if (maybe_result->ToObject(&result)) { if (maybe_result->ToObject(&result)) {
HeapObject* target = HeapObject::cast(result); HeapObject* target = HeapObject::cast(result);
if (alignment != kObjectAlignment) {
target = EnsureDoubleAligned(heap, target, allocation_size);
}
// Order is important: slot might be inside of the target if target // Order is important: slot might be inside of the target if target
// was allocated over a dead object and slot comes from the store // was allocated over a dead object and slot comes from the store
// buffer. // buffer.
...@@ -1650,11 +1685,15 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -1650,11 +1685,15 @@ class ScavengingVisitor : public StaticVisitorBase {
return; return;
} }
} }
MaybeObject* allocation = heap->new_space()->AllocateRaw(object_size); MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
Object* result = allocation->ToObjectUnchecked(); Object* result = allocation->ToObjectUnchecked();
HeapObject* target = HeapObject::cast(result); HeapObject* target = HeapObject::cast(result);
if (alignment != kObjectAlignment) {
target = EnsureDoubleAligned(heap, target, allocation_size);
}
// Order is important: slot might be inside of the target if target // Order is important: slot might be inside of the target if target
// was allocated over a dead object and slot comes from the store // was allocated over a dead object and slot comes from the store
// buffer. // buffer.
...@@ -1690,7 +1729,7 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -1690,7 +1729,7 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject** slot, HeapObject** slot,
HeapObject* object) { HeapObject* object) {
int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(map,
slot, slot,
object, object,
object_size); object_size);
...@@ -1702,7 +1741,8 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -1702,7 +1741,8 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* object) { HeapObject* object) {
int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
int object_size = FixedDoubleArray::SizeFor(length); int object_size = FixedDoubleArray::SizeFor(length);
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kDoubleAlignment>(
map,
slot, slot,
object, object,
object_size); object_size);
...@@ -1713,7 +1753,8 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -1713,7 +1753,8 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject** slot, HeapObject** slot,
HeapObject* object) { HeapObject* object) {
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size); EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
map, slot, object, object_size);
} }
...@@ -1722,7 +1763,8 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -1722,7 +1763,8 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* object) { HeapObject* object) {
int object_size = SeqAsciiString::cast(object)-> int object_size = SeqAsciiString::cast(object)->
SeqAsciiStringSize(map->instance_type()); SeqAsciiStringSize(map->instance_type());
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size); EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
map, slot, object, object_size);
} }
...@@ -1731,7 +1773,8 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -1731,7 +1773,8 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* object) { HeapObject* object) {
int object_size = SeqTwoByteString::cast(object)-> int object_size = SeqTwoByteString::cast(object)->
SeqTwoByteStringSize(map->instance_type()); SeqTwoByteStringSize(map->instance_type());
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size); EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
map, slot, object, object_size);
} }
...@@ -1774,7 +1817,8 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -1774,7 +1817,8 @@ class ScavengingVisitor : public StaticVisitorBase {
} }
int object_size = ConsString::kSize; int object_size = ConsString::kSize;
EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size); EvacuateObject<POINTER_OBJECT, SMALL, kObjectAlignment>(
map, slot, object, object_size);
} }
template<ObjectContents object_contents> template<ObjectContents object_contents>
...@@ -1784,14 +1828,16 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -1784,14 +1828,16 @@ class ScavengingVisitor : public StaticVisitorBase {
static inline void VisitSpecialized(Map* map, static inline void VisitSpecialized(Map* map,
HeapObject** slot, HeapObject** slot,
HeapObject* object) { HeapObject* object) {
EvacuateObject<object_contents, SMALL>(map, slot, object, object_size); EvacuateObject<object_contents, SMALL, kObjectAlignment>(
map, slot, object, object_size);
} }
static inline void Visit(Map* map, static inline void Visit(Map* map,
HeapObject** slot, HeapObject** slot,
HeapObject* object) { HeapObject* object) {
int object_size = map->instance_size(); int object_size = map->instance_size();
EvacuateObject<object_contents, SMALL>(map, slot, object, object_size); EvacuateObject<object_contents, SMALL, kObjectAlignment>(
map, slot, object, object_size);
} }
}; };
...@@ -4666,6 +4712,11 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, ...@@ -4666,6 +4712,11 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
AllocationSpace space = AllocationSpace space =
(pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
int size = FixedDoubleArray::SizeFor(length); int size = FixedDoubleArray::SizeFor(length);
#ifndef V8_HOST_ARCH_64_BIT
size += kPointerSize;
#endif
if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
// Too big for new space. // Too big for new space.
space = LO_SPACE; space = LO_SPACE;
...@@ -4678,7 +4729,12 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, ...@@ -4678,7 +4729,12 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
AllocationSpace retry_space = AllocationSpace retry_space =
(size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE; (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE;
return AllocateRaw(size, space, retry_space); HeapObject* object;
{ MaybeObject* maybe_object = AllocateRaw(size, space, retry_space);
if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
}
return EnsureDoubleAligned(this, object, size);
} }
......
...@@ -397,9 +397,25 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble( ...@@ -397,9 +397,25 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// Allocate new FixedDoubleArray. // Allocate new FixedDoubleArray.
// edx: receiver // edx: receiver
// edi: length of source FixedArray (smi-tagged) // edi: length of source FixedArray (smi-tagged)
__ lea(esi, Operand(edi, times_4, FixedDoubleArray::kHeaderSize)); __ lea(esi, Operand(edi,
times_4,
FixedDoubleArray::kHeaderSize + kPointerSize));
__ AllocateInNewSpace(esi, eax, ebx, no_reg, &gc_required, TAG_OBJECT); __ AllocateInNewSpace(esi, eax, ebx, no_reg, &gc_required, TAG_OBJECT);
Label aligned, aligned_done;
__ test(eax, Immediate(kDoubleAlignmentMask - kHeapObjectTag));
__ j(zero, &aligned, Label::kNear);
__ mov(FieldOperand(eax, 0),
Immediate(masm->isolate()->factory()->one_pointer_filler_map()));
__ add(eax, Immediate(kPointerSize));
__ jmp(&aligned_done);
__ bind(&aligned);
__ mov(Operand(eax, esi, times_1, -kPointerSize-1),
Immediate(masm->isolate()->factory()->one_pointer_filler_map()));
__ bind(&aligned_done);
// eax: destination FixedDoubleArray // eax: destination FixedDoubleArray
// edi: number of elements // edi: number of elements
// edx: receiver // edx: receiver
......
...@@ -4030,6 +4030,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement( ...@@ -4030,6 +4030,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements); int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
__ AllocateInNewSpace(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT); __ AllocateInNewSpace(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT);
// Restore the key, which is known to be the array length. // Restore the key, which is known to be the array length.
__ mov(ecx, Immediate(0)); __ mov(ecx, Immediate(0));
......
...@@ -48,6 +48,10 @@ const intptr_t kObjectAlignmentMask = kObjectAlignment - 1; ...@@ -48,6 +48,10 @@ const intptr_t kObjectAlignmentMask = kObjectAlignment - 1;
const intptr_t kPointerAlignment = (1 << kPointerSizeLog2); const intptr_t kPointerAlignment = (1 << kPointerSizeLog2);
const intptr_t kPointerAlignmentMask = kPointerAlignment - 1; const intptr_t kPointerAlignmentMask = kPointerAlignment - 1;
// Desired alignment for double values.
const intptr_t kDoubleAlignment = 8;
const intptr_t kDoubleAlignmentMask = kDoubleAlignment - 1;
// Desired alignment for maps. // Desired alignment for maps.
#if V8_HOST_ARCH_64_BIT #if V8_HOST_ARCH_64_BIT
const intptr_t kMapAlignmentBits = kObjectAlignmentBits; const intptr_t kMapAlignmentBits = kObjectAlignmentBits;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment