Commit c3c0be71 authored by hpayer@chromium.org's avatar hpayer@chromium.org

Simplified large object allocation strategy.

BUG=
R=mstarzinger@chromium.org

Review URL: https://codereview.chromium.org/19934006

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@15840 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 6c83b7d6
...@@ -1625,6 +1625,7 @@ void MacroAssembler::Allocate(int object_size, ...@@ -1625,6 +1625,7 @@ void MacroAssembler::Allocate(int object_size,
Register scratch2, Register scratch2,
Label* gc_required, Label* gc_required,
AllocationFlags flags) { AllocationFlags flags) {
ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
if (!FLAG_inline_new) { if (!FLAG_inline_new) {
if (emit_debug_code()) { if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure. // Trash the registers to simulate an allocation failure.
......
...@@ -5359,25 +5359,16 @@ MaybeObject* Heap::AllocateRawOneByteString(int length, ...@@ -5359,25 +5359,16 @@ MaybeObject* Heap::AllocateRawOneByteString(int length,
if (length < 0 || length > SeqOneByteString::kMaxLength) { if (length < 0 || length > SeqOneByteString::kMaxLength) {
return Failure::OutOfMemoryException(0xb); return Failure::OutOfMemoryException(0xb);
} }
int size = SeqOneByteString::SizeFor(length); int size = SeqOneByteString::SizeFor(length);
ASSERT(size <= SeqOneByteString::kMaxSize); ASSERT(size <= SeqOneByteString::kMaxSize);
AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
AllocationSpace retry_space = OLD_DATA_SPACE; AllocationSpace retry_space = OLD_DATA_SPACE;
if (space == NEW_SPACE) { if (size > Page::kMaxNonCodeHeapObjectSize) {
if (size > kMaxObjectSizeInNewSpace) { // Allocate in large object space, retry space will be ignored.
// Allocate in large object space, retry space will be ignored.
space = LO_SPACE;
} else if (size > Page::kMaxNonCodeHeapObjectSize) {
// Allocate in new space, retry in large object space.
retry_space = LO_SPACE;
}
} else if (space == OLD_DATA_SPACE &&
size > Page::kMaxNonCodeHeapObjectSize) {
space = LO_SPACE; space = LO_SPACE;
} }
Object* result; Object* result;
{ MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
if (!maybe_result->ToObject(&result)) return maybe_result; if (!maybe_result->ToObject(&result)) return maybe_result;
...@@ -5403,18 +5394,11 @@ MaybeObject* Heap::AllocateRawTwoByteString(int length, ...@@ -5403,18 +5394,11 @@ MaybeObject* Heap::AllocateRawTwoByteString(int length,
AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
AllocationSpace retry_space = OLD_DATA_SPACE; AllocationSpace retry_space = OLD_DATA_SPACE;
if (space == NEW_SPACE) { if (size > Page::kMaxNonCodeHeapObjectSize) {
if (size > kMaxObjectSizeInNewSpace) { // Allocate in large object space, retry space will be ignored.
// Allocate in large object space, retry space will be ignored.
space = LO_SPACE;
} else if (size > Page::kMaxNonCodeHeapObjectSize) {
// Allocate in new space, retry in large object space.
retry_space = LO_SPACE;
}
} else if (space == OLD_DATA_SPACE &&
size > Page::kMaxNonCodeHeapObjectSize) {
space = LO_SPACE; space = LO_SPACE;
} }
Object* result; Object* result;
{ MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
if (!maybe_result->ToObject(&result)) return maybe_result; if (!maybe_result->ToObject(&result)) return maybe_result;
...@@ -5488,7 +5472,7 @@ MaybeObject* Heap::AllocateRawFixedArray(int length) { ...@@ -5488,7 +5472,7 @@ MaybeObject* Heap::AllocateRawFixedArray(int length) {
if (always_allocate()) return AllocateFixedArray(length, TENURED); if (always_allocate()) return AllocateFixedArray(length, TENURED);
// Allocate the raw data for a fixed array. // Allocate the raw data for a fixed array.
int size = FixedArray::SizeFor(length); int size = FixedArray::SizeFor(length);
return size <= kMaxObjectSizeInNewSpace return size <= Page::kMaxNonCodeHeapObjectSize
? new_space_.AllocateRaw(size) ? new_space_.AllocateRaw(size)
: lo_space_->AllocateRaw(size, NOT_EXECUTABLE); : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
} }
...@@ -5559,22 +5543,16 @@ MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { ...@@ -5559,22 +5543,16 @@ MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
if (length < 0 || length > FixedArray::kMaxLength) { if (length < 0 || length > FixedArray::kMaxLength) {
return Failure::OutOfMemoryException(0xe); return Failure::OutOfMemoryException(0xe);
} }
int size = FixedArray::SizeFor(length);
AllocationSpace space = AllocationSpace space =
(pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
int size = FixedArray::SizeFor(length); AllocationSpace retry_space = OLD_POINTER_SPACE;
if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
// Too big for new space. if (size > Page::kMaxNonCodeHeapObjectSize) {
space = LO_SPACE; // Allocate in large object space, retry space will be ignored.
} else if (space == OLD_POINTER_SPACE &&
size > Page::kMaxNonCodeHeapObjectSize) {
// Too big for old pointer space.
space = LO_SPACE; space = LO_SPACE;
} }
AllocationSpace retry_space =
(size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_POINTER_SPACE : LO_SPACE;
return AllocateRaw(size, space, retry_space); return AllocateRaw(size, space, retry_space);
} }
...@@ -5692,27 +5670,19 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, ...@@ -5692,27 +5670,19 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
if (length < 0 || length > FixedDoubleArray::kMaxLength) { if (length < 0 || length > FixedDoubleArray::kMaxLength) {
return Failure::OutOfMemoryException(0xf); return Failure::OutOfMemoryException(0xf);
} }
AllocationSpace space =
(pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
int size = FixedDoubleArray::SizeFor(length); int size = FixedDoubleArray::SizeFor(length);
AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
AllocationSpace retry_space = OLD_DATA_SPACE;
#ifndef V8_HOST_ARCH_64_BIT #ifndef V8_HOST_ARCH_64_BIT
size += kPointerSize; size += kPointerSize;
#endif #endif
if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { if (size > Page::kMaxNonCodeHeapObjectSize) {
// Too big for new space. // Allocate in large object space, retry space will be ignored.
space = LO_SPACE;
} else if (space == OLD_DATA_SPACE &&
size > Page::kMaxNonCodeHeapObjectSize) {
// Too big for old data space.
space = LO_SPACE; space = LO_SPACE;
} }
AllocationSpace retry_space =
(size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE;
HeapObject* object; HeapObject* object;
{ MaybeObject* maybe_object = AllocateRaw(size, space, retry_space); { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space);
if (!maybe_object->To<HeapObject>(&object)) return maybe_object; if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
......
...@@ -1678,8 +1678,6 @@ class Heap { ...@@ -1678,8 +1678,6 @@ class Heap {
// we try to promote this object. // we try to promote this object.
inline bool ShouldBePromoted(Address old_address, int object_size); inline bool ShouldBePromoted(Address old_address, int object_size);
int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
void ClearJSFunctionResultCaches(); void ClearJSFunctionResultCaches();
void ClearNormalizedMapCaches(); void ClearNormalizedMapCaches();
...@@ -1924,12 +1922,6 @@ class Heap { ...@@ -1924,12 +1922,6 @@ class Heap {
int scan_on_scavenge_pages_; int scan_on_scavenge_pages_;
#if V8_TARGET_ARCH_X64
static const int kMaxObjectSizeInNewSpace = 1024*KB;
#else
static const int kMaxObjectSizeInNewSpace = 512*KB;
#endif
NewSpace new_space_; NewSpace new_space_;
OldSpace* old_pointer_space_; OldSpace* old_pointer_space_;
OldSpace* old_data_space_; OldSpace* old_data_space_;
......
...@@ -1248,6 +1248,7 @@ void MacroAssembler::Allocate(int object_size, ...@@ -1248,6 +1248,7 @@ void MacroAssembler::Allocate(int object_size,
Label* gc_required, Label* gc_required,
AllocationFlags flags) { AllocationFlags flags) {
ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
if (!FLAG_inline_new) { if (!FLAG_inline_new) {
if (emit_debug_code()) { if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure. // Trash the registers to simulate an allocation failure.
......
...@@ -2882,6 +2882,7 @@ void MacroAssembler::Allocate(int object_size, ...@@ -2882,6 +2882,7 @@ void MacroAssembler::Allocate(int object_size,
Register scratch2, Register scratch2,
Label* gc_required, Label* gc_required,
AllocationFlags flags) { AllocationFlags flags) {
ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
if (!FLAG_inline_new) { if (!FLAG_inline_new) {
if (emit_debug_code()) { if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure. // Trash the registers to simulate an allocation failure.
......
...@@ -3838,6 +3838,7 @@ void MacroAssembler::Allocate(int object_size, ...@@ -3838,6 +3838,7 @@ void MacroAssembler::Allocate(int object_size,
Label* gc_required, Label* gc_required,
AllocationFlags flags) { AllocationFlags flags) {
ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
if (!FLAG_inline_new) { if (!FLAG_inline_new) {
if (emit_debug_code()) { if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure. // Trash the registers to simulate an allocation failure.
......
...@@ -984,7 +984,7 @@ TEST(Regression39128) { ...@@ -984,7 +984,7 @@ TEST(Regression39128) {
// just enough room to allocate JSObject and thus fill the newspace. // just enough room to allocate JSObject and thus fill the newspace.
int allocation_amount = Min(FixedArray::kMaxSize, int allocation_amount = Min(FixedArray::kMaxSize,
HEAP->MaxObjectSizeInNewSpace()); Page::kMaxNonCodeHeapObjectSize);
int allocation_len = LenFromSize(allocation_amount); int allocation_len = LenFromSize(allocation_amount);
NewSpace* new_space = HEAP->new_space(); NewSpace* new_space = HEAP->new_space();
Address* top_addr = new_space->allocation_top_address(); Address* top_addr = new_space->allocation_top_address();
......
...@@ -119,10 +119,8 @@ TEST(NoPromotion) { ...@@ -119,10 +119,8 @@ TEST(NoPromotion) {
HEAP->CollectGarbage(OLD_POINTER_SPACE); HEAP->CollectGarbage(OLD_POINTER_SPACE);
// Allocate a big Fixed array in the new space. // Allocate a big Fixed array in the new space.
int max_size = int length = (Page::kMaxNonCodeHeapObjectSize -
Min(Page::kMaxNonCodeHeapObjectSize, HEAP->MaxObjectSizeInNewSpace()); FixedArray::kHeaderSize) / (2 * kPointerSize);
int length = (max_size - FixedArray::kHeaderSize) / (2*kPointerSize);
Object* obj = i::Isolate::Current()->heap()->AllocateFixedArray(length)-> Object* obj = i::Isolate::Current()->heap()->AllocateFixedArray(length)->
ToObjectChecked(); ToObjectChecked();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment