Commit 4c929953 authored by ager@chromium.org's avatar ager@chromium.org

Port FastCloneShallowArrayStub to ARM and x64.

Review URL: http://codereview.chromium.org/597022

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@3835 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 51a7ecc1
......@@ -2784,17 +2784,19 @@ void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
// Load the function of this activation.
__ ldr(r2, frame_->Function());
// Literals array.
// Load the literals array of the function.
__ ldr(r2, FieldMemOperand(r2, JSFunction::kLiteralsOffset));
// Literal index.
__ mov(r1, Operand(Smi::FromInt(node->literal_index())));
// Constant elements.
__ mov(r0, Operand(node->constant_elements()));
frame_->EmitPushMultiple(3, r2.bit() | r1.bit() | r0.bit());
int length = node->values()->length();
if (node->depth() > 1) {
frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else {
} else if (length > FastCloneShallowArrayStub::kMaximumLength) {
frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
FastCloneShallowArrayStub stub(length);
frame_->CallStub(&stub, 3);
}
frame_->EmitPush(r0); // save the result
// r0: created object literal
......@@ -4524,6 +4526,69 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
}
void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//
// [sp]: constant elements.
// [sp + kPointerSize]: literal index.
// [sp + (2 * kPointerSize)]: literals array.
// All sizes here are multiples of kPointerSize.
int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
int size = JSArray::kSize + elements_size;
// Load boilerplate object into r3 and check if we need to create a
// boilerplate.
Label slow_case;
__ ldr(r3, MemOperand(sp, 2 * kPointerSize));
__ ldr(r0, MemOperand(sp, 1 * kPointerSize));
__ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r3, ip);
__ b(eq, &slow_case);
// Allocate both the JS array and the elements array in one big
// allocation. This avoids multiple limit checks.
__ AllocateInNewSpace(size / kPointerSize,
r0,
r1,
r2,
&slow_case,
TAG_OBJECT);
// Copy the JS array part.
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
__ ldr(r1, FieldMemOperand(r3, i));
__ str(r1, FieldMemOperand(r0, i));
}
}
if (length_ > 0) {
// Get hold of the elements array of the boilerplate and setup the
// elements pointer in the resulting object.
__ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
__ add(r2, r0, Operand(JSArray::kSize));
__ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset));
// Copy the elements array.
for (int i = 0; i < elements_size; i += kPointerSize) {
__ ldr(r1, FieldMemOperand(r3, i));
__ str(r1, FieldMemOperand(r2, i));
}
}
// Return and remove the on-stack parameters.
__ add(sp, sp, Operand(3 * kPointerSize));
__ Ret();
__ bind(&slow_case);
ExternalReference runtime(Runtime::kCreateArrayLiteralShallow);
__ TailCallRuntime(runtime, 3, 1);
}
// Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
// instruction. On pre-ARM5 hardware this routine gives the wrong answer for 0
// (31 instead of 32).
......
......@@ -6917,6 +6917,13 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//
// [esp + kPointerSize]: constant elements.
// [esp + (2 * kPointerSize)]: literal index.
// [esp + (3 * kPointerSize)]: literals array.
// All sizes here are multiples of kPointerSize.
int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
int size = JSArray::kSize + elements_size;
......
......@@ -2495,17 +2495,19 @@ void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
// Load the literals array of the function.
__ movq(literals.reg(),
FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
// Literal array.
frame_->Push(&literals);
// Literal index.
frame_->Push(Smi::FromInt(node->literal_index()));
// Constant elements.
frame_->Push(node->constant_elements());
int length = node->values()->length();
Result clone;
if (node->depth() > 1) {
clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else {
} else if (length > FastCloneShallowArrayStub::kMaximumLength) {
clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
FastCloneShallowArrayStub stub(length);
clone = frame_->CallStub(&stub, 3);
}
frame_->Push(&clone);
......@@ -6260,6 +6262,63 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
}
void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//
// [rsp + kPointerSize]: constant elements.
// [rsp + (2 * kPointerSize)]: literal index.
// [rsp + (3 * kPointerSize)]: literals array.
// All sizes here are multiples of kPointerSize.
int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
int size = JSArray::kSize + elements_size;
// Load boilerplate object into rcx and check if we need to create a
// boilerplate.
Label slow_case;
__ movq(rcx, Operand(rsp, 3 * kPointerSize));
__ movq(rax, Operand(rsp, 2 * kPointerSize));
SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
__ movq(rcx,
FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
__ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
__ j(equal, &slow_case);
// Allocate both the JS array and the elements array in one big
// allocation. This avoids multiple limit checks.
__ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
// Copy the JS array part.
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
__ movq(rbx, FieldOperand(rcx, i));
__ movq(FieldOperand(rax, i), rbx);
}
}
if (length_ > 0) {
// Get hold of the elements array of the boilerplate and setup the
// elements pointer in the resulting object.
__ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
__ lea(rdx, Operand(rax, JSArray::kSize));
__ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
// Copy the elements array.
for (int i = 0; i < elements_size; i += kPointerSize) {
__ movq(rbx, FieldOperand(rcx, i));
__ movq(FieldOperand(rdx, i), rbx);
}
}
// Return and remove the on-stack parameters.
__ ret(3 * kPointerSize);
__ bind(&slow_case);
ExternalReference runtime(Runtime::kCreateArrayLiteralShallow);
__ TailCallRuntime(runtime, 3, 1);
}
void ToBooleanStub::Generate(MacroAssembler* masm) {
Label false_result, true_result, not_string;
__ movq(rax, Operand(rsp, 1 * kPointerSize));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment