Commit 0801e4c3 authored by yangguo@chromium.org's avatar yangguo@chromium.org

Port r10674 to x64 and arm.

BUG=
TEST=

Review URL: https://chromiumcodereview.appspot.com/9384005

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10679 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent b06c0414
......@@ -313,7 +313,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
Label* call_generic_code) {
Counters* counters = masm->isolate()->counters();
Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
has_non_smi_element;
has_non_smi_element, finish, cant_transition_map, not_double;
// Check for array construction with zero arguments or one.
__ cmp(r0, Operand(0, RelocInfo::NONE));
......@@ -418,6 +418,8 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ bind(&entry);
__ cmp(r4, r5);
__ b(lt, &loop);
__ bind(&finish);
__ mov(sp, r7);
// Remove caller arguments and receiver from the stack, setup return value and
......@@ -430,8 +432,39 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ Jump(lr);
__ bind(&has_non_smi_element);
// Double values are handled by the runtime.
__ CheckMap(
r2, r9, Heap::kHeapNumberMapRootIndex, &not_double, DONT_DO_SMI_CHECK);
__ bind(&cant_transition_map);
__ UndoAllocationInNewSpace(r3, r4);
__ b(call_generic_code);
__ bind(&not_double);
// Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
// r3: JSArray
__ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_ELEMENTS,
r2,
r9,
&cant_transition_map);
__ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
__ RecordWriteField(r3,
HeapObject::kMapOffset,
r2,
r9,
kLRHasNotBeenSaved,
kDontSaveFPRegs,
EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
Label loop2;
__ sub(r7, r7, Operand(kPointerSize));
__ bind(&loop2);
__ ldr(r2, MemOperand(r7, kPointerSize, PostIndex));
__ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
__ cmp(r4, r5);
__ b(lt, &loop2);
__ b(&finish);
}
......
......@@ -1238,7 +1238,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ add(edx, Immediate(kPointerSize));
__ bind(&entry);
__ dec(ecx);
__ j(greater_equal, &loop, Label::kNear);
__ j(greater_equal, &loop);
// Remove caller arguments from the stack and return.
// ebx: argc
......@@ -1268,7 +1268,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ jmp(&prepare_generic_code_call);
__ bind(&not_double);
// Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS
// Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
__ mov(ebx, Operand(esp, 0));
__ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
__ LoadTransitionedArrayMapConditional(
......@@ -1291,7 +1291,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ mov(Operand(edx, 0), eax);
__ add(edx, Immediate(kPointerSize));
__ dec(ecx);
__ j(greater_equal, &loop2, Label::kNear);
__ j(greater_equal, &loop2);
__ jmp(&finish);
// Restore argc and constructor before running the generic code.
......
......@@ -1160,7 +1160,7 @@ static void AllocateJSArray(MacroAssembler* masm,
static void ArrayNativeCode(MacroAssembler* masm,
Label* call_generic_code) {
Label argc_one_or_more, argc_two_or_more, empty_array, not_empty_array,
has_non_smi_element;
has_non_smi_element, finish, cant_transition_map, not_double;
// Check for array construction with zero arguments.
__ testq(rax, rax);
......@@ -1265,11 +1265,11 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ movq(rcx, rax);
__ jmp(&entry);
__ bind(&loop);
__ movq(kScratchRegister, Operand(r9, rcx, times_pointer_size, 0));
__ movq(r8, Operand(r9, rcx, times_pointer_size, 0));
if (FLAG_smi_only_arrays) {
__ JumpIfNotSmi(kScratchRegister, &has_non_smi_element);
__ JumpIfNotSmi(r8, &has_non_smi_element);
}
__ movq(Operand(rdx, 0), kScratchRegister);
__ movq(Operand(rdx, 0), r8);
__ addq(rdx, Immediate(kPointerSize));
__ bind(&entry);
__ decq(rcx);
......@@ -1280,6 +1280,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
// rbx: JSArray
// esp[0]: return address
// esp[8]: last argument
__ bind(&finish);
__ pop(rcx);
__ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
__ push(rcx);
......@@ -1287,8 +1288,38 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ ret(0);
__ bind(&has_non_smi_element);
// Double values are handled by the runtime.
__ CheckMap(r8,
masm->isolate()->factory()->heap_number_map(),
&not_double,
DONT_DO_SMI_CHECK);
__ bind(&cant_transition_map);
__ UndoAllocationInNewSpace(rbx);
__ jmp(call_generic_code);
__ bind(&not_double);
// Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
// rbx: JSArray
__ movq(r11, FieldOperand(rbx, HeapObject::kMapOffset));
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_ELEMENTS,
r11,
kScratchRegister,
&cant_transition_map);
__ movq(FieldOperand(rbx, HeapObject::kMapOffset), r11);
__ RecordWriteField(rbx, HeapObject::kMapOffset, r11, r8,
kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
// Finish the array initialization loop.
Label loop2;
__ bind(&loop2);
__ movq(r8, Operand(r9, rcx, times_pointer_size, 0));
__ movq(Operand(rdx, 0), r8);
__ addq(rdx, Immediate(kPointerSize));
__ decq(rcx);
__ j(greater_equal, &loop2);
__ jmp(&finish);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment