Commit e95cfafb authored by mstarzinger's avatar mstarzinger Committed by Commit bot

Fix arguments object stubs for large arrays.

This fixes FastNewStrictArgumentsStub and FastNewRestParameterStub to no
longer assume that the strict arguments object being allocated will fit
into new-space. The case where said object needs to move to large object
space is now handled in the runtime.

R=bmeurer@chromium.org
TEST=mjsunit/regress/regress-crbug-614727
BUG=chromium:614727

Review-Url: https://codereview.chromium.org/2054853002
Cr-Commit-Position: refs/heads/master@{#36917}
parent 89d8c57b
...@@ -4456,10 +4456,10 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4456,10 +4456,10 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// specified by the function's internal formal parameter count. // specified by the function's internal formal parameter count.
Label rest_parameters; Label rest_parameters;
__ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r1, __ ldr(r3,
FieldMemOperand(r1, SharedFunctionInfo::kFormalParameterCountOffset)); FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
__ sub(r0, r0, r1, SetCC); __ sub(r0, r0, r3, SetCC);
__ b(gt, &rest_parameters); __ b(gt, &rest_parameters);
// Return an empty rest parameter array. // Return an empty rest parameter array.
...@@ -4506,15 +4506,16 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4506,15 +4506,16 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- cp : context // -- cp : context
// -- r0 : number of rest parameters (tagged) // -- r0 : number of rest parameters (tagged)
// -- r1 : function
// -- r2 : pointer to first rest parameters // -- r2 : pointer to first rest parameters
// -- lr : return address // -- lr : return address
// ----------------------------------- // -----------------------------------
// Allocate space for the rest parameter array plus the backing store. // Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate; Label allocate, done_allocate;
__ mov(r1, Operand(JSArray::kSize + FixedArray::kHeaderSize)); __ mov(r6, Operand(JSArray::kSize + FixedArray::kHeaderSize));
__ add(r1, r1, Operand(r0, LSL, kPointerSizeLog2 - 1)); __ add(r6, r6, Operand(r0, LSL, kPointerSizeLog2 - 1));
__ Allocate(r1, r3, r4, r5, &allocate, NO_ALLOCATION_FLAGS); __ Allocate(r6, r3, r4, r5, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate); __ bind(&done_allocate);
// Setup the elements array in r3. // Setup the elements array in r3.
...@@ -4546,8 +4547,11 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4546,8 +4547,11 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
__ mov(r0, r4); __ mov(r0, r4);
__ Ret(); __ Ret();
// Fall back to %AllocateInNewSpace. // Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate); __ bind(&allocate);
__ cmp(r1, Operand(Page::kMaxRegularHeapObjectSize));
__ b(gt, &too_big_for_new_space);
{ {
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r1); __ SmiTag(r1);
...@@ -4557,6 +4561,11 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4557,6 +4561,11 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
__ Pop(r0, r2); __ Pop(r0, r2);
} }
__ jmp(&done_allocate); __ jmp(&done_allocate);
// Fall back to %NewRestParameter.
__ bind(&too_big_for_new_space);
__ push(r1);
__ TailCallRuntime(Runtime::kNewRestParameter);
} }
} }
...@@ -4815,9 +4824,9 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -4815,9 +4824,9 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
__ cmp(ip, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); __ cmp(ip, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ b(eq, &arguments_adaptor); __ b(eq, &arguments_adaptor);
{ {
__ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r0, FieldMemOperand( __ ldr(r0, FieldMemOperand(
r1, SharedFunctionInfo::kFormalParameterCountOffset)); r4, SharedFunctionInfo::kFormalParameterCountOffset));
__ add(r2, r2, Operand(r0, LSL, kPointerSizeLog2 - 1)); __ add(r2, r2, Operand(r0, LSL, kPointerSizeLog2 - 1));
__ add(r2, r2, __ add(r2, r2,
Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize)); Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
...@@ -4835,15 +4844,16 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -4835,15 +4844,16 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- cp : context // -- cp : context
// -- r0 : number of rest parameters (tagged) // -- r0 : number of rest parameters (tagged)
// -- r1 : function
// -- r2 : pointer to first rest parameters // -- r2 : pointer to first rest parameters
// -- lr : return address // -- lr : return address
// ----------------------------------- // -----------------------------------
// Allocate space for the strict arguments object plus the backing store. // Allocate space for the strict arguments object plus the backing store.
Label allocate, done_allocate; Label allocate, done_allocate;
__ mov(r1, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize)); __ mov(r6, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
__ add(r1, r1, Operand(r0, LSL, kPointerSizeLog2 - 1)); __ add(r6, r6, Operand(r0, LSL, kPointerSizeLog2 - 1));
__ Allocate(r1, r3, r4, r5, &allocate, NO_ALLOCATION_FLAGS); __ Allocate(r6, r3, r4, r5, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate); __ bind(&done_allocate);
// Setup the elements array in r3. // Setup the elements array in r3.
...@@ -4875,17 +4885,25 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -4875,17 +4885,25 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
__ mov(r0, r4); __ mov(r0, r4);
__ Ret(); __ Ret();
// Fall back to %AllocateInNewSpace. // Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate); __ bind(&allocate);
__ cmp(r6, Operand(Page::kMaxRegularHeapObjectSize));
__ b(gt, &too_big_for_new_space);
{ {
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r1); __ SmiTag(r6);
__ Push(r0, r2, r1); __ Push(r0, r2, r6);
__ CallRuntime(Runtime::kAllocateInNewSpace); __ CallRuntime(Runtime::kAllocateInNewSpace);
__ mov(r3, r0); __ mov(r3, r0);
__ Pop(r0, r2); __ Pop(r0, r2);
} }
__ b(&done_allocate); __ b(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ push(r1);
__ TailCallRuntime(Runtime::kNewStrictArguments);
} }
......
...@@ -4722,10 +4722,10 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4722,10 +4722,10 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
Label rest_parameters; Label rest_parameters;
__ Ldrsw(x0, UntagSmiMemOperand( __ Ldrsw(x0, UntagSmiMemOperand(
x2, ArgumentsAdaptorFrameConstants::kLengthOffset)); x2, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ Ldr(x1, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldrsw( __ Ldrsw(
x1, FieldMemOperand(x1, SharedFunctionInfo::kFormalParameterCountOffset)); x3, FieldMemOperand(x3, SharedFunctionInfo::kFormalParameterCountOffset));
__ Subs(x0, x0, x1); __ Subs(x0, x0, x3);
__ B(gt, &rest_parameters); __ B(gt, &rest_parameters);
// Return an empty rest parameter array. // Return an empty rest parameter array.
...@@ -4771,15 +4771,16 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4771,15 +4771,16 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- cp : context // -- cp : context
// -- x0 : number of rest parameters // -- x0 : number of rest parameters
// -- x1 : function
// -- x2 : pointer to first rest parameters // -- x2 : pointer to first rest parameters
// -- lr : return address // -- lr : return address
// ----------------------------------- // -----------------------------------
// Allocate space for the rest parameter array plus the backing store. // Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate; Label allocate, done_allocate;
__ Mov(x1, JSArray::kSize + FixedArray::kHeaderSize); __ Mov(x6, JSArray::kSize + FixedArray::kHeaderSize);
__ Add(x1, x1, Operand(x0, LSL, kPointerSizeLog2)); __ Add(x6, x6, Operand(x0, LSL, kPointerSizeLog2));
__ Allocate(x1, x3, x4, x5, &allocate, NO_ALLOCATION_FLAGS); __ Allocate(x6, x3, x4, x5, &allocate, NO_ALLOCATION_FLAGS);
__ Bind(&done_allocate); __ Bind(&done_allocate);
// Compute arguments.length in x6. // Compute arguments.length in x6.
...@@ -4814,19 +4815,27 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4814,19 +4815,27 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize); STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret(); __ Ret();
// Fall back to %AllocateInNewSpace. // Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ Bind(&allocate); __ Bind(&allocate);
__ Cmp(x6, Operand(Page::kMaxRegularHeapObjectSize));
__ B(gt, &too_big_for_new_space);
{ {
FrameScope scope(masm, StackFrame::INTERNAL); FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(x0); __ SmiTag(x0);
__ SmiTag(x1); __ SmiTag(x6);
__ Push(x0, x2, x1); __ Push(x0, x2, x6);
__ CallRuntime(Runtime::kAllocateInNewSpace); __ CallRuntime(Runtime::kAllocateInNewSpace);
__ Mov(x3, x0); __ Mov(x3, x0);
__ Pop(x2, x0); __ Pop(x2, x0);
__ SmiUntag(x0); __ SmiUntag(x0);
} }
__ B(&done_allocate); __ B(&done_allocate);
// Fall back to %NewRestParameter.
__ Bind(&too_big_for_new_space);
__ Push(x1);
__ TailCallRuntime(Runtime::kNewRestParameter);
} }
} }
...@@ -5163,9 +5172,9 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -5163,9 +5172,9 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
__ Cmp(x4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); __ Cmp(x4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ B(eq, &arguments_adaptor); __ B(eq, &arguments_adaptor);
{ {
__ Ldr(x1, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldrsw(x0, FieldMemOperand( __ Ldrsw(x0, FieldMemOperand(
x1, SharedFunctionInfo::kFormalParameterCountOffset)); x4, SharedFunctionInfo::kFormalParameterCountOffset));
__ Add(x2, x2, Operand(x0, LSL, kPointerSizeLog2)); __ Add(x2, x2, Operand(x0, LSL, kPointerSizeLog2));
__ Add(x2, x2, StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize); __ Add(x2, x2, StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize);
} }
...@@ -5182,15 +5191,16 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -5182,15 +5191,16 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- cp : context // -- cp : context
// -- x0 : number of rest parameters // -- x0 : number of rest parameters
// -- x1 : function
// -- x2 : pointer to first rest parameters // -- x2 : pointer to first rest parameters
// -- lr : return address // -- lr : return address
// ----------------------------------- // -----------------------------------
// Allocate space for the strict arguments object plus the backing store. // Allocate space for the strict arguments object plus the backing store.
Label allocate, done_allocate; Label allocate, done_allocate;
__ Mov(x1, JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize); __ Mov(x6, JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize);
__ Add(x1, x1, Operand(x0, LSL, kPointerSizeLog2)); __ Add(x6, x6, Operand(x0, LSL, kPointerSizeLog2));
__ Allocate(x1, x3, x4, x5, &allocate, NO_ALLOCATION_FLAGS); __ Allocate(x6, x3, x4, x5, &allocate, NO_ALLOCATION_FLAGS);
__ Bind(&done_allocate); __ Bind(&done_allocate);
// Compute arguments.length in x6. // Compute arguments.length in x6.
...@@ -5225,19 +5235,27 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -5225,19 +5235,27 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize); STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
__ Ret(); __ Ret();
// Fall back to %AllocateInNewSpace. // Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ Bind(&allocate); __ Bind(&allocate);
__ Cmp(x6, Operand(Page::kMaxRegularHeapObjectSize));
__ B(gt, &too_big_for_new_space);
{ {
FrameScope scope(masm, StackFrame::INTERNAL); FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(x0); __ SmiTag(x0);
__ SmiTag(x1); __ SmiTag(x6);
__ Push(x0, x2, x1); __ Push(x0, x2, x6);
__ CallRuntime(Runtime::kAllocateInNewSpace); __ CallRuntime(Runtime::kAllocateInNewSpace);
__ Mov(x3, x0); __ Mov(x3, x0);
__ Pop(x2, x0); __ Pop(x2, x0);
__ SmiUntag(x0); __ SmiUntag(x0);
} }
__ B(&done_allocate); __ B(&done_allocate);
// Fall back to %NewStrictArguments.
__ Bind(&too_big_for_new_space);
__ Push(x1);
__ TailCallRuntime(Runtime::kNewStrictArguments);
} }
......
...@@ -311,11 +311,10 @@ Reduction JSCreateLowering::ReduceJSCreateArguments(Node* node) { ...@@ -311,11 +311,10 @@ Reduction JSCreateLowering::ReduceJSCreateArguments(Node* node) {
Operator::Properties properties = node->op()->properties(); Operator::Properties properties = node->op()->properties();
CallDescriptor* desc = Linkage::GetStubCallDescriptor( CallDescriptor* desc = Linkage::GetStubCallDescriptor(
isolate(), graph()->zone(), callable.descriptor(), 0, isolate(), graph()->zone(), callable.descriptor(), 0,
CallDescriptor::kNoFlags, properties); CallDescriptor::kNeedsFrameState, properties);
const Operator* new_op = common()->Call(desc); const Operator* new_op = common()->Call(desc);
Node* stub_code = jsgraph()->HeapConstant(callable.code()); Node* stub_code = jsgraph()->HeapConstant(callable.code());
node->InsertInput(graph()->zone(), 0, stub_code); node->InsertInput(graph()->zone(), 0, stub_code);
node->RemoveInput(3); // Remove the frame state.
NodeProperties::ChangeOp(node, new_op); NodeProperties::ChangeOp(node, new_op);
return Changed(node); return Changed(node);
} }
...@@ -324,11 +323,10 @@ Reduction JSCreateLowering::ReduceJSCreateArguments(Node* node) { ...@@ -324,11 +323,10 @@ Reduction JSCreateLowering::ReduceJSCreateArguments(Node* node) {
Operator::Properties properties = node->op()->properties(); Operator::Properties properties = node->op()->properties();
CallDescriptor* desc = Linkage::GetStubCallDescriptor( CallDescriptor* desc = Linkage::GetStubCallDescriptor(
isolate(), graph()->zone(), callable.descriptor(), 0, isolate(), graph()->zone(), callable.descriptor(), 0,
CallDescriptor::kNoFlags, properties); CallDescriptor::kNeedsFrameState, properties);
const Operator* new_op = common()->Call(desc); const Operator* new_op = common()->Call(desc);
Node* stub_code = jsgraph()->HeapConstant(callable.code()); Node* stub_code = jsgraph()->HeapConstant(callable.code());
node->InsertInput(graph()->zone(), 0, stub_code); node->InsertInput(graph()->zone(), 0, stub_code);
node->RemoveInput(3); // Remove the frame state.
NodeProperties::ChangeOp(node, new_op); NodeProperties::ChangeOp(node, new_op);
return Changed(node); return Changed(node);
} }
......
...@@ -4693,8 +4693,11 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4693,8 +4693,11 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
__ mov(eax, edi); __ mov(eax, edi);
__ Ret(); __ Ret();
// Fall back to %AllocateInNewSpace. // Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate); __ bind(&allocate);
__ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
__ j(greater, &too_big_for_new_space);
{ {
FrameScope scope(masm, StackFrame::INTERNAL); FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(ecx); __ SmiTag(ecx);
...@@ -4707,6 +4710,22 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4707,6 +4710,22 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
__ Pop(eax); __ Pop(eax);
} }
__ jmp(&done_allocate); __ jmp(&done_allocate);
// Fall back to %NewRestParameter.
__ bind(&too_big_for_new_space);
__ PopReturnAddressTo(ecx);
// We reload the function from the caller frame due to register pressure
// within this stub. This is the slow path, hence reloading is preferable.
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
__ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
} else {
__ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
}
__ PushReturnAddressFrom(ecx);
__ TailCallRuntime(Runtime::kNewRestParameter);
} }
} }
...@@ -5063,8 +5082,11 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -5063,8 +5082,11 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
__ mov(eax, edi); __ mov(eax, edi);
__ Ret(); __ Ret();
// Fall back to %AllocateInNewSpace. // Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate); __ bind(&allocate);
__ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
__ j(greater, &too_big_for_new_space);
{ {
FrameScope scope(masm, StackFrame::INTERNAL); FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(ecx); __ SmiTag(ecx);
...@@ -5077,6 +5099,22 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -5077,6 +5099,22 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
__ Pop(eax); __ Pop(eax);
} }
__ jmp(&done_allocate); __ jmp(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ PopReturnAddressTo(ecx);
// We reload the function from the caller frame due to register pressure
// within this stub. This is the slow path, hence reloading is preferable.
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
__ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
} else {
__ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
}
__ PushReturnAddressFrom(ecx);
__ TailCallRuntime(Runtime::kNewStrictArguments);
} }
......
...@@ -4640,10 +4640,10 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4640,10 +4640,10 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// specified by the function's internal formal parameter count. // specified by the function's internal formal parameter count.
Label rest_parameters; Label rest_parameters;
__ lw(a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); __ lw(a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ lw(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a1, __ lw(a3,
FieldMemOperand(a1, SharedFunctionInfo::kFormalParameterCountOffset)); FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
__ Subu(a0, a0, Operand(a1)); __ Subu(a0, a0, Operand(a3));
__ Branch(&rest_parameters, gt, a0, Operand(zero_reg)); __ Branch(&rest_parameters, gt, a0, Operand(zero_reg));
// Return an empty rest parameter array. // Return an empty rest parameter array.
...@@ -4690,15 +4690,16 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4690,15 +4690,16 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- cp : context // -- cp : context
// -- a0 : number of rest parameters (tagged) // -- a0 : number of rest parameters (tagged)
// -- a1 : function
// -- a2 : pointer to first rest parameters // -- a2 : pointer to first rest parameters
// -- ra : return address // -- ra : return address
// ----------------------------------- // -----------------------------------
// Allocate space for the rest parameter array plus the backing store. // Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate; Label allocate, done_allocate;
__ li(a1, Operand(JSArray::kSize + FixedArray::kHeaderSize)); __ li(t0, Operand(JSArray::kSize + FixedArray::kHeaderSize));
__ Lsa(a1, a1, a0, kPointerSizeLog2 - 1); __ Lsa(t0, t0, a0, kPointerSizeLog2 - 1);
__ Allocate(a1, v0, a3, t0, &allocate, NO_ALLOCATION_FLAGS); __ Allocate(t0, v0, a3, t1, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate); __ bind(&done_allocate);
// Setup the elements array in v0. // Setup the elements array in v0.
...@@ -4731,16 +4732,24 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4731,16 +4732,24 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
__ Ret(USE_DELAY_SLOT); __ Ret(USE_DELAY_SLOT);
__ mov(v0, a3); // In delay slot __ mov(v0, a3); // In delay slot
// Fall back to %AllocateInNewSpace. // Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate); __ bind(&allocate);
__ Branch(&too_big_for_new_space, gt, t0,
Operand(Page::kMaxRegularHeapObjectSize));
{ {
FrameScope scope(masm, StackFrame::INTERNAL); FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(a1); __ SmiTag(t0);
__ Push(a0, a2, a1); __ Push(a0, a2, t0);
__ CallRuntime(Runtime::kAllocateInNewSpace); __ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(a0, a2); __ Pop(a0, a2);
} }
__ jmp(&done_allocate); __ jmp(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ Push(a1);
__ TailCallRuntime(Runtime::kNewStrictArguments);
} }
} }
...@@ -5006,9 +5015,9 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -5006,9 +5015,9 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
__ Branch(&arguments_adaptor, eq, a0, __ Branch(&arguments_adaptor, eq, a0,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
{ {
__ lw(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a0, __ lw(a0,
FieldMemOperand(a1, SharedFunctionInfo::kFormalParameterCountOffset)); FieldMemOperand(t0, SharedFunctionInfo::kFormalParameterCountOffset));
__ Lsa(a2, a2, a0, kPointerSizeLog2 - 1); __ Lsa(a2, a2, a0, kPointerSizeLog2 - 1);
__ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset - __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
1 * kPointerSize)); 1 * kPointerSize));
...@@ -5026,15 +5035,16 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -5026,15 +5035,16 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- cp : context // -- cp : context
// -- a0 : number of rest parameters (tagged) // -- a0 : number of rest parameters (tagged)
// -- a1 : function
// -- a2 : pointer to first rest parameters // -- a2 : pointer to first rest parameters
// -- ra : return address // -- ra : return address
// ----------------------------------- // -----------------------------------
// Allocate space for the strict arguments object plus the backing store. // Allocate space for the strict arguments object plus the backing store.
Label allocate, done_allocate; Label allocate, done_allocate;
__ li(a1, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize)); __ li(t0, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
__ Lsa(a1, a1, a0, kPointerSizeLog2 - 1); __ Lsa(t0, t0, a0, kPointerSizeLog2 - 1);
__ Allocate(a1, v0, a3, t0, &allocate, NO_ALLOCATION_FLAGS); __ Allocate(t0, v0, a3, t1, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate); __ bind(&done_allocate);
// Setup the elements array in v0. // Setup the elements array in v0.
...@@ -5067,16 +5077,24 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -5067,16 +5077,24 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
__ Ret(USE_DELAY_SLOT); __ Ret(USE_DELAY_SLOT);
__ mov(v0, a3); // In delay slot __ mov(v0, a3); // In delay slot
// Fall back to %AllocateInNewSpace. // Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate); __ bind(&allocate);
__ Branch(&too_big_for_new_space, gt, t0,
Operand(Page::kMaxRegularHeapObjectSize));
{ {
FrameScope scope(masm, StackFrame::INTERNAL); FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(a1); __ SmiTag(t0);
__ Push(a0, a2, a1); __ Push(a0, a2, t0);
__ CallRuntime(Runtime::kAllocateInNewSpace); __ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(a0, a2); __ Pop(a0, a2);
} }
__ jmp(&done_allocate); __ jmp(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ Push(a1);
__ TailCallRuntime(Runtime::kNewStrictArguments);
} }
......
...@@ -4654,10 +4654,10 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4654,10 +4654,10 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
Label rest_parameters; Label rest_parameters;
__ SmiLoadUntag( __ SmiLoadUntag(
a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ ld(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); __ ld(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a1, __ lw(a3,
FieldMemOperand(a1, SharedFunctionInfo::kFormalParameterCountOffset)); FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
__ Dsubu(a0, a0, Operand(a1)); __ Dsubu(a0, a0, Operand(a3));
__ Branch(&rest_parameters, gt, a0, Operand(zero_reg)); __ Branch(&rest_parameters, gt, a0, Operand(zero_reg));
// Return an empty rest parameter array. // Return an empty rest parameter array.
...@@ -4704,15 +4704,16 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4704,15 +4704,16 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- cp : context // -- cp : context
// -- a0 : number of rest parameters // -- a0 : number of rest parameters
// -- a1 : function
// -- a2 : pointer to first rest parameters // -- a2 : pointer to first rest parameters
// -- ra : return address // -- ra : return address
// ----------------------------------- // -----------------------------------
// Allocate space for the rest parameter array plus the backing store. // Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate; Label allocate, done_allocate;
__ li(a1, Operand(JSArray::kSize + FixedArray::kHeaderSize)); __ li(a5, Operand(JSArray::kSize + FixedArray::kHeaderSize));
__ Dlsa(a1, a1, a0, kPointerSizeLog2); __ Dlsa(a5, a5, a0, kPointerSizeLog2);
__ Allocate(a1, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS); __ Allocate(a5, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate); __ bind(&done_allocate);
// Compute arguments.length in a4. // Compute arguments.length in a4.
...@@ -4747,18 +4748,26 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4747,18 +4748,26 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
__ Ret(USE_DELAY_SLOT); __ Ret(USE_DELAY_SLOT);
__ mov(v0, a3); // In delay slot __ mov(v0, a3); // In delay slot
// Fall back to %AllocateInNewSpace. // Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate); __ bind(&allocate);
__ Branch(&too_big_for_new_space, gt, a5,
Operand(Page::kMaxRegularHeapObjectSize));
{ {
FrameScope scope(masm, StackFrame::INTERNAL); FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(a0); __ SmiTag(a0);
__ SmiTag(a1); __ SmiTag(a5);
__ Push(a0, a2, a1); __ Push(a0, a2, a5);
__ CallRuntime(Runtime::kAllocateInNewSpace); __ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(a0, a2); __ Pop(a0, a2);
__ SmiUntag(a0); __ SmiUntag(a0);
} }
__ jmp(&done_allocate); __ jmp(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ Push(a1);
__ TailCallRuntime(Runtime::kNewStrictArguments);
} }
} }
...@@ -5030,9 +5039,9 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -5030,9 +5039,9 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
__ Branch(&arguments_adaptor, eq, a0, __ Branch(&arguments_adaptor, eq, a0,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
{ {
__ ld(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a0, __ lw(a0,
FieldMemOperand(a1, SharedFunctionInfo::kFormalParameterCountOffset)); FieldMemOperand(a4, SharedFunctionInfo::kFormalParameterCountOffset));
__ Dlsa(a2, a2, a0, kPointerSizeLog2); __ Dlsa(a2, a2, a0, kPointerSizeLog2);
__ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset - __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
1 * kPointerSize)); 1 * kPointerSize));
...@@ -5051,15 +5060,16 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -5051,15 +5060,16 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- cp : context // -- cp : context
// -- a0 : number of rest parameters // -- a0 : number of rest parameters
// -- a1 : function
// -- a2 : pointer to first rest parameters // -- a2 : pointer to first rest parameters
// -- ra : return address // -- ra : return address
// ----------------------------------- // -----------------------------------
// Allocate space for the rest parameter array plus the backing store. // Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate; Label allocate, done_allocate;
__ li(a1, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize)); __ li(a5, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
__ Dlsa(a1, a1, a0, kPointerSizeLog2); __ Dlsa(a5, a5, a0, kPointerSizeLog2);
__ Allocate(a1, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS); __ Allocate(a5, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate); __ bind(&done_allocate);
// Compute arguments.length in a4. // Compute arguments.length in a4.
...@@ -5094,18 +5104,26 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -5094,18 +5104,26 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
__ Ret(USE_DELAY_SLOT); __ Ret(USE_DELAY_SLOT);
__ mov(v0, a3); // In delay slot __ mov(v0, a3); // In delay slot
// Fall back to %AllocateInNewSpace. // Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate); __ bind(&allocate);
__ Branch(&too_big_for_new_space, gt, a5,
Operand(Page::kMaxRegularHeapObjectSize));
{ {
FrameScope scope(masm, StackFrame::INTERNAL); FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(a0); __ SmiTag(a0);
__ SmiTag(a1); __ SmiTag(a5);
__ Push(a0, a2, a1); __ Push(a0, a2, a5);
__ CallRuntime(Runtime::kAllocateInNewSpace); __ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(a0, a2); __ Pop(a0, a2);
__ SmiUntag(a0); __ SmiUntag(a0);
} }
__ jmp(&done_allocate); __ jmp(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ Push(a1);
__ TailCallRuntime(Runtime::kNewStrictArguments);
} }
......
...@@ -4382,6 +4382,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4382,6 +4382,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
1 * kPointerSize)); 1 * kPointerSize));
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- rdi : function
// -- rsi : context // -- rsi : context
// -- rax : number of rest parameters // -- rax : number of rest parameters
// -- rbx : pointer to first rest parameters // -- rbx : pointer to first rest parameters
...@@ -4392,7 +4393,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4392,7 +4393,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
Label allocate, done_allocate; Label allocate, done_allocate;
__ leal(rcx, Operand(rax, times_pointer_size, __ leal(rcx, Operand(rax, times_pointer_size,
JSArray::kSize + FixedArray::kHeaderSize)); JSArray::kSize + FixedArray::kHeaderSize));
__ Allocate(rcx, rdx, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS); __ Allocate(rcx, rdx, r8, no_reg, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate); __ bind(&done_allocate);
// Compute the arguments.length in rdi. // Compute the arguments.length in rdi.
...@@ -4430,8 +4431,11 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4430,8 +4431,11 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize); STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret(); __ Ret();
// Fall back to %AllocateInNewSpace. // Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate); __ bind(&allocate);
__ cmpl(rcx, Immediate(Page::kMaxRegularHeapObjectSize));
__ j(greater, &too_big_for_new_space);
{ {
FrameScope scope(masm, StackFrame::INTERNAL); FrameScope scope(masm, StackFrame::INTERNAL);
__ Integer32ToSmi(rax, rax); __ Integer32ToSmi(rax, rax);
...@@ -4446,6 +4450,13 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) { ...@@ -4446,6 +4450,13 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
__ SmiToInteger32(rax, rax); __ SmiToInteger32(rax, rax);
} }
__ jmp(&done_allocate); __ jmp(&done_allocate);
// Fall back to %NewRestParameter.
__ bind(&too_big_for_new_space);
__ PopReturnAddressTo(kScratchRegister);
__ Push(rdi);
__ PushReturnAddressFrom(kScratchRegister);
__ TailCallRuntime(Runtime::kNewRestParameter);
} }
} }
...@@ -4736,6 +4747,7 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -4736,6 +4747,7 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- rax : number of arguments // -- rax : number of arguments
// -- rbx : pointer to the first argument // -- rbx : pointer to the first argument
// -- rdi : function
// -- rsi : context // -- rsi : context
// -- rsp[0] : return address // -- rsp[0] : return address
// ----------------------------------- // -----------------------------------
...@@ -4744,7 +4756,7 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -4744,7 +4756,7 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
Label allocate, done_allocate; Label allocate, done_allocate;
__ leal(rcx, Operand(rax, times_pointer_size, JSStrictArgumentsObject::kSize + __ leal(rcx, Operand(rax, times_pointer_size, JSStrictArgumentsObject::kSize +
FixedArray::kHeaderSize)); FixedArray::kHeaderSize));
__ Allocate(rcx, rdx, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS); __ Allocate(rcx, rdx, r8, no_reg, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate); __ bind(&done_allocate);
// Compute the arguments.length in rdi. // Compute the arguments.length in rdi.
...@@ -4782,8 +4794,11 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -4782,8 +4794,11 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize); STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
__ Ret(); __ Ret();
// Fall back to %AllocateInNewSpace. // Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate); __ bind(&allocate);
__ cmpl(rcx, Immediate(Page::kMaxRegularHeapObjectSize));
__ j(greater, &too_big_for_new_space);
{ {
FrameScope scope(masm, StackFrame::INTERNAL); FrameScope scope(masm, StackFrame::INTERNAL);
__ Integer32ToSmi(rax, rax); __ Integer32ToSmi(rax, rax);
...@@ -4798,6 +4813,13 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { ...@@ -4798,6 +4813,13 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
__ SmiToInteger32(rax, rax); __ SmiToInteger32(rax, rax);
} }
__ jmp(&done_allocate); __ jmp(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ PopReturnAddressTo(kScratchRegister);
__ Push(rdi);
__ PushReturnAddressFrom(kScratchRegister);
__ TailCallRuntime(Runtime::kNewStrictArguments);
} }
......
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
"use strict";
function f(a, b, c) { return arguments }
function g(...args) { return args }
// On 64-bit machine this produces a 768K array which is sufficiently small to
// not cause a stack overflow, but big enough to move the allocated arguments
// object into large object space (kMaxRegularHeapObjectSize == 600K).
var length = Math.pow(2, 15) * 3;
var args = new Array(length);
assertEquals(length, f.apply(null, args).length);
assertEquals(length, g.apply(null, args).length);
// On 32-bit machines this produces an equally sized array, however it might in
// turn trigger a stack overflow on 64-bit machines, which we need to catch.
var length = Math.pow(2, 16) * 3;
var args = new Array(length);
try { f.apply(null, args) } catch(e) {}
try { g.apply(null, args) } catch(e) {}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment