Commit c1e7c8d9 authored by mstarzinger's avatar mstarzinger Committed by Commit bot

Make arguments adaptor not clobber new.target.

This ensures that the ArgumentsAdaptorTrampoline does not clobber the
new.target value, but rather passes it through to the callee unaltered.
Note that callees do not yet use the new.target value so far.

This is a preparatory CL to allows us passing new.target in a register
instead of via a side-channel through the construct stub frame.

R=mvstanton@chromium.org
BUG=v8:4544
LOG=n

Review URL: https://codereview.chromium.org/1458103003

Cr-Commit-Position: refs/heads/master@{#32171}
parent 4ef29b1a
......@@ -1416,6 +1416,7 @@ static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
// -- r0 : actual number of arguments
// -- r1 : function (passed through to callee)
// -- r2 : expected number of arguments
// -- r3 : new target (passed through to callee)
// -----------------------------------
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
......@@ -1696,14 +1697,12 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// -- r0 : actual number of arguments
// -- r1 : function (passed through to callee)
// -- r2 : expected number of arguments
// -- r3 : new target (passed through to callee)
// -----------------------------------
Label stack_overflow;
ArgumentAdaptorStackCheck(masm, &stack_overflow);
Label invoke, dont_adapt_arguments;
Label invoke, dont_adapt_arguments, stack_overflow;
Label enough, too_few;
__ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
__ cmp(r0, r2);
__ b(lt, &too_few);
__ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
......@@ -1712,12 +1711,13 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
{ // Enough parameters: actual >= expected
__ bind(&enough);
EnterArgumentsAdaptorFrame(masm);
ArgumentAdaptorStackCheck(masm, &stack_overflow);
// Calculate copy start address into r0 and copy end address into r4.
// r0: actual number of arguments as a smi
// r1: function
// r2: expected number of arguments
// r3: code entry to call
// r3: new target (passed through to callee)
__ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
// adjust for return address and receiver
__ add(r0, r0, Operand(2 * kPointerSize));
......@@ -1727,7 +1727,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r0: copy start address
// r1: function
// r2: expected number of arguments
// r3: code entry to call
// r3: new target (passed through to callee)
// r4: copy end address
Label copy;
......@@ -1765,19 +1765,20 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&no_strong_error);
EnterArgumentsAdaptorFrame(masm);
ArgumentAdaptorStackCheck(masm, &stack_overflow);
// Calculate copy start address into r0 and copy end address is fp.
// r0: actual number of arguments as a smi
// r1: function
// r2: expected number of arguments
// r3: code entry to call
// r3: new target (passed through to callee)
__ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
// Copy the arguments (including the receiver) to the new stack frame.
// r0: copy start address
// r1: function
// r2: expected number of arguments
// r3: code entry to call
// r3: new target (passed through to callee)
Label copy;
__ bind(&copy);
// Adjust load for return address and receiver.
......@@ -1790,7 +1791,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Fill the remaining expected arguments with undefined.
// r1: function
// r2: expected number of arguments
// r3: code entry to call
// r3: new target (passed through to callee)
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2));
// Adjust for frame.
......@@ -1809,7 +1810,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ mov(r0, r2);
// r0 : expected number of arguments
// r1 : function (passed through to callee)
__ Call(r3);
// r3 : new target (passed through to callee)
__ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
__ Call(r4);
// Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
......@@ -1823,12 +1826,12 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Dont adapt arguments.
// -------------------------------------------
__ bind(&dont_adapt_arguments);
__ Jump(r3);
__ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
__ Jump(r4);
__ bind(&stack_overflow);
{
FrameScope frame(masm, StackFrame::MANUAL);
EnterArgumentsAdaptorFrame(masm);
__ CallRuntime(Runtime::kThrowStackOverflow, 0);
__ bkpt(0);
}
......
......@@ -1409,6 +1409,7 @@ static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
// -- x0 : actual number of arguments
// -- x1 : function (passed through to callee)
// -- x2 : expected number of arguments
// -- x3 : new target (passed through to callee)
// -----------------------------------
// Check the stack for overflow.
// We are not trying to catch interruptions (e.g. debug break and
......@@ -1759,20 +1760,17 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// -- x0 : actual number of arguments
// -- x1 : function (passed through to callee)
// -- x2 : expected number of arguments
// -- x3 : new target (passed through to callee)
// -----------------------------------
Label stack_overflow;
ArgumentAdaptorStackCheck(masm, &stack_overflow);
Register argc_actual = x0; // Excluding the receiver.
Register argc_expected = x2; // Excluding the receiver.
Register function = x1;
Register code_entry = x3;
Register code_entry = x10;
Label invoke, dont_adapt_arguments;
Label invoke, dont_adapt_arguments, stack_overflow;
Label enough, too_few;
__ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
__ Cmp(argc_actual, argc_expected);
__ B(lt, &too_few);
__ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
......@@ -1780,6 +1778,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
{ // Enough parameters: actual >= expected
EnterArgumentsAdaptorFrame(masm);
ArgumentAdaptorStackCheck(masm, &stack_overflow);
Register copy_start = x10;
Register copy_end = x11;
......@@ -1850,6 +1849,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ Bind(&no_strong_error);
EnterArgumentsAdaptorFrame(masm);
ArgumentAdaptorStackCheck(masm, &stack_overflow);
__ Lsl(scratch2, argc_expected, kPointerSizeLog2);
__ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
......@@ -1899,6 +1899,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ Mov(argc_actual, argc_expected);
// x0 : expected number of arguments
// x1 : function (passed through to callee)
// x3 : new target (passed through to callee)
__ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
__ Call(code_entry);
// Store offset of return address for deoptimizer.
......@@ -1910,12 +1912,12 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Call the entry point without adapting the arguments.
__ Bind(&dont_adapt_arguments);
__ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
__ Jump(code_entry);
__ Bind(&stack_overflow);
{
FrameScope frame(masm, StackFrame::MANUAL);
EnterArgumentsAdaptorFrame(masm);
__ CallRuntime(Runtime::kThrowStackOverflow, 0);
__ Unreachable();
}
......
......@@ -1376,24 +1376,24 @@ static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
// ----------- S t a t e -------------
// -- eax : actual number of arguments
// -- ebx : expected number of arguments
// -- edi : function (passed through to callee)
// -- edx : new target (passed through to callee)
// -----------------------------------
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
ExternalReference real_stack_limit =
ExternalReference::address_of_real_stack_limit(masm->isolate());
__ mov(edx, Operand::StaticVariable(real_stack_limit));
__ mov(edi, Operand::StaticVariable(real_stack_limit));
// Make ecx the space we have left. The stack might already be overflowed
// here which will cause ecx to become negative.
__ mov(ecx, esp);
__ sub(ecx, edx);
// Make edx the space we need for the array when it is unrolled onto the
__ sub(ecx, edi);
// Make edi the space we need for the array when it is unrolled onto the
// stack.
__ mov(edx, ebx);
__ shl(edx, kPointerSizeLog2);
__ mov(edi, ebx);
__ shl(edi, kPointerSizeLog2);
// Check if the arguments will overflow the stack.
__ cmp(ecx, edx);
__ cmp(ecx, edi);
__ j(less_equal, stack_overflow); // Signed comparison.
}
......@@ -1673,17 +1673,14 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : actual number of arguments
// -- ebx : expected number of arguments
// -- edx : new target (passed through to callee)
// -- edi : function (passed through to callee)
// -----------------------------------
Label invoke, dont_adapt_arguments;
Label invoke, dont_adapt_arguments, stack_overflow;
__ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
Label stack_overflow;
ArgumentsAdaptorStackCheck(masm, &stack_overflow);
Label enough, too_few;
__ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
__ cmp(eax, ebx);
__ j(less, &too_few);
__ cmp(ebx, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
......@@ -1692,6 +1689,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
{ // Enough parameters: Actual >= expected.
__ bind(&enough);
EnterArgumentsAdaptorFrame(masm);
ArgumentsAdaptorStackCheck(masm, &stack_overflow);
// Copy receiver and all expected arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
......@@ -1733,6 +1731,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&no_strong_error);
EnterArgumentsAdaptorFrame(masm);
ArgumentsAdaptorStackCheck(masm, &stack_overflow);
// Remember expected arguments in ecx.
__ mov(ecx, ebx);
......@@ -1771,8 +1770,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Restore function pointer.
__ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
// eax : expected number of arguments
// edx : new target (passed through to callee)
// edi : function (passed through to callee)
__ call(edx);
__ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
__ call(ecx);
// Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
......@@ -1785,12 +1786,12 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Dont adapt arguments.
// -------------------------------------------
__ bind(&dont_adapt_arguments);
__ jmp(edx);
__ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
__ jmp(ecx);
__ bind(&stack_overflow);
{
FrameScope frame(masm, StackFrame::MANUAL);
EnterArgumentsAdaptorFrame(masm);
__ CallRuntime(Runtime::kThrowStackOverflow, 0);
__ int3();
}
......
......@@ -1432,6 +1432,7 @@ static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
// -- a0 : actual number of arguments
// -- a1 : function (passed through to callee)
// -- a2 : expected number of arguments
// -- a3 : new target (passed through to callee)
// -----------------------------------
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
......@@ -1721,14 +1722,12 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// -- a0: actual arguments count
// -- a1: function (passed through to callee)
// -- a2: expected arguments count
// -- a3: new target (passed through to callee)
// -----------------------------------
Label stack_overflow;
ArgumentAdaptorStackCheck(masm, &stack_overflow);
Label invoke, dont_adapt_arguments;
Label invoke, dont_adapt_arguments, stack_overflow;
Label enough, too_few;
__ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
__ Branch(&dont_adapt_arguments, eq,
a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
// We use Uless as the number of argument should always be greater than 0.
......@@ -1738,9 +1737,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// a0: actual number of arguments as a smi
// a1: function
// a2: expected number of arguments
// a3: code entry to call
// a3: new target (passed through to callee)
__ bind(&enough);
EnterArgumentsAdaptorFrame(masm);
ArgumentAdaptorStackCheck(masm, &stack_overflow);
// Calculate copy start address into a0 and copy end address into t1.
__ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
......@@ -1755,7 +1755,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// a0: copy start address
// a1: function
// a2: expected number of arguments
// a3: code entry to call
// a3: new target (passed through to callee)
// t1: copy end address
Label copy;
......@@ -1792,12 +1792,13 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&no_strong_error);
EnterArgumentsAdaptorFrame(masm);
ArgumentAdaptorStackCheck(masm, &stack_overflow);
// Calculate copy start address into a0 and copy end address into t3.
// a0: actual number of arguments as a smi
// a1: function
// a2: expected number of arguments
// a3: code entry to call
// a3: new target (passed through to callee)
__ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
__ Addu(a0, fp, a0);
// Adjust for return address and receiver.
......@@ -1809,7 +1810,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// a0: copy start address
// a1: function
// a2: expected number of arguments
// a3: code entry to call
// a3: new target (passed through to callee)
// t3: copy end address
Label copy;
__ bind(&copy);
......@@ -1822,7 +1823,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Fill the remaining expected arguments with undefined.
// a1: function
// a2: expected number of arguments
// a3: code entry to call
// a3: new target (passed through to callee)
__ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
__ sll(t2, a2, kPointerSizeLog2);
__ Subu(t1, fp, Operand(t2));
......@@ -1842,7 +1843,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ mov(a0, a2);
// a0 : expected number of arguments
// a1 : function (passed through to callee)
__ Call(a3);
// a3 : new target (passed through to callee)
__ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
__ Call(t0);
// Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
......@@ -1856,12 +1859,12 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Don't adapt arguments.
// -------------------------------------------
__ bind(&dont_adapt_arguments);
__ Jump(a3);
__ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
__ Jump(t0);
__ bind(&stack_overflow);
{
FrameScope frame(masm, StackFrame::MANUAL);
EnterArgumentsAdaptorFrame(masm);
__ CallRuntime(Runtime::kThrowStackOverflow, 0);
__ break_(0xCC);
}
......
......@@ -1429,6 +1429,7 @@ static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
// -- a0 : actual number of arguments
// -- a1 : function (passed through to callee)
// -- a2 : expected number of arguments
// -- a3 : new target (passed through to callee)
// -----------------------------------
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
......@@ -1717,14 +1718,12 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// -- a0: actual arguments count
// -- a1: function (passed through to callee)
// -- a2: expected arguments count
// -- a3: new target (passed through to callee)
// -----------------------------------
Label stack_overflow;
ArgumentAdaptorStackCheck(masm, &stack_overflow);
Label invoke, dont_adapt_arguments;
Label invoke, dont_adapt_arguments, stack_overflow;
Label enough, too_few;
__ ld(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
__ Branch(&dont_adapt_arguments, eq,
a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
// We use Uless as the number of argument should always be greater than 0.
......@@ -1734,9 +1733,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// a0: actual number of arguments as a smi
// a1: function
// a2: expected number of arguments
// a3: code entry to call
// a3: new target (passed through to callee)
__ bind(&enough);
EnterArgumentsAdaptorFrame(masm);
ArgumentAdaptorStackCheck(masm, &stack_overflow);
// Calculate copy start address into a0 and copy end address into a4.
__ SmiScale(a0, a0, kPointerSizeLog2);
......@@ -1751,7 +1751,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// a0: copy start address
// a1: function
// a2: expected number of arguments
// a3: code entry to call
// a3: new target (passed through to callee)
// a4: copy end address
Label copy;
......@@ -1788,12 +1788,13 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&no_strong_error);
EnterArgumentsAdaptorFrame(masm);
ArgumentAdaptorStackCheck(masm, &stack_overflow);
// Calculate copy start address into a0 and copy end address into a7.
// a0: actual number of arguments as a smi
// a1: function
// a2: expected number of arguments
// a3: code entry to call
// a3: new target (passed through to callee)
__ SmiScale(a0, a0, kPointerSizeLog2);
__ Daddu(a0, fp, a0);
// Adjust for return address and receiver.
......@@ -1805,7 +1806,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// a0: copy start address
// a1: function
// a2: expected number of arguments
// a3: code entry to call
// a3: new target (passed through to callee)
// a7: copy end address
Label copy;
__ bind(&copy);
......@@ -1818,7 +1819,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Fill the remaining expected arguments with undefined.
// a1: function
// a2: expected number of arguments
// a3: code entry to call
// a3: new target (passed through to callee)
__ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
__ dsll(a6, a2, kPointerSizeLog2);
__ Dsubu(a4, fp, Operand(a6));
......@@ -1838,7 +1839,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ mov(a0, a2);
// a0 : expected number of arguments
// a1 : function (passed through to callee)
__ Call(a3);
// a3: new target (passed through to callee)
__ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
__ Call(a4);
// Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
......@@ -1852,12 +1855,12 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Don't adapt arguments.
// -------------------------------------------
__ bind(&dont_adapt_arguments);
__ Jump(a3);
__ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
__ Jump(a4);
__ bind(&stack_overflow);
{
FrameScope frame(masm, StackFrame::MANUAL);
EnterArgumentsAdaptorFrame(masm);
__ CallRuntime(Runtime::kThrowStackOverflow, 0);
__ break_(0xCC);
}
......
......@@ -1418,23 +1418,24 @@ static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
// ----------- S t a t e -------------
// -- rax : actual number of arguments
// -- rbx : expected number of arguments
// -- rdi: function (passed through to callee)
// -- rdx : new target (passed through to callee)
// -- rdi : function (passed through to callee)
// -----------------------------------
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
Label okay;
__ LoadRoot(rdx, Heap::kRealStackLimitRootIndex);
__ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
__ movp(rcx, rsp);
// Make rcx the space we have left. The stack might already be overflowed
// here which will cause rcx to become negative.
__ subp(rcx, rdx);
// Make rdx the space we need for the array when it is unrolled onto the
__ subp(rcx, r8);
// Make r8 the space we need for the array when it is unrolled onto the
// stack.
__ movp(rdx, rbx);
__ shlp(rdx, Immediate(kPointerSizeLog2));
__ movp(r8, rbx);
__ shlp(r8, Immediate(kPointerSizeLog2));
// Check if the arguments will overflow the stack.
__ cmpp(rcx, rdx);
__ cmpp(rcx, r8);
__ j(less_equal, stack_overflow); // Signed comparison.
}
......@@ -1477,18 +1478,15 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : actual number of arguments
// -- rbx : expected number of arguments
// -- rdi: function (passed through to callee)
// -- rdx : new target (passed through to callee)
// -- rdi : function (passed through to callee)
// -----------------------------------
Label invoke, dont_adapt_arguments;
Label invoke, dont_adapt_arguments, stack_overflow;
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->arguments_adaptors(), 1);
Label stack_overflow;
ArgumentsAdaptorStackCheck(masm, &stack_overflow);
Label enough, too_few;
__ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
__ cmpp(rax, rbx);
__ j(less, &too_few);
__ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
......@@ -1497,6 +1495,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
{ // Enough parameters: Actual >= expected.
__ bind(&enough);
EnterArgumentsAdaptorFrame(masm);
ArgumentsAdaptorStackCheck(masm, &stack_overflow);
// Copy receiver and all expected arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
......@@ -1551,6 +1550,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&no_strong_error);
EnterArgumentsAdaptorFrame(masm);
ArgumentsAdaptorStackCheck(masm, &stack_overflow);
// Copy receiver and all actual arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
......@@ -1582,8 +1582,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&invoke);
__ movp(rax, rbx);
// rax : expected number of arguments
// rdi: function (passed through to callee)
__ call(rdx);
// rdx : new target (passed through to callee)
// rdi : function (passed through to callee)
__ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
__ call(rcx);
// Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
......@@ -1596,12 +1598,12 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Dont adapt arguments.
// -------------------------------------------
__ bind(&dont_adapt_arguments);
__ jmp(rdx);
__ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
__ jmp(rcx);
__ bind(&stack_overflow);
{
FrameScope frame(masm, StackFrame::MANUAL);
EnterArgumentsAdaptorFrame(masm);
__ CallRuntime(Runtime::kThrowStackOverflow, 0);
__ int3();
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment