Commit 8a5cb3dc authored by Victor Gomes's avatar Victor Gomes Committed by Commit Bot

[arm64] Remove arguments adaptor frame

- It also fixes padding issues in the deoptimizer

Change-Id: Icac62892657830d067b7c21ff45b43ba58e350d9
Bug: v8:10201
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2498694
Commit-Queue: Victor Gomes <victorgomes@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71090}
parent fbb3353c
...@@ -112,8 +112,8 @@ declare_args() { ...@@ -112,8 +112,8 @@ declare_args() {
# Disable arguments adaptor frame (sets -dV8_NO_ARGUMENTS_ADAPTOR). # Disable arguments adaptor frame (sets -dV8_NO_ARGUMENTS_ADAPTOR).
v8_disable_arguments_adaptor = v8_disable_arguments_adaptor =
v8_current_cpu == "x86" || v8_current_cpu == "x64" || v8_current_cpu == "x86" || v8_current_cpu == "x64" ||
v8_current_cpu == "mipsel" || v8_current_cpu == "mips64el" || v8_current_cpu == "arm" || v8_current_cpu == "arm64" ||
v8_current_cpu == "arm" v8_current_cpu == "mipsel" || v8_current_cpu == "mips64el"
# Sets -dOBJECT_PRINT. # Sets -dOBJECT_PRINT.
v8_enable_object_print = "" v8_enable_object_print = ""
......
...@@ -74,41 +74,6 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm, ...@@ -74,41 +74,6 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
namespace { namespace {
enum StackLimitKind { kInterruptStackLimit, kRealStackLimit };
void LoadStackLimit(MacroAssembler* masm, Register destination,
StackLimitKind kind) {
DCHECK(masm->root_array_available());
Isolate* isolate = masm->isolate();
ExternalReference limit =
kind == StackLimitKind::kRealStackLimit
? ExternalReference::address_of_real_jslimit(isolate)
: ExternalReference::address_of_jslimit(isolate);
DCHECK(TurboAssembler::IsAddressableThroughRootRegister(isolate, limit));
intptr_t offset =
TurboAssembler::RootRegisterOffsetForExternalReference(isolate, limit);
__ Ldr(destination, MemOperand(kRootRegister, offset));
}
void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
Label* stack_overflow) {
UseScratchRegisterScope temps(masm);
Register scratch = temps.AcquireX();
// Check the stack for overflow.
// We are not trying to catch interruptions (e.g. debug break and
// preemption) here, so the "real stack limit" is checked.
LoadStackLimit(masm, scratch, StackLimitKind::kRealStackLimit);
// Make scratch the space we have left. The stack might already be overflowed
// here which will cause scratch to become negative.
__ Sub(scratch, sp, scratch);
// Check if the arguments will overflow the stack.
__ Cmp(scratch, Operand(num_args, LSL, kSystemPointerSizeLog2));
__ B(le, stack_overflow);
}
void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- x0 : number of arguments // -- x0 : number of arguments
...@@ -122,7 +87,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { ...@@ -122,7 +87,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
ASM_LOCATION("Builtins::Generate_JSConstructStubHelper"); ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
Label stack_overflow; Label stack_overflow;
Generate_StackOverflowCheck(masm, x0, &stack_overflow); __ StackOverflowCheck(x0, &stack_overflow);
// Enter a construct frame. // Enter a construct frame.
{ {
...@@ -160,6 +125,11 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { ...@@ -160,6 +125,11 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
__ Str(padreg, MemOperand(x2, 1 * kSystemPointerSize)); __ Str(padreg, MemOperand(x2, 1 * kSystemPointerSize));
__ Bind(&already_aligned); __ Bind(&already_aligned);
// TODO(victorgomes): When the arguments adaptor is completely removed, we
// should get the formal parameter count and copy the arguments in its
// correct position (including any undefined), instead of delaying this to
// InvokeFunction.
// Copy arguments to the expression stack. // Copy arguments to the expression stack.
{ {
Register count = x2; Register count = x2;
...@@ -340,7 +310,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -340,7 +310,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// Check if we have enough stack space to push all arguments. // Check if we have enough stack space to push all arguments.
Label enough_stack_space, stack_overflow; Label enough_stack_space, stack_overflow;
Generate_StackOverflowCheck(masm, x10, &stack_overflow); __ StackOverflowCheck(x10, &stack_overflow);
__ B(&enough_stack_space); __ B(&enough_stack_space);
__ Bind(&stack_overflow); __ Bind(&stack_overflow);
...@@ -352,6 +322,11 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -352,6 +322,11 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
__ Bind(&enough_stack_space); __ Bind(&enough_stack_space);
__ Claim(x10); __ Claim(x10);
// TODO(victorgomes): When the arguments adaptor is completely removed, we
// should get the formal parameter count and copy the arguments in its
// correct position (including any undefined), instead of delaying this to
// InvokeFunction.
// Copy the arguments. // Copy the arguments.
{ {
Register count = x2; Register count = x2;
...@@ -488,7 +463,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -488,7 +463,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// Check the stack for overflow. We are not trying to catch interruptions // Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack limit". // (i.e. debug break and preemption) here, so check the "real stack limit".
Label stack_overflow; Label stack_overflow;
LoadStackLimit(masm, x10, StackLimitKind::kRealStackLimit); __ LoadStackLimit(x10, StackLimitKind::kRealStackLimit);
__ Cmp(sp, x10); __ Cmp(sp, x10);
__ B(lo, &stack_overflow); __ B(lo, &stack_overflow);
...@@ -859,7 +834,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, ...@@ -859,7 +834,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Check if we have enough stack space to push all arguments. // Check if we have enough stack space to push all arguments.
Label enough_stack_space, stack_overflow; Label enough_stack_space, stack_overflow;
Generate_StackOverflowCheck(masm, slots_to_claim, &stack_overflow); __ StackOverflowCheck(slots_to_claim, &stack_overflow);
__ B(&enough_stack_space); __ B(&enough_stack_space);
__ Bind(&stack_overflow); __ Bind(&stack_overflow);
...@@ -968,25 +943,42 @@ static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm, ...@@ -968,25 +943,42 @@ static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm,
OMIT_SMI_CHECK); OMIT_SMI_CHECK);
} }
static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) { static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
Register args_size = scratch; Register scratch2) {
Register params_size = scratch1;
// Get the arguments + receiver count. // Get the size of the formal parameters + receiver (in bytes).
__ Ldr(args_size, __ Ldr(params_size,
MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
__ Ldr(args_size.W(), __ Ldr(params_size.W(),
FieldMemOperand(args_size, BytecodeArray::kParameterSizeOffset)); FieldMemOperand(params_size, BytecodeArray::kParameterSizeOffset));
#ifdef V8_NO_ARGUMENTS_ADAPTOR
Register actual_params_size = scratch2;
// Compute the size of the actual parameters + receiver (in bytes).
__ Ldr(actual_params_size,
MemOperand(fp, StandardFrameConstants::kArgCOffset));
__ lsl(actual_params_size, actual_params_size, kSystemPointerSizeLog2);
__ Add(actual_params_size, actual_params_size, Operand(kSystemPointerSize));
// If actual is bigger than formal, then we should use it to free up the stack
// arguments.
Label corrected_args_count;
__ Cmp(params_size, actual_params_size);
__ B(ge, &corrected_args_count);
__ Mov(params_size, actual_params_size);
__ Bind(&corrected_args_count);
#endif
// Leave the frame (also dropping the register file). // Leave the frame (also dropping the register file).
__ LeaveFrame(StackFrame::INTERPRETED); __ LeaveFrame(StackFrame::INTERPRETED);
// Drop receiver + arguments. // Drop receiver + arguments.
if (__ emit_debug_code()) { if (__ emit_debug_code()) {
__ Tst(args_size, kSystemPointerSize - 1); __ Tst(params_size, kSystemPointerSize - 1);
__ Check(eq, AbortReason::kUnexpectedValue); __ Check(eq, AbortReason::kUnexpectedValue);
} }
__ Lsr(args_size, args_size, kSystemPointerSizeLog2); __ Lsr(params_size, params_size, kSystemPointerSizeLog2);
__ DropArguments(args_size); __ DropArguments(params_size);
} }
// Tail-call |function_id| if |actual_marker| == |expected_marker| // Tail-call |function_id| if |actual_marker| == |expected_marker|
...@@ -1256,10 +1248,10 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1256,10 +1248,10 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Push actual argument count, bytecode array, Smi tagged bytecode array // Push actual argument count, bytecode array, Smi tagged bytecode array
// offset and an undefined (to properly align the stack pointer). // offset and an undefined (to properly align the stack pointer).
STATIC_ASSERT(TurboAssembler::kExtraSlotClaimedByPrologue == 1); STATIC_ASSERT(TurboAssembler::kExtraSlotClaimedByPrologue == 1);
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
__ SmiTag(x6, kInterpreterBytecodeOffsetRegister); __ SmiTag(x6, kInterpreterBytecodeOffsetRegister);
__ Push(kJavaScriptCallArgCountRegister, kInterpreterBytecodeArrayRegister, __ Push(kJavaScriptCallArgCountRegister, kInterpreterBytecodeArrayRegister);
x6, kInterpreterAccumulatorRegister); __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
__ Push(x6, kInterpreterAccumulatorRegister);
// Allocate the local and temporary register file on the stack. // Allocate the local and temporary register file on the stack.
Label stack_overflow; Label stack_overflow;
...@@ -1273,7 +1265,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1273,7 +1265,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
{ {
UseScratchRegisterScope temps(masm); UseScratchRegisterScope temps(masm);
Register scratch = temps.AcquireX(); Register scratch = temps.AcquireX();
LoadStackLimit(masm, scratch, StackLimitKind::kRealStackLimit); __ LoadStackLimit(scratch, StackLimitKind::kRealStackLimit);
__ Cmp(x10, scratch); __ Cmp(x10, scratch);
} }
__ B(lo, &stack_overflow); __ B(lo, &stack_overflow);
...@@ -1304,7 +1296,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1304,7 +1296,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Perform interrupt stack check. // Perform interrupt stack check.
// TODO(solanes): Merge with the real stack limit check above. // TODO(solanes): Merge with the real stack limit check above.
Label stack_check_interrupt, after_stack_check_interrupt; Label stack_check_interrupt, after_stack_check_interrupt;
LoadStackLimit(masm, x10, StackLimitKind::kInterruptStackLimit); __ LoadStackLimit(x10, StackLimitKind::kInterruptStackLimit);
__ Cmp(sp, x10); __ Cmp(sp, x10);
__ B(lo, &stack_check_interrupt); __ B(lo, &stack_check_interrupt);
__ Bind(&after_stack_check_interrupt); __ Bind(&after_stack_check_interrupt);
...@@ -1346,7 +1338,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1346,7 +1338,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ bind(&do_return); __ bind(&do_return);
// The return value is in x0. // The return value is in x0.
LeaveInterpreterFrame(masm, x2); LeaveInterpreterFrame(masm, x2, x4);
__ Ret(); __ Ret();
__ bind(&stack_check_interrupt); __ bind(&stack_check_interrupt);
...@@ -1433,7 +1425,7 @@ static void Generate_InterpreterPushArgs(MacroAssembler* masm, ...@@ -1433,7 +1425,7 @@ static void Generate_InterpreterPushArgs(MacroAssembler* masm,
// Add a stack check before pushing arguments. // Add a stack check before pushing arguments.
Label stack_overflow, done; Label stack_overflow, done;
Generate_StackOverflowCheck(masm, slots_to_claim, &stack_overflow); __ StackOverflowCheck(slots_to_claim, &stack_overflow);
__ B(&done); __ B(&done);
__ Bind(&stack_overflow); __ Bind(&stack_overflow);
__ TailCallRuntime(Runtime::kThrowStackOverflow); __ TailCallRuntime(Runtime::kThrowStackOverflow);
...@@ -2184,7 +2176,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm, ...@@ -2184,7 +2176,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
Register len = x4; Register len = x4;
Label stack_overflow; Label stack_overflow;
Generate_StackOverflowCheck(masm, len, &stack_overflow); __ StackOverflowCheck(len, &stack_overflow);
// Skip argument setup if we don't need to push any varargs. // Skip argument setup if we don't need to push any varargs.
Label done; Label done;
...@@ -2257,12 +2249,18 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, ...@@ -2257,12 +2249,18 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ Bind(&new_target_constructor); __ Bind(&new_target_constructor);
} }
Register args_fp = x5;
Register len = x6;
#ifdef V8_NO_ARGUMENTS_ADAPTOR
// TODO(victorgomes): Remove this copy when all the arguments adaptor frame
// code is erased.
__ Mov(args_fp, fp);
__ Ldr(len, MemOperand(fp, StandardFrameConstants::kArgCOffset));
#else
// Check if we have an arguments adaptor frame below the function frame. // Check if we have an arguments adaptor frame below the function frame.
// args_fp will point to the frame that contains the actual arguments, which // args_fp will point to the frame that contains the actual arguments, which
// will be the current frame unless we have an arguments adaptor frame, in // will be the current frame unless we have an arguments adaptor frame, in
// which case args_fp points to the arguments adaptor frame. // which case args_fp points to the arguments adaptor frame.
Register args_fp = x5;
Register len = x6;
{ {
Label arguments_adaptor, arguments_done; Label arguments_adaptor, arguments_done;
Register scratch = x10; Register scratch = x10;
...@@ -2291,12 +2289,13 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, ...@@ -2291,12 +2289,13 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
} }
__ Bind(&arguments_done); __ Bind(&arguments_done);
} }
#endif
Label stack_done, stack_overflow; Label stack_done, stack_overflow;
__ Subs(len, len, start_index); __ Subs(len, len, start_index);
__ B(le, &stack_done); __ B(le, &stack_done);
// Check for stack overflow. // Check for stack overflow.
Generate_StackOverflowCheck(masm, x6, &stack_overflow); __ StackOverflowCheck(len, &stack_overflow);
Generate_PrepareForCopyingVarargs(masm, argc, len); Generate_PrepareForCopyingVarargs(masm, argc, len);
...@@ -2462,7 +2461,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) { ...@@ -2462,7 +2461,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
// (i.e. debug break and preemption) here, so check the "real stack // (i.e. debug break and preemption) here, so check the "real stack
// limit". // limit".
Label done; Label done;
LoadStackLimit(masm, x10, StackLimitKind::kRealStackLimit); __ LoadStackLimit(x10, StackLimitKind::kRealStackLimit);
// Make x10 the space we have left. The stack might already be overflowed // Make x10 the space we have left. The stack might already be overflowed
// here which will cause x10 to become negative. // here which will cause x10 to become negative.
__ Sub(x10, sp, x10); __ Sub(x10, sp, x10);
...@@ -2826,7 +2825,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2826,7 +2825,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// receiver. // receiver.
__ RecordComment("-- Stack check --"); __ RecordComment("-- Stack check --");
__ Add(scratch1, argc_expected, 1); __ Add(scratch1, argc_expected, 1);
Generate_StackOverflowCheck(masm, scratch1, &stack_overflow); __ StackOverflowCheck(scratch1, &stack_overflow);
// Round up number of slots to be even, to maintain stack alignment. // Round up number of slots to be even, to maintain stack alignment.
__ RecordComment("-- Allocate callee frame slots --"); __ RecordComment("-- Allocate callee frame slots --");
...@@ -3821,6 +3820,12 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm, ...@@ -3821,6 +3820,12 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
__ Lsr(unwind_limit, unwind_limit, kSystemPointerSizeLog2); __ Lsr(unwind_limit, unwind_limit, kSystemPointerSizeLog2);
__ Mov(x5, unwind_limit); __ Mov(x5, unwind_limit);
__ CopyDoubleWords(x3, x1, x5); __ CopyDoubleWords(x3, x1, x5);
#ifdef V8_NO_ARGUMENTS_ADAPTOR
// Since {unwind_limit} is the frame size up to the parameter count, we might
// end up with a unaligned stack pointer. This is later recovered when
// setting the stack pointer to {caller_frame_top_offset}.
__ Bic(unwind_limit, unwind_limit, 1);
#endif
__ Drop(unwind_limit); __ Drop(unwind_limit);
// Compute the output frame in the deoptimizer. // Compute the output frame in the deoptimizer.
......
...@@ -2079,23 +2079,148 @@ void TurboAssembler::PrepareForTailCall(Register callee_args_count, ...@@ -2079,23 +2079,148 @@ void TurboAssembler::PrepareForTailCall(Register callee_args_count,
Mov(sp, dst_reg); Mov(sp, dst_reg);
} }
void MacroAssembler::InvokePrologue(Register expected_parameter_count, void MacroAssembler::LoadStackLimit(Register destination, StackLimitKind kind) {
Register actual_parameter_count, DCHECK(root_array_available());
Label* done, InvokeFlag flag) { Isolate* isolate = this->isolate();
Label regular_invoke; ExternalReference limit =
kind == StackLimitKind::kRealStackLimit
? ExternalReference::address_of_real_jslimit(isolate)
: ExternalReference::address_of_jslimit(isolate);
DCHECK(TurboAssembler::IsAddressableThroughRootRegister(isolate, limit));
intptr_t offset =
TurboAssembler::RootRegisterOffsetForExternalReference(isolate, limit);
Ldr(destination, MemOperand(kRootRegister, offset));
}
// Check whether the expected and actual arguments count match. The registers void MacroAssembler::StackOverflowCheck(Register num_args,
// are set up according to contract with ArgumentsAdaptorTrampoline: Label* stack_overflow) {
UseScratchRegisterScope temps(this);
Register scratch = temps.AcquireX();
// Check the stack for overflow.
// We are not trying to catch interruptions (e.g. debug break and
// preemption) here, so the "real stack limit" is checked.
LoadStackLimit(scratch, StackLimitKind::kRealStackLimit);
// Make scratch the space we have left. The stack might already be overflowed
// here which will cause scratch to become negative.
Sub(scratch, sp, scratch);
// Check if the arguments will overflow the stack.
Cmp(scratch, Operand(num_args, LSL, kSystemPointerSizeLog2));
B(le, stack_overflow);
}
void MacroAssembler::InvokePrologue(Register formal_parameter_count,
Register actual_argument_count, Label* done,
InvokeFlag flag) {
// x0: actual arguments count. // x0: actual arguments count.
// x1: function (passed through to callee). // x1: function (passed through to callee).
// x2: expected arguments count. // x2: expected arguments count.
// The code below is made a lot easier because the calling code already sets // x3: new target
// up actual and expected registers according to the contract. Label regular_invoke;
DCHECK_EQ(actual_parameter_count, x0); DCHECK_EQ(actual_argument_count, x0);
DCHECK_EQ(expected_parameter_count, x2); DCHECK_EQ(formal_parameter_count, x2);
#ifdef V8_NO_ARGUMENTS_ADAPTOR
// If the formal parameter count is equal to the adaptor sentinel, no need
// to push undefined value as arguments.
Cmp(formal_parameter_count, Operand(kDontAdaptArgumentsSentinel));
B(eq, &regular_invoke);
// If overapplication or if the actual argument count is equal to the
// formal parameter count, no need to push extra undefined values.
Register extra_argument_count = x2;
Subs(extra_argument_count, formal_parameter_count, actual_argument_count);
B(le, &regular_invoke);
// The stack pointer in arm64 needs to be 16-byte aligned. We might need to
// (1) add an extra padding or (2) remove (re-use) the extra padding already
// in the stack. Let {slots_to_copy} be the number of slots (arguments) to
// move up in the stack and let {slots_to_claim} be the number of extra stack
// slots to claim.
Label even_extra_count, skip_move;
Register slots_to_copy = x4;
Register slots_to_claim = x5;
Add(slots_to_copy, actual_argument_count, 1); // Copy with receiver.
Mov(slots_to_claim, extra_argument_count);
Tbz(extra_argument_count, 0, &even_extra_count);
// Calculate {slots_to_claim} when {extra_argument_count} is odd.
// If {actual_argument_count} is even, we need one extra padding slot
// {slots_to_claim = extra_argument_count + 1}.
// If {actual_argument_count} is odd, we know that the
// original arguments will have a padding slot that we can reuse
// {slots_to_claim = extra_argument_count - 1}.
{
Register scratch = x11;
Add(slots_to_claim, extra_argument_count, 1);
And(scratch, actual_argument_count, 1);
Eor(scratch, scratch, 1);
Sub(slots_to_claim, slots_to_claim, Operand(scratch, LSL, 1));
}
Bind(&even_extra_count);
Cbz(slots_to_claim, &skip_move);
Label stack_overflow;
StackOverflowCheck(slots_to_claim, &stack_overflow);
Claim(slots_to_claim);
// Move the arguments already in the stack including the receiver.
{
Register src = x6;
Register dst = x7;
SlotAddress(src, slots_to_claim);
SlotAddress(dst, 0);
CopyDoubleWords(dst, src, slots_to_copy);
}
Bind(&skip_move);
Register actual_argument_with_receiver = x4;
Register pointer_next_value = x5;
Add(actual_argument_with_receiver, actual_argument_count,
1); // {slots_to_copy} was scratched.
// Copy extra arguments as undefined values.
{
Label loop;
Register undefined_value = x6;
Register count = x7;
LoadRoot(undefined_value, RootIndex::kUndefinedValue);
SlotAddress(pointer_next_value, actual_argument_with_receiver);
Mov(count, extra_argument_count);
Bind(&loop);
Str(undefined_value,
MemOperand(pointer_next_value, kSystemPointerSize, PostIndex));
Subs(count, count, 1);
Cbnz(count, &loop);
}
// Set padding if needed.
{
Label skip;
Register total_args_slots = x4;
Add(total_args_slots, actual_argument_with_receiver, extra_argument_count);
Tbz(total_args_slots, 0, &skip);
Str(padreg, MemOperand(pointer_next_value));
Bind(&skip);
}
B(&regular_invoke);
bind(&stack_overflow);
{
FrameScope frame(this,
has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
CallRuntime(Runtime::kThrowStackOverflow);
Unreachable();
}
#else
// Check whether the expected and actual arguments count match. The registers
// are set up according to contract with ArgumentsAdaptorTrampoline.ct.
// If actual == expected perform a regular invocation. // If actual == expected perform a regular invocation.
Cmp(expected_parameter_count, actual_parameter_count); Cmp(formal_parameter_count, actual_argument_count);
B(eq, &regular_invoke); B(eq, &regular_invoke);
// The argument counts mismatch, generate a call to the argument adaptor. // The argument counts mismatch, generate a call to the argument adaptor.
...@@ -2108,6 +2233,7 @@ void MacroAssembler::InvokePrologue(Register expected_parameter_count, ...@@ -2108,6 +2233,7 @@ void MacroAssembler::InvokePrologue(Register expected_parameter_count,
} else { } else {
Jump(adaptor, RelocInfo::CODE_TARGET); Jump(adaptor, RelocInfo::CODE_TARGET);
} }
#endif
Bind(&regular_invoke); Bind(&regular_invoke);
} }
......
...@@ -145,6 +145,10 @@ enum PreShiftImmMode { ...@@ -145,6 +145,10 @@ enum PreShiftImmMode {
kAnyShift // Allow any pre-shift. kAnyShift // Allow any pre-shift.
}; };
// TODO(victorgomes): Move definition to macro-assembler.h, once all other
// platforms are updated.
enum class StackLimitKind { kInterruptStackLimit, kRealStackLimit };
class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
public: public:
using TurboAssemblerBase::TurboAssemblerBase; using TurboAssemblerBase::TurboAssemblerBase;
...@@ -1979,6 +1983,11 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { ...@@ -1979,6 +1983,11 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
void DecrementCounter(StatsCounter* counter, int value, Register scratch1, void DecrementCounter(StatsCounter* counter, int value, Register scratch1,
Register scratch2); Register scratch2);
// ---------------------------------------------------------------------------
// Stack limit utilities
void LoadStackLimit(Register destination, StackLimitKind kind);
void StackOverflowCheck(Register num_args, Label* stack_overflow);
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Garbage collector support (GC). // Garbage collector support (GC).
......
...@@ -3093,11 +3093,10 @@ void CodeGenerator::AssembleConstructFrame() { ...@@ -3093,11 +3093,10 @@ void CodeGenerator::AssembleConstructFrame() {
} }
} }
void CodeGenerator::AssembleReturn(InstructionOperand* pop) { void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) {
auto call_descriptor = linkage()->GetIncomingDescriptor(); auto call_descriptor = linkage()->GetIncomingDescriptor();
const int returns = RoundUp(frame()->GetReturnSlotCount(), 2); const int returns = RoundUp(frame()->GetReturnSlotCount(), 2);
if (returns != 0) { if (returns != 0) {
__ Drop(returns); __ Drop(returns);
} }
...@@ -3114,35 +3113,78 @@ void CodeGenerator::AssembleReturn(InstructionOperand* pop) { ...@@ -3114,35 +3113,78 @@ void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
unwinding_info_writer_.MarkBlockWillExit(); unwinding_info_writer_.MarkBlockWillExit();
// We might need x3 for scratch.
DCHECK_EQ(0u, call_descriptor->CalleeSavedRegisters() & x3.bit());
const int parameter_count =
static_cast<int>(call_descriptor->StackParameterCount());
Arm64OperandConverter g(this, nullptr); Arm64OperandConverter g(this, nullptr);
int pop_count = static_cast<int>(call_descriptor->StackParameterCount());
// {aditional_pop_count} is only greater than zero if {parameter_count = 0}.
// Check RawMachineAssembler::PopAndReturn.
if (parameter_count != 0) {
if (additional_pop_count->IsImmediate()) {
DCHECK_EQ(g.ToConstant(additional_pop_count).ToInt32(), 0);
} else if (__ emit_debug_code()) {
__ cmp(g.ToRegister(additional_pop_count), Operand(0));
__ Assert(eq, AbortReason::kUnexpectedAdditionalPopValue);
}
}
Register argc_reg = x3;
#ifdef V8_NO_ARGUMENTS_ADAPTOR
// Functions with JS linkage have at least one parameter (the receiver).
// If {parameter_count} == 0, it means it is a builtin with
// kDontAdaptArgumentsSentinel, which takes care of JS arguments popping
// itself.
const bool drop_jsargs = frame_access_state()->has_frame() &&
call_descriptor->IsJSFunctionCall() &&
parameter_count != 0;
#else
const bool drop_jsargs = false;
#endif
if (call_descriptor->IsCFunctionCall()) { if (call_descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame(); AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) { } else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now unless they have an variable // Canonicalize JSFunction return sites for now unless they have an variable
// number of stack slot pops. // number of stack slot pops.
if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) { if (additional_pop_count->IsImmediate() &&
g.ToConstant(additional_pop_count).ToInt32() == 0) {
if (return_label_.is_bound()) { if (return_label_.is_bound()) {
__ B(&return_label_); __ B(&return_label_);
return; return;
} else { } else {
__ Bind(&return_label_); __ Bind(&return_label_);
AssembleDeconstructFrame();
} }
} else {
AssembleDeconstructFrame();
} }
if (drop_jsargs) {
// Get the actual argument count.
__ Ldr(argc_reg, MemOperand(fp, StandardFrameConstants::kArgCOffset));
}
AssembleDeconstructFrame();
} }
if (pop->IsImmediate()) { if (drop_jsargs) {
pop_count += g.ToConstant(pop).ToInt32(); // We must pop all arguments from the stack (including the receiver). This
__ DropArguments(pop_count); // number of arguments is given by max(1 + argc_reg, parameter_count).
Label argc_reg_has_final_count;
__ Add(argc_reg, argc_reg, 1); // Consider the receiver.
if (parameter_count > 1) {
__ Cmp(argc_reg, Operand(parameter_count));
__ B(&argc_reg_has_final_count, ge);
__ Mov(argc_reg, Operand(parameter_count));
__ Bind(&argc_reg_has_final_count);
}
__ DropArguments(argc_reg);
} else if (additional_pop_count->IsImmediate()) {
int additional_count = g.ToConstant(additional_pop_count).ToInt32();
__ DropArguments(parameter_count + additional_count);
} else if (parameter_count == 0) {
__ DropArguments(g.ToRegister(additional_pop_count));
} else { } else {
Register pop_reg = g.ToRegister(pop); // {additional_pop_count} is guaranteed to be zero if {parameter_count !=
__ Add(pop_reg, pop_reg, pop_count); // 0}. Check RawMachineAssembler::PopAndReturn.
__ DropArguments(pop_reg); __ DropArguments(parameter_count);
} }
__ AssertSpAligned(); __ AssertSpAligned();
__ Ret(); __ Ret();
} }
......
...@@ -962,9 +962,22 @@ void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame, ...@@ -962,9 +962,22 @@ void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
goto_catch_handler ? catch_handler_pc_offset_ : real_bytecode_offset; goto_catch_handler ? catch_handler_pc_offset_ : real_bytecode_offset;
const int parameters_count = InternalFormalParameterCountWithReceiver(shared); const int parameters_count = InternalFormalParameterCountWithReceiver(shared);
#ifdef V8_NO_ARGUMENTS_ADAPTOR
// If this is the bottom most frame or the previous frame was the arguments
// adaptor fake frame, then we already have extra arguments in the stack
// (including any extra padding). Therefore we should not try to add any
// padding.
bool should_pad_arguments =
!is_bottommost && (translated_state_.frames()[frame_index - 1]).kind() !=
TranslatedFrame::kArgumentsAdaptor;
#else
bool should_pad_arguments = true;
#endif
const int locals_count = translated_frame->height(); const int locals_count = translated_frame->height();
InterpretedFrameInfo frame_info = InterpretedFrameInfo frame_info = InterpretedFrameInfo::Precise(
InterpretedFrameInfo::Precise(parameters_count, locals_count, is_topmost); parameters_count, locals_count, is_topmost, should_pad_arguments);
const uint32_t output_frame_size = frame_info.frame_size_in_bytes(); const uint32_t output_frame_size = frame_info.frame_size_in_bytes();
TranslatedFrame::iterator function_iterator = value_iterator++; TranslatedFrame::iterator function_iterator = value_iterator++;
...@@ -996,9 +1009,10 @@ void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame, ...@@ -996,9 +1009,10 @@ void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
// Compute the incoming parameter translation. // Compute the incoming parameter translation.
ReadOnlyRoots roots(isolate()); ReadOnlyRoots roots(isolate());
if (ShouldPadArguments(parameters_count)) { if (should_pad_arguments && ShouldPadArguments(parameters_count)) {
frame_writer.PushRawObject(roots.the_hole_value(), "padding\n"); frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
} }
// Note: parameters_count includes the receiver. // Note: parameters_count includes the receiver.
if (verbose_tracing_enabled() && is_bottommost && if (verbose_tracing_enabled() && is_bottommost &&
actual_argument_count_ > parameters_count - 1) { actual_argument_count_ > parameters_count - 1) {
...@@ -1008,7 +1022,7 @@ void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame, ...@@ -1008,7 +1022,7 @@ void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
} }
frame_writer.PushStackJSArguments(value_iterator, parameters_count); frame_writer.PushStackJSArguments(value_iterator, parameters_count);
DCHECK_EQ(output_frame->GetLastArgumentSlotOffset(), DCHECK_EQ(output_frame->GetLastArgumentSlotOffset(should_pad_arguments),
frame_writer.top_offset()); frame_writer.top_offset());
if (verbose_tracing_enabled()) { if (verbose_tracing_enabled()) {
PrintF(trace_scope()->file(), " -------------------------\n"); PrintF(trace_scope()->file(), " -------------------------\n");
...@@ -1250,9 +1264,13 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame( ...@@ -1250,9 +1264,13 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(
translated_frame->raw_shared_info().internal_formal_parameter_count(); translated_frame->raw_shared_info().internal_formal_parameter_count();
const int extra_argument_count = const int extra_argument_count =
argument_count_without_receiver - formal_parameter_count; argument_count_without_receiver - formal_parameter_count;
// The number of pushed arguments is the maximum of the actual argument count
// and the formal parameter count + the receiver.
const bool should_pad_args = ShouldPadArguments(
std::max(argument_count_without_receiver, formal_parameter_count) + 1);
const int output_frame_size = const int output_frame_size =
std::max(0, extra_argument_count * kSystemPointerSize); std::max(0, extra_argument_count * kSystemPointerSize) +
(should_pad_args ? kSystemPointerSize : 0);
if (verbose_tracing_enabled()) { if (verbose_tracing_enabled()) {
PrintF(trace_scope_->file(), PrintF(trace_scope_->file(),
" translating arguments adaptor => variable_size=%d\n", " translating arguments adaptor => variable_size=%d\n",
...@@ -1272,14 +1290,14 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame( ...@@ -1272,14 +1290,14 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(
output_frame->SetFp(output_[frame_index - 1]->GetFp()); output_frame->SetFp(output_[frame_index - 1]->GetFp());
output_[frame_index] = output_frame; output_[frame_index] = output_frame;
if (extra_argument_count > 0) { FrameWriter frame_writer(this, output_frame, verbose_trace_scope());
FrameWriter frame_writer(this, output_frame, verbose_trace_scope());
ReadOnlyRoots roots(isolate()); ReadOnlyRoots roots(isolate());
if (ShouldPadArguments(extra_argument_count)) { if (should_pad_args) {
frame_writer.PushRawObject(roots.the_hole_value(), "padding\n"); frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
} }
if (extra_argument_count > 0) {
// The receiver and arguments with index below the formal parameter // The receiver and arguments with index below the formal parameter
// count are in the fake adaptor frame, because they are used to create the // count are in the fake adaptor frame, because they are used to create the
// arguments object. We should however not push them, since the interpreter // arguments object. We should however not push them, since the interpreter
...@@ -2015,7 +2033,9 @@ unsigned Deoptimizer::ComputeInputFrameSize() const { ...@@ -2015,7 +2033,9 @@ unsigned Deoptimizer::ComputeInputFrameSize() const {
// static // static
unsigned Deoptimizer::ComputeIncomingArgumentSize(SharedFunctionInfo shared) { unsigned Deoptimizer::ComputeIncomingArgumentSize(SharedFunctionInfo shared) {
int parameter_slots = InternalFormalParameterCountWithReceiver(shared); int parameter_slots = InternalFormalParameterCountWithReceiver(shared);
#ifndef V8_NO_ARGUMENTS_ADAPTOR
if (ShouldPadArguments(parameter_slots)) parameter_slots++; if (ShouldPadArguments(parameter_slots)) parameter_slots++;
#endif
return parameter_slots * kSystemPointerSize; return parameter_slots * kSystemPointerSize;
} }
......
...@@ -712,15 +712,23 @@ class FrameDescription { ...@@ -712,15 +712,23 @@ class FrameDescription {
return *GetFrameSlotPointer(offset); return *GetFrameSlotPointer(offset);
} }
unsigned GetLastArgumentSlotOffset() { unsigned GetLastArgumentSlotOffset(bool pad_arguments = true) {
int parameter_slots = parameter_count(); int parameter_slots = parameter_count();
if (ShouldPadArguments(parameter_slots)) parameter_slots++; if (pad_arguments && ShouldPadArguments(parameter_slots)) parameter_slots++;
return GetFrameSize() - parameter_slots * kSystemPointerSize; return GetFrameSize() - parameter_slots * kSystemPointerSize;
} }
Address GetFramePointerAddress() { Address GetFramePointerAddress() {
int fp_offset = #ifdef V8_NO_ARGUMENTS_ADAPTOR
GetLastArgumentSlotOffset() - StandardFrameConstants::kCallerSPOffset; // We should not pad arguments in the bottom frame, since this
// already contain a padding if necessary and it might contain
// extra arguments (actual argument count > parameter count).
const bool pad_arguments_bottom_frame = false;
#else
const bool pad_arguments_bottom_frame = true;
#endif
int fp_offset = GetLastArgumentSlotOffset(pad_arguments_bottom_frame) -
StandardFrameConstants::kCallerSPOffset;
return reinterpret_cast<Address>(GetFrameSlotPointer(fp_offset)); return reinterpret_cast<Address>(GetFrameSlotPointer(fp_offset));
} }
......
...@@ -2246,7 +2246,7 @@ bool BuiltinContinuationModeIsWithCatch(BuiltinContinuationMode mode) { ...@@ -2246,7 +2246,7 @@ bool BuiltinContinuationModeIsWithCatch(BuiltinContinuationMode mode) {
InterpretedFrameInfo::InterpretedFrameInfo(int parameters_count_with_receiver, InterpretedFrameInfo::InterpretedFrameInfo(int parameters_count_with_receiver,
int translation_height, int translation_height,
bool is_topmost, bool is_topmost, bool pad_arguments,
FrameInfoKind frame_info_kind) { FrameInfoKind frame_info_kind) {
const int locals_count = translation_height; const int locals_count = translation_height;
...@@ -2267,7 +2267,7 @@ InterpretedFrameInfo::InterpretedFrameInfo(int parameters_count_with_receiver, ...@@ -2267,7 +2267,7 @@ InterpretedFrameInfo::InterpretedFrameInfo(int parameters_count_with_receiver,
// the part described by InterpreterFrameConstants. This will include // the part described by InterpreterFrameConstants. This will include
// argument padding, when needed. // argument padding, when needed.
const int parameter_padding_slots = const int parameter_padding_slots =
ArgumentPaddingSlots(parameters_count_with_receiver); pad_arguments ? ArgumentPaddingSlots(parameters_count_with_receiver) : 0;
const int fixed_frame_size = const int fixed_frame_size =
InterpreterFrameConstants::kFixedFrameSize + InterpreterFrameConstants::kFixedFrameSize +
(parameters_count_with_receiver + parameter_padding_slots) * (parameters_count_with_receiver + parameter_padding_slots) *
......
...@@ -1334,14 +1334,15 @@ enum class BuiltinContinuationMode { ...@@ -1334,14 +1334,15 @@ enum class BuiltinContinuationMode {
class InterpretedFrameInfo { class InterpretedFrameInfo {
public: public:
static InterpretedFrameInfo Precise(int parameters_count_with_receiver, static InterpretedFrameInfo Precise(int parameters_count_with_receiver,
int translation_height, bool is_topmost) { int translation_height, bool is_topmost,
bool pad_arguments) {
return {parameters_count_with_receiver, translation_height, is_topmost, return {parameters_count_with_receiver, translation_height, is_topmost,
FrameInfoKind::kPrecise}; pad_arguments, FrameInfoKind::kPrecise};
} }
static InterpretedFrameInfo Conservative(int parameters_count_with_receiver, static InterpretedFrameInfo Conservative(int parameters_count_with_receiver,
int locals_count) { int locals_count) {
return {parameters_count_with_receiver, locals_count, false, return {parameters_count_with_receiver, locals_count, false, true,
FrameInfoKind::kConservative}; FrameInfoKind::kConservative};
} }
...@@ -1356,7 +1357,7 @@ class InterpretedFrameInfo { ...@@ -1356,7 +1357,7 @@ class InterpretedFrameInfo {
private: private:
InterpretedFrameInfo(int parameters_count_with_receiver, InterpretedFrameInfo(int parameters_count_with_receiver,
int translation_height, bool is_topmost, int translation_height, bool is_topmost,
FrameInfoKind frame_info_kind); bool pad_arguments, FrameInfoKind frame_info_kind);
uint32_t register_stack_slot_count_; uint32_t register_stack_slot_count_;
uint32_t frame_size_in_bytes_without_fixed_; uint32_t frame_size_in_bytes_without_fixed_;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment