Commit b4d63cf5 authored by Martyn Capewell's avatar Martyn Capewell Committed by Commit Bot

[arm64] Restore jssp from csp, remove jssp refs

As they're now equal, simplify some TF opcodes by restoring jssp from csp on
stack pointer switch. Also, remove some direct references to jssp.

Bug: v8:6644
Change-Id: I20ee54fc0d536809a0aa72def43337f83cc91685
Reviewed-on: https://chromium-review.googlesource.com/857457Reviewed-by: 's avatarBenedikt Meurer <bmeurer@chromium.org>
Commit-Queue: Martyn Capewell <martyn.capewell@arm.com>
Cr-Commit-Position: refs/heads/master@{#50462}
parent 6ef05c78
...@@ -30,7 +30,7 @@ namespace internal { ...@@ -30,7 +30,7 @@ namespace internal {
void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) { void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ Mov(x5, Operand(x0, LSL, kPointerSizeLog2)); __ Mov(x5, Operand(x0, LSL, kPointerSizeLog2));
__ Str(x1, MemOperand(jssp, x5)); __ Str(x1, MemOperand(__ StackPointer(), x5));
__ Push(x1, x2); __ Push(x1, x2);
__ Add(x0, x0, Operand(3)); __ Add(x0, x0, Operand(3));
__ TailCallRuntime(Runtime::kNewArray); __ TailCallRuntime(Runtime::kNewArray);
...@@ -100,8 +100,8 @@ void DoubleToIStub::Generate(MacroAssembler* masm) { ...@@ -100,8 +100,8 @@ void DoubleToIStub::Generate(MacroAssembler* masm) {
void MathPowStub::Generate(MacroAssembler* masm) { void MathPowStub::Generate(MacroAssembler* masm) {
// Stack on entry: // Stack on entry:
// jssp[0]: Exponent (as a tagged value). // sp[0]: Exponent (as a tagged value).
// jssp[1]: Base (as a tagged value). // sp[1]: Base (as a tagged value).
// //
// The (tagged) result will be returned in x0, as a heap number. // The (tagged) result will be returned in x0, as a heap number.
...@@ -276,11 +276,11 @@ void CEntryStub::Generate(MacroAssembler* masm) { ...@@ -276,11 +276,11 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// The stack on entry holds the arguments and the receiver, with the receiver // The stack on entry holds the arguments and the receiver, with the receiver
// at the highest address: // at the highest address:
// //
// jssp]argc-1]: receiver // sp]argc-1]: receiver
// jssp[argc-2]: arg[argc-2] // sp[argc-2]: arg[argc-2]
// ... ... // ... ...
// jssp[1]: arg[1] // sp[1]: arg[1]
// jssp[0]: arg[0] // sp[0]: arg[0]
// //
// The arguments are in reverse order, so that arg[argc-2] is actually the // The arguments are in reverse order, so that arg[argc-2] is actually the
// first argument to the target function and arg[0] is the last. // first argument to the target function and arg[0] is the last.
...@@ -533,7 +533,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) { ...@@ -533,7 +533,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
__ Push(x13, x12, xzr, x10); __ Push(x13, x12, xzr, x10);
// Set up fp. // Set up fp.
__ Sub(fp, jssp, EntryFrameConstants::kCallerFPOffset); __ Sub(fp, __ StackPointer(), EntryFrameConstants::kCallerFPOffset);
// Push the JS entry frame marker. Also set js_entry_sp if this is the // Push the JS entry frame marker. Also set js_entry_sp if this is the
// outermost JS call. // outermost JS call.
...@@ -555,12 +555,12 @@ void JSEntryStub::Generate(MacroAssembler* masm) { ...@@ -555,12 +555,12 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
__ Push(x12, padreg); __ Push(x12, padreg);
// The frame set up looks like this: // The frame set up looks like this:
// jssp[0] : padding. // sp[0] : padding.
// jssp[1] : JS entry frame marker. // sp[1] : JS entry frame marker.
// jssp[2] : C entry FP. // sp[2] : C entry FP.
// jssp[3] : stack frame marker. // sp[3] : stack frame marker.
// jssp[4] : stack frame marker. // sp[4] : stack frame marker.
// jssp[5] : bad frame pointer 0xFFF...FF <- fp points here. // sp[5] : bad frame pointer 0xFFF...FF <- fp points here.
// Jump to a faked try block that does the invoke, with a faked catch // Jump to a faked try block that does the invoke, with a faked catch
// block that sets the pending exception. // block that sets the pending exception.
...@@ -602,7 +602,12 @@ void JSEntryStub::Generate(MacroAssembler* masm) { ...@@ -602,7 +602,12 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
__ Push(padreg, x10); __ Push(padreg, x10);
// Set this new handler as the current one. // Set this new handler as the current one.
__ Str(jssp, MemOperand(x11)); {
UseScratchRegisterScope temps(masm);
Register scratch = temps.AcquireX();
__ Mov(scratch, __ StackPointer());
__ Str(scratch, MemOperand(x11));
}
// If an exception not caught by another handler occurs, this handler // If an exception not caught by another handler occurs, this handler
// returns control to the code after the B(&invoke) above, which // returns control to the code after the B(&invoke) above, which
...@@ -633,12 +638,12 @@ void JSEntryStub::Generate(MacroAssembler* masm) { ...@@ -633,12 +638,12 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
// x0 holds the result. // x0 holds the result.
// The stack pointer points to the top of the entry frame pushed on entry from // The stack pointer points to the top of the entry frame pushed on entry from
// C++ (at the beginning of this stub): // C++ (at the beginning of this stub):
// jssp[0] : padding. // sp[0] : padding.
// jssp[1] : JS entry frame marker. // sp[1] : JS entry frame marker.
// jssp[2] : C entry FP. // sp[2] : C entry FP.
// jssp[3] : stack frame marker. // sp[3] : stack frame marker.
// jssp[4] : stack frame marker. // sp[4] : stack frame marker.
// jssp[5] : bad frame pointer 0xFFF...FF <- fp points here. // sp[5] : bad frame pointer 0xFFF...FF <- fp points here.
// Check if the current stack frame is marked as the outermost JS frame. // Check if the current stack frame is marked as the outermost JS frame.
Label non_outermost_js_2; Label non_outermost_js_2;
......
...@@ -220,8 +220,12 @@ void Deoptimizer::TableEntryGenerator::Generate() { ...@@ -220,8 +220,12 @@ void Deoptimizer::TableEntryGenerator::Generate() {
} }
__ Pop(x4, padreg); // Restore deoptimizer object (class Deoptimizer). __ Pop(x4, padreg); // Restore deoptimizer object (class Deoptimizer).
__ Ldr(__ StackPointer(), {
MemOperand(x4, Deoptimizer::caller_frame_top_offset())); UseScratchRegisterScope temps(masm());
Register scratch = temps.AcquireX();
__ Ldr(scratch, MemOperand(x4, Deoptimizer::caller_frame_top_offset()));
__ Mov(__ StackPointer(), scratch);
}
// Replace the current (input) frame with the output frames. // Replace the current (input) frame with the output frames.
Label outer_push_loop, inner_push_loop, Label outer_push_loop, inner_push_loop,
......
...@@ -1885,39 +1885,18 @@ void TurboAssembler::CallCFunction(Register function, int num_of_reg_args, ...@@ -1885,39 +1885,18 @@ void TurboAssembler::CallCFunction(Register function, int num_of_reg_args,
// so the return address in the link register stays correct. // so the return address in the link register stays correct.
Call(function); Call(function);
if (csp.Is(old_stack_pointer)) { if (num_of_reg_args > kRegisterPassedArguments) {
if (num_of_reg_args > kRegisterPassedArguments) { // Drop the register passed arguments.
// Drop the register passed arguments. int claim_slots = RoundUp(num_of_reg_args - kRegisterPassedArguments, 2);
int claim_slots = RoundUp(num_of_reg_args - kRegisterPassedArguments, 2); Drop(claim_slots);
Drop(claim_slots); }
} if (jssp.Is(old_stack_pointer)) {
} else {
DCHECK(jssp.Is(old_stack_pointer));
if (emit_debug_code()) { if (emit_debug_code()) {
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX(); Register temp = temps.AcquireX();
Mov(temp, csp);
if (num_of_reg_args > kRegisterPassedArguments) { Cmp(old_stack_pointer, temp);
// We don't need to drop stack arguments, as the stack pointer will be Check(eq, AbortReason::kTheStackWasCorruptedByMacroAssemblerCall);
// jssp when returning from this function. However, in debug builds, we
// can check that jssp is as expected.
int claim_slots =
RoundUp(num_of_reg_args - kRegisterPassedArguments, 2);
// Check jssp matches the previous value on the stack.
Ldr(temp, MemOperand(csp, claim_slots * kPointerSize));
Cmp(jssp, temp);
Check(eq, AbortReason::kTheStackWasCorruptedByMacroAssemblerCall);
} else {
// Because the stack pointer must be aligned on a 16-byte boundary, the
// aligned csp can be up to 12 bytes below the jssp. This is the case
// where we only pushed one W register on top of an aligned jssp.
Sub(temp, csp, old_stack_pointer);
// We want temp <= 0 && temp >= -12.
Cmp(temp, 0);
Ccmp(temp, -12, NFlag, le);
Check(ge, AbortReason::kTheStackWasCorruptedByMacroAssemblerCall);
}
} }
SetStackPointer(old_stack_pointer); SetStackPointer(old_stack_pointer);
} }
...@@ -2166,13 +2145,16 @@ void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count, ...@@ -2166,13 +2145,16 @@ void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
Add(dst_reg, dst_reg, 15); Add(dst_reg, dst_reg, 15);
Bic(dst_reg, dst_reg, 15); Bic(dst_reg, dst_reg, 15);
DCHECK(jssp.Is(StackPointer()));
Register src_reg = caller_args_count_reg; Register src_reg = caller_args_count_reg;
// Calculate the end of source area. +kPointerSize is for the receiver. // Calculate the end of source area. +kPointerSize is for the receiver.
if (callee_args_count.is_reg()) { if (callee_args_count.is_reg()) {
Add(src_reg, jssp, Operand(callee_args_count.reg(), LSL, kPointerSizeLog2)); Add(src_reg, StackPointer(),
Operand(callee_args_count.reg(), LSL, kPointerSizeLog2));
Add(src_reg, src_reg, kPointerSize); Add(src_reg, src_reg, kPointerSize);
} else { } else {
Add(src_reg, jssp, (callee_args_count.immediate() + 1) * kPointerSize); Add(src_reg, StackPointer(),
(callee_args_count.immediate() + 1) * kPointerSize);
} }
// Round src_reg up to a multiple of 16 bytes, so we include any potential // Round src_reg up to a multiple of 16 bytes, so we include any potential
...@@ -2202,11 +2184,11 @@ void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count, ...@@ -2202,11 +2184,11 @@ void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
Ldr(tmp_reg, MemOperand(src_reg, -kPointerSize, PreIndex)); Ldr(tmp_reg, MemOperand(src_reg, -kPointerSize, PreIndex));
Str(tmp_reg, MemOperand(dst_reg, -kPointerSize, PreIndex)); Str(tmp_reg, MemOperand(dst_reg, -kPointerSize, PreIndex));
bind(&entry); bind(&entry);
Cmp(jssp, src_reg); Cmp(StackPointer(), src_reg);
B(ne, &loop); B(ne, &loop);
// Leave current frame. // Leave current frame.
Mov(jssp, dst_reg); Mov(StackPointer(), dst_reg);
SetStackPointer(jssp); SetStackPointer(jssp);
AssertStackConsistency(); AssertStackConsistency();
} }
...@@ -2477,8 +2459,9 @@ void TurboAssembler::TruncateDoubleToIDelayed(Zone* zone, Register result, ...@@ -2477,8 +2459,9 @@ void TurboAssembler::TruncateDoubleToIDelayed(Zone* zone, Register result,
} }
void TurboAssembler::Prologue() { void TurboAssembler::Prologue() {
DCHECK(jssp.Is(StackPointer()));
Push(lr, fp, cp, x1); Push(lr, fp, cp, x1);
Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp); Add(fp, StackPointer(), StandardFrameConstants::kFixedFrameSizeFromFp);
} }
void TurboAssembler::EnterFrame(StackFrame::Type type) { void TurboAssembler::EnterFrame(StackFrame::Type type) {
...@@ -2491,11 +2474,11 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) { ...@@ -2491,11 +2474,11 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) {
Mov(type_reg, StackFrame::TypeToMarker(type)); Mov(type_reg, StackFrame::TypeToMarker(type));
Mov(code_reg, Operand(CodeObject())); Mov(code_reg, Operand(CodeObject()));
Push(lr, fp, type_reg, code_reg); Push(lr, fp, type_reg, code_reg);
Add(fp, jssp, InternalFrameConstants::kFixedFrameSizeFromFp); Add(fp, StackPointer(), InternalFrameConstants::kFixedFrameSizeFromFp);
// jssp[4] : lr // sp[4] : lr
// jssp[3] : fp // sp[3] : fp
// jssp[1] : type // sp[1] : type
// jssp[0] : [code object] // sp[0] : [code object]
} else if (type == StackFrame::WASM_COMPILED) { } else if (type == StackFrame::WASM_COMPILED) {
DCHECK(csp.Is(StackPointer())); DCHECK(csp.Is(StackPointer()));
Mov(type_reg, StackFrame::TypeToMarker(type)); Mov(type_reg, StackFrame::TypeToMarker(type));
...@@ -2517,11 +2500,12 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) { ...@@ -2517,11 +2500,12 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) {
// The context pointer isn't part of the fixed frame, so add an extra slot // The context pointer isn't part of the fixed frame, so add an extra slot
// to account for it. // to account for it.
Add(fp, jssp, TypedFrameConstants::kFixedFrameSizeFromFp + kPointerSize); Add(fp, StackPointer(),
// jssp[3] : lr TypedFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
// jssp[2] : fp // sp[3] : lr
// jssp[1] : type // sp[2] : fp
// jssp[0] : cp // sp[1] : type
// sp[0] : cp
} }
} }
...@@ -2535,7 +2519,7 @@ void TurboAssembler::LeaveFrame(StackFrame::Type type) { ...@@ -2535,7 +2519,7 @@ void TurboAssembler::LeaveFrame(StackFrame::Type type) {
DCHECK(jssp.Is(StackPointer())); DCHECK(jssp.Is(StackPointer()));
// Drop the execution stack down to the frame pointer and restore // Drop the execution stack down to the frame pointer and restore
// the caller frame pointer and return address. // the caller frame pointer and return address.
Mov(jssp, fp); Mov(StackPointer(), fp);
AssertStackConsistency(); AssertStackConsistency();
Pop(fp, lr); Pop(fp, lr);
} }
...@@ -2582,7 +2566,7 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch, ...@@ -2582,7 +2566,7 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch,
// fp[-8]: STUB marker // fp[-8]: STUB marker
// fp[-16]: Space reserved for SPOffset. // fp[-16]: Space reserved for SPOffset.
// fp[-24]: CodeObject() // fp[-24]: CodeObject()
// jssp -> fp[-32]: padding // sp -> fp[-32]: padding
STATIC_ASSERT((2 * kPointerSize) == ExitFrameConstants::kCallerSPOffset); STATIC_ASSERT((2 * kPointerSize) == ExitFrameConstants::kCallerSPOffset);
STATIC_ASSERT((1 * kPointerSize) == ExitFrameConstants::kCallerPCOffset); STATIC_ASSERT((1 * kPointerSize) == ExitFrameConstants::kCallerPCOffset);
STATIC_ASSERT((0 * kPointerSize) == ExitFrameConstants::kCallerFPOffset); STATIC_ASSERT((0 * kPointerSize) == ExitFrameConstants::kCallerFPOffset);
...@@ -2616,8 +2600,8 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch, ...@@ -2616,8 +2600,8 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch,
// fp[-16]: Space reserved for SPOffset. // fp[-16]: Space reserved for SPOffset.
// fp[-24]: CodeObject() // fp[-24]: CodeObject()
// fp[-24 - fp_size]: Saved doubles (if save_doubles is true). // fp[-24 - fp_size]: Saved doubles (if save_doubles is true).
// jssp[8]: Extra space reserved for caller (if extra_space != 0). // sp[8]: Extra space reserved for caller (if extra_space != 0).
// jssp -> jssp[0]: Space reserved for the return address. // sp -> sp[0]: Space reserved for the return address.
// Align and synchronize the system stack pointer with jssp. // Align and synchronize the system stack pointer with jssp.
AlignAndSetCSPForFrame(); AlignAndSetCSPForFrame();
...@@ -2630,7 +2614,6 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch, ...@@ -2630,7 +2614,6 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch,
// fp[-24]: CodeObject() // fp[-24]: CodeObject()
// fp[-24 - fp_size]: Saved doubles (if save_doubles is true). // fp[-24 - fp_size]: Saved doubles (if save_doubles is true).
// csp[8]: Memory reserved for the caller if extra_space != 0. // csp[8]: Memory reserved for the caller if extra_space != 0.
// Alignment padding, if necessary.
// csp -> csp[0]: Space reserved for the return address. // csp -> csp[0]: Space reserved for the return address.
// ExitFrame::GetStateForFramePointer expects to find the return address at // ExitFrame::GetStateForFramePointer expects to find the return address at
......
...@@ -552,7 +552,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -552,7 +552,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// Check the stack for overflow. We are not trying to catch interruptions // Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack limit". // (i.e. debug break and preemption) here, so check the "real stack limit".
Label stack_overflow; Label stack_overflow;
__ CompareRoot(jssp, Heap::kRealStackLimitRootIndex); __ CompareRoot(__ StackPointer(), Heap::kRealStackLimitRootIndex);
__ B(lo, &stack_overflow); __ B(lo, &stack_overflow);
// Get number of arguments for generator function. // Get number of arguments for generator function.
...@@ -579,8 +579,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -579,8 +579,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// -- x10 : argument count // -- x10 : argument count
// -- cp : generator context // -- cp : generator context
// -- lr : return address // -- lr : return address
// -- jssp[arg count] : generator receiver // -- sp[arg count] : generator receiver
// -- jssp[0 .. arg count - 1] : claimed for args // -- sp[0 .. arg count - 1] : claimed for args
// ----------------------------------- // -----------------------------------
// Push holes for arguments to generator function. Since the parser forced // Push holes for arguments to generator function. Since the parser forced
...@@ -1010,9 +1010,10 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1010,9 +1010,10 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Open a frame scope to indicate that there is a frame on the stack. The // Open a frame scope to indicate that there is a frame on the stack. The
// MANUAL indicates that the scope shouldn't actually generate code to set up // MANUAL indicates that the scope shouldn't actually generate code to set up
// the frame (that is done below). // the frame (that is done below).
DCHECK(jssp.Is(__ StackPointer()));
FrameScope frame_scope(masm, StackFrame::MANUAL); FrameScope frame_scope(masm, StackFrame::MANUAL);
__ Push(lr, fp, cp, closure); __ Push(lr, fp, cp, closure);
__ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp); __ Add(fp, __ StackPointer(), StandardFrameConstants::kFixedFrameSizeFromFp);
// Get the bytecode array from the function object (or from the DebugInfo if // Get the bytecode array from the function object (or from the DebugInfo if
// it is present) and load it into kInterpreterBytecodeArrayRegister. // it is present) and load it into kInterpreterBytecodeArrayRegister.
...@@ -1064,7 +1065,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1064,7 +1065,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Do a stack check to ensure we don't go over the limit. // Do a stack check to ensure we don't go over the limit.
Label ok; Label ok;
DCHECK(jssp.Is(__ StackPointer())); DCHECK(jssp.Is(__ StackPointer()));
__ Sub(x10, jssp, Operand(x11)); __ Sub(x10, __ StackPointer(), Operand(x11));
__ CompareRoot(x10, Heap::kRealStackLimitRootIndex); __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
__ B(hs, &ok); __ B(hs, &ok);
__ CallRuntime(Runtime::kThrowStackOverflow); __ CallRuntime(Runtime::kThrowStackOverflow);
...@@ -1649,8 +1650,9 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm, ...@@ -1649,8 +1650,9 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
allocatable_register_count)) * allocatable_register_count)) *
kPointerSize; kPointerSize;
DCHECK(jssp.Is(__ StackPointer()));
// Set up frame pointer. // Set up frame pointer.
__ Add(fp, jssp, frame_size); __ Add(fp, __ StackPointer(), frame_size);
if (with_result) { if (with_result) {
// Overwrite the hole inserted by the deoptimizer with the return value from // Overwrite the hole inserted by the deoptimizer with the return value from
...@@ -1786,9 +1788,9 @@ void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { ...@@ -1786,9 +1788,9 @@ void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- x0 : argc // -- x0 : argc
// -- jssp[0] : argArray (if argc == 2) // -- sp[0] : argArray (if argc == 2)
// -- jssp[8] : thisArg (if argc >= 1) // -- sp[8] : thisArg (if argc >= 1)
// -- jssp[16] : receiver // -- sp[16] : receiver
// ----------------------------------- // -----------------------------------
ASM_LOCATION("Builtins::Generate_FunctionPrototypeApply"); ASM_LOCATION("Builtins::Generate_FunctionPrototypeApply");
...@@ -1840,7 +1842,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { ...@@ -1840,7 +1842,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- x2 : argArray // -- x2 : argArray
// -- x1 : receiver // -- x1 : receiver
// -- jssp[0] : thisArg // -- sp[0] : thisArg
// ----------------------------------- // -----------------------------------
// 2. We don't need to check explicitly for callable receiver here, // 2. We don't need to check explicitly for callable receiver here,
...@@ -1926,10 +1928,10 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { ...@@ -1926,10 +1928,10 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
void Builtins::Generate_ReflectApply(MacroAssembler* masm) { void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- x0 : argc // -- x0 : argc
// -- jssp[0] : argumentsList (if argc == 3) // -- sp[0] : argumentsList (if argc == 3)
// -- jssp[8] : thisArgument (if argc >= 2) // -- sp[8] : thisArgument (if argc >= 2)
// -- jssp[16] : target (if argc >= 1) // -- sp[16] : target (if argc >= 1)
// -- jssp[24] : receiver // -- sp[24] : receiver
// ----------------------------------- // -----------------------------------
ASM_LOCATION("Builtins::Generate_ReflectApply"); ASM_LOCATION("Builtins::Generate_ReflectApply");
...@@ -1988,7 +1990,7 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) { ...@@ -1988,7 +1990,7 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- x2 : argumentsList // -- x2 : argumentsList
// -- x1 : target // -- x1 : target
// -- jssp[0] : thisArgument // -- sp[0] : thisArgument
// ----------------------------------- // -----------------------------------
// 2. We don't need to check explicitly for callable target here, // 2. We don't need to check explicitly for callable target here,
...@@ -2003,10 +2005,10 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) { ...@@ -2003,10 +2005,10 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- x0 : argc // -- x0 : argc
// -- jssp[0] : new.target (optional) // -- sp[0] : new.target (optional)
// -- jssp[8] : argumentsList // -- sp[8] : argumentsList
// -- jssp[16] : target // -- sp[16] : target
// -- jssp[24] : receiver // -- sp[24] : receiver
// ----------------------------------- // -----------------------------------
ASM_LOCATION("Builtins::Generate_ReflectConstruct"); ASM_LOCATION("Builtins::Generate_ReflectConstruct");
...@@ -2070,7 +2072,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { ...@@ -2070,7 +2072,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
// -- x2 : argumentsList // -- x2 : argumentsList
// -- x1 : target // -- x1 : target
// -- x3 : new.target // -- x3 : new.target
// -- jssp[0] : receiver (undefined) // -- sp[0] : receiver (undefined)
// ----------------------------------- // -----------------------------------
// 2. We don't need to check explicitly for constructor target here, // 2. We don't need to check explicitly for constructor target here,
...@@ -2094,7 +2096,8 @@ void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { ...@@ -2094,7 +2096,8 @@ void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
__ Push(x11, x1); // x1: function __ Push(x11, x1); // x1: function
__ SmiTag(x11, x0); // x0: number of arguments. __ SmiTag(x11, x0); // x0: number of arguments.
__ Push(x11, padreg); __ Push(x11, padreg);
__ Add(fp, jssp, ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp); __ Add(fp, __ StackPointer(),
ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp);
} }
void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
...@@ -2104,7 +2107,7 @@ void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { ...@@ -2104,7 +2107,7 @@ void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
// Get the number of arguments passed (as a smi), tear down the frame and // Get the number of arguments passed (as a smi), tear down the frame and
// then drop the parameters and the receiver. // then drop the parameters and the receiver.
__ Ldr(x10, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); __ Ldr(x10, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ Mov(jssp, fp); __ Mov(__ StackPointer(), fp);
__ Pop(fp, lr); __ Pop(fp, lr);
// Drop actual parameters and receiver. // Drop actual parameters and receiver.
...@@ -2857,7 +2860,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2857,7 +2860,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ Bic(scratch1, scratch1, 1); __ Bic(scratch1, scratch1, 1);
__ Claim(scratch1, kPointerSize); __ Claim(scratch1, kPointerSize);
__ Mov(copy_to, jssp); __ Mov(copy_to, __ StackPointer());
// Preparing the expected arguments is done in four steps, the order of // Preparing the expected arguments is done in four steps, the order of
// which is chosen so we can use LDP/STP and avoid conditional branches as // which is chosen so we can use LDP/STP and avoid conditional branches as
...@@ -2914,14 +2917,15 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2914,14 +2917,15 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ RecordComment("-- Store padding --"); __ RecordComment("-- Store padding --");
__ Str(padreg, MemOperand(fp, -5 * kPointerSize)); __ Str(padreg, MemOperand(fp, -5 * kPointerSize));
// (4) Store receiver. Calculate target address from jssp to avoid checking // (4) Store receiver. Calculate target address from the sp to avoid checking
// for padding. Storing the receiver will overwrite either the extra slot // for padding. Storing the receiver will overwrite either the extra slot
// we copied with the actual arguments, if we did copy one, or the padding we // we copied with the actual arguments, if we did copy one, or the padding we
// stored above. // stored above.
__ RecordComment("-- Store receiver --"); __ RecordComment("-- Store receiver --");
__ Add(copy_from, fp, 2 * kPointerSize); __ Add(copy_from, fp, 2 * kPointerSize);
__ Ldr(scratch1, MemOperand(copy_from, argc_actual, LSL, kPointerSizeLog2)); __ Ldr(scratch1, MemOperand(copy_from, argc_actual, LSL, kPointerSizeLog2));
__ Str(scratch1, MemOperand(jssp, argc_expected, LSL, kPointerSizeLog2)); __ Str(scratch1,
MemOperand(__ StackPointer(), argc_expected, LSL, kPointerSizeLog2));
// Arguments have been adapted. Now call the entry point. // Arguments have been adapted. Now call the entry point.
__ RecordComment("-- Call entry point --"); __ RecordComment("-- Call entry point --");
......
...@@ -589,8 +589,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -589,8 +589,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// allocator. // allocator.
CallDescriptor::Flags flags(MiscField::decode(opcode)); CallDescriptor::Flags flags(MiscField::decode(opcode));
if (flags & CallDescriptor::kRestoreJSSP) { if (flags & CallDescriptor::kRestoreJSSP) {
__ Ldr(jssp, MemOperand(csp)); __ Mov(jssp, csp);
__ Mov(csp, jssp);
} }
if (flags & CallDescriptor::kRestoreCSP) { if (flags & CallDescriptor::kRestoreCSP) {
__ Mov(csp, jssp); __ Mov(csp, jssp);
...@@ -623,8 +622,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -623,8 +622,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// allocator. // allocator.
CallDescriptor::Flags flags(MiscField::decode(opcode)); CallDescriptor::Flags flags(MiscField::decode(opcode));
if (flags & CallDescriptor::kRestoreJSSP) { if (flags & CallDescriptor::kRestoreJSSP) {
__ Ldr(jssp, MemOperand(csp)); __ Mov(jssp, csp);
__ Mov(csp, jssp);
} }
if (flags & CallDescriptor::kRestoreCSP) { if (flags & CallDescriptor::kRestoreCSP) {
__ Mov(csp, jssp); __ Mov(csp, jssp);
...@@ -708,8 +706,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -708,8 +706,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// allocator. // allocator.
CallDescriptor::Flags flags(MiscField::decode(opcode)); CallDescriptor::Flags flags(MiscField::decode(opcode));
if (flags & CallDescriptor::kRestoreJSSP) { if (flags & CallDescriptor::kRestoreJSSP) {
__ Ldr(jssp, MemOperand(csp)); __ Mov(jssp, csp);
__ Mov(csp, jssp);
} }
if (flags & CallDescriptor::kRestoreCSP) { if (flags & CallDescriptor::kRestoreCSP) {
__ Mov(csp, jssp); __ Mov(csp, jssp);
...@@ -1224,15 +1221,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1224,15 +1221,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// Pseudo instruction turned into cbz/cbnz in AssembleArchBranch. // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch.
break; break;
case kArm64ClaimCSP: { case kArm64ClaimCSP: {
int count = RoundUp(i.InputInt32(0), 2); int count = i.InputInt32(0);
DCHECK_EQ(count % 2, 0);
Register prev = __ StackPointer(); Register prev = __ StackPointer();
if (prev.Is(jssp)) { if (prev.Is(jssp)) {
// TODO(titzer): make this a macro-assembler method. // TODO(titzer): make this a macro-assembler method.
// Align the CSP and store the previous JSSP on the stack. We do not
// need to modify the SP delta here, as we will continue to access the
// frame via JSSP.
UseScratchRegisterScope scope(tasm());
Register tmp = scope.AcquireX();
// TODO(arm64): Storing JSSP on the stack is redundant when calling a C // TODO(arm64): Storing JSSP on the stack is redundant when calling a C
// function, as JSSP is callee-saved (we still need to do this when // function, as JSSP is callee-saved (we still need to do this when
...@@ -1241,15 +1234,12 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1241,15 +1234,12 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// (the latter does not restore CSP/JSSP). // (the latter does not restore CSP/JSSP).
// TurboAssembler::CallCFunction() (safely) drops this extra slot // TurboAssembler::CallCFunction() (safely) drops this extra slot
// anyway. // anyway.
int sp_alignment = __ ActivationFrameAlignment(); __ SetStackPointer(csp);
__ Sub(tmp, jssp, kPointerSize); __ Mov(csp, jssp);
__ Bic(csp, tmp, sp_alignment - 1);
__ Str(jssp, MemOperand(csp));
if (count > 0) { if (count > 0) {
__ SetStackPointer(csp);
__ Claim(count); __ Claim(count);
__ SetStackPointer(prev);
} }
__ SetStackPointer(prev);
} else { } else {
__ AssertCspAligned(); __ AssertCspAligned();
if (count > 0) { if (count > 0) {
...@@ -1261,14 +1251,15 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1261,14 +1251,15 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} }
case kArm64ClaimJSSP: { case kArm64ClaimJSSP: {
int count = i.InputInt32(0); int count = i.InputInt32(0);
DCHECK_EQ(count % 2, 0);
if (csp.Is(__ StackPointer())) { if (csp.Is(__ StackPointer())) {
// No JSSP is set up. Compute it from the CSP. // No JSSP is set up. Compute it from the CSP.
__ AssertCspAligned(); __ AssertCspAligned();
if (count > 0) { if (count > 0) {
int even = RoundUp(count, 2); int even = RoundUp(count, 2);
__ Sub(jssp, csp, count * kPointerSize);
// We must also update CSP to maintain stack consistency: // We must also update CSP to maintain stack consistency:
__ Sub(csp, csp, even * kPointerSize); // Must always be aligned. __ Sub(csp, csp, even * kPointerSize); // Must always be aligned.
__ Mov(jssp, csp);
__ AssertStackConsistency(); __ AssertStackConsistency();
frame_access_state()->IncreaseSPDelta(even); frame_access_state()->IncreaseSPDelta(even);
} else { } else {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment