Commit b4d63cf5 authored by Martyn Capewell's avatar Martyn Capewell Committed by Commit Bot

[arm64] Restore jssp from csp, remove jssp refs

As they're now equal, simplify some TF opcodes by restoring jssp from csp on
stack pointer switch. Also, remove some direct references to jssp.

Bug: v8:6644
Change-Id: I20ee54fc0d536809a0aa72def43337f83cc91685
Reviewed-on: https://chromium-review.googlesource.com/857457Reviewed-by: 's avatarBenedikt Meurer <bmeurer@chromium.org>
Commit-Queue: Martyn Capewell <martyn.capewell@arm.com>
Cr-Commit-Position: refs/heads/master@{#50462}
parent 6ef05c78
......@@ -30,7 +30,7 @@ namespace internal {
void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ Mov(x5, Operand(x0, LSL, kPointerSizeLog2));
__ Str(x1, MemOperand(jssp, x5));
__ Str(x1, MemOperand(__ StackPointer(), x5));
__ Push(x1, x2);
__ Add(x0, x0, Operand(3));
__ TailCallRuntime(Runtime::kNewArray);
......@@ -100,8 +100,8 @@ void DoubleToIStub::Generate(MacroAssembler* masm) {
void MathPowStub::Generate(MacroAssembler* masm) {
// Stack on entry:
// jssp[0]: Exponent (as a tagged value).
// jssp[1]: Base (as a tagged value).
// sp[0]: Exponent (as a tagged value).
// sp[1]: Base (as a tagged value).
//
// The (tagged) result will be returned in x0, as a heap number.
......@@ -276,11 +276,11 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// The stack on entry holds the arguments and the receiver, with the receiver
// at the highest address:
//
// jssp]argc-1]: receiver
// jssp[argc-2]: arg[argc-2]
// sp]argc-1]: receiver
// sp[argc-2]: arg[argc-2]
// ... ...
// jssp[1]: arg[1]
// jssp[0]: arg[0]
// sp[1]: arg[1]
// sp[0]: arg[0]
//
// The arguments are in reverse order, so that arg[argc-2] is actually the
// first argument to the target function and arg[0] is the last.
......@@ -533,7 +533,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
__ Push(x13, x12, xzr, x10);
// Set up fp.
__ Sub(fp, jssp, EntryFrameConstants::kCallerFPOffset);
__ Sub(fp, __ StackPointer(), EntryFrameConstants::kCallerFPOffset);
// Push the JS entry frame marker. Also set js_entry_sp if this is the
// outermost JS call.
......@@ -555,12 +555,12 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
__ Push(x12, padreg);
// The frame set up looks like this:
// jssp[0] : padding.
// jssp[1] : JS entry frame marker.
// jssp[2] : C entry FP.
// jssp[3] : stack frame marker.
// jssp[4] : stack frame marker.
// jssp[5] : bad frame pointer 0xFFF...FF <- fp points here.
// sp[0] : padding.
// sp[1] : JS entry frame marker.
// sp[2] : C entry FP.
// sp[3] : stack frame marker.
// sp[4] : stack frame marker.
// sp[5] : bad frame pointer 0xFFF...FF <- fp points here.
// Jump to a faked try block that does the invoke, with a faked catch
// block that sets the pending exception.
......@@ -602,7 +602,12 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
__ Push(padreg, x10);
// Set this new handler as the current one.
__ Str(jssp, MemOperand(x11));
{
UseScratchRegisterScope temps(masm);
Register scratch = temps.AcquireX();
__ Mov(scratch, __ StackPointer());
__ Str(scratch, MemOperand(x11));
}
// If an exception not caught by another handler occurs, this handler
// returns control to the code after the B(&invoke) above, which
......@@ -633,12 +638,12 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
// x0 holds the result.
// The stack pointer points to the top of the entry frame pushed on entry from
// C++ (at the beginning of this stub):
// jssp[0] : padding.
// jssp[1] : JS entry frame marker.
// jssp[2] : C entry FP.
// jssp[3] : stack frame marker.
// jssp[4] : stack frame marker.
// jssp[5] : bad frame pointer 0xFFF...FF <- fp points here.
// sp[0] : padding.
// sp[1] : JS entry frame marker.
// sp[2] : C entry FP.
// sp[3] : stack frame marker.
// sp[4] : stack frame marker.
// sp[5] : bad frame pointer 0xFFF...FF <- fp points here.
// Check if the current stack frame is marked as the outermost JS frame.
Label non_outermost_js_2;
......
......@@ -220,8 +220,12 @@ void Deoptimizer::TableEntryGenerator::Generate() {
}
__ Pop(x4, padreg); // Restore deoptimizer object (class Deoptimizer).
__ Ldr(__ StackPointer(),
MemOperand(x4, Deoptimizer::caller_frame_top_offset()));
{
UseScratchRegisterScope temps(masm());
Register scratch = temps.AcquireX();
__ Ldr(scratch, MemOperand(x4, Deoptimizer::caller_frame_top_offset()));
__ Mov(__ StackPointer(), scratch);
}
// Replace the current (input) frame with the output frames.
Label outer_push_loop, inner_push_loop,
......
......@@ -1885,39 +1885,18 @@ void TurboAssembler::CallCFunction(Register function, int num_of_reg_args,
// so the return address in the link register stays correct.
Call(function);
if (csp.Is(old_stack_pointer)) {
if (num_of_reg_args > kRegisterPassedArguments) {
// Drop the register passed arguments.
int claim_slots = RoundUp(num_of_reg_args - kRegisterPassedArguments, 2);
Drop(claim_slots);
}
} else {
DCHECK(jssp.Is(old_stack_pointer));
if (num_of_reg_args > kRegisterPassedArguments) {
// Drop the register passed arguments.
int claim_slots = RoundUp(num_of_reg_args - kRegisterPassedArguments, 2);
Drop(claim_slots);
}
if (jssp.Is(old_stack_pointer)) {
if (emit_debug_code()) {
UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX();
if (num_of_reg_args > kRegisterPassedArguments) {
// We don't need to drop stack arguments, as the stack pointer will be
// jssp when returning from this function. However, in debug builds, we
// can check that jssp is as expected.
int claim_slots =
RoundUp(num_of_reg_args - kRegisterPassedArguments, 2);
// Check jssp matches the previous value on the stack.
Ldr(temp, MemOperand(csp, claim_slots * kPointerSize));
Cmp(jssp, temp);
Check(eq, AbortReason::kTheStackWasCorruptedByMacroAssemblerCall);
} else {
// Because the stack pointer must be aligned on a 16-byte boundary, the
// aligned csp can be up to 12 bytes below the jssp. This is the case
// where we only pushed one W register on top of an aligned jssp.
Sub(temp, csp, old_stack_pointer);
// We want temp <= 0 && temp >= -12.
Cmp(temp, 0);
Ccmp(temp, -12, NFlag, le);
Check(ge, AbortReason::kTheStackWasCorruptedByMacroAssemblerCall);
}
Mov(temp, csp);
Cmp(old_stack_pointer, temp);
Check(eq, AbortReason::kTheStackWasCorruptedByMacroAssemblerCall);
}
SetStackPointer(old_stack_pointer);
}
......@@ -2166,13 +2145,16 @@ void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
Add(dst_reg, dst_reg, 15);
Bic(dst_reg, dst_reg, 15);
DCHECK(jssp.Is(StackPointer()));
Register src_reg = caller_args_count_reg;
// Calculate the end of source area. +kPointerSize is for the receiver.
if (callee_args_count.is_reg()) {
Add(src_reg, jssp, Operand(callee_args_count.reg(), LSL, kPointerSizeLog2));
Add(src_reg, StackPointer(),
Operand(callee_args_count.reg(), LSL, kPointerSizeLog2));
Add(src_reg, src_reg, kPointerSize);
} else {
Add(src_reg, jssp, (callee_args_count.immediate() + 1) * kPointerSize);
Add(src_reg, StackPointer(),
(callee_args_count.immediate() + 1) * kPointerSize);
}
// Round src_reg up to a multiple of 16 bytes, so we include any potential
......@@ -2202,11 +2184,11 @@ void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
Ldr(tmp_reg, MemOperand(src_reg, -kPointerSize, PreIndex));
Str(tmp_reg, MemOperand(dst_reg, -kPointerSize, PreIndex));
bind(&entry);
Cmp(jssp, src_reg);
Cmp(StackPointer(), src_reg);
B(ne, &loop);
// Leave current frame.
Mov(jssp, dst_reg);
Mov(StackPointer(), dst_reg);
SetStackPointer(jssp);
AssertStackConsistency();
}
......@@ -2477,8 +2459,9 @@ void TurboAssembler::TruncateDoubleToIDelayed(Zone* zone, Register result,
}
void TurboAssembler::Prologue() {
DCHECK(jssp.Is(StackPointer()));
Push(lr, fp, cp, x1);
Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
Add(fp, StackPointer(), StandardFrameConstants::kFixedFrameSizeFromFp);
}
void TurboAssembler::EnterFrame(StackFrame::Type type) {
......@@ -2491,11 +2474,11 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) {
Mov(type_reg, StackFrame::TypeToMarker(type));
Mov(code_reg, Operand(CodeObject()));
Push(lr, fp, type_reg, code_reg);
Add(fp, jssp, InternalFrameConstants::kFixedFrameSizeFromFp);
// jssp[4] : lr
// jssp[3] : fp
// jssp[1] : type
// jssp[0] : [code object]
Add(fp, StackPointer(), InternalFrameConstants::kFixedFrameSizeFromFp);
// sp[4] : lr
// sp[3] : fp
// sp[1] : type
// sp[0] : [code object]
} else if (type == StackFrame::WASM_COMPILED) {
DCHECK(csp.Is(StackPointer()));
Mov(type_reg, StackFrame::TypeToMarker(type));
......@@ -2517,11 +2500,12 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) {
// The context pointer isn't part of the fixed frame, so add an extra slot
// to account for it.
Add(fp, jssp, TypedFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
// jssp[3] : lr
// jssp[2] : fp
// jssp[1] : type
// jssp[0] : cp
Add(fp, StackPointer(),
TypedFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
// sp[3] : lr
// sp[2] : fp
// sp[1] : type
// sp[0] : cp
}
}
......@@ -2535,7 +2519,7 @@ void TurboAssembler::LeaveFrame(StackFrame::Type type) {
DCHECK(jssp.Is(StackPointer()));
// Drop the execution stack down to the frame pointer and restore
// the caller frame pointer and return address.
Mov(jssp, fp);
Mov(StackPointer(), fp);
AssertStackConsistency();
Pop(fp, lr);
}
......@@ -2582,7 +2566,7 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch,
// fp[-8]: STUB marker
// fp[-16]: Space reserved for SPOffset.
// fp[-24]: CodeObject()
// jssp -> fp[-32]: padding
// sp -> fp[-32]: padding
STATIC_ASSERT((2 * kPointerSize) == ExitFrameConstants::kCallerSPOffset);
STATIC_ASSERT((1 * kPointerSize) == ExitFrameConstants::kCallerPCOffset);
STATIC_ASSERT((0 * kPointerSize) == ExitFrameConstants::kCallerFPOffset);
......@@ -2616,8 +2600,8 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch,
// fp[-16]: Space reserved for SPOffset.
// fp[-24]: CodeObject()
// fp[-24 - fp_size]: Saved doubles (if save_doubles is true).
// jssp[8]: Extra space reserved for caller (if extra_space != 0).
// jssp -> jssp[0]: Space reserved for the return address.
// sp[8]: Extra space reserved for caller (if extra_space != 0).
// sp -> sp[0]: Space reserved for the return address.
// Align and synchronize the system stack pointer with jssp.
AlignAndSetCSPForFrame();
......@@ -2630,7 +2614,6 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch,
// fp[-24]: CodeObject()
// fp[-24 - fp_size]: Saved doubles (if save_doubles is true).
// csp[8]: Memory reserved for the caller if extra_space != 0.
// Alignment padding, if necessary.
// csp -> csp[0]: Space reserved for the return address.
// ExitFrame::GetStateForFramePointer expects to find the return address at
......
......@@ -552,7 +552,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack limit".
Label stack_overflow;
__ CompareRoot(jssp, Heap::kRealStackLimitRootIndex);
__ CompareRoot(__ StackPointer(), Heap::kRealStackLimitRootIndex);
__ B(lo, &stack_overflow);
// Get number of arguments for generator function.
......@@ -579,8 +579,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// -- x10 : argument count
// -- cp : generator context
// -- lr : return address
// -- jssp[arg count] : generator receiver
// -- jssp[0 .. arg count - 1] : claimed for args
// -- sp[arg count] : generator receiver
// -- sp[0 .. arg count - 1] : claimed for args
// -----------------------------------
// Push holes for arguments to generator function. Since the parser forced
......@@ -1010,9 +1010,10 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Open a frame scope to indicate that there is a frame on the stack. The
// MANUAL indicates that the scope shouldn't actually generate code to set up
// the frame (that is done below).
DCHECK(jssp.Is(__ StackPointer()));
FrameScope frame_scope(masm, StackFrame::MANUAL);
__ Push(lr, fp, cp, closure);
__ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
__ Add(fp, __ StackPointer(), StandardFrameConstants::kFixedFrameSizeFromFp);
// Get the bytecode array from the function object (or from the DebugInfo if
// it is present) and load it into kInterpreterBytecodeArrayRegister.
......@@ -1064,7 +1065,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Do a stack check to ensure we don't go over the limit.
Label ok;
DCHECK(jssp.Is(__ StackPointer()));
__ Sub(x10, jssp, Operand(x11));
__ Sub(x10, __ StackPointer(), Operand(x11));
__ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
__ B(hs, &ok);
__ CallRuntime(Runtime::kThrowStackOverflow);
......@@ -1649,8 +1650,9 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
allocatable_register_count)) *
kPointerSize;
DCHECK(jssp.Is(__ StackPointer()));
// Set up frame pointer.
__ Add(fp, jssp, frame_size);
__ Add(fp, __ StackPointer(), frame_size);
if (with_result) {
// Overwrite the hole inserted by the deoptimizer with the return value from
......@@ -1786,9 +1788,9 @@ void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : argc
// -- jssp[0] : argArray (if argc == 2)
// -- jssp[8] : thisArg (if argc >= 1)
// -- jssp[16] : receiver
// -- sp[0] : argArray (if argc == 2)
// -- sp[8] : thisArg (if argc >= 1)
// -- sp[16] : receiver
// -----------------------------------
ASM_LOCATION("Builtins::Generate_FunctionPrototypeApply");
......@@ -1840,7 +1842,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x2 : argArray
// -- x1 : receiver
// -- jssp[0] : thisArg
// -- sp[0] : thisArg
// -----------------------------------
// 2. We don't need to check explicitly for callable receiver here,
......@@ -1926,10 +1928,10 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : argc
// -- jssp[0] : argumentsList (if argc == 3)
// -- jssp[8] : thisArgument (if argc >= 2)
// -- jssp[16] : target (if argc >= 1)
// -- jssp[24] : receiver
// -- sp[0] : argumentsList (if argc == 3)
// -- sp[8] : thisArgument (if argc >= 2)
// -- sp[16] : target (if argc >= 1)
// -- sp[24] : receiver
// -----------------------------------
ASM_LOCATION("Builtins::Generate_ReflectApply");
......@@ -1988,7 +1990,7 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x2 : argumentsList
// -- x1 : target
// -- jssp[0] : thisArgument
// -- sp[0] : thisArgument
// -----------------------------------
// 2. We don't need to check explicitly for callable target here,
......@@ -2003,10 +2005,10 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : argc
// -- jssp[0] : new.target (optional)
// -- jssp[8] : argumentsList
// -- jssp[16] : target
// -- jssp[24] : receiver
// -- sp[0] : new.target (optional)
// -- sp[8] : argumentsList
// -- sp[16] : target
// -- sp[24] : receiver
// -----------------------------------
ASM_LOCATION("Builtins::Generate_ReflectConstruct");
......@@ -2070,7 +2072,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
// -- x2 : argumentsList
// -- x1 : target
// -- x3 : new.target
// -- jssp[0] : receiver (undefined)
// -- sp[0] : receiver (undefined)
// -----------------------------------
// 2. We don't need to check explicitly for constructor target here,
......@@ -2094,7 +2096,8 @@ void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
__ Push(x11, x1); // x1: function
__ SmiTag(x11, x0); // x0: number of arguments.
__ Push(x11, padreg);
__ Add(fp, jssp, ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp);
__ Add(fp, __ StackPointer(),
ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp);
}
void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
......@@ -2104,7 +2107,7 @@ void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
// Get the number of arguments passed (as a smi), tear down the frame and
// then drop the parameters and the receiver.
__ Ldr(x10, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ Mov(jssp, fp);
__ Mov(__ StackPointer(), fp);
__ Pop(fp, lr);
// Drop actual parameters and receiver.
......@@ -2857,7 +2860,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ Bic(scratch1, scratch1, 1);
__ Claim(scratch1, kPointerSize);
__ Mov(copy_to, jssp);
__ Mov(copy_to, __ StackPointer());
// Preparing the expected arguments is done in four steps, the order of
// which is chosen so we can use LDP/STP and avoid conditional branches as
......@@ -2914,14 +2917,15 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ RecordComment("-- Store padding --");
__ Str(padreg, MemOperand(fp, -5 * kPointerSize));
// (4) Store receiver. Calculate target address from jssp to avoid checking
// (4) Store receiver. Calculate target address from the sp to avoid checking
// for padding. Storing the receiver will overwrite either the extra slot
// we copied with the actual arguments, if we did copy one, or the padding we
// stored above.
__ RecordComment("-- Store receiver --");
__ Add(copy_from, fp, 2 * kPointerSize);
__ Ldr(scratch1, MemOperand(copy_from, argc_actual, LSL, kPointerSizeLog2));
__ Str(scratch1, MemOperand(jssp, argc_expected, LSL, kPointerSizeLog2));
__ Str(scratch1,
MemOperand(__ StackPointer(), argc_expected, LSL, kPointerSizeLog2));
// Arguments have been adapted. Now call the entry point.
__ RecordComment("-- Call entry point --");
......
......@@ -589,8 +589,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// allocator.
CallDescriptor::Flags flags(MiscField::decode(opcode));
if (flags & CallDescriptor::kRestoreJSSP) {
__ Ldr(jssp, MemOperand(csp));
__ Mov(csp, jssp);
__ Mov(jssp, csp);
}
if (flags & CallDescriptor::kRestoreCSP) {
__ Mov(csp, jssp);
......@@ -623,8 +622,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// allocator.
CallDescriptor::Flags flags(MiscField::decode(opcode));
if (flags & CallDescriptor::kRestoreJSSP) {
__ Ldr(jssp, MemOperand(csp));
__ Mov(csp, jssp);
__ Mov(jssp, csp);
}
if (flags & CallDescriptor::kRestoreCSP) {
__ Mov(csp, jssp);
......@@ -708,8 +706,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// allocator.
CallDescriptor::Flags flags(MiscField::decode(opcode));
if (flags & CallDescriptor::kRestoreJSSP) {
__ Ldr(jssp, MemOperand(csp));
__ Mov(csp, jssp);
__ Mov(jssp, csp);
}
if (flags & CallDescriptor::kRestoreCSP) {
__ Mov(csp, jssp);
......@@ -1224,15 +1221,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// Pseudo instruction turned into cbz/cbnz in AssembleArchBranch.
break;
case kArm64ClaimCSP: {
int count = RoundUp(i.InputInt32(0), 2);
int count = i.InputInt32(0);
DCHECK_EQ(count % 2, 0);
Register prev = __ StackPointer();
if (prev.Is(jssp)) {
// TODO(titzer): make this a macro-assembler method.
// Align the CSP and store the previous JSSP on the stack. We do not
// need to modify the SP delta here, as we will continue to access the
// frame via JSSP.
UseScratchRegisterScope scope(tasm());
Register tmp = scope.AcquireX();
// TODO(arm64): Storing JSSP on the stack is redundant when calling a C
// function, as JSSP is callee-saved (we still need to do this when
......@@ -1241,15 +1234,12 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// (the latter does not restore CSP/JSSP).
// TurboAssembler::CallCFunction() (safely) drops this extra slot
// anyway.
int sp_alignment = __ ActivationFrameAlignment();
__ Sub(tmp, jssp, kPointerSize);
__ Bic(csp, tmp, sp_alignment - 1);
__ Str(jssp, MemOperand(csp));
__ SetStackPointer(csp);
__ Mov(csp, jssp);
if (count > 0) {
__ SetStackPointer(csp);
__ Claim(count);
__ SetStackPointer(prev);
}
__ SetStackPointer(prev);
} else {
__ AssertCspAligned();
if (count > 0) {
......@@ -1261,14 +1251,15 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
}
case kArm64ClaimJSSP: {
int count = i.InputInt32(0);
DCHECK_EQ(count % 2, 0);
if (csp.Is(__ StackPointer())) {
// No JSSP is set up. Compute it from the CSP.
__ AssertCspAligned();
if (count > 0) {
int even = RoundUp(count, 2);
__ Sub(jssp, csp, count * kPointerSize);
// We must also update CSP to maintain stack consistency:
__ Sub(csp, csp, even * kPointerSize); // Must always be aligned.
__ Mov(jssp, csp);
__ AssertStackConsistency();
frame_access_state()->IncreaseSPDelta(even);
} else {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment