Commit 01bd06f4 authored by Georgia Kouveli's avatar Georgia Kouveli Committed by Commit Bot

[arm64] Rewrite Generate_InterpreterPushArgs and its callers.

This moves the handling of the receiver and the final spread argument
into Generate_InterpreterPushArgs and merges the stack allocation to a
single claim operation for the whole argument setup. When we start
padding arguments in TF, we will simply need to claim one extra slot
and store padreg to it in Generate_InterpreterPushArgs (hence the
remaining TODO).

This also replaces the single use of Generate_CheckStackOverflow with
Generate_StackOverflowCheck and removes the former. This change is also
done for arm, ia32 and x64.

Bug: v8:6644
Change-Id: I8d06dda96dbc4a6f219b73c711f894320c2f6cdf
Reviewed-on: https://chromium-review.googlesource.com/738031
Commit-Queue: Georgia Kouveli <georgia.kouveli@arm.com>
Reviewed-by: 's avatarRoss McIlroy <rmcilroy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49005}
parent bc8c97c7
...@@ -1419,7 +1419,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { ...@@ -1419,7 +1419,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
// We should either have undefined in the allocation_site register or a // We should either have undefined in the allocation_site register or a
// valid AllocationSite. // valid AllocationSite.
__ AssertUndefinedOrAllocationSite(allocation_site, x10); __ AssertUndefinedOrAllocationSite(allocation_site);
} }
// Enter the context of the Array function. // Enter the context of the Array function.
......
...@@ -1708,9 +1708,10 @@ void MacroAssembler::AssertGeneratorObject(Register object) { ...@@ -1708,9 +1708,10 @@ void MacroAssembler::AssertGeneratorObject(Register object) {
Check(eq, kOperandIsNotAGeneratorObject); Check(eq, kOperandIsNotAGeneratorObject);
} }
void MacroAssembler::AssertUndefinedOrAllocationSite(Register object, void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
Register scratch) {
if (emit_debug_code()) { if (emit_debug_code()) {
UseScratchRegisterScope temps(this);
Register scratch = temps.AcquireX();
Label done_checking; Label done_checking;
AssertNotSmi(object); AssertNotSmi(object);
JumpIfRoot(object, Heap::kUndefinedValueRootIndex, &done_checking); JumpIfRoot(object, Heap::kUndefinedValueRootIndex, &done_checking);
......
...@@ -1807,7 +1807,7 @@ class MacroAssembler : public TurboAssembler { ...@@ -1807,7 +1807,7 @@ class MacroAssembler : public TurboAssembler {
// Abort execution if argument is not undefined or an AllocationSite, enabled // Abort execution if argument is not undefined or an AllocationSite, enabled
// via --debug-code. // via --debug-code.
void AssertUndefinedOrAllocationSite(Register object, Register scratch); void AssertUndefinedOrAllocationSite(Register object);
void JumpIfHeapNumber(Register object, Label* on_heap_number, void JumpIfHeapNumber(Register object, Label* on_heap_number,
SmiCheckType smi_check_type = DONT_DO_SMI_CHECK); SmiCheckType smi_check_type = DONT_DO_SMI_CHECK);
......
...@@ -585,24 +585,19 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { ...@@ -585,24 +585,19 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
__ CallRuntime(Runtime::kThrowConstructedNonConstructable); __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
} }
// Clobbers r2; preserves all other registers. static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) { Register scratch,
Label* stack_overflow) {
// Check the stack for overflow. We are not trying to catch // Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack // interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked. // limit" is checked.
Label okay; __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
__ LoadRoot(r2, Heap::kRealStackLimitRootIndex); // Make scratch the space we have left. The stack might already be overflowed
// Make r2 the space we have left. The stack might already be overflowed // here which will cause scratch to become negative.
// here which will cause r2 to become negative. __ sub(scratch, sp, scratch);
__ sub(r2, sp, r2);
// Check if the arguments will overflow the stack. // Check if the arguments will overflow the stack.
__ cmp(r2, Operand(argc, LSL, kPointerSizeLog2)); __ cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
__ b(gt, &okay); // Signed comparison. __ b(le, stack_overflow); // Signed comparison.
// Out of stack space.
__ CallRuntime(Runtime::kThrowStackOverflow);
__ bind(&okay);
} }
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
...@@ -633,7 +628,15 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, ...@@ -633,7 +628,15 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Check if we have enough stack space to push all arguments. // Check if we have enough stack space to push all arguments.
// Clobbers r2. // Clobbers r2.
Generate_CheckStackOverflow(masm, r3); Label enough_stack_space, stack_overflow;
Generate_StackOverflowCheck(masm, r3, r2, &stack_overflow);
__ b(&enough_stack_space);
__ bind(&stack_overflow);
__ CallRuntime(Runtime::kThrowStackOverflow);
// Unreachable code.
__ bkpt(0);
__ bind(&enough_stack_space);
// Remember new.target. // Remember new.target.
__ mov(r5, r0); __ mov(r5, r0);
...@@ -1036,21 +1039,6 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1036,21 +1039,6 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ b(&bytecode_array_loaded); __ b(&bytecode_array_loaded);
} }
static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
Register scratch,
Label* stack_overflow) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
__ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
// Make scratch the space we have left. The stack might already be overflowed
// here which will cause scratch to become negative.
__ sub(scratch, sp, scratch);
// Check if the arguments will overflow the stack.
__ cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
__ b(le, stack_overflow); // Signed comparison.
}
static void Generate_InterpreterPushArgs(MacroAssembler* masm, static void Generate_InterpreterPushArgs(MacroAssembler* masm,
Register num_args, Register index, Register num_args, Register index,
Register limit, Register scratch) { Register limit, Register scratch) {
......
...@@ -585,27 +585,24 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -585,27 +585,24 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ B(&stepping_prepared); __ B(&stepping_prepared);
} }
// Clobbers x10, x15; preserves all other registers. static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) { Label* stack_overflow) {
DCHECK(masm->StackPointer().Is(jssp));
UseScratchRegisterScope temps(masm);
Register scratch = temps.AcquireX();
// Check the stack for overflow. // Check the stack for overflow.
// We are not trying to catch interruptions (e.g. debug break and // We are not trying to catch interruptions (e.g. debug break and
// preemption) here, so the "real stack limit" is checked. // preemption) here, so the "real stack limit" is checked.
Label enough_stack_space; Label enough_stack_space;
__ LoadRoot(x10, Heap::kRealStackLimitRootIndex); __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
// Make x10 the space we have left. The stack might already be overflowed // Make scratch the space we have left. The stack might already be overflowed
// here which will cause x10 to become negative. // here which will cause scratch to become negative.
// TODO(jbramley): Check that the stack usage here is safe. __ Sub(scratch, masm->StackPointer(), scratch);
__ Sub(x10, jssp, x10);
// Check if the arguments will overflow the stack. // Check if the arguments will overflow the stack.
__ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2)); __ Cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
__ B(gt, &enough_stack_space); __ B(le, stack_overflow);
__ CallRuntime(Runtime::kThrowStackOverflow);
// We should never return from the APPLY_OVERFLOW builtin.
if (__ emit_debug_code()) {
__ Unreachable();
}
__ Bind(&enough_stack_space);
} }
// Input: // Input:
...@@ -643,7 +640,15 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, ...@@ -643,7 +640,15 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
__ Push(function, receiver); __ Push(function, receiver);
// Check if we have enough stack space to push all arguments. // Check if we have enough stack space to push all arguments.
Generate_CheckStackOverflow(masm, argc); Label enough_stack_space, stack_overflow;
Generate_StackOverflowCheck(masm, argc, &stack_overflow);
__ B(&enough_stack_space);
__ Bind(&stack_overflow);
__ CallRuntime(Runtime::kThrowStackOverflow);
__ Unreachable();
__ Bind(&enough_stack_space);
// Copy arguments to the stack in a loop, in reverse order. // Copy arguments to the stack in a loop, in reverse order.
// x3: argc. // x3: argc.
...@@ -1068,44 +1073,70 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1068,44 +1073,70 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ B(&bytecode_array_loaded); __ B(&bytecode_array_loaded);
} }
static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
Register scratch,
Label* stack_overflow) {
// Check the stack for overflow.
// We are not trying to catch interruptions (e.g. debug break and
// preemption) here, so the "real stack limit" is checked.
Label enough_stack_space;
__ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
// Make scratch the space we have left. The stack might already be overflowed
// here which will cause scratch to become negative.
__ Sub(scratch, jssp, scratch);
// Check if the arguments will overflow the stack.
__ Cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
__ B(le, stack_overflow);
}
static void Generate_InterpreterPushArgs(MacroAssembler* masm, static void Generate_InterpreterPushArgs(MacroAssembler* masm,
Register num_args, Register index, Register num_args,
Register last_arg, Register stack_addr, Register first_arg_index,
Register scratch) { Register spread_arg_out,
__ Mov(scratch, num_args); ConvertReceiverMode receiver_mode,
__ lsl(scratch, scratch, kPointerSizeLog2); InterpreterPushArgsMode mode) {
__ sub(last_arg, index, scratch); Register last_arg_addr = x10;
Register stack_addr = x11;
// Set stack pointer and where to stop. Register slots_to_claim = x12;
__ Mov(stack_addr, jssp); Register slots_to_copy = x13; // May include receiver, unlike num_args.
__ Claim(scratch, 1);
DCHECK(!AreAliased(num_args, first_arg_index, last_arg_addr, stack_addr,
// Push the arguments. slots_to_claim, slots_to_copy));
Label loop_header, loop_check; // spread_arg_out may alias with the first_arg_index input.
__ B(&loop_check); DCHECK(!AreAliased(spread_arg_out, last_arg_addr, stack_addr, slots_to_claim,
__ Bind(&loop_header); slots_to_copy));
// TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
__ Ldr(scratch, MemOperand(index, -kPointerSize, PostIndex)); // Add one slot for the receiver.
__ Str(scratch, MemOperand(stack_addr, -kPointerSize, PreIndex)); __ Add(slots_to_claim, num_args, 1);
__ Bind(&loop_check);
__ Cmp(index, last_arg); if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
__ B(gt, &loop_header); // Exclude final spread from slots to claim and the number of arguments.
__ Sub(slots_to_claim, slots_to_claim, 1);
__ Sub(num_args, num_args, 1);
}
// Add a stack check before pushing arguments.
Label stack_overflow, done;
Generate_StackOverflowCheck(masm, slots_to_claim, &stack_overflow);
__ B(&done);
__ Bind(&stack_overflow);
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ Unreachable();
__ Bind(&done);
// TODO(arm64): Claim one extra slot for padding and store padreg to the
// padding slot.
__ Claim(slots_to_claim);
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
// Store "undefined" as the receiver arg if we need to.
Register receiver = x14;
__ LoadRoot(receiver, Heap::kUndefinedValueRootIndex);
__ SlotAddress(stack_addr, num_args);
__ Str(receiver, MemOperand(stack_addr));
__ Mov(slots_to_copy, num_args);
} else {
// If we're not given an explicit receiver to store, we'll need to copy it
// together with the rest of the arguments.
__ Add(slots_to_copy, num_args, 1);
}
__ Sub(last_arg_addr, first_arg_index,
Operand(slots_to_copy, LSL, kPointerSizeLog2));
__ Add(last_arg_addr, last_arg_addr, kPointerSize);
// Load the final spread argument into spread_arg_out, if necessary.
if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
__ Ldr(spread_arg_out, MemOperand(last_arg_addr, -kPointerSize));
}
// Copy the rest of the arguments.
__ SlotAddress(stack_addr, 0);
__ CopyDoubleWords(stack_addr, last_arg_addr, slots_to_copy);
} }
// static // static
...@@ -1119,28 +1150,16 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl( ...@@ -1119,28 +1150,16 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
// they are to be pushed onto the stack. // they are to be pushed onto the stack.
// -- x1 : the target to call (can be any Object). // -- x1 : the target to call (can be any Object).
// ----------------------------------- // -----------------------------------
Label stack_overflow;
// Add one for the receiver.
__ Add(x3, x0, 1);
// Add a stack check before pushing arguments.
Generate_StackOverflowCheck(masm, x3, x6, &stack_overflow);
// Push "undefined" as the receiver arg if we need to.
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
__ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
__ Push(x10);
__ Mov(x3, x0); // Argument count is correct.
}
// Push the arguments. x2, x4, x5, x6 will be modified. // Push the arguments. num_args may be updated according to mode.
Generate_InterpreterPushArgs(masm, x3, x2, x4, x5, x6); // spread_arg_out will be updated to contain the last spread argument, when
// mode == InterpreterPushArgsMode::kWithFinalSpread.
if (mode == InterpreterPushArgsMode::kWithFinalSpread) { Register num_args = x0;
__ Pop(x2); // Pass the spread in a register Register first_arg_index = x2;
__ Sub(x0, x0, 1); // Subtract one for spread Register spread_arg_out =
} (mode == InterpreterPushArgsMode::kWithFinalSpread) ? x2 : no_reg;
Generate_InterpreterPushArgs(masm, num_args, first_arg_index, spread_arg_out,
receiver_mode, mode);
// Call the target. // Call the target.
if (mode == InterpreterPushArgsMode::kJSFunction) { if (mode == InterpreterPushArgsMode::kJSFunction) {
...@@ -1154,12 +1173,6 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl( ...@@ -1154,12 +1173,6 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
__ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny), __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
RelocInfo::CODE_TARGET); RelocInfo::CODE_TARGET);
} }
__ bind(&stack_overflow);
{
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ Unreachable();
}
} }
// static // static
...@@ -1172,23 +1185,17 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl( ...@@ -1172,23 +1185,17 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
// -- x2 : allocation site feedback if available, undefined otherwise // -- x2 : allocation site feedback if available, undefined otherwise
// -- x4 : address of the first argument // -- x4 : address of the first argument
// ----------------------------------- // -----------------------------------
Label stack_overflow; __ AssertUndefinedOrAllocationSite(x2);
// Push a slot for the receiver. // Push the arguments. num_args may be updated according to mode.
__ Push(xzr); // spread_arg_out will be updated to contain the last spread argument, when
// mode == InterpreterPushArgsMode::kWithFinalSpread.
// Add a stack check before pushing arguments. Register num_args = x0;
Generate_StackOverflowCheck(masm, x0, x7, &stack_overflow); Register first_arg_index = x4;
Register spread_arg_out =
// Push the arguments. x5, x4, x6, x7 will be modified. (mode == InterpreterPushArgsMode::kWithFinalSpread) ? x2 : no_reg;
Generate_InterpreterPushArgs(masm, x0, x4, x5, x6, x7); Generate_InterpreterPushArgs(masm, num_args, first_arg_index, spread_arg_out,
ConvertReceiverMode::kNullOrUndefined, mode);
if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
__ Pop(x2); // Pass the spread in a register
__ Sub(x0, x0, 1); // Subtract one for spread
} else {
__ AssertUndefinedOrAllocationSite(x2, x6);
}
if (mode == InterpreterPushArgsMode::kJSFunction) { if (mode == InterpreterPushArgsMode::kJSFunction) {
__ AssertFunction(x1); __ AssertFunction(x1);
...@@ -1208,12 +1215,6 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl( ...@@ -1208,12 +1215,6 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
// Call the constructor with x0, x1, and x3 unmodified. // Call the constructor with x0, x1, and x3 unmodified.
__ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET); __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
} }
__ bind(&stack_overflow);
{
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ Unreachable();
}
} }
static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
...@@ -2129,14 +2130,14 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, ...@@ -2129,14 +2130,14 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ B(le, &stack_done); __ B(le, &stack_done);
{ {
// Check for stack overflow. // Check for stack overflow.
Generate_StackOverflowCheck(masm, x6, x2, &stack_overflow); Generate_StackOverflowCheck(masm, x6, &stack_overflow);
// Forward the arguments from the caller frame. // Forward the arguments from the caller frame.
{ {
Label loop; Label loop;
__ Add(x5, x5, kPointerSize); __ Add(x5, x5, kPointerSize);
__ Add(x0, x0, x6); __ Add(x0, x0, x6);
__ bind(&loop); __ Bind(&loop);
{ {
__ Ldr(x4, MemOperand(x5, x6, LSL, kPointerSizeLog2)); __ Ldr(x4, MemOperand(x5, x6, LSL, kPointerSizeLog2));
__ Push(x4); __ Push(x4);
...@@ -2625,7 +2626,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2625,7 +2626,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// number of actual arguments and the receiver. // number of actual arguments and the receiver.
__ RecordComment("-- Stack check --"); __ RecordComment("-- Stack check --");
__ Add(scratch1, argc_expected, 2); __ Add(scratch1, argc_expected, 2);
Generate_StackOverflowCheck(masm, scratch1, scratch2, &stack_overflow); Generate_StackOverflowCheck(masm, scratch1, &stack_overflow);
// Round up number of slots to be even, to maintain stack alignment. // Round up number of slots to be even, to maintain stack alignment.
__ RecordComment("-- Allocate callee frame slots --"); __ RecordComment("-- Allocate callee frame slots --");
......
...@@ -395,33 +395,30 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { ...@@ -395,33 +395,30 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
__ CallRuntime(Runtime::kThrowConstructedNonConstructable); __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
} }
// Clobbers ecx, edx, edi; preserves all other registers. static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
static void Generate_CheckStackOverflow(MacroAssembler* masm) { Register scratch1, Register scratch2,
// eax : the number of items to be pushed to the stack Label* stack_overflow,
// bool include_receiver = false) {
// Check the stack for overflow. We are not trying to catch // Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack // interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked. // limit" is checked.
Label okay;
ExternalReference real_stack_limit = ExternalReference real_stack_limit =
ExternalReference::address_of_real_stack_limit(masm->isolate()); ExternalReference::address_of_real_stack_limit(masm->isolate());
__ mov(edi, Operand::StaticVariable(real_stack_limit)); __ mov(scratch1, Operand::StaticVariable(real_stack_limit));
// Make ecx the space we have left. The stack might already be overflowed // Make scratch2 the space we have left. The stack might already be overflowed
// here which will cause ecx to become negative. // here which will cause scratch2 to become negative.
__ mov(ecx, esp); __ mov(scratch2, esp);
__ sub(ecx, edi); __ sub(scratch2, scratch1);
// Make edx the space we need for the array when it is unrolled onto the // Make scratch1 the space we need for the array when it is unrolled onto the
// stack. // stack.
__ mov(edx, eax); __ mov(scratch1, num_args);
__ shl(edx, kPointerSizeLog2); if (include_receiver) {
__ add(scratch1, Immediate(1));
}
__ shl(scratch1, kPointerSizeLog2);
// Check if the arguments will overflow the stack. // Check if the arguments will overflow the stack.
__ cmp(ecx, edx); __ cmp(scratch2, scratch1);
__ j(greater, &okay); // Signed comparison. __ j(less_equal, stack_overflow); // Signed comparison.
// Out of stack space.
__ CallRuntime(Runtime::kThrowStackOverflow);
__ bind(&okay);
} }
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
...@@ -448,8 +445,17 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, ...@@ -448,8 +445,17 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
__ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset)); __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
// Check if we have enough stack space to push all arguments. // Check if we have enough stack space to push all arguments.
// Expects argument count in eax. Clobbers ecx, edx, edi. // Argument count in eax. Clobbers ecx and edx.
Generate_CheckStackOverflow(masm); Label enough_stack_space, stack_overflow;
Generate_StackOverflowCheck(masm, eax, ecx, edx, &stack_overflow);
__ jmp(&enough_stack_space);
__ bind(&stack_overflow);
__ CallRuntime(Runtime::kThrowStackOverflow);
// This should be unreachable.
__ int3();
__ bind(&enough_stack_space);
// Copy arguments to the stack in a loop. // Copy arguments to the stack in a loop.
Label loop, entry; Label loop, entry;
...@@ -961,32 +967,6 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -961,32 +967,6 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
} }
static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
Register scratch1, Register scratch2,
Label* stack_overflow,
bool include_receiver = false) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
ExternalReference real_stack_limit =
ExternalReference::address_of_real_stack_limit(masm->isolate());
__ mov(scratch1, Operand::StaticVariable(real_stack_limit));
// Make scratch2 the space we have left. The stack might already be overflowed
// here which will cause scratch2 to become negative.
__ mov(scratch2, esp);
__ sub(scratch2, scratch1);
// Make scratch1 the space we need for the array when it is unrolled onto the
// stack.
__ mov(scratch1, num_args);
if (include_receiver) {
__ add(scratch1, Immediate(1));
}
__ shl(scratch1, kPointerSizeLog2);
// Check if the arguments will overflow the stack.
__ cmp(scratch2, scratch1);
__ j(less_equal, stack_overflow); // Signed comparison.
}
static void Generate_InterpreterPushArgs(MacroAssembler* masm, static void Generate_InterpreterPushArgs(MacroAssembler* masm,
Register array_limit, Register array_limit,
Register start_address) { Register start_address) {
......
...@@ -400,31 +400,23 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { ...@@ -400,31 +400,23 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
__ CallRuntime(Runtime::kThrowConstructedNonConstructable); __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
} }
// Clobbers rcx, r11, kScratchRegister; preserves all other registers. static void Generate_StackOverflowCheck(
static void Generate_CheckStackOverflow(MacroAssembler* masm) { MacroAssembler* masm, Register num_args, Register scratch,
// rax : the number of items to be pushed to the stack Label* stack_overflow,
// Label::Distance stack_overflow_distance = Label::kFar) {
// Check the stack for overflow. We are not trying to catch // Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack // interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked. // limit" is checked.
Label okay;
__ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
__ movp(rcx, rsp); __ movp(scratch, rsp);
// Make rcx the space we have left. The stack might already be overflowed // Make scratch the space we have left. The stack might already be overflowed
// here which will cause rcx to become negative. // here which will cause scratch to become negative.
__ subp(rcx, kScratchRegister); __ subp(scratch, kScratchRegister);
// Make r11 the space we need for the array when it is unrolled onto the __ sarp(scratch, Immediate(kPointerSizeLog2));
// stack.
__ movp(r11, rax);
__ shlq(r11, Immediate(kPointerSizeLog2));
// Check if the arguments will overflow the stack. // Check if the arguments will overflow the stack.
__ cmpp(rcx, r11); __ cmpp(scratch, num_args);
__ j(greater, &okay); // Signed comparison. // Signed comparison.
__ j(less_equal, stack_overflow, stack_overflow_distance);
// Out of stack space.
__ CallRuntime(Runtime::kThrowStackOverflow);
__ bind(&okay);
} }
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
...@@ -524,8 +516,17 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, ...@@ -524,8 +516,17 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// rdx : new.target // rdx : new.target
// Check if we have enough stack space to push all arguments. // Check if we have enough stack space to push all arguments.
// Expects argument count in rax. Clobbers rcx, r11. // Argument count in rax. Clobbers rcx.
Generate_CheckStackOverflow(masm); Label enough_stack_space, stack_overflow;
Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
__ jmp(&enough_stack_space);
__ bind(&stack_overflow);
__ CallRuntime(Runtime::kThrowStackOverflow);
// This should be unreachable.
__ int3();
__ bind(&enough_stack_space);
// Copy arguments to the stack in a loop. // Copy arguments to the stack in a loop.
// Register rbx points to array of pointers to handle locations. // Register rbx points to array of pointers to handle locations.
...@@ -1035,25 +1036,6 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1035,25 +1036,6 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ jmp(&bytecode_array_loaded); __ jmp(&bytecode_array_loaded);
} }
static void Generate_StackOverflowCheck(
MacroAssembler* masm, Register num_args, Register scratch,
Label* stack_overflow,
Label::Distance stack_overflow_distance = Label::kFar) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
__ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
__ movp(scratch, rsp);
// Make scratch the space we have left. The stack might already be overflowed
// here which will cause scratch to become negative.
__ subp(scratch, kScratchRegister);
__ sarp(scratch, Immediate(kPointerSizeLog2));
// Check if the arguments will overflow the stack.
__ cmpp(scratch, num_args);
// Signed comparison.
__ j(less_equal, stack_overflow, stack_overflow_distance);
}
static void Generate_InterpreterPushArgs(MacroAssembler* masm, static void Generate_InterpreterPushArgs(MacroAssembler* masm,
Register num_args, Register num_args,
Register start_address, Register start_address,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment