Commit 4adc7c7f authored by Liu yu's avatar Liu yu Committed by Commit Bot

Revert "[mips] Remove arguments adaptor frame"

This reverts commit 7257dc93.

Reason for revert:
- New test fail on mips64el: https://logs.chromium.org/logs/v8/buildbucket/cr-buildbucket.appspot.com/8864502401132417824/+/steps/Check/0/logs/modules-import-large/0

Original change's description:
> [mips] Remove arguments adaptor frame
>
> Port: 958d8e9f
>
> Bug: v8:10201
>
> Change-Id: I27d29f2a1f1d5f659d558b5fd776b88474d9b140
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2513867
> Auto-Submit: Liu yu <liuyu@loongson.cn>
> Reviewed-by: Jakob Gruber <jgruber@chromium.org>
> Commit-Queue: Jakob Gruber <jgruber@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#70964}

TBR=jgruber@chromium.org,liuyu@loongson.cn

Change-Id: Iddca9684995409fb9a1f3340b3a1931b1b5b472b
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: v8:10201
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2519189Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70980}
parent 8ca481ed
...@@ -112,7 +112,6 @@ declare_args() { ...@@ -112,7 +112,6 @@ declare_args() {
# Disable arguments adaptor frame (sets -dV8_NO_ARGUMENTS_ADAPTOR). # Disable arguments adaptor frame (sets -dV8_NO_ARGUMENTS_ADAPTOR).
v8_disable_arguments_adaptor = v8_disable_arguments_adaptor =
v8_current_cpu == "x86" || v8_current_cpu == "x64" || v8_current_cpu == "x86" || v8_current_cpu == "x64" ||
v8_current_cpu == "mipsel" || v8_current_cpu == "mips64el" ||
v8_current_cpu == "arm" v8_current_cpu == "arm"
# Sets -dOBJECT_PRINT. # Sets -dOBJECT_PRINT.
......
...@@ -68,6 +68,23 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm, ...@@ -68,6 +68,23 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
namespace { namespace {
enum StackLimitKind { kInterruptStackLimit, kRealStackLimit };
void LoadStackLimit(MacroAssembler* masm, Register destination,
StackLimitKind kind) {
DCHECK(masm->root_array_available());
Isolate* isolate = masm->isolate();
ExternalReference limit =
kind == StackLimitKind::kRealStackLimit
? ExternalReference::address_of_real_jslimit(isolate)
: ExternalReference::address_of_jslimit(isolate);
DCHECK(TurboAssembler::IsAddressableThroughRootRegister(isolate, limit));
intptr_t offset =
TurboAssembler::RootRegisterOffsetForExternalReference(isolate, limit);
__ Lw(destination, MemOperand(kRootRegister, static_cast<int32_t>(offset)));
}
void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- a0 : number of arguments // -- a0 : number of arguments
...@@ -114,6 +131,22 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { ...@@ -114,6 +131,22 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
__ Ret(); __ Ret();
} }
static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
Register scratch1, Register scratch2,
Label* stack_overflow) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
LoadStackLimit(masm, scratch1, StackLimitKind::kRealStackLimit);
// Make scratch1 the space we have left. The stack might already be overflowed
// here which will cause scratch1 to become negative.
__ subu(scratch1, sp, scratch1);
// Check if the arguments will overflow the stack.
__ sll(scratch2, num_args, kPointerSizeLog2);
// Signed comparison.
__ Branch(stack_overflow, le, scratch1, Operand(scratch2));
}
} // namespace } // namespace
// The construct stub for ES5 constructor functions and ES6 class constructors. // The construct stub for ES5 constructor functions and ES6 class constructors.
...@@ -208,7 +241,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -208,7 +241,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
__ SmiUntag(a0); __ SmiUntag(a0);
Label enough_stack_space, stack_overflow; Label enough_stack_space, stack_overflow;
__ StackOverflowCheck(a0, t0, t1, &stack_overflow); Generate_StackOverflowCheck(masm, a0, t0, t1, &stack_overflow);
__ Branch(&enough_stack_space); __ Branch(&enough_stack_space);
__ bind(&stack_overflow); __ bind(&stack_overflow);
...@@ -220,11 +253,6 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -220,11 +253,6 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
__ bind(&enough_stack_space); __ bind(&enough_stack_space);
// TODO(victorgomes): When the arguments adaptor is completely removed, we
// should get the formal parameter count and copy the arguments in its
// correct position (including any undefined), instead of delaying this to
// InvokeFunction.
// Copy arguments and receiver to the expression stack. // Copy arguments and receiver to the expression stack.
__ PushArray(t2, a0, t0, t1); __ PushArray(t2, a0, t0, t1);
// We need two copies because we may have to return the original one // We need two copies because we may have to return the original one
...@@ -309,7 +337,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, ...@@ -309,7 +337,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
// interruptions (e.g. debug break and preemption) here, so the "real stack // interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked. // limit" is checked.
Label okay; Label okay;
__ LoadStackLimit(scratch1, MacroAssembler::StackLimitKind::kRealStackLimit); LoadStackLimit(masm, scratch1, StackLimitKind::kRealStackLimit);
// Make a2 the space we have left. The stack might already be overflowed // Make a2 the space we have left. The stack might already be overflowed
// here which will cause a2 to become negative. // here which will cause a2 to become negative.
__ Subu(scratch1, sp, scratch1); __ Subu(scratch1, sp, scratch1);
...@@ -654,8 +682,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -654,8 +682,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// Check the stack for overflow. We are not trying to catch interruptions // Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack limit". // (i.e. debug break and preemption) here, so check the "real stack limit".
Label stack_overflow; Label stack_overflow;
__ LoadStackLimit(kScratchReg, LoadStackLimit(masm, kScratchReg, StackLimitKind::kRealStackLimit);
MacroAssembler::StackLimitKind::kRealStackLimit);
__ Branch(&stack_overflow, lo, sp, Operand(kScratchReg)); __ Branch(&stack_overflow, lo, sp, Operand(kScratchReg));
// ----------- S t a t e ------------- // ----------- S t a t e -------------
...@@ -756,35 +783,20 @@ static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm, ...@@ -756,35 +783,20 @@ static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm,
OMIT_SMI_CHECK); OMIT_SMI_CHECK);
} }
static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1, static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
Register scratch2) { Register args_count = scratch;
Register params_size = scratch1;
// Get the size of the formal parameters + receiver (in bytes). // Get the arguments + receiver count.
__ lw(params_size, __ lw(args_count,
MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
__ lw(params_size, __ lw(args_count,
FieldMemOperand(params_size, BytecodeArray::kParameterSizeOffset)); FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
#ifdef V8_NO_ARGUMENTS_ADAPTOR
Register actual_params_size = scratch2;
// Compute the size of the actual parameters + receiver (in bytes).
__ Lw(actual_params_size,
MemOperand(fp, StandardFrameConstants::kArgCOffset));
__ sll(actual_params_size, actual_params_size, kPointerSizeLog2);
__ Addu(actual_params_size, actual_params_size, Operand(kSystemPointerSize));
// If actual is bigger than formal, then we should use it to free up the stack
// arguments.
__ slt(t2, params_size, actual_params_size);
__ movn(params_size, actual_params_size, t2);
#endif
// Leave the frame (also dropping the register file). // Leave the frame (also dropping the register file).
__ LeaveFrame(StackFrame::INTERPRETED); __ LeaveFrame(StackFrame::INTERPRETED);
// Drop receiver + arguments. // Drop receiver + arguments.
__ Addu(sp, sp, params_size); __ Addu(sp, sp, args_count);
} }
// Tail-call |function_id| if |actual_marker| == |expected_marker| // Tail-call |function_id| if |actual_marker| == |expected_marker|
...@@ -1055,7 +1067,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1055,7 +1067,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Do a stack check to ensure we don't go over the limit. // Do a stack check to ensure we don't go over the limit.
__ Subu(t1, sp, Operand(t0)); __ Subu(t1, sp, Operand(t0));
__ LoadStackLimit(a2, MacroAssembler::StackLimitKind::kRealStackLimit); LoadStackLimit(masm, a2, StackLimitKind::kRealStackLimit);
__ Branch(&stack_overflow, lo, t1, Operand(a2)); __ Branch(&stack_overflow, lo, t1, Operand(a2));
// If ok, push undefined as the initial value for all register file entries. // If ok, push undefined as the initial value for all register file entries.
...@@ -1087,7 +1099,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1087,7 +1099,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Perform interrupt stack check. // Perform interrupt stack check.
// TODO(solanes): Merge with the real stack limit check above. // TODO(solanes): Merge with the real stack limit check above.
Label stack_check_interrupt, after_stack_check_interrupt; Label stack_check_interrupt, after_stack_check_interrupt;
__ LoadStackLimit(a2, MacroAssembler::StackLimitKind::kInterruptStackLimit); LoadStackLimit(masm, a2, StackLimitKind::kInterruptStackLimit);
__ Branch(&stack_check_interrupt, lo, sp, Operand(a2)); __ Branch(&stack_check_interrupt, lo, sp, Operand(a2));
__ bind(&after_stack_check_interrupt); __ bind(&after_stack_check_interrupt);
...@@ -1129,7 +1141,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1129,7 +1141,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ bind(&do_return); __ bind(&do_return);
// The return value is in v0. // The return value is in v0.
LeaveInterpreterFrame(masm, t0, t1); LeaveInterpreterFrame(masm, t0);
__ Jump(ra); __ Jump(ra);
__ bind(&stack_check_interrupt); __ bind(&stack_check_interrupt);
...@@ -1223,7 +1235,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl( ...@@ -1223,7 +1235,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
__ Addu(t0, a0, Operand(1)); // Add one for receiver. __ Addu(t0, a0, Operand(1)); // Add one for receiver.
__ StackOverflowCheck(t0, t4, t1, &stack_overflow); Generate_StackOverflowCheck(masm, t0, t4, t1, &stack_overflow);
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) { if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
// Don't copy receiver. // Don't copy receiver.
...@@ -1273,7 +1285,7 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl( ...@@ -1273,7 +1285,7 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
// ----------------------------------- // -----------------------------------
Label stack_overflow; Label stack_overflow;
__ addiu(t2, a0, 1); __ addiu(t2, a0, 1);
__ StackOverflowCheck(t2, t1, t0, &stack_overflow); Generate_StackOverflowCheck(masm, t2, t1, t0, &stack_overflow);
if (mode == InterpreterPushArgsMode::kWithFinalSpread) { if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
// The spread argument should not be pushed. // The spread argument should not be pushed.
...@@ -1786,7 +1798,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm, ...@@ -1786,7 +1798,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
// Check for stack overflow. // Check for stack overflow.
Label stack_overflow; Label stack_overflow;
__ StackOverflowCheck(t0, kScratchReg, t1, &stack_overflow); Generate_StackOverflowCheck(masm, t0, kScratchReg, t1, &stack_overflow);
// Move the arguments already in the stack, // Move the arguments already in the stack,
// including the receiver and the return address. // including the receiver and the return address.
...@@ -1865,13 +1877,6 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, ...@@ -1865,13 +1877,6 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ bind(&new_target_constructor); __ bind(&new_target_constructor);
} }
#ifdef V8_NO_ARGUMENTS_ADAPTOR
// TODO(victorgomes): Remove this copy when all the arguments adaptor frame
// code is erased.
__ mov(t3, fp);
__ Lw(t2, MemOperand(fp, StandardFrameConstants::kArgCOffset));
#else
// Check if we have an arguments adaptor frame below the function frame. // Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done; Label arguments_adaptor, arguments_done;
__ lw(t3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); __ lw(t3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
...@@ -1893,14 +1898,13 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, ...@@ -1893,14 +1898,13 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ SmiUntag(t2); __ SmiUntag(t2);
} }
__ bind(&arguments_done); __ bind(&arguments_done);
#endif
Label stack_done, stack_overflow; Label stack_done, stack_overflow;
__ Subu(t2, t2, a2); __ Subu(t2, t2, a2);
__ Branch(&stack_done, le, t2, Operand(zero_reg)); __ Branch(&stack_done, le, t2, Operand(zero_reg));
{ {
// Check for stack overflow. // Check for stack overflow.
__ StackOverflowCheck(t2, t0, t1, &stack_overflow); Generate_StackOverflowCheck(masm, t2, t0, t1, &stack_overflow);
// Forward the arguments from the caller frame. // Forward the arguments from the caller frame.
// Point to the first argument to copy (skipping the receiver). // Point to the first argument to copy (skipping the receiver).
...@@ -2091,8 +2095,7 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) { ...@@ -2091,8 +2095,7 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
__ Subu(t1, sp, Operand(t1)); __ Subu(t1, sp, Operand(t1));
// Check the stack for overflow. We are not trying to catch interruptions // Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack limit". // (i.e. debug break and preemption) here, so check the "real stack limit".
__ LoadStackLimit(kScratchReg, LoadStackLimit(masm, kScratchReg, StackLimitKind::kRealStackLimit);
MacroAssembler::StackLimitKind::kRealStackLimit);
__ Branch(&done, hs, t1, Operand(kScratchReg)); __ Branch(&done, hs, t1, Operand(kScratchReg));
{ {
FrameScope scope(masm, StackFrame::MANUAL); FrameScope scope(masm, StackFrame::MANUAL);
...@@ -2233,8 +2236,7 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { ...@@ -2233,8 +2236,7 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
__ Subu(t1, sp, Operand(t1)); __ Subu(t1, sp, Operand(t1));
// Check the stack for overflow. We are not trying to catch interruptions // Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack limit". // (i.e. debug break and preemption) here, so check the "real stack limit".
__ LoadStackLimit(kScratchReg, LoadStackLimit(masm, kScratchReg, StackLimitKind::kRealStackLimit);
MacroAssembler::StackLimitKind::kRealStackLimit);
__ Branch(&done, hs, t1, Operand(kScratchReg)); __ Branch(&done, hs, t1, Operand(kScratchReg));
{ {
FrameScope scope(masm, StackFrame::MANUAL); FrameScope scope(masm, StackFrame::MANUAL);
...@@ -2354,7 +2356,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2354,7 +2356,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// a3: new target (passed through to callee) // a3: new target (passed through to callee)
__ bind(&enough); __ bind(&enough);
EnterArgumentsAdaptorFrame(masm); EnterArgumentsAdaptorFrame(masm);
__ StackOverflowCheck(a2, t1, kScratchReg, &stack_overflow); Generate_StackOverflowCheck(masm, a2, t1, kScratchReg, &stack_overflow);
// Calculate copy start address into a0 and copy end address into t1. // Calculate copy start address into a0 and copy end address into t1.
__ Lsa(a0, fp, a2, kPointerSizeLog2); __ Lsa(a0, fp, a2, kPointerSizeLog2);
...@@ -2384,7 +2386,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2384,7 +2386,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
{ // Too few parameters: Actual < expected. { // Too few parameters: Actual < expected.
__ bind(&too_few); __ bind(&too_few);
EnterArgumentsAdaptorFrame(masm); EnterArgumentsAdaptorFrame(masm);
__ StackOverflowCheck(a2, t1, kScratchReg, &stack_overflow); Generate_StackOverflowCheck(masm, a2, t1, kScratchReg, &stack_overflow);
// Fill the remaining expected arguments with undefined. // Fill the remaining expected arguments with undefined.
__ LoadRoot(t0, RootIndex::kUndefinedValue); __ LoadRoot(t0, RootIndex::kUndefinedValue);
......
...@@ -67,6 +67,24 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm, ...@@ -67,6 +67,24 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
namespace { namespace {
enum StackLimitKind { kInterruptStackLimit, kRealStackLimit };
void LoadStackLimit(MacroAssembler* masm, Register destination,
StackLimitKind kind) {
DCHECK(masm->root_array_available());
Isolate* isolate = masm->isolate();
ExternalReference limit =
kind == StackLimitKind::kRealStackLimit
? ExternalReference::address_of_real_jslimit(isolate)
: ExternalReference::address_of_jslimit(isolate);
DCHECK(TurboAssembler::IsAddressableThroughRootRegister(isolate, limit));
intptr_t offset =
TurboAssembler::RootRegisterOffsetForExternalReference(isolate, limit);
CHECK(is_int32(offset));
__ Ld(destination, MemOperand(kRootRegister, static_cast<int32_t>(offset)));
}
void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- a0 : number of arguments // -- a0 : number of arguments
...@@ -115,6 +133,22 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { ...@@ -115,6 +133,22 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
__ Ret(); __ Ret();
} }
static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
Register scratch1, Register scratch2,
Label* stack_overflow) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
LoadStackLimit(masm, scratch1, StackLimitKind::kRealStackLimit);
// Make scratch1 the space we have left. The stack might already be overflowed
// here which will cause scratch1 to become negative.
__ dsubu(scratch1, sp, scratch1);
// Check if the arguments will overflow the stack.
__ dsll(scratch2, num_args, kPointerSizeLog2);
// Signed comparison.
__ Branch(stack_overflow, le, scratch1, Operand(scratch2));
}
} // namespace } // namespace
// The construct stub for ES5 constructor functions and ES6 class constructors. // The construct stub for ES5 constructor functions and ES6 class constructors.
...@@ -209,7 +243,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -209,7 +243,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
__ SmiUntag(a0); __ SmiUntag(a0);
Label enough_stack_space, stack_overflow; Label enough_stack_space, stack_overflow;
__ StackOverflowCheck(a0, t0, t1, &stack_overflow); Generate_StackOverflowCheck(masm, a0, t0, t1, &stack_overflow);
__ Branch(&enough_stack_space); __ Branch(&enough_stack_space);
__ bind(&stack_overflow); __ bind(&stack_overflow);
...@@ -221,11 +255,6 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -221,11 +255,6 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
__ bind(&enough_stack_space); __ bind(&enough_stack_space);
// TODO(victorgomes): When the arguments adaptor is completely removed, we
// should get the formal parameter count and copy the arguments in its
// correct position (including any undefined), instead of delaying this to
// InvokeFunction.
// Copy arguments and receiver to the expression stack. // Copy arguments and receiver to the expression stack.
__ PushArray(t2, a0, t0, t1); __ PushArray(t2, a0, t0, t1);
// We need two copies because we may have to return the original one // We need two copies because we may have to return the original one
...@@ -349,8 +378,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -349,8 +378,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// Check the stack for overflow. We are not trying to catch interruptions // Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack limit". // (i.e. debug break and preemption) here, so check the "real stack limit".
Label stack_overflow; Label stack_overflow;
__ LoadStackLimit(kScratchReg, LoadStackLimit(masm, kScratchReg, StackLimitKind::kRealStackLimit);
MacroAssembler::StackLimitKind::kRealStackLimit);
__ Branch(&stack_overflow, lo, sp, Operand(kScratchReg)); __ Branch(&stack_overflow, lo, sp, Operand(kScratchReg));
// ----------- S t a t e ------------- // ----------- S t a t e -------------
...@@ -453,7 +481,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, ...@@ -453,7 +481,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
// interruptions (e.g. debug break and preemption) here, so the "real stack // interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked. // limit" is checked.
Label okay; Label okay;
__ LoadStackLimit(scratch1, MacroAssembler::StackLimitKind::kRealStackLimit); LoadStackLimit(masm, scratch1, StackLimitKind::kRealStackLimit);
// Make a2 the space we have left. The stack might already be overflowed // Make a2 the space we have left. The stack might already be overflowed
// here which will cause r2 to become negative. // here which will cause r2 to become negative.
__ dsubu(scratch1, sp, scratch1); __ dsubu(scratch1, sp, scratch1);
...@@ -773,35 +801,19 @@ static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm, ...@@ -773,35 +801,19 @@ static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm,
OMIT_SMI_CHECK); OMIT_SMI_CHECK);
} }
static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1, static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
Register scratch2) { Register args_count = scratch;
Register params_size = scratch1;
// Get the size of the formal parameters + receiver (in bytes). // Get the arguments + receiver count.
__ Ld(params_size, __ Ld(args_count,
MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
__ Lw(params_size, __ Lw(t0, FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
FieldMemOperand(params_size, BytecodeArray::kParameterSizeOffset));
#ifdef V8_NO_ARGUMENTS_ADAPTOR
Register actual_params_size = scratch2;
// Compute the size of the actual parameters + receiver (in bytes).
__ Ld(actual_params_size,
MemOperand(fp, StandardFrameConstants::kArgCOffset));
__ dsll(actual_params_size, actual_params_size, kPointerSizeLog2);
__ Daddu(actual_params_size, actual_params_size, Operand(kSystemPointerSize));
// If actual is bigger than formal, then we should use it to free up the stack
// arguments.
__ slt(t2, params_size, actual_params_size);
__ movn(params_size, actual_params_size, t2);
#endif
// Leave the frame (also dropping the register file). // Leave the frame (also dropping the register file).
__ LeaveFrame(StackFrame::INTERPRETED); __ LeaveFrame(StackFrame::INTERPRETED);
// Drop receiver + arguments. // Drop receiver + arguments.
__ Daddu(sp, sp, params_size); __ Daddu(sp, sp, args_count);
} }
// Tail-call |function_id| if |actual_marker| == |expected_marker| // Tail-call |function_id| if |actual_marker| == |expected_marker|
...@@ -1073,7 +1085,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1073,7 +1085,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Do a stack check to ensure we don't go over the limit. // Do a stack check to ensure we don't go over the limit.
__ Dsubu(a5, sp, Operand(a4)); __ Dsubu(a5, sp, Operand(a4));
__ LoadStackLimit(a2, MacroAssembler::StackLimitKind::kRealStackLimit); LoadStackLimit(masm, a2, StackLimitKind::kRealStackLimit);
__ Branch(&stack_overflow, lo, a5, Operand(a2)); __ Branch(&stack_overflow, lo, a5, Operand(a2));
// If ok, push undefined as the initial value for all register file entries. // If ok, push undefined as the initial value for all register file entries.
...@@ -1105,7 +1117,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1105,7 +1117,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Perform interrupt stack check. // Perform interrupt stack check.
// TODO(solanes): Merge with the real stack limit check above. // TODO(solanes): Merge with the real stack limit check above.
Label stack_check_interrupt, after_stack_check_interrupt; Label stack_check_interrupt, after_stack_check_interrupt;
__ LoadStackLimit(a5, MacroAssembler::StackLimitKind::kInterruptStackLimit); LoadStackLimit(masm, a5, StackLimitKind::kInterruptStackLimit);
__ Branch(&stack_check_interrupt, lo, sp, Operand(a5)); __ Branch(&stack_check_interrupt, lo, sp, Operand(a5));
__ bind(&after_stack_check_interrupt); __ bind(&after_stack_check_interrupt);
...@@ -1148,7 +1160,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1148,7 +1160,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ bind(&do_return); __ bind(&do_return);
// The return value is in v0. // The return value is in v0.
LeaveInterpreterFrame(masm, t0, t1); LeaveInterpreterFrame(masm, t0);
__ Jump(ra); __ Jump(ra);
__ bind(&stack_check_interrupt); __ bind(&stack_check_interrupt);
...@@ -1242,7 +1254,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl( ...@@ -1242,7 +1254,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
__ Daddu(a3, a0, Operand(1)); // Add one for receiver. __ Daddu(a3, a0, Operand(1)); // Add one for receiver.
__ StackOverflowCheck(a3, a4, t0, &stack_overflow); Generate_StackOverflowCheck(masm, a3, a4, t0, &stack_overflow);
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) { if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
// Don't copy receiver. // Don't copy receiver.
...@@ -1292,7 +1304,7 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl( ...@@ -1292,7 +1304,7 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
// ----------------------------------- // -----------------------------------
Label stack_overflow; Label stack_overflow;
__ daddiu(a6, a0, 1); __ daddiu(a6, a0, 1);
__ StackOverflowCheck(a6, a5, t0, &stack_overflow); Generate_StackOverflowCheck(masm, a6, a5, t0, &stack_overflow);
if (mode == InterpreterPushArgsMode::kWithFinalSpread) { if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
// The spread argument should not be pushed. // The spread argument should not be pushed.
...@@ -1847,7 +1859,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm, ...@@ -1847,7 +1859,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
// Check for stack overflow. // Check for stack overflow.
Label stack_overflow; Label stack_overflow;
__ StackOverflowCheck(len, kScratchReg, a5, &stack_overflow); Generate_StackOverflowCheck(masm, len, kScratchReg, a5, &stack_overflow);
// Move the arguments already in the stack, // Move the arguments already in the stack,
// including the receiver and the return address. // including the receiver and the return address.
...@@ -1931,13 +1943,6 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, ...@@ -1931,13 +1943,6 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ bind(&new_target_constructor); __ bind(&new_target_constructor);
} }
#ifdef V8_NO_ARGUMENTS_ADAPTOR
// TODO(victorgomes): Remove this copy when all the arguments adaptor frame
// code is erased.
__ mov(a6, fp);
__ Ld(a7, MemOperand(fp, StandardFrameConstants::kArgCOffset));
#else
// Check if we have an arguments adaptor frame below the function frame. // Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done; Label arguments_adaptor, arguments_done;
__ Ld(a6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); __ Ld(a6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
...@@ -1959,14 +1964,13 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, ...@@ -1959,14 +1964,13 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
MemOperand(a6, ArgumentsAdaptorFrameConstants::kLengthOffset)); MemOperand(a6, ArgumentsAdaptorFrameConstants::kLengthOffset));
} }
__ bind(&arguments_done); __ bind(&arguments_done);
#endif
Label stack_done, stack_overflow; Label stack_done, stack_overflow;
__ Subu(a7, a7, a2); __ Subu(a7, a7, a2);
__ Branch(&stack_done, le, a7, Operand(zero_reg)); __ Branch(&stack_done, le, a7, Operand(zero_reg));
{ {
// Check for stack overflow. // Check for stack overflow.
__ StackOverflowCheck(a7, a4, a5, &stack_overflow); Generate_StackOverflowCheck(masm, a7, a4, a5, &stack_overflow);
// Forward the arguments from the caller frame. // Forward the arguments from the caller frame.
...@@ -2157,8 +2161,7 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) { ...@@ -2157,8 +2161,7 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
__ Dsubu(t0, sp, Operand(a5)); __ Dsubu(t0, sp, Operand(a5));
// Check the stack for overflow. We are not trying to catch interruptions // Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack limit". // (i.e. debug break and preemption) here, so check the "real stack limit".
__ LoadStackLimit(kScratchReg, LoadStackLimit(masm, kScratchReg, StackLimitKind::kRealStackLimit);
MacroAssembler::StackLimitKind::kRealStackLimit);
__ Branch(&done, hs, t0, Operand(kScratchReg)); __ Branch(&done, hs, t0, Operand(kScratchReg));
{ {
FrameScope scope(masm, StackFrame::MANUAL); FrameScope scope(masm, StackFrame::MANUAL);
...@@ -2297,8 +2300,7 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { ...@@ -2297,8 +2300,7 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
__ Dsubu(t0, sp, Operand(a5)); __ Dsubu(t0, sp, Operand(a5));
// Check the stack for overflow. We are not trying to catch interruptions // Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack limit". // (i.e. debug break and preemption) here, so check the "real stack limit".
__ LoadStackLimit(kScratchReg, LoadStackLimit(masm, kScratchReg, StackLimitKind::kRealStackLimit);
MacroAssembler::StackLimitKind::kRealStackLimit);
__ Branch(&done, hs, t0, Operand(kScratchReg)); __ Branch(&done, hs, t0, Operand(kScratchReg));
{ {
FrameScope scope(masm, StackFrame::MANUAL); FrameScope scope(masm, StackFrame::MANUAL);
...@@ -2419,7 +2421,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2419,7 +2421,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// a3: new target (passed through to callee) // a3: new target (passed through to callee)
__ bind(&enough); __ bind(&enough);
EnterArgumentsAdaptorFrame(masm); EnterArgumentsAdaptorFrame(masm);
__ StackOverflowCheck(a2, a5, kScratchReg, &stack_overflow); Generate_StackOverflowCheck(masm, a2, a5, kScratchReg, &stack_overflow);
// Calculate copy start address into a0 and copy end address into a4. // Calculate copy start address into a0 and copy end address into a4.
__ dsll(a0, a2, kPointerSizeLog2); __ dsll(a0, a2, kPointerSizeLog2);
...@@ -2451,7 +2453,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2451,7 +2453,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
{ // Too few parameters: Actual < expected. { // Too few parameters: Actual < expected.
__ bind(&too_few); __ bind(&too_few);
EnterArgumentsAdaptorFrame(masm); EnterArgumentsAdaptorFrame(masm);
__ StackOverflowCheck(a2, a5, kScratchReg, &stack_overflow); Generate_StackOverflowCheck(masm, a2, a5, kScratchReg, &stack_overflow);
// Fill the remaining expected arguments with undefined. // Fill the remaining expected arguments with undefined.
__ LoadRoot(t0, RootIndex::kUndefinedValue); __ LoadRoot(t0, RootIndex::kUndefinedValue);
......
...@@ -4387,106 +4387,23 @@ void TurboAssembler::PrepareForTailCall(Register callee_args_count, ...@@ -4387,106 +4387,23 @@ void TurboAssembler::PrepareForTailCall(Register callee_args_count,
mov(sp, dst_reg); mov(sp, dst_reg);
} }
void MacroAssembler::LoadStackLimit(Register destination, StackLimitKind kind) {
DCHECK(root_array_available());
Isolate* isolate = this->isolate();
ExternalReference limit =
kind == StackLimitKind::kRealStackLimit
? ExternalReference::address_of_real_jslimit(isolate)
: ExternalReference::address_of_jslimit(isolate);
DCHECK(TurboAssembler::IsAddressableThroughRootRegister(isolate, limit));
intptr_t offset =
TurboAssembler::RootRegisterOffsetForExternalReference(isolate, limit);
CHECK(is_int32(offset));
Lw(destination, MemOperand(kRootRegister, static_cast<int32_t>(offset)));
}
void MacroAssembler::StackOverflowCheck(Register num_args, Register scratch1,
Register scratch2,
Label* stack_overflow) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
LoadStackLimit(scratch1, StackLimitKind::kRealStackLimit);
// Make scratch1 the space we have left. The stack might already be overflowed
// here which will cause scratch1 to become negative.
subu(scratch1, sp, scratch1);
// Check if the arguments will overflow the stack.
sll(scratch2, num_args, kPointerSizeLog2);
// Signed comparison.
Branch(stack_overflow, le, scratch1, Operand(scratch2));
}
void MacroAssembler::InvokePrologue(Register expected_parameter_count, void MacroAssembler::InvokePrologue(Register expected_parameter_count,
Register actual_parameter_count, Register actual_parameter_count,
Label* done, InvokeFlag flag) { Label* done, InvokeFlag flag) {
Label regular_invoke; Label regular_invoke;
// Check whether the expected and actual arguments count match. The
// registers are set up according to contract with
// ArgumentsAdaptorTrampoline:
// a0: actual arguments count // a0: actual arguments count
// a1: function (passed through to callee) // a1: function (passed through to callee)
// a2: expected arguments count // a2: expected arguments count
// The code below is made a lot easier because the calling code already sets
// up actual and expected registers according to the contract.
DCHECK_EQ(actual_parameter_count, a0); DCHECK_EQ(actual_parameter_count, a0);
DCHECK_EQ(expected_parameter_count, a2); DCHECK_EQ(expected_parameter_count, a2);
#ifdef V8_NO_ARGUMENTS_ADAPTOR
// If the expected parameter count is equal to the adaptor sentinel, no need
// to push undefined value as arguments.
Branch(&regular_invoke, eq, expected_parameter_count,
Operand(kDontAdaptArgumentsSentinel));
// If overapplication or if the actual argument count is equal to the
// formal parameter count, no need to push extra undefined values.
Subu(expected_parameter_count, expected_parameter_count,
actual_parameter_count);
Branch(&regular_invoke, le, expected_parameter_count, Operand(zero_reg));
Label stack_overflow;
StackOverflowCheck(expected_parameter_count, t0, t1, &stack_overflow);
// Underapplication. Move the arguments already in the stack, including the
// receiver and the return address.
{
Label copy;
Register src = t3, dest = t4;
mov(src, sp);
sll(t0, expected_parameter_count, kSystemPointerSizeLog2);
Subu(sp, sp, Operand(t0));
// Update stack pointer.
mov(dest, sp);
mov(t0, a0);
bind(&copy);
Lw(t1, MemOperand(src, 0));
Sw(t1, MemOperand(dest, 0));
Subu(t0, t0, Operand(1));
Addu(src, src, Operand(kSystemPointerSize));
Addu(dest, dest, Operand(kSystemPointerSize));
Branch(&copy, ge, t0, Operand(zero_reg));
}
// Fill remaining expected arguments with undefined values.
LoadRoot(t0, RootIndex::kUndefinedValue);
{
Label loop;
bind(&loop);
Sw(t0, MemOperand(t4, 0));
Subu(expected_parameter_count, expected_parameter_count, Operand(1));
Addu(t4, t4, Operand(kSystemPointerSize));
Branch(&loop, gt, expected_parameter_count, Operand(zero_reg));
}
b(&regular_invoke);
nop();
bind(&stack_overflow);
{
FrameScope frame(this, StackFrame::MANUAL);
CallRuntime(Runtime::kThrowStackOverflow);
break_(0xCC);
}
#else
// Check whether the expected and actual arguments count match. The registers
// are set up according to contract with ArgumentsAdaptorTrampoline:
Branch(&regular_invoke, eq, expected_parameter_count, Branch(&regular_invoke, eq, expected_parameter_count,
Operand(actual_parameter_count)); Operand(actual_parameter_count));
...@@ -4497,7 +4414,7 @@ void MacroAssembler::InvokePrologue(Register expected_parameter_count, ...@@ -4497,7 +4414,7 @@ void MacroAssembler::InvokePrologue(Register expected_parameter_count,
} else { } else {
Jump(adaptor, RelocInfo::CODE_TARGET); Jump(adaptor, RelocInfo::CODE_TARGET);
} }
#endif
bind(&regular_invoke); bind(&regular_invoke);
} }
......
...@@ -1096,14 +1096,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { ...@@ -1096,14 +1096,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
Register scratch2); Register scratch2);
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
// Stack limit utilities
enum StackLimitKind { kInterruptStackLimit, kRealStackLimit };
void LoadStackLimit(Register destination, StackLimitKind kind);
void StackOverflowCheck(Register num_args, Register scratch1,
Register scratch2, Label* stack_overflow);
// ---------------------------------------------------------------------------
// Smi utilities. // Smi utilities.
void SmiTag(Register reg) { Addu(reg, reg, reg); } void SmiTag(Register reg) { Addu(reg, reg, reg); }
......
...@@ -4732,107 +4732,23 @@ void TurboAssembler::PrepareForTailCall(Register callee_args_count, ...@@ -4732,107 +4732,23 @@ void TurboAssembler::PrepareForTailCall(Register callee_args_count,
mov(sp, dst_reg); mov(sp, dst_reg);
} }
void MacroAssembler::LoadStackLimit(Register destination, StackLimitKind kind) {
DCHECK(root_array_available());
Isolate* isolate = this->isolate();
ExternalReference limit =
kind == StackLimitKind::kRealStackLimit
? ExternalReference::address_of_real_jslimit(isolate)
: ExternalReference::address_of_jslimit(isolate);
DCHECK(TurboAssembler::IsAddressableThroughRootRegister(isolate, limit));
intptr_t offset =
TurboAssembler::RootRegisterOffsetForExternalReference(isolate, limit);
CHECK(is_int32(offset));
Ld(destination, MemOperand(kRootRegister, static_cast<int32_t>(offset)));
}
void MacroAssembler::StackOverflowCheck(Register num_args, Register scratch1,
Register scratch2,
Label* stack_overflow) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
LoadStackLimit(scratch1, StackLimitKind::kRealStackLimit);
// Make scratch1 the space we have left. The stack might already be overflowed
// here which will cause scratch1 to become negative.
dsubu(scratch1, sp, scratch1);
// Check if the arguments will overflow the stack.
dsll(scratch2, num_args, kPointerSizeLog2);
// Signed comparison.
Branch(stack_overflow, le, scratch1, Operand(scratch2));
}
void MacroAssembler::InvokePrologue(Register expected_parameter_count, void MacroAssembler::InvokePrologue(Register expected_parameter_count,
Register actual_parameter_count, Register actual_parameter_count,
Label* done, InvokeFlag flag) { Label* done, InvokeFlag flag) {
Label regular_invoke; Label regular_invoke;
// Check whether the expected and actual arguments count match. The registers
// are set up according to contract with ArgumentsAdaptorTrampoline:
// a0: actual arguments count // a0: actual arguments count
// a1: function (passed through to callee) // a1: function (passed through to callee)
// a2: expected arguments count // a2: expected arguments count
// The code below is made a lot easier because the calling code already sets
// up actual and expected registers according to the contract.
DCHECK_EQ(actual_parameter_count, a0); DCHECK_EQ(actual_parameter_count, a0);
DCHECK_EQ(expected_parameter_count, a2); DCHECK_EQ(expected_parameter_count, a2);
#ifdef V8_NO_ARGUMENTS_ADAPTOR
// If the expected parameter count is equal to the adaptor sentinel, no need
// to push undefined value as arguments.
Branch(&regular_invoke, eq, expected_parameter_count,
Operand(kDontAdaptArgumentsSentinel));
// If overapplication or if the actual argument count is equal to the
// formal parameter count, no need to push extra undefined values.
Dsubu(expected_parameter_count, expected_parameter_count,
actual_parameter_count);
Branch(&regular_invoke, le, expected_parameter_count, Operand(zero_reg));
Label stack_overflow;
StackOverflowCheck(expected_parameter_count, t0, t1, &stack_overflow);
// Underapplication. Move the arguments already in the stack, including the
// receiver and the return address.
{
Label copy;
Register src = a6, dest = a7;
mov(src, sp);
dsll(t0, expected_parameter_count, kSystemPointerSizeLog2);
Dsubu(sp, sp, Operand(t0));
// Update stack pointer.
mov(dest, sp);
mov(t0, a0);
bind(&copy);
Ld(t1, MemOperand(src, 0));
Sd(t1, MemOperand(dest, 0));
Dsubu(t0, t0, Operand(1));
Daddu(src, src, Operand(kSystemPointerSize));
Daddu(dest, dest, Operand(kSystemPointerSize));
Branch(&copy, ge, t0, Operand(zero_reg));
}
// Fill remaining expected arguments with undefined values.
LoadRoot(t0, RootIndex::kUndefinedValue);
{
Label loop;
bind(&loop);
Sd(t0, MemOperand(a7, 0));
Dsubu(expected_parameter_count, expected_parameter_count, Operand(1));
Daddu(a7, a7, Operand(kSystemPointerSize));
Branch(&loop, gt, expected_parameter_count, Operand(zero_reg));
}
b(&regular_invoke);
nop();
bind(&stack_overflow);
{
FrameScope frame(this, StackFrame::MANUAL);
CallRuntime(Runtime::kThrowStackOverflow);
break_(0xCC);
}
#else
// Check whether the expected and actual arguments count match. The registers
// are set up according to contract with ArgumentsAdaptorTrampoline:
Branch(&regular_invoke, eq, expected_parameter_count, Branch(&regular_invoke, eq, expected_parameter_count,
Operand(actual_parameter_count)); Operand(actual_parameter_count));
...@@ -4843,7 +4759,7 @@ void MacroAssembler::InvokePrologue(Register expected_parameter_count, ...@@ -4843,7 +4759,7 @@ void MacroAssembler::InvokePrologue(Register expected_parameter_count,
} else { } else {
Jump(adaptor, RelocInfo::CODE_TARGET); Jump(adaptor, RelocInfo::CODE_TARGET);
} }
#endif
bind(&regular_invoke); bind(&regular_invoke);
} }
......
...@@ -1140,14 +1140,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { ...@@ -1140,14 +1140,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
Register scratch2); Register scratch2);
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
// Stack limit utilities
enum StackLimitKind { kInterruptStackLimit, kRealStackLimit };
void LoadStackLimit(Register destination, StackLimitKind kind);
void StackOverflowCheck(Register num_args, Register scratch1,
Register scratch2, Label* stack_overflow);
// ---------------------------------------------------------------------------
// Smi utilities. // Smi utilities.
void SmiTag(Register dst, Register src) { void SmiTag(Register dst, Register src) {
......
...@@ -4031,8 +4031,9 @@ void CodeGenerator::AssembleConstructFrame() { ...@@ -4031,8 +4031,9 @@ void CodeGenerator::AssembleConstructFrame() {
} }
} }
void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
auto call_descriptor = linkage()->GetIncomingDescriptor(); auto call_descriptor = linkage()->GetIncomingDescriptor();
int pop_count = static_cast<int>(call_descriptor->StackParameterCount());
const int returns = frame()->GetReturnSlotCount(); const int returns = frame()->GetReturnSlotCount();
if (returns != 0) { if (returns != 0) {
...@@ -4052,76 +4053,36 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { ...@@ -4052,76 +4053,36 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) {
} }
MipsOperandConverter g(this, nullptr); MipsOperandConverter g(this, nullptr);
const int parameter_count =
static_cast<int>(call_descriptor->StackParameterCount());
// {aditional_pop_count} is only greater than zero if {parameter_count = 0}.
// Check RawMachineAssembler::PopAndReturn.
if (parameter_count != 0) {
if (additional_pop_count->IsImmediate()) {
DCHECK_EQ(g.ToConstant(additional_pop_count).ToInt32(), 0);
} else if (__ emit_debug_code()) {
__ Assert(eq, AbortReason::kUnexpectedAdditionalPopValue,
g.ToRegister(additional_pop_count),
Operand(static_cast<int64_t>(0)));
}
}
#ifdef V8_NO_ARGUMENTS_ADAPTOR
// Functions with JS linkage have at least one parameter (the receiver).
// If {parameter_count} == 0, it means it is a builtin with
// kDontAdaptArgumentsSentinel, which takes care of JS arguments popping
// itself.
const bool drop_jsargs = frame_access_state()->has_frame() &&
call_descriptor->IsJSFunctionCall() &&
parameter_count != 0;
#else
const bool drop_jsargs = false;
#endif
if (call_descriptor->IsCFunctionCall()) { if (call_descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame(); AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) { } else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now unless they have an variable // Canonicalize JSFunction return sites for now unless they have an variable
// number of stack slot pops. // number of stack slot pops.
if (additional_pop_count->IsImmediate() && if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
g.ToConstant(additional_pop_count).ToInt32() == 0) {
if (return_label_.is_bound()) { if (return_label_.is_bound()) {
__ Branch(&return_label_); __ Branch(&return_label_);
return; return;
} else { } else {
__ bind(&return_label_); __ bind(&return_label_);
AssembleDeconstructFrame();
} }
} } else {
if (drop_jsargs) {
// Get the actual argument count
__ Lw(t0, MemOperand(fp, StandardFrameConstants::kArgCOffset));
}
AssembleDeconstructFrame(); AssembleDeconstructFrame();
} }
}
if (drop_jsargs) { if (pop->IsImmediate()) {
// We must pop all arguments from the stack (including the receiver). This DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
// number of arguments is given by max(1 + argc_reg, parameter_count). pop_count += g.ToConstant(pop).ToInt32();
__ Addu(t0, t0, Operand(1)); // Also pop the receiver.
if (parameter_count > 1) {
Label max_number;
__ Branch(&max_number, ge, t0, Operand(parameter_count));
__ li(t0, parameter_count);
__ bind(&max_number);
}
__ sll(t0, t0, kSystemPointerSizeLog2);
__ Addu(sp, sp, t0);
} else if (additional_pop_count->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(additional_pop_count).type());
int additional_count = g.ToConstant(additional_pop_count).ToInt32();
__ Drop(parameter_count + additional_count);
} else { } else {
Register pop_reg = g.ToRegister(additional_pop_count); Register pop_reg = g.ToRegister(pop);
__ Drop(parameter_count);
__ sll(pop_reg, pop_reg, kSystemPointerSizeLog2); __ sll(pop_reg, pop_reg, kSystemPointerSizeLog2);
__ Addu(sp, sp, pop_reg); __ Addu(sp, sp, Operand(pop_reg));
} }
if (pop_count != 0) {
__ DropAndRet(pop_count);
} else {
__ Ret(); __ Ret();
}
} }
void CodeGenerator::FinishCode() {} void CodeGenerator::FinishCode() {}
......
...@@ -4314,7 +4314,7 @@ void CodeGenerator::AssembleConstructFrame() { ...@@ -4314,7 +4314,7 @@ void CodeGenerator::AssembleConstructFrame() {
} }
} }
void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
auto call_descriptor = linkage()->GetIncomingDescriptor(); auto call_descriptor = linkage()->GetIncomingDescriptor();
const int returns = frame()->GetReturnSlotCount(); const int returns = frame()->GetReturnSlotCount();
...@@ -4335,76 +4335,36 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { ...@@ -4335,76 +4335,36 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) {
} }
MipsOperandConverter g(this, nullptr); MipsOperandConverter g(this, nullptr);
const int parameter_count =
static_cast<int>(call_descriptor->StackParameterCount());
// {aditional_pop_count} is only greater than zero if {parameter_count = 0}.
// Check RawMachineAssembler::PopAndReturn.
if (parameter_count != 0) {
if (additional_pop_count->IsImmediate()) {
DCHECK_EQ(g.ToConstant(additional_pop_count).ToInt32(), 0);
} else if (__ emit_debug_code()) {
__ Assert(eq, AbortReason::kUnexpectedAdditionalPopValue,
g.ToRegister(additional_pop_count),
Operand(static_cast<int64_t>(0)));
}
}
#ifdef V8_NO_ARGUMENTS_ADAPTOR
// Functions with JS linkage have at least one parameter (the receiver).
// If {parameter_count} == 0, it means it is a builtin with
// kDontAdaptArgumentsSentinel, which takes care of JS arguments popping
// itself.
const bool drop_jsargs = frame_access_state()->has_frame() &&
call_descriptor->IsJSFunctionCall() &&
parameter_count != 0;
#else
const bool drop_jsargs = false;
#endif
if (call_descriptor->IsCFunctionCall()) { if (call_descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame(); AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) { } else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now unless they have an variable // Canonicalize JSFunction return sites for now unless they have an variable
// number of stack slot pops. // number of stack slot pops.
if (additional_pop_count->IsImmediate() && if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
g.ToConstant(additional_pop_count).ToInt32() == 0) {
if (return_label_.is_bound()) { if (return_label_.is_bound()) {
__ Branch(&return_label_); __ Branch(&return_label_);
return; return;
} else { } else {
__ bind(&return_label_); __ bind(&return_label_);
AssembleDeconstructFrame();
} }
} } else {
if (drop_jsargs) {
// Get the actual argument count
__ Ld(t0, MemOperand(fp, StandardFrameConstants::kArgCOffset));
}
AssembleDeconstructFrame(); AssembleDeconstructFrame();
} }
if (drop_jsargs) { }
// We must pop all arguments from the stack (including the receiver). This int pop_count = static_cast<int>(call_descriptor->StackParameterCount());
// number of arguments is given by max(1 + argc_reg, parameter_count). if (pop->IsImmediate()) {
__ Daddu(t0, t0, Operand(1)); // Also pop the receiver. pop_count += g.ToConstant(pop).ToInt32();
if (parameter_count > 1) {
Label max_number;
__ Branch(&max_number, ge, t0, Operand(parameter_count));
__ li(t0, parameter_count);
__ bind(&max_number);
}
__ dsll(t0, t0, kSystemPointerSizeLog2);
__ Daddu(sp, sp, t0);
} else if (additional_pop_count->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(additional_pop_count).type());
int additional_count = g.ToConstant(additional_pop_count).ToInt32();
__ Drop(parameter_count + additional_count);
} else { } else {
Register pop_reg = g.ToRegister(additional_pop_count); Register pop_reg = g.ToRegister(pop);
__ Drop(parameter_count);
__ dsll(pop_reg, pop_reg, kSystemPointerSizeLog2); __ dsll(pop_reg, pop_reg, kSystemPointerSizeLog2);
__ Daddu(sp, sp, pop_reg); __ Daddu(sp, sp, pop_reg);
} }
if (pop_count != 0) {
__ DropAndRet(pop_count);
} else {
__ Ret(); __ Ret();
}
} }
void CodeGenerator::FinishCode() {} void CodeGenerator::FinishCode() {}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment