Commit 7724a255 authored by predrag.rudic's avatar predrag.rudic Committed by Commit bot

MIPS64: Port `Reland [arm64] Improve some new builtins.`

Port a1103a11

Original commit message:
 - Simplify the variable-length pop sequence on entry. (It now uses
   smaller code with no branches.)
 - Use conditional compare to merge branches where appropriate.
 - Make use of Ldrsw + UntagSmiFieldMemOperand to load smis more
   efficiently.
 - Only load 'undefined' and 'null' once per builtin.
 - A few other small improvements.

Review-Url: https://codereview.chromium.org/2507683003
Cr-Commit-Position: refs/heads/master@{#41113}
parent 5f06c1a1
......@@ -814,8 +814,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// Restore the operand stack.
__ ld(a0, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
__ ld(a3, FieldMemOperand(a0, FixedArray::kLengthOffset));
__ SmiUntag(a3);
__ lw(a3, UntagSmiFieldMemOperand(a0, FixedArray::kLengthOffset));
__ Daddu(a0, a0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ Dlsa(a3, a0, a3, kPointerSizeLog2);
{
......@@ -837,8 +836,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
__ ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset));
__ Daddu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag));
__ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
__ SmiUntag(a2);
__ lw(a2,
UntagSmiFieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
__ Daddu(a3, a3, Operand(a2));
__ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
__ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
......@@ -1318,9 +1317,9 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
}
// Get the target bytecode offset from the frame.
__ ld(kInterpreterBytecodeOffsetRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
__ SmiUntag(kInterpreterBytecodeOffsetRegister);
__ lw(
kInterpreterBytecodeOffsetRegister,
UntagSmiMemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
// Dispatch to the target bytecode.
__ Daddu(a1, kInterpreterBytecodeArrayRegister,
......@@ -1686,8 +1685,7 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
}
// Get the full codegen state from the stack and untag it -> a6.
__ ld(a6, MemOperand(sp, 0 * kPointerSize));
__ SmiUntag(a6);
__ lw(a6, UntagSmiMemOperand(sp, 0 * kPointerSize));
// Switch on the state.
Label with_tos_register, unknown_state;
__ Branch(
......@@ -1855,10 +1853,10 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
// Load the OSR entrypoint offset from the deoptimization data.
// <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
__ ld(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
__ lw(a1,
UntagSmiMemOperand(a1, FixedArray::OffsetOfElementAt(
DeoptimizationInputData::kOsrPcOffsetIndex) -
kHeapObjectTag));
__ SmiUntag(a1);
// Compute the target address = code_obj + header_size + osr_offset
// <entry_addr> = <code_obj> + #header_size + <osr_offset>
......@@ -1886,52 +1884,56 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// -- sp[8] : receiver
// -----------------------------------
Register argc = a0;
Register arg_array = a0;
Register receiver = a1;
Register this_arg = a2;
Register undefined_value = a3;
Register scratch = a4;
__ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
// 1. Load receiver into a1, argArray into a0 (if present), remove all
// arguments from the stack (including the receiver), and push thisArg (if
// present) instead.
{
Label no_arg;
Register scratch = a4;
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
__ mov(a3, a2);
// Dlsa() cannot be used hare as scratch value used later.
__ dsll(scratch, a0, kPointerSizeLog2);
__ Daddu(a0, sp, Operand(scratch));
__ ld(a1, MemOperand(a0)); // receiver
__ Dsubu(a0, a0, Operand(kPointerSize));
__ Branch(&no_arg, lt, a0, Operand(sp));
__ ld(a2, MemOperand(a0)); // thisArg
__ Dsubu(a0, a0, Operand(kPointerSize));
__ Branch(&no_arg, lt, a0, Operand(sp));
__ ld(a3, MemOperand(a0)); // argArray
__ bind(&no_arg);
__ Daddu(sp, sp, Operand(scratch));
__ sd(a2, MemOperand(sp));
__ mov(a0, a3);
// Claim (2 - argc) dummy arguments form the stack, to put the stack in a
// consistent state for a simple pop operation.
__ Dsubu(sp, sp, Operand(2 * kPointerSize));
__ Dlsa(sp, sp, argc, kPointerSizeLog2);
__ mov(scratch, argc);
__ Pop(this_arg, arg_array); // Overwrite argc
__ Movz(arg_array, undefined_value, scratch); // if argc == 0
__ Movz(this_arg, undefined_value, scratch); // if argc == 0
__ Dsubu(scratch, scratch, Operand(1));
__ Movz(arg_array, undefined_value, scratch); // if argc == 1
__ ld(receiver, MemOperand(sp));
__ sd(this_arg, MemOperand(sp));
}
// ----------- S t a t e -------------
// -- a0 : argArray
// -- a1 : receiver
// -- a3 : undefined root value
// -- sp[0] : thisArg
// -----------------------------------
// 2. Make sure the receiver is actually callable.
Label receiver_not_callable;
__ JumpIfSmi(a1, &receiver_not_callable);
__ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
__ JumpIfSmi(receiver, &receiver_not_callable);
__ ld(a4, FieldMemOperand(receiver, HeapObject::kMapOffset));
__ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
__ And(a4, a4, Operand(1 << Map::kIsCallable));
__ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg));
// 3. Tail call with no arguments if argArray is null or undefined.
Label no_arguments;
__ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments);
__ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments);
__ JumpIfRoot(arg_array, Heap::kNullValueRootIndex, &no_arguments);
__ Branch(&no_arguments, eq, arg_array, Operand(undefined_value));
// 4a. Apply the receiver to the given argArray (passing undefined for
// new.target).
__ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
DCHECK(undefined_value.is(a3));
__ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
// 4b. The argArray is either null or undefined, so we tail call without any
......@@ -1939,13 +1941,14 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
__ bind(&no_arguments);
{
__ mov(a0, zero_reg);
DCHECK(receiver.is(a1));
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
// 4c. The receiver is not callable, throw an appropriate TypeError.
__ bind(&receiver_not_callable);
{
__ sd(a1, MemOperand(sp));
__ sd(receiver, MemOperand(sp));
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
......@@ -1995,62 +1998,67 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : argc
// -- sp[0] : argumentsList
// -- sp[4] : thisArgument
// -- sp[8] : target
// -- sp[0] : argumentsList (if argc ==3)
// -- sp[4] : thisArgument (if argc >=2)
// -- sp[8] : target (if argc >=1)
// -- sp[12] : receiver
// -----------------------------------
Register argc = a0;
Register arguments_list = a0;
Register target = a1;
Register this_argument = a2;
Register undefined_value = a3;
Register scratch = a4;
__ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
// 1. Load target into a1 (if present), argumentsList into a0 (if present),
// remove all arguments from the stack (including the receiver), and push
// thisArgument (if present) instead.
{
Label no_arg;
Register scratch = a4;
__ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
__ mov(a2, a1);
__ mov(a3, a1);
__ dsll(scratch, a0, kPointerSizeLog2);
__ mov(a0, scratch);
__ Dsubu(a0, a0, Operand(kPointerSize));
__ Branch(&no_arg, lt, a0, Operand(zero_reg));
__ Daddu(a0, sp, Operand(a0));
__ ld(a1, MemOperand(a0)); // target
__ Dsubu(a0, a0, Operand(kPointerSize));
__ Branch(&no_arg, lt, a0, Operand(sp));
__ ld(a2, MemOperand(a0)); // thisArgument
__ Dsubu(a0, a0, Operand(kPointerSize));
__ Branch(&no_arg, lt, a0, Operand(sp));
__ ld(a3, MemOperand(a0)); // argumentsList
__ bind(&no_arg);
__ Daddu(sp, sp, Operand(scratch));
__ sd(a2, MemOperand(sp));
__ mov(a0, a3);
// Claim (3 - argc) dummy arguments form the stack, to put the stack in a
// consistent state for a simple pop operation.
__ Dsubu(sp, sp, Operand(3 * kPointerSize));
__ Dlsa(sp, sp, argc, kPointerSizeLog2);
__ mov(scratch, argc);
__ Pop(target, this_argument, arguments_list);
__ Movz(arguments_list, undefined_value, scratch); // if argc == 0
__ Movz(this_argument, undefined_value, scratch); // if argc == 0
__ Movz(target, undefined_value, scratch); // if argc == 0
__ Dsubu(scratch, scratch, Operand(1));
__ Movz(arguments_list, undefined_value, scratch); // if argc == 1
__ Movz(this_argument, undefined_value, scratch); // if argc == 1
__ Dsubu(scratch, scratch, Operand(1));
__ Movz(arguments_list, undefined_value, scratch); // if argc == 2
__ sd(this_argument, MemOperand(sp, 0)); // Overwrite receiver
}
// ----------- S t a t e -------------
// -- a0 : argumentsList
// -- a1 : target
// -- a3 : undefined root value
// -- sp[0] : thisArgument
// -----------------------------------
// 2. Make sure the target is actually callable.
Label target_not_callable;
__ JumpIfSmi(a1, &target_not_callable);
__ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
__ JumpIfSmi(target, &target_not_callable);
__ ld(a4, FieldMemOperand(target, HeapObject::kMapOffset));
__ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
__ And(a4, a4, Operand(1 << Map::kIsCallable));
__ Branch(&target_not_callable, eq, a4, Operand(zero_reg));
// 3a. Apply the target to the given argumentsList (passing undefined for
// new.target).
__ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
DCHECK(undefined_value.is(a3));
__ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
// 3b. The target is not callable, throw an appropriate TypeError.
__ bind(&target_not_callable);
{
__ sd(a1, MemOperand(sp));
__ sd(target, MemOperand(sp));
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
......@@ -2058,59 +2066,61 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : argc
// -- sp[0] : new.target (optional)
// -- sp[4] : argumentsList
// -- sp[8] : target
// -- sp[0] : new.target (optional) (dummy value if argc <= 2)
// -- sp[4] : argumentsList (dummy value if argc <= 1)
// -- sp[8] : target (dummy value if argc == 0)
// -- sp[12] : receiver
// -----------------------------------
Register argc = a0;
Register arguments_list = a0;
Register target = a1;
Register new_target = a3;
Register undefined_value = a4;
Register scratch = a5;
// 1. Load target into a1 (if present), argumentsList into a0 (if present),
// new.target into a3 (if present, otherwise use target), remove all
// arguments from the stack (including the receiver), and push thisArgument
// (if present) instead.
{
Label no_arg;
Register scratch = a4;
__ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
__ mov(a2, a1);
// Dlsa() cannot be used hare as scratch value used later.
__ dsll(scratch, a0, kPointerSizeLog2);
__ Daddu(a0, sp, Operand(scratch));
__ sd(a2, MemOperand(a0)); // receiver
__ Dsubu(a0, a0, Operand(kPointerSize));
__ Branch(&no_arg, lt, a0, Operand(sp));
__ ld(a1, MemOperand(a0)); // target
__ mov(a3, a1); // new.target defaults to target
__ Dsubu(a0, a0, Operand(kPointerSize));
__ Branch(&no_arg, lt, a0, Operand(sp));
__ ld(a2, MemOperand(a0)); // argumentsList
__ Dsubu(a0, a0, Operand(kPointerSize));
__ Branch(&no_arg, lt, a0, Operand(sp));
__ ld(a3, MemOperand(a0)); // new.target
__ bind(&no_arg);
__ Daddu(sp, sp, Operand(scratch));
__ mov(a0, a2);
// Claim (3 - argc) dummy arguments form the stack, to put the stack in a
// consistent state for a simple pop operation.
__ Dsubu(sp, sp, Operand(3 * kPointerSize));
__ Dlsa(sp, sp, argc, kPointerSizeLog2);
__ mov(scratch, argc);
__ Pop(target, arguments_list, new_target);
__ Movz(arguments_list, undefined_value, scratch); // if argc == 0
__ Movz(new_target, undefined_value, scratch); // if argc == 0
__ Movz(target, undefined_value, scratch); // if argc == 0
__ Dsubu(scratch, scratch, Operand(1));
__ Movz(arguments_list, undefined_value, scratch); // if argc == 1
__ Movz(new_target, target, scratch); // if argc == 1
__ Dsubu(scratch, scratch, Operand(1));
__ Movz(new_target, target, scratch); // if argc == 2
__ sd(undefined_value, MemOperand(sp, 0)); // Overwrite receiver
}
// ----------- S t a t e -------------
// -- a0 : argumentsList
// -- a3 : new.target
// -- a1 : target
// -- a3 : new.target
// -- sp[0] : receiver (undefined)
// -----------------------------------
// 2. Make sure the target is actually a constructor.
Label target_not_constructor;
__ JumpIfSmi(a1, &target_not_constructor);
__ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
__ JumpIfSmi(target, &target_not_constructor);
__ ld(a4, FieldMemOperand(target, HeapObject::kMapOffset));
__ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
__ And(a4, a4, Operand(1 << Map::kIsConstructor));
__ Branch(&target_not_constructor, eq, a4, Operand(zero_reg));
// 3. Make sure the target is actually a constructor.
Label new_target_not_constructor;
__ JumpIfSmi(a3, &new_target_not_constructor);
__ ld(a4, FieldMemOperand(a3, HeapObject::kMapOffset));
__ JumpIfSmi(new_target, &new_target_not_constructor);
__ ld(a4, FieldMemOperand(new_target, HeapObject::kMapOffset));
__ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
__ And(a4, a4, Operand(1 << Map::kIsConstructor));
__ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg));
......@@ -2121,14 +2131,14 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
// 4b. The target is not a constructor, throw an appropriate TypeError.
__ bind(&target_not_constructor);
{
__ sd(a1, MemOperand(sp));
__ sd(target, MemOperand(sp));
__ TailCallRuntime(Runtime::kThrowCalledNonCallable);
}
// 4c. The new.target is not a constructor, throw an appropriate TypeError.
__ bind(&new_target_not_constructor);
{
__ sd(a3, MemOperand(sp));
__ sd(new_target, MemOperand(sp));
__ TailCallRuntime(Runtime::kThrowCalledNonCallable);
}
}
......@@ -2167,22 +2177,34 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
// -- sp[0] : thisArgument
// -----------------------------------
Register arguments_list = a0;
Register target = a1;
Register new_target = a3;
Register args = a0;
Register len = a2;
// Create the list of arguments from the array-like argumentsList.
{
Label create_arguments, create_array, create_runtime, done_create;
__ JumpIfSmi(a0, &create_runtime);
__ JumpIfSmi(arguments_list, &create_runtime);
// Load the map of argumentsList into a2.
__ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
Register arguments_list_map = a2;
__ ld(arguments_list_map,
FieldMemOperand(arguments_list, HeapObject::kMapOffset));
// Load native context into a4.
__ ld(a4, NativeContextMemOperand());
Register native_context = a4;
__ ld(native_context, NativeContextMemOperand());
// Check if argumentsList is an (unmodified) arguments object.
__ ld(at, ContextMemOperand(a4, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
__ Branch(&create_arguments, eq, a2, Operand(at));
__ ld(at, ContextMemOperand(a4, Context::STRICT_ARGUMENTS_MAP_INDEX));
__ Branch(&create_arguments, eq, a2, Operand(at));
__ ld(at, ContextMemOperand(native_context,
Context::SLOPPY_ARGUMENTS_MAP_INDEX));
__ Branch(&create_arguments, eq, arguments_list_map, Operand(at));
__ ld(at, ContextMemOperand(native_context,
Context::STRICT_ARGUMENTS_MAP_INDEX));
__ Branch(&create_arguments, eq, arguments_list_map, Operand(at));
// Check if argumentsList is a fast JSArray.
__ ld(v0, FieldMemOperand(a2, HeapObject::kMapOffset));
......@@ -2193,23 +2215,23 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
__ bind(&create_runtime);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(a1, a3, a0);
__ Push(target, new_target, arguments_list);
__ CallRuntime(Runtime::kCreateListFromArrayLike);
__ mov(a0, v0);
__ Pop(a1, a3);
__ ld(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
__ SmiUntag(a2);
__ mov(arguments_list, v0);
__ Pop(target, new_target);
__ lw(len, UntagSmiFieldMemOperand(v0, FixedArray::kLengthOffset));
}
__ Branch(&done_create);
// Try to create the list from an arguments object.
__ bind(&create_arguments);
__ ld(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset));
__ ld(a4, FieldMemOperand(a0, JSObject::kElementsOffset));
__ ld(at, FieldMemOperand(a4, FixedArray::kLengthOffset));
__ Branch(&create_runtime, ne, a2, Operand(at));
__ SmiUntag(a2);
__ mov(a0, a4);
__ lw(len, UntagSmiFieldMemOperand(arguments_list,
JSArgumentsObject::kLengthOffset));
__ ld(a4, FieldMemOperand(arguments_list, JSObject::kElementsOffset));
__ lw(at, UntagSmiFieldMemOperand(a4, FixedArray::kLengthOffset));
__ Branch(&create_runtime, ne, len, Operand(at));
__ mov(args, a4);
__ Branch(&done_create);
// Try to create the list from a JSArray object.
......@@ -2217,13 +2239,12 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
__ ld(a2, FieldMemOperand(a2, Map::kBitField2Offset));
__ DecodeField<Map::ElementsKindBits>(a2);
STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(FAST_ELEMENTS == 2);
__ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS));
__ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS));
__ ld(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
__ ld(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
__ SmiUntag(a2);
__ andi(a2, a2, uint16_t(~FAST_ELEMENTS)); // works if enum ElementsKind
// has less than 2^16 elements
__ Branch(&create_runtime, ne, a2, Operand(int64_t(0)));
__ lw(a2, UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset));
__ ld(a0, FieldMemOperand(arguments_list, JSArray::kElementsOffset));
__ bind(&done_create);
}
......@@ -2238,7 +2259,7 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
// here which will cause ip to become negative.
__ Dsubu(a4, sp, a4);
// Check if the arguments will overflow the stack.
__ dsll(at, a2, kPointerSizeLog2);
__ dsll(at, len, kPointerSizeLog2);
__ Branch(&done, gt, a4, Operand(at)); // Signed comparison.
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ bind(&done);
......@@ -2254,19 +2275,34 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
// Push arguments onto the stack (thisArgument is already on the stack).
{
__ mov(a4, zero_reg);
Label done, loop;
Register src = a4;
Register scratch = len;
__ daddiu(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
__ Branch(&done, eq, len, Operand(zero_reg), i::USE_DELAY_SLOT);
__ mov(a0, len); // The 'len' argument for Call() or Construct().
__ dsll(scratch, len, kPointerSizeLog2);
__ Dsubu(scratch, sp, Operand(scratch));
__ bind(&loop);
__ Branch(&done, eq, a4, Operand(a2));
__ Dlsa(at, a0, a4, kPointerSizeLog2);
__ ld(at, FieldMemOperand(at, FixedArray::kHeaderSize));
__ Push(at);
__ Daddu(a4, a4, Operand(1));
__ Branch(&loop);
__ ld(a5, MemOperand(src));
__ daddiu(src, src, kPointerSize);
__ Push(a5);
__ Branch(&loop, ne, scratch, Operand(sp));
__ bind(&done);
__ Move(a0, a4);
}
// ----------- S t a t e -------------
// -- a0 : argument count (len)
// -- a1 : target
// -- a3 : new.target (checked to be constructor or undefinded)
// -- sp[0] : args[len-1]
// -- sp[8] : args[len-2]
// ... : ...
// -- sp[8*(len-2)] : args[1]
// -- sp[8*(len-1)] : args[0]
// ----------------------------------
// Dispatch to Call or Construct depending on whether new.target is undefined.
{
Label construct;
......@@ -2344,9 +2380,8 @@ void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
// Drop current frame and load arguments count from arguments adaptor frame.
__ mov(fp, scratch2);
__ ld(caller_args_count_reg,
MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ SmiUntag(caller_args_count_reg);
__ lw(caller_args_count_reg,
UntagSmiMemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ Branch(&formal_parameter_count_loaded);
__ bind(&no_arguments_adaptor);
......@@ -2503,8 +2538,7 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
// Load [[BoundArguments]] into a2 and length of that into a4.
__ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
__ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
__ SmiUntag(a4);
__ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
// ----------- S t a t e -------------
// -- a0 : the number of arguments (not including the receiver)
......@@ -2551,8 +2585,7 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
// Copy [[BoundArguments]] to the stack (below the arguments).
{
Label loop, done_loop;
__ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
__ SmiUntag(a4);
__ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
__ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ bind(&loop);
__ Dsubu(a4, a4, Operand(1));
......@@ -2665,8 +2698,7 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
// Load [[BoundArguments]] into a2 and length of that into a4.
__ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
__ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
__ SmiUntag(a4);
__ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
// ----------- S t a t e -------------
// -- a0 : the number of arguments (not including the receiver)
......@@ -2714,8 +2746,7 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
// Copy [[BoundArguments]] to the stack (below the arguments).
{
Label loop, done_loop;
__ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
__ SmiUntag(a4);
__ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
__ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ bind(&loop);
__ Dsubu(a4, a4, Operand(1));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment