builtins-mips.cc 59.8 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4 5 6



7
#include "src/v8.h"
8

9
#if V8_TARGET_ARCH_MIPS
10

11 12 13 14
#include "src/codegen.h"
#include "src/debug.h"
#include "src/deoptimizer.h"
#include "src/full-codegen.h"
15
#include "src/runtime/runtime.h"
16

17 18 19 20 21 22 23 24 25 26 27

namespace v8 {
namespace internal {


#define __ ACCESS_MASM(masm)


void Builtins::Generate_Adaptor(MacroAssembler* masm,
                                CFunctionId id,
                                BuiltinExtraArguments extra_args) {
28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44
  // ----------- S t a t e -------------
  //  -- a0                 : number of arguments excluding receiver
  //  -- a1                 : called function (only guaranteed when
  //  --                      extra_args requires it)
  //  -- cp                 : context
  //  -- sp[0]              : last argument
  //  -- ...
  //  -- sp[4 * (argc - 1)] : first argument
  //  -- sp[4 * agrc]       : receiver
  // -----------------------------------

  // Insert extra arguments.
  int num_extra_args = 0;
  if (extra_args == NEEDS_CALLED_FUNCTION) {
    num_extra_args = 1;
    __ push(a1);
  } else {
45
    DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
46 47
  }

48
  // JumpToExternalReference expects a0 to contain the number of arguments
49
  // including the receiver and the extra arguments.
50
  __ Addu(a0, a0, num_extra_args + 1);
51 52 53 54
  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
}


55 56 57
// Load the built-in InternalArray function from the current context.
static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
                                              Register result) {
58
  // Load the native context.
59

60 61
  __ lw(result,
        MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
62
  __ lw(result,
63 64
        FieldMemOperand(result, GlobalObject::kNativeContextOffset));
  // Load the InternalArray function from the native context.
65 66 67 68 69 70 71
  __ lw(result,
         MemOperand(result,
                    Context::SlotOffset(
                        Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
}


72 73
// Load the built-in Array function from the current context.
static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
74
  // Load the native context.
75

76 77
  __ lw(result,
        MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
78
  __ lw(result,
79 80
        FieldMemOperand(result, GlobalObject::kNativeContextOffset));
  // Load the Array function from the native context.
81
  __ lw(result,
82 83
        MemOperand(result,
                   Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
84 85 86
}


87 88 89 90 91 92 93 94 95 96 97 98 99 100
void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- a0     : number of arguments
  //  -- ra     : return address
  //  -- sp[...]: constructor arguments
  // -----------------------------------
  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;

  // Get the InternalArray function.
  GenerateLoadInternalArrayFunction(masm, a1);

  if (FLAG_debug_code) {
    // Initial map for the builtin InternalArray functions should be maps.
    __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
101
    __ SmiTst(a2, t0);
102
    __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
103 104
              t0, Operand(zero_reg));
    __ GetObjectType(a2, a3, t0);
105
    __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
106 107 108 109 110
              t0, Operand(MAP_TYPE));
  }

  // Run the native code for the InternalArray function called as a normal
  // function.
111 112 113
  // Tail call a stub.
  InternalArrayConstructorStub stub(masm->isolate());
  __ TailCallStub(&stub);
114 115 116
}


117
void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
118 119 120 121 122 123 124 125 126 127 128 129 130
  // ----------- S t a t e -------------
  //  -- a0     : number of arguments
  //  -- ra     : return address
  //  -- sp[...]: constructor arguments
  // -----------------------------------
  Label generic_array_code;

  // Get the Array function.
  GenerateLoadArrayFunction(masm, a1);

  if (FLAG_debug_code) {
    // Initial map for the builtin Array functions should be maps.
    __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
131
    __ SmiTst(a2, t0);
132
    __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
133 134
              t0, Operand(zero_reg));
    __ GetObjectType(a2, a3, t0);
135
    __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
136 137 138 139
              t0, Operand(MAP_TYPE));
  }

  // Run the native code for the Array function called as a normal function.
140
  // Tail call a stub.
dslomov's avatar
dslomov committed
141
  __ mov(a3, a1);
142
  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
143 144
  ArrayConstructorStub stub(masm->isolate());
  __ TailCallStub(&stub);
145 146 147
}


148
void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
149 150 151 152 153 154 155 156 157 158 159 160 161
  // ----------- S t a t e -------------
  //  -- a0                     : number of arguments
  //  -- a1                     : constructor function
  //  -- ra                     : return address
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
  //  -- sp[argc * 4]           : receiver
  // -----------------------------------
  Counters* counters = masm->isolate()->counters();
  __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);

  Register function = a1;
  if (FLAG_debug_code) {
    __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
162
    __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
163 164 165 166 167 168 169 170 171 172 173 174 175 176 177
  }

  // Load the first arguments in a0 and get rid of the rest.
  Label no_arguments;
  __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
  // First args = sp[(argc - 1) * 4].
  __ Subu(a0, a0, Operand(1));
  __ sll(a0, a0, kPointerSizeLog2);
  __ Addu(sp, a0, sp);
  __ lw(a0, MemOperand(sp));
  // sp now point to args[0], drop args[0] + receiver.
  __ Drop(2);

  Register argument = a2;
  Label not_cached, argument_is_string;
178 179 180 181 182 183
  __ LookupNumberStringCache(a0,        // Input.
                             argument,  // Result.
                             a3,        // Scratch.
                             t0,        // Scratch.
                             t1,        // Scratch.
                             &not_cached);
184 185 186 187 188 189 190 191 192 193
  __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
  __ bind(&argument_is_string);

  // ----------- S t a t e -------------
  //  -- a2     : argument converted to string
  //  -- a1     : constructor function
  //  -- ra     : return address
  // -----------------------------------

  Label gc_required;
194 195 196 197 198 199
  __ Allocate(JSValue::kSize,
              v0,  // Result.
              a3,  // Scratch.
              t0,  // Scratch.
              &gc_required,
              TAG_OBJECT);
200 201 202 203 204 205

  // Initialising the String Object.
  Register map = a3;
  __ LoadGlobalFunctionInitialMap(function, map, t0);
  if (FLAG_debug_code) {
    __ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
206
    __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
207 208
        t0, Operand(JSValue::kSize >> kPointerSizeLog2));
    __ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
209
    __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233
        t0, Operand(zero_reg));
  }
  __ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));

  __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
  __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
  __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));

  __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));

  // Ensure the object is fully initialized.
  STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);

  __ Ret();

  // The argument was not found in the number to string cache. Check
  // if it's a string already before calling the conversion builtin.
  Label convert_argument;
  __ bind(&not_cached);
  __ JumpIfSmi(a0, &convert_argument);

  // Is it a String?
  __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
  __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
234
  STATIC_ASSERT(kNotStringTag != 0);
235 236 237 238 239 240 241 242 243 244
  __ And(t0, a3, Operand(kIsNotStringMask));
  __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
  __ mov(argument, a0);
  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
  __ Branch(&argument_is_string);

  // Invoke the conversion builtin and put the result into a2.
  __ bind(&convert_argument);
  __ push(function);  // Preserve the function.
  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
245 246
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
247
    __ push(a0);
248 249
    __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
  }
250 251 252 253 254 255 256
  __ pop(function);
  __ mov(argument, v0);
  __ Branch(&argument_is_string);

  // Load the empty string into a2, remove the receiver from the
  // stack, and jump back to the case where the argument is a string.
  __ bind(&no_arguments);
257
  __ LoadRoot(argument, Heap::kempty_stringRootIndex);
258 259 260 261 262 263 264
  __ Drop(1);
  __ Branch(&argument_is_string);

  // At this point the argument is already a string. Call runtime to
  // create a string wrapper.
  __ bind(&gc_required);
  __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
265 266 267 268 269
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    __ push(argument);
    __ CallRuntime(Runtime::kNewStringWrapper, 1);
  }
270
  __ Ret();
271 272 273
}


274 275
static void CallRuntimePassFunction(
    MacroAssembler* masm, Runtime::FunctionId function_id) {
276 277
  FrameScope scope(masm, StackFrame::INTERNAL);
  // Push a copy of the function onto the stack.
278
  // Push call kind information and function as parameter to the runtime call.
279
  __ Push(a1, a1);
280 281

  __ CallRuntime(function_id, 1);
282
  // Restore call kind information and receiver.
283
  __ Pop(a1);
284 285 286
}


287 288 289 290 291 292 293 294
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
  __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
  __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
  __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
  __ Jump(at);
}


295 296 297 298 299 300 301
static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
  __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
  __ Jump(at);
}


void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
302 303 304 305 306 307 308 309 310
  // Checking whether the queued function is ready for install is optional,
  // since we come across interrupts and stack checks elsewhere.  However,
  // not checking may delay installing ready functions, and always checking
  // would be quite expensive.  A good compromise is to first check against
  // stack limit as a cue for an interrupt signal.
  Label ok;
  __ LoadRoot(t0, Heap::kStackLimitRootIndex);
  __ Branch(&ok, hs, sp, Operand(t0));

311
  CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
312
  GenerateTailCallToReturnedCode(masm);
313

314 315
  __ bind(&ok);
  GenerateTailCallToSharedCode(masm);
316 317 318
}


319 320
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
                                           bool is_api_function,
321
                                           bool create_memento) {
322 323 324
  // ----------- S t a t e -------------
  //  -- a0     : number of arguments
  //  -- a1     : constructor function
325
  //  -- a2     : allocation site or undefined
326
  //  -- a3     : original constructor
327 328 329 330
  //  -- ra     : return address
  //  -- sp[...]: constructor arguments
  // -----------------------------------

331
  // Should never create mementos for api functions.
332
  DCHECK(!is_api_function || !create_memento);
333

334 335 336
  Isolate* isolate = masm->isolate();

  // Enter a construct frame.
337 338
  {
    FrameScope scope(masm, StackFrame::CONSTRUCT);
339

340
    if (create_memento) {
341
      __ AssertUndefinedOrAllocationSite(a2, t0);
342 343 344
      __ push(a2);
    }

345 346
    // Preserve the incoming parameters on the stack.
    __ SmiTag(a0);
347
    __ Push(a0, a1, a3);
348

349 350
    // Try to allocate the object without transitioning into C code. If any of
    // the preconditions is not met, the code bails out to the runtime call.
351
    Label rt_call, allocated;
352 353 354 355 356 357
    if (FLAG_inline_new) {
      ExternalReference debug_step_in_fp =
          ExternalReference::debug_step_in_fp_address(isolate);
      __ li(a2, Operand(debug_step_in_fp));
      __ lw(a2, MemOperand(a2));
      __ Branch(&rt_call, ne, a2, Operand(zero_reg));
358

359 360 361
      // Fall back to runtime if the original constructor and function differ.
      __ Branch(&rt_call, ne, a1, Operand(a3));

362 363 364
      // Load the initial map and verify that it is in fact a map.
      // a1: constructor function
      __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
365
      __ JumpIfSmi(a2, &rt_call);
366
      __ GetObjectType(a2, t5, t4);
367 368 369 370 371 372 373
      __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));

      // Check that the constructor is not constructing a JSFunction (see
      // comments in Runtime_NewObject in runtime.cc). In which case the
      // initial map's instance type would be JS_FUNCTION_TYPE.
      // a1: constructor function
      // a2: initial map
374 375
      __ lbu(t5, FieldMemOperand(a2, Map::kInstanceTypeOffset));
      __ Branch(&rt_call, eq, t5, Operand(JS_FUNCTION_TYPE));
376

377
      if (!is_api_function) {
378
        Label allocate;
379 380 381
        MemOperand bit_field3 = FieldMemOperand(a2, Map::kBitField3Offset);
        // Check if slack tracking is enabled.
        __ lw(t0, bit_field3);
382 383
        __ DecodeField<Map::Counter>(t2, t0);
        __ Branch(&allocate, lt, t2, Operand(Map::kSlackTrackingCounterEnd));
384
        // Decrease generous allocation count.
385 386 387
        __ Subu(t0, t0, Operand(1 << Map::Counter::kShift));
        __ Branch(USE_DELAY_SLOT, &allocate, ne, t2,
                  Operand(Map::kSlackTrackingCounterEnd));
388
        __ sw(t0, bit_field3);  // In delay slot.
389

390
        __ Push(a1, a2, a1);  // a1 = Constructor.
391
        __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
392

393
        __ Pop(a1, a2);
394
        __ li(t2, Operand(Map::kSlackTrackingCounterEnd - 1));
395 396

        __ bind(&allocate);
397 398
      }

399 400 401
      // Now allocate the JSObject on the heap.
      // a1: constructor function
      // a2: initial map
402
      Label rt_call_reload_new_target;
403
      __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
404 405 406 407
      if (create_memento) {
        __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
      }

408
      __ Allocate(a3, t4, t5, t6, &rt_call_reload_new_target, SIZE_IN_WORDS);
409 410 411 412 413

      // Allocated the JSObject, now initialize the fields. Map is set to
      // initial map and properties and elements are set to empty fixed array.
      // a1: constructor function
      // a2: initial map
jkummerow's avatar
jkummerow committed
414
      // a3: object size (including memento if create_memento)
415 416 417 418 419 420 421
      // t4: JSObject (not tagged)
      __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
      __ mov(t5, t4);
      __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
      __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
      __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
      __ Addu(t5, t5, Operand(3*kPointerSize));
422 423 424
      DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
      DCHECK_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
      DCHECK_EQ(2 * kPointerSize, JSObject::kElementsOffset);
425 426 427 428

      // Fill all the in-object properties with appropriate filler.
      // a1: constructor function
      // a2: initial map
429
      // a3: object size (in words, including memento if create_memento)
430 431
      // t4: JSObject (not tagged)
      // t5: First in-object property of JSObject (not tagged)
432
      // t2: slack tracking counter (non-API function case)
433
      DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
434

435 436 437
      // Use t7 to hold undefined, which is used in several places below.
      __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);

438 439 440 441
      if (!is_api_function) {
        Label no_inobject_slack_tracking;

        // Check if slack tracking is enabled.
442 443
        __ Branch(&no_inobject_slack_tracking, lt, t2,
                  Operand(Map::kSlackTrackingCounterEnd));
444 445

        // Allocate object with a slack.
446 447 448
        __ lbu(a0, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
        __ lbu(a2, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
        __ subu(a0, a0, a2);
449 450
        __ sll(at, a0, kPointerSizeLog2);
        __ addu(a0, t5, at);
451 452
        // a0: offset of first field after pre-allocated fields
        if (FLAG_debug_code) {
453 454
          __ sll(at, a3, kPointerSizeLog2);
          __ Addu(t6, t4, Operand(at));   // End of object.
455
          __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
456
              a0, Operand(t6));
457
        }
458 459 460
        __ InitializeFieldsWithFiller(t5, a0, t7);
        // To allow for truncation.
        __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
461 462 463 464 465 466 467 468 469
        // Fill the remaining fields with one pointer filler map.

        __ bind(&no_inobject_slack_tracking);
      }

      if (create_memento) {
        __ Subu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
        __ sll(a0, a0, kPointerSizeLog2);
        __ Addu(a0, t4, Operand(a0));  // End of object.
470 471 472 473 474
        __ InitializeFieldsWithFiller(t5, a0, t7);

        // Fill in memento fields.
        // t5: points to the allocated but uninitialized memento.
        __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
475
        DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
476 477 478 479
        __ sw(t7, MemOperand(t5));
        __ Addu(t5, t5, kPointerSize);
        // Load the AllocationSite.
        __ lw(t7, MemOperand(sp, 2 * kPointerSize));
480
        DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
481 482 483 484 485 486
        __ sw(t7, MemOperand(t5));
        __ Addu(t5, t5, kPointerSize);
      } else {
        __ sll(at, a3, kPointerSizeLog2);
        __ Addu(a0, t4, Operand(at));  // End of object.
        __ InitializeFieldsWithFiller(t5, a0, t7);
487
      }
488

489
      // Add the object tag to make the JSObject real, so that we can continue
490
      // and jump into the continuation code at any time from now on.
491 492 493
      __ Addu(t4, t4, Operand(kHeapObjectTag));

      // Continue with JSObject being successfully allocated.
494
      // t4: JSObject
495
      __ jmp(&allocated);
496 497 498 499

      // Reload the original constructor and fall-through.
      __ bind(&rt_call_reload_new_target);
      __ lw(a3, MemOperand(sp, 0 * kPointerSize));
500 501
    }

502
    // Allocate the new receiver object using the runtime call.
503
    // a1: constructor function
504
    // a3: original constructor
505
    __ bind(&rt_call);
506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525
    if (create_memento) {
      // Get the cell or allocation site.
      __ lw(a2, MemOperand(sp, 3 * kPointerSize));
      __ push(a2);  // argument 1: allocation site
    }

    __ Push(a1, a3);  // arguments 2-3 / 1-2
    if (create_memento) {
      __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
    } else {
      __ CallRuntime(Runtime::kNewObject, 2);
    }
    __ mov(t4, v0);

    // Runtime_NewObjectWithAllocationSite increments allocation count.
    // Skip the increment.
    Label count_incremented;
    if (create_memento) {
      __ jmp(&count_incremented);
    }
526

527
    // Receiver for constructor call allocated.
528
    // t4: JSObject
529
    __ bind(&allocated);
530 531

    if (create_memento) {
532
      __ lw(a2, MemOperand(sp, 3 * kPointerSize));
533 534 535 536 537 538 539 540 541 542 543 544
      __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
      __ Branch(&count_incremented, eq, a2, Operand(t5));
      // a2 is an AllocationSite. We are creating a memento from it, so we
      // need to increment the memento create count.
      __ lw(a3, FieldMemOperand(a2,
                                AllocationSite::kPretenureCreateCountOffset));
      __ Addu(a3, a3, Operand(Smi::FromInt(1)));
      __ sw(a3, FieldMemOperand(a2,
                                AllocationSite::kPretenureCreateCountOffset));
      __ bind(&count_incremented);
    }

545
    // Restore the parameters.
546
    __ Pop(a3);  // new.target
547 548
    __ Pop(a1);

549 550 551
    // Retrieve smi-tagged arguments count from the stack.
    __ lw(a0, MemOperand(sp));
    __ SmiUntag(a0);
552

553
    __ Push(a3, t4, t4);
554

555
    // Set up pointer to last argument.
556
    __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
557

558 559 560 561 562 563
    // Copy arguments and receiver to the expression stack.
    // a0: number of arguments
    // a1: constructor function
    // a2: address of last argument (caller sp)
    // a3: number of arguments (smi-tagged)
    // sp[0]: receiver
564
    // sp[1]: receiver
565 566
    // sp[2]: new.target
    // sp[3]: number of arguments (smi-tagged)
567
    Label loop, entry;
568
    __ SmiTag(a3, a0);
569 570 571 572 573 574 575 576 577
    __ jmp(&entry);
    __ bind(&loop);
    __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
    __ Addu(t0, a2, Operand(t0));
    __ lw(t1, MemOperand(t0));
    __ push(t1);
    __ bind(&entry);
    __ Addu(a3, a3, Operand(-2));
    __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
578

579 580 581 582 583 584 585
    // Call the function.
    // a0: number of arguments
    // a1: constructor function
    if (is_api_function) {
      __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
      Handle<Code> code =
          masm->isolate()->builtins()->HandleApiCallConstruct();
586
      __ Call(code, RelocInfo::CODE_TARGET);
587 588
    } else {
      ParameterCount actual(a0);
589
      __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
590
    }
591

592
    // Store offset of return address for deoptimizer.
593
    if (!is_api_function) {
594 595 596
      masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
    }

597 598 599 600 601 602 603 604 605 606 607
    // Restore context from the frame.
    __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));

    // If the result is an object (in the ECMA sense), we should get rid
    // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    // on page 74.
    Label use_receiver, exit;

    // If the result is a smi, it is *not* an object in the ECMA sense.
    // v0: result
    // sp[0]: receiver (newly allocated object)
608 609
    // sp[1]: new.target
    // sp[2]: number of arguments (smi-tagged)
610
    __ JumpIfSmi(v0, &use_receiver);
611 612 613

    // If the type of the result (stored in its map) is less than
    // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
614
    __ GetObjectType(v0, a1, a3);
615 616 617 618 619 620 621 622 623 624 625 626
    __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));

    // Throw away the result of the constructor invocation and use the
    // on-stack receiver as the result.
    __ bind(&use_receiver);
    __ lw(v0, MemOperand(sp));

    // Remove receiver from the stack, remove caller arguments, and
    // return.
    __ bind(&exit);
    // v0: result
    // sp[0]: receiver (newly allocated object)
627 628 629
    // sp[1]: new.target (original constructor)
    // sp[2]: number of arguments (smi-tagged)
    __ lw(a1, MemOperand(sp, 2 * kPointerSize));
630 631

    // Leave construct frame.
632 633 634 635 636 637 638
  }

  __ sll(t0, a1, kPointerSizeLog2 - 1);
  __ Addu(sp, sp, t0);
  __ Addu(sp, sp, kPointerSize);
  __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
  __ Ret();
639 640 641
}


642
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
643
  Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
644 645 646
}


647
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
648
  Generate_JSConstructStubHelper(masm, true, false);
649 650 651
}


652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671
void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- a0     : number of arguments
  //  -- a1     : constructor function
  //  -- a2     : allocation site or undefined
  //  -- a3     : original constructor
  //  -- ra     : return address
  //  -- sp[...]: constructor arguments
  // -----------------------------------

  // TODO(dslomov): support pretenuring
  CHECK(!FLAG_pretenuring_call_new);

  {
    FrameScope frame_scope(masm, StackFrame::CONSTRUCT);

    __ mov(t0, a0);
    __ SmiTag(t0);
    __ push(t0);  // Smi-tagged arguments count.

672 673 674
    // Push new.target.
    __ push(a3);

675 676 677 678 679 680 681 682 683 684 685 686 687
    // receiver is the hole.
    __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
    __ push(at);

    // Set up pointer to last argument.
    __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));

    // Copy arguments and receiver to the expression stack.
    // a0: number of arguments
    // a1: constructor function
    // a2: address of last argument (caller sp)
    // t0: number of arguments (smi-tagged)
    // sp[0]: receiver
688 689
    // sp[1]: new.target
    // sp[2]: number of arguments (smi-tagged)
690 691 692 693 694 695 696 697 698 699 700
    Label loop, entry;
    __ Branch(&entry);
    __ bind(&loop);
    __ sll(at, t0, kPointerSizeLog2 - 1);
    __ Addu(at, a2, Operand(at));
    __ lw(at, MemOperand(at));
    __ push(at);
    __ bind(&entry);
    __ Subu(t0, t0, Operand(2));
    __ Branch(&loop, ge, t0, Operand(zero_reg));

701 702 703 704 705 706 707 708 709 710 711 712 713 714
    // Handle step in.
    Label skip_step_in;
    ExternalReference debug_step_in_fp =
        ExternalReference::debug_step_in_fp_address(masm->isolate());
    __ li(a2, Operand(debug_step_in_fp));
    __ lw(a2, MemOperand(a2));
    __ Branch(&skip_step_in, eq, a2, Operand(zero_reg));

    __ Push(a0, a1, a1);
    __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
    __ Pop(a0, a1);

    __ bind(&skip_step_in);

715 716 717 718 719 720 721 722
    // Call the function.
    // a0: number of arguments
    // a1: constructor function
    ParameterCount actual(a0);
    __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());

    // Restore context from the frame.
    // v0: result
723 724
    // sp[0]: new.target
    // sp[1]: number of arguments (smi-tagged)
725
    __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
726
    __ lw(a1, MemOperand(sp, kPointerSize));
727 728 729 730 731 732 733 734 735 736 737

    // Leave construct frame.
  }

  __ sll(at, a1, kPointerSizeLog2 - 1);
  __ Addu(sp, sp, Operand(at));
  __ Addu(sp, sp, Operand(kPointerSize));
  __ Jump(ra);
}


738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774
enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };


// Clobbers a2; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm,
                                        const int calleeOffset, Register argc,
                                        IsTagged argc_is_tagged) {
  // Check the stack for overflow. We are not trying to catch
  // interruptions (e.g. debug break and preemption) here, so the "real stack
  // limit" is checked.
  Label okay;
  __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
  // Make a2 the space we have left. The stack might already be overflowed
  // here which will cause r2 to become negative.
  __ Subu(a2, sp, a2);
  // Check if the arguments will overflow the stack.
  if (argc_is_tagged == kArgcIsSmiTagged) {
    __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize);
  } else {
    DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
    __ sll(t3, argc, kPointerSizeLog2);
  }
  // Signed comparison.
  __ Branch(&okay, gt, a2, Operand(t3));

  // Out of stack space.
  __ lw(a1, MemOperand(fp, calleeOffset));
  if (argc_is_tagged == kArgcIsUntaggedInt) {
    __ SmiTag(argc);
  }
  __ Push(a1, argc);
  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);

  __ bind(&okay);
}


775 776 777 778 779 780 781
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
                                             bool is_construct) {
  // Called from JSEntryStub::GenerateBody

  // ----------- S t a t e -------------
  //  -- a0: code entry
  //  -- a1: function
782
  //  -- a2: receiver_pointer
783 784 785
  //  -- a3: argc
  //  -- s0: argv
  // -----------------------------------
786
  ProfileEntryHookStub::MaybeCallEntryHook(masm);
787 788 789 790 791

  // Clear the context before we push it when entering the JS frame.
  __ mov(cp, zero_reg);

  // Enter an internal frame.
792 793
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
794

795 796
    // Set up the context from the function argument.
    __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
797

798 799
    // Push the function and the receiver onto the stack.
    __ Push(a1, a2);
800

801 802 803 804 805 806 807 808
    // Check if we have enough stack space to push all arguments.
    // The function is the first thing that was pushed above after entering
    // the internal frame.
    const int kFunctionOffset =
        InternalFrameConstants::kCodeOffset - kPointerSize;
    // Clobbers a2.
    Generate_CheckStackOverflow(masm, kFunctionOffset, a3, kArgcIsUntaggedInt);

809 810
    // Copy arguments to the stack in a loop.
    // a3: argc
811
    // s0: argv, i.e. points to first arg
812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839
    Label loop, entry;
    __ sll(t0, a3, kPointerSizeLog2);
    __ addu(t2, s0, t0);
    __ b(&entry);
    __ nop();   // Branch delay slot nop.
    // t2 points past last arg.
    __ bind(&loop);
    __ lw(t0, MemOperand(s0));  // Read next parameter.
    __ addiu(s0, s0, kPointerSize);
    __ lw(t0, MemOperand(t0));  // Dereference handle.
    __ push(t0);  // Push parameter.
    __ bind(&entry);
    __ Branch(&loop, ne, s0, Operand(t2));

    // Initialize all JavaScript callee-saved registers, since they will be seen
    // by the garbage collector as part of handlers.
    __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
    __ mov(s1, t0);
    __ mov(s2, t0);
    __ mov(s3, t0);
    __ mov(s4, t0);
    __ mov(s5, t0);
    // s6 holds the root address. Do not clobber.
    // s7 is cp. Do not init.

    // Invoke the code and pass argc as a0.
    __ mov(a0, a3);
    if (is_construct) {
840
      // No type feedback cell is available
841
      __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
842
      CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
843
      __ CallStub(&stub);
844 845
    } else {
      ParameterCount actual(a0);
846
      __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
847
    }
848

849 850
    // Leave internal frame.
  }
851 852

  __ Jump(ra);
853 854 855 856
}


void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
857
  Generate_JSEntryTrampolineHelper(masm, false);
858 859 860 861
}


void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
862
  Generate_JSEntryTrampolineHelper(masm, true);
863 864 865
}


866 867
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
  CallRuntimePassFunction(masm, Runtime::kCompileLazy);
868
  GenerateTailCallToReturnedCode(masm);
869 870 871
}


872
static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
873 874
  FrameScope scope(masm, StackFrame::INTERNAL);
  // Push a copy of the function onto the stack.
875 876
  // Push function as parameter to the runtime call.
  __ Push(a1, a1);
877
  // Whether to compile in a background thread.
878 879 880
  __ LoadRoot(
      at, concurrent ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
  __ push(at);
881

882
  __ CallRuntime(Runtime::kCompileOptimized, 2);
883 884
  // Restore receiver.
  __ Pop(a1);
885 886 887 888 889 890 891 892 893 894 895 896
}


void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
  CallCompileOptimized(masm, false);
  GenerateTailCallToReturnedCode(masm);
}


void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
  CallCompileOptimized(masm, true);
  GenerateTailCallToReturnedCode(masm);
897 898 899
}


900

901
static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
902 903 904 905 906 907
  // For now, we are relying on the fact that make_code_young doesn't do any
  // garbage collection which allows us to save/restore the registers without
  // worrying about which of them contain pointers. We also don't build an
  // internal frame to make the code faster, since we shouldn't have to do stack
  // crawls in MakeCodeYoung. This seems a bit fragile.

908
  // Set a0 to point to the head of the PlatformCodeAge sequence.
909
  __ Subu(a0, a0,
910
      Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
911 912 913 914

  // The following registers must be saved and restored when calling through to
  // the runtime:
  //   a0 - contains return address (beginning of patch sequence)
915
  //   a1 - isolate
916 917 918 919
  RegList saved_regs =
      (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
  FrameScope scope(masm, StackFrame::MANUAL);
  __ MultiPush(saved_regs);
920
  __ PrepareCallCFunction(2, 0, a2);
921
  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
922
  __ CallCFunction(
923
      ExternalReference::get_make_code_young_function(masm->isolate()), 2);
924 925
  __ MultiPop(saved_regs);
  __ Jump(a0);
926 927 928 929 930 931 932 933 934 935 936 937 938 939 940
}

#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
    MacroAssembler* masm) {                                  \
  GenerateMakeCodeYoungAgainCommon(masm);                    \
}                                                            \
void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
    MacroAssembler* masm) {                                  \
  GenerateMakeCodeYoungAgainCommon(masm);                    \
}
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR


941 942 943 944 945 946
void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
  // that make_code_young doesn't do any garbage collection which allows us to
  // save/restore the registers without worrying about which of them contain
  // pointers.

947
  // Set a0 to point to the head of the PlatformCodeAge sequence.
948
  __ Subu(a0, a0,
949
      Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
950 951 952 953 954 955 956 957 958

  // The following registers must be saved and restored when calling through to
  // the runtime:
  //   a0 - contains return address (beginning of patch sequence)
  //   a1 - isolate
  RegList saved_regs =
      (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
  FrameScope scope(masm, StackFrame::MANUAL);
  __ MultiPush(saved_regs);
959
  __ PrepareCallCFunction(2, 0, a2);
960 961 962 963 964 965 966 967
  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
  __ CallCFunction(
      ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
      2);
  __ MultiPop(saved_regs);

  // Perform prologue operations usually performed by the young code stub.
  __ Push(ra, fp, cp, a1);
968
  __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
969 970

  // Jump to point after the code-age stub.
971
  __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
972 973 974 975 976 977 978 979 980
  __ Jump(a0);
}


void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
  GenerateMakeCodeYoungAgainCommon(masm);
}


981 982 983 984 985
void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
  Generate_MarkCodeAsExecutedOnce(masm);
}


986 987
static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
                                             SaveFPRegsMode save_doubles) {
988 989 990 991 992 993 994 995
  {
    FrameScope scope(masm, StackFrame::INTERNAL);

    // Preserve registers across notification, this is important for compiled
    // stubs that tail call the runtime on deopts passing their parameters in
    // registers.
    __ MultiPush(kJSCallerSaved | kCalleeSaved);
    // Pass the function and deoptimization type to the runtime system.
996
    __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
997 998 999 1000
    __ MultiPop(kJSCallerSaved | kCalleeSaved);
  }

  __ Addu(sp, sp, Operand(kPointerSize));  // Ignore state
1001
  __ Jump(ra);  // Jump to miss handler
1002 1003 1004
}


1005 1006 1007 1008 1009 1010 1011 1012 1013 1014
void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
}


void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
}


1015 1016 1017 1018 1019 1020 1021
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
                                             Deoptimizer::BailoutType type) {
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    // Pass the function and deoptimization type to the runtime system.
    __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
    __ push(a0);
1022
    __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1023 1024 1025 1026 1027 1028 1029 1030 1031
  }

  // Get the full codegen state from the stack and untag it -> t2.
  __ lw(t2, MemOperand(sp, 0 * kPointerSize));
  __ SmiUntag(t2);
  // Switch on the state.
  Label with_tos_register, unknown_state;
  __ Branch(&with_tos_register,
            ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
1032 1033
  __ Ret(USE_DELAY_SLOT);
  // Safe to fill delay slot Addu will emit one instruction.
1034 1035 1036 1037 1038 1039
  __ Addu(sp, sp, Operand(1 * kPointerSize));  // Remove state.

  __ bind(&with_tos_register);
  __ lw(v0, MemOperand(sp, 1 * kPointerSize));
  __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));

1040 1041
  __ Ret(USE_DELAY_SLOT);
  // Safe to fill delay slot Addu will emit one instruction.
1042 1043 1044 1045 1046 1047 1048
  __ Addu(sp, sp, Operand(2 * kPointerSize));  // Remove state.

  __ bind(&unknown_state);
  __ stop("no cases left");
}


1049
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1050
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1051 1052 1053
}


1054 1055 1056 1057 1058
void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
}


1059
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1060
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1061 1062 1063 1064
}


void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1065
  // Lookup the function in the JavaScript frame.
1066 1067 1068
  __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
1069
    // Pass function as argument.
1070
    __ push(a0);
1071
    __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1072 1073
  }

1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093
  // If the code object is null, just return to the unoptimized code.
  __ Ret(eq, v0, Operand(Smi::FromInt(0)));

  // Load deoptimization data from the code object.
  // <deopt_data> = <code>[#deoptimization_data_offset]
  __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));

  // Load the OSR entrypoint offset from the deoptimization data.
  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
  __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
      DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
  __ SmiUntag(a1);

  // Compute the target address = code_obj + header_size + osr_offset
  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
  __ addu(v0, v0, a1);
  __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);

  // And "return" to the OSR entry point of the function.
  __ Ret();
1094 1095 1096
}


1097 1098 1099 1100 1101 1102 1103
void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
  // We check the stack limit as indicator that recompilation might be done.
  Label ok;
  __ LoadRoot(at, Heap::kStackLimitRootIndex);
  __ Branch(&ok, hs, sp, Operand(at));
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
1104
    __ CallRuntime(Runtime::kStackGuard, 0);
1105 1106 1107 1108 1109 1110 1111 1112 1113
  }
  __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
          RelocInfo::CODE_TARGET);

  __ bind(&ok);
  __ Ret();
}


1114
void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127
  // 1. Make sure we have at least one argument.
  // a0: actual number of arguments
  { Label done;
    __ Branch(&done, ne, a0, Operand(zero_reg));
    __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
    __ push(t2);
    __ Addu(a0, a0, Operand(1));
    __ bind(&done);
  }

  // 2. Get the function to call (passed as receiver) from the stack, check
  //    if it is a function.
  // a0: actual number of arguments
1128
  Label slow, non_function;
1129 1130 1131
  __ sll(at, a0, kPointerSizeLog2);
  __ addu(at, sp, at);
  __ lw(a1, MemOperand(at));
1132
  __ JumpIfSmi(a1, &non_function);
1133
  __ GetObjectType(a1, a2, a2);
1134
  __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1135 1136 1137 1138 1139

  // 3a. Patch the first argument if necessary when calling a function.
  // a0: actual number of arguments
  // a1: function
  Label shift_arguments;
1140
  __ li(t0, Operand(0, RelocInfo::NONE32));  // Indicate regular JS_FUNCTION.
1141
  { Label convert_to_object, use_global_proxy, patch_receiver;
1142 1143 1144 1145 1146 1147
    // Change context eagerly in case we need the global receiver.
    __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));

    // Do not transform the receiver for strict mode functions.
    __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
    __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1148
    __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1149
                                 kSmiTagSize)));
1150
    __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1151

1152
    // Do not transform the receiver for native (Compilerhints already in a3).
1153 1154
    __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
    __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1155

1156
    // Compute the receiver in sloppy mode.
1157 1158 1159 1160 1161 1162 1163 1164 1165
    // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
    __ sll(at, a0, kPointerSizeLog2);
    __ addu(a2, sp, at);
    __ lw(a2, MemOperand(a2, -kPointerSize));
    // a0: actual number of arguments
    // a1: function
    // a2: first argument
    __ JumpIfSmi(a2, &convert_to_object, t2);

1166
    __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1167
    __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1168
    __ LoadRoot(a3, Heap::kNullValueRootIndex);
1169
    __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1170

1171
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1172
    __ GetObjectType(a2, a3, a3);
1173
    __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1174 1175

    __ bind(&convert_to_object);
1176 1177 1178 1179
    // Enter an internal frame in order to preserve argument count.
    {
      FrameScope scope(masm, StackFrame::INTERNAL);
      __ sll(a0, a0, kSmiTagSize);  // Smi tagged.
1180
      __ Push(a0, a2);
1181 1182 1183 1184 1185 1186 1187
      __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
      __ mov(a2, v0);

      __ pop(a0);
      __ sra(a0, a0, kSmiTagSize);  // Un-tag.
      // Leave internal frame.
    }
1188

1189
    // Restore the function to a1, and the flag to t0.
1190 1191 1192
    __ sll(at, a0, kPointerSizeLog2);
    __ addu(at, sp, at);
    __ lw(a1, MemOperand(at));
1193 1194
    __ Branch(USE_DELAY_SLOT, &patch_receiver);
    __ li(t0, Operand(0, RelocInfo::NONE32));  // In delay slot.
1195

1196
    __ bind(&use_global_proxy);
1197
    __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1198
    __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
1199 1200 1201 1202 1203 1204 1205 1206 1207

    __ bind(&patch_receiver);
    __ sll(at, a0, kPointerSizeLog2);
    __ addu(a3, sp, at);
    __ sw(a2, MemOperand(a3, -kPointerSize));

    __ Branch(&shift_arguments);
  }

1208 1209
  // 3b. Check for function proxy.
  __ bind(&slow);
1210
  __ li(t0, Operand(1, RelocInfo::NONE32));  // Indicate function proxy.
1211
  __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1212

1213
  __ bind(&non_function);
1214
  __ li(t0, Operand(2, RelocInfo::NONE32));  // Indicate non-function.
1215 1216

  // 3c. Patch the first argument when calling a non-function.  The
1217 1218 1219 1220 1221
  //     CALL_NON_FUNCTION builtin expects the non-function callee as
  //     receiver, so overwrite the first argument which will ultimately
  //     become the receiver.
  // a0: actual number of arguments
  // a1: function
1222
  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1223 1224 1225 1226 1227 1228 1229 1230 1231
  __ sll(at, a0, kPointerSizeLog2);
  __ addu(a2, sp, at);
  __ sw(a1, MemOperand(a2, -kPointerSize));

  // 4. Shift arguments and return address one slot down on the stack
  //    (overwriting the original receiver).  Adjust argument count to make
  //    the original first argument the new receiver.
  // a0: actual number of arguments
  // a1: function
1232
  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249
  __ bind(&shift_arguments);
  { Label loop;
    // Calculate the copy start address (destination). Copy end address is sp.
    __ sll(at, a0, kPointerSizeLog2);
    __ addu(a2, sp, at);

    __ bind(&loop);
    __ lw(at, MemOperand(a2, -kPointerSize));
    __ sw(at, MemOperand(a2));
    __ Subu(a2, a2, Operand(kPointerSize));
    __ Branch(&loop, ne, a2, Operand(sp));
    // Adjust the actual number of arguments and remove the top element
    // (which is a copy of the last argument).
    __ Subu(a0, a0, Operand(1));
    __ Pop();
  }

1250 1251
  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
  //     or a function proxy via CALL_FUNCTION_PROXY.
1252 1253
  // a0: actual number of arguments
  // a1: function
1254 1255 1256 1257 1258 1259 1260 1261 1262
  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
  { Label function, non_proxy;
    __ Branch(&function, eq, t0, Operand(zero_reg));
    // Expected number of arguments is 0 for CALL_NON_FUNCTION.
    __ mov(a2, zero_reg);
    __ Branch(&non_proxy, ne, t0, Operand(1));

    __ push(a1);  // Re-add proxy object as additional argument.
    __ Addu(a0, a0, Operand(1));
1263
    __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1264 1265 1266 1267
    __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
            RelocInfo::CODE_TARGET);

    __ bind(&non_proxy);
1268
    __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286
    __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
            RelocInfo::CODE_TARGET);
    __ bind(&function);
  }

  // 5b. Get the code to call from the function and check that the number of
  //     expected arguments matches what we're providing.  If so, jump
  //     (tail-call) to the code in register edx without checking arguments.
  // a0: actual number of arguments
  // a1: function
  __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
  __ lw(a2,
         FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
  __ sra(a2, a2, kSmiTagSize);
  // Check formal and actual parameter counts.
  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
          RelocInfo::CODE_TARGET, ne, a2, Operand(a0));

1287
  __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1288
  ParameterCount expected(0);
1289
  __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1290 1291 1292
}


1293 1294 1295 1296 1297
static void Generate_PushAppliedArguments(MacroAssembler* masm,
                                          const int argumentsOffset,
                                          const int indexOffset,
                                          const int limitOffset) {
  Label entry, loop;
1298 1299
  Register receiver = LoadDescriptor::ReceiverRegister();
  Register key = LoadDescriptor::NameRegister();
1300 1301
  Register slot = LoadDescriptor::SlotRegister();
  Register vector = LoadWithVectorDescriptor::VectorRegister();
1302 1303

  __ lw(key, MemOperand(fp, indexOffset));
1304 1305
  __ Branch(&entry);

1306
  // Load the current argument from the arguments array.
1307
  __ bind(&loop);
1308 1309 1310
  __ lw(receiver, MemOperand(fp, argumentsOffset));

  // Use inline caching to speed up access to arguments.
1311 1312 1313 1314 1315 1316
  FeedbackVectorSpec spec(0, Code::KEYED_LOAD_IC);
  Handle<TypeFeedbackVector> feedback_vector =
      masm->isolate()->factory()->NewTypeFeedbackVector(&spec);
  int index = feedback_vector->GetIndex(FeedbackVectorICSlot(0));
  __ li(slot, Operand(Smi::FromInt(index)));
  __ li(vector, feedback_vector);
1317 1318
  Handle<Code> ic =
      KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1319
  __ Call(ic, RelocInfo::CODE_TARGET);
1320 1321 1322 1323

  __ push(v0);

  // Use inline caching to access the arguments.
1324 1325 1326
  __ lw(key, MemOperand(fp, indexOffset));
  __ Addu(key, key, Operand(1 << kSmiTagSize));
  __ sw(key, MemOperand(fp, indexOffset));
1327 1328 1329 1330 1331

  // Test if the copy loop has finished copying all the elements from the
  // arguments object.
  __ bind(&entry);
  __ lw(a1, MemOperand(fp, limitOffset));
1332
  __ Branch(&loop, ne, key, Operand(a1));
1333 1334

  // On exit, the pushed arguments count is in a0, untagged
1335
  __ mov(a0, key);
1336 1337 1338 1339 1340 1341 1342 1343
  __ SmiUntag(a0);
}


// Used by FunctionApply and ReflectApply
static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
  const int kFormalParameters = targetIsArgument ? 3 : 2;
  const int kStackSize = kFormalParameters + 1;
1344

1345
  {
1346
    FrameScope frame_scope(masm, StackFrame::INTERNAL);
1347 1348 1349 1350
    const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
    const int kReceiverOffset = kArgumentsOffset + kPointerSize;
    const int kFunctionOffset = kReceiverOffset + kPointerSize;

1351 1352
    __ lw(a0, MemOperand(fp, kFunctionOffset));  // Get the function.
    __ push(a0);
1353
    __ lw(a0, MemOperand(fp, kArgumentsOffset));  // Get the args array.
1354 1355
    __ push(a0);
    // Returns (in v0) number of arguments to copy to stack as Smi.
1356 1357 1358 1359 1360 1361
    if (targetIsArgument) {
      __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION);
    } else {
      __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
    }

1362
    // Returns the result in v0.
1363
    Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged);
1364 1365

    // Push current limit and index.
1366 1367 1368 1369
    const int kIndexOffset =
        StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
    const int kLimitOffset =
        StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1370 1371
    __ mov(a1, zero_reg);
    __ Push(v0, a1);  // Limit and initial index.
1372

1373
    // Get the receiver.
1374
    __ lw(a0, MemOperand(fp, kReceiverOffset));
1375 1376 1377 1378 1379 1380 1381

    // Check that the function is a JS function (otherwise it must be a proxy).
    Label push_receiver;
    __ lw(a1, MemOperand(fp, kFunctionOffset));
    __ GetObjectType(a1, a2, a2);
    __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));

1382
    // Change context eagerly to get the right global object if necessary.
1383 1384 1385
    __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
    // Load the shared function info while the function is still in a1.
    __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1386

1387 1388
    // Compute the receiver.
    // Do not transform the receiver for strict mode functions.
1389
    Label call_to_object, use_global_proxy;
1390 1391
    __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
    __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1392
                                 kSmiTagSize)));
1393
    __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1394 1395

    // Do not transform the receiver for native (Compilerhints already in a2).
1396 1397
    __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
    __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1398

1399
    // Compute the receiver in sloppy mode.
1400
    __ JumpIfSmi(a0, &call_to_object);
1401
    __ LoadRoot(a1, Heap::kNullValueRootIndex);
1402
    __ Branch(&use_global_proxy, eq, a0, Operand(a1));
1403
    __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1404
    __ Branch(&use_global_proxy, eq, a0, Operand(a2));
1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419

    // Check if the receiver is already a JavaScript object.
    // a0: receiver
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
    __ GetObjectType(a0, a1, a1);
    __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));

    // Convert the receiver to a regular object.
    // a0: receiver
    __ bind(&call_to_object);
    __ push(a0);
    __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
    __ mov(a0, v0);  // Put object in a0 to match other paths to push_receiver.
    __ Branch(&push_receiver);

1420
    __ bind(&use_global_proxy);
1421
    __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1422
    __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset));
1423 1424 1425 1426 1427 1428 1429

    // Push the receiver.
    // a0: receiver
    __ bind(&push_receiver);
    __ push(a0);

    // Copy all arguments from the array to the stack.
1430 1431
    Generate_PushAppliedArguments(
        masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1432

1433
    // Call the function.
1434
    Label call_proxy;
1435 1436
    ParameterCount actual(a0);
    __ lw(a1, MemOperand(fp, kFunctionOffset));
1437 1438
    __ GetObjectType(a1, a2, a2);
    __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1439

1440
    __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
1441

1442
    frame_scope.GenerateLeaveFrame();
1443
    __ Ret(USE_DELAY_SLOT);
1444
    __ Addu(sp, sp, Operand(kStackSize * kPointerSize));  // In delay slot.
1445

1446
    // Call the function proxy.
1447 1448 1449
    __ bind(&call_proxy);
    __ push(a1);  // Add function proxy as last argument.
    __ Addu(a0, a0, Operand(1));
1450
    __ li(a2, Operand(0, RelocInfo::NONE32));
1451
    __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1452 1453
    __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
            RelocInfo::CODE_TARGET);
1454 1455
    // Tear down the internal frame and remove function, receiver and args.
  }
1456 1457

  __ Ret(USE_DELAY_SLOT);
1458 1459 1460 1461 1462 1463 1464 1465 1466
  __ Addu(sp, sp, Operand(kStackSize * kPointerSize));  // In delay slot.
}


static void Generate_ConstructHelper(MacroAssembler* masm) {
  const int kFormalParameters = 3;
  const int kStackSize = kFormalParameters + 1;

  {
1467
    FrameScope frame_scope(masm, StackFrame::INTERNAL);
1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487
    const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
    const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
    const int kFunctionOffset = kArgumentsOffset + kPointerSize;

    // If newTarget is not supplied, set it to constructor
    Label validate_arguments;
    __ lw(a0, MemOperand(fp, kNewTargetOffset));
    __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
    __ Branch(&validate_arguments, ne, a0, Operand(at));
    __ lw(a0, MemOperand(fp, kFunctionOffset));
    __ sw(a0, MemOperand(fp, kNewTargetOffset));

    // Validate arguments
    __ bind(&validate_arguments);
    __ lw(a0, MemOperand(fp, kFunctionOffset));  // get the function
    __ push(a0);
    __ lw(a0, MemOperand(fp, kArgumentsOffset));  // get the args array
    __ push(a0);
    __ lw(a0, MemOperand(fp, kNewTargetOffset));  // get the new.target
    __ push(a0);
1488
    // Returns argument count in v0.
1489 1490
    __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION);

1491
    // Returns result in v0.
1492
    Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged);
1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523

    // Push current limit and index.
    const int kIndexOffset =
        StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
    const int kLimitOffset =
        StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
    __ push(v0);  // limit
    __ mov(a1, zero_reg);  // initial index
    __ push(a1);
    // Push newTarget and callee functions
    __ lw(a0, MemOperand(fp, kNewTargetOffset));
    __ push(a0);
    __ lw(a0, MemOperand(fp, kFunctionOffset));
    __ push(a0);

    // Copy all arguments from the array to the stack.
    Generate_PushAppliedArguments(
        masm, kArgumentsOffset, kIndexOffset, kLimitOffset);

    // Use undefined feedback vector
    __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
    __ lw(a1, MemOperand(fp, kFunctionOffset));

    // Call the function.
    CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
    __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);

    __ Drop(1);

    // Leave internal frame.
  }
1524 1525
  __ jr(ra);
  __ Addu(sp, sp, Operand(kStackSize * kPointerSize));  // In delay slot.
1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537 1538 1539 1540
}


void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
  Generate_ApplyHelper(masm, false);
}


void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
  Generate_ApplyHelper(masm, true);
}


void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
  Generate_ConstructHelper(masm);
1541 1542 1543
}


1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564
static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
                                      Label* stack_overflow) {
  // ----------- S t a t e -------------
  //  -- a0 : actual number of arguments
  //  -- a1 : function (passed through to callee)
  //  -- a2 : expected number of arguments
  // -----------------------------------
  // Check the stack for overflow. We are not trying to catch
  // interruptions (e.g. debug break and preemption) here, so the "real stack
  // limit" is checked.
  __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
  // Make t1 the space we have left. The stack might already be overflowed
  // here which will cause t1 to become negative.
  __ subu(t1, sp, t1);
  // Check if the arguments will overflow the stack.
  __ sll(at, a2, kPointerSizeLog2);
  // Signed comparison.
  __ Branch(stack_overflow, le, t1, Operand(at));
}


1565 1566 1567 1568
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
  __ sll(a0, a0, kSmiTagSize);
  __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
  __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1569 1570
  __ Addu(fp, sp,
      Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1571 1572 1573 1574 1575 1576 1577 1578 1579
}


static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- v0 : result being passed through
  // -----------------------------------
  // Get the number of arguments passed (as a smi), tear down the frame and
  // then tear down the parameters.
1580 1581
  __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
                             kPointerSize)));
1582 1583 1584 1585 1586 1587
  __ mov(sp, fp);
  __ MultiPop(fp.bit() | ra.bit());
  __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
  __ Addu(sp, sp, t0);
  // Adjust for the receiver.
  __ Addu(sp, sp, Operand(kPointerSize));
1588 1589 1590 1591
}


void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1592 1593 1594 1595 1596 1597 1598
  // State setup as expected by MacroAssembler::InvokePrologue.
  // ----------- S t a t e -------------
  //  -- a0: actual arguments count
  //  -- a1: function (passed through to callee)
  //  -- a2: expected arguments count
  // -----------------------------------

1599 1600
  Label stack_overflow;
  ArgumentAdaptorStackCheck(masm, &stack_overflow);
1601 1602 1603
  Label invoke, dont_adapt_arguments;

  Label enough, too_few;
1604
  __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1605 1606 1607 1608 1609 1610 1611 1612 1613 1614 1615 1616 1617 1618 1619 1620 1621 1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644
  __ Branch(&dont_adapt_arguments, eq,
      a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
  // We use Uless as the number of argument should always be greater than 0.
  __ Branch(&too_few, Uless, a0, Operand(a2));

  {  // Enough parameters: actual >= expected.
    // a0: actual number of arguments as a smi
    // a1: function
    // a2: expected number of arguments
    // a3: code entry to call
    __ bind(&enough);
    EnterArgumentsAdaptorFrame(masm);

    // Calculate copy start address into a0 and copy end address into a2.
    __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
    __ Addu(a0, fp, a0);
    // Adjust for return address and receiver.
    __ Addu(a0, a0, Operand(2 * kPointerSize));
    // Compute copy end address.
    __ sll(a2, a2, kPointerSizeLog2);
    __ subu(a2, a0, a2);

    // Copy the arguments (including the receiver) to the new stack frame.
    // a0: copy start address
    // a1: function
    // a2: copy end address
    // a3: code entry to call

    Label copy;
    __ bind(&copy);
    __ lw(t0, MemOperand(a0));
    __ push(t0);
    __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a2));
    __ addiu(a0, a0, -kPointerSize);  // In delay slot.

    __ jmp(&invoke);
  }

  {  // Too few parameters: Actual < expected.
    __ bind(&too_few);
1645 1646

    // If the function is strong we need to throw an error.
1647
    Label no_strong_error;
1648
    __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1649 1650
    __ lw(t2, FieldMemOperand(t1, SharedFunctionInfo::kCompilerHintsOffset));
    __ And(t3, t2, Operand(1 << (SharedFunctionInfo::kStrongModeFunction +
1651
                                 kSmiTagSize)));
1652 1653 1654 1655 1656 1657
    __ Branch(&no_strong_error, eq, t3, Operand(zero_reg));

    // What we really care about is the required number of arguments.
    __ lw(t2, FieldMemOperand(t1, SharedFunctionInfo::kLengthOffset));
    __ SmiUntag(t2);
    __ Branch(&no_strong_error, ge, a0, Operand(t2));
1658 1659 1660 1661 1662 1663 1664

    {
      FrameScope frame(masm, StackFrame::MANUAL);
      EnterArgumentsAdaptorFrame(masm);
      __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0);
    }

1665
    __ bind(&no_strong_error);
1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677
    EnterArgumentsAdaptorFrame(masm);

    // Calculate copy start address into a0 and copy end address is fp.
    // a0: actual number of arguments as a smi
    // a1: function
    // a2: expected number of arguments
    // a3: code entry to call
    __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
    __ Addu(a0, fp, a0);
    // Adjust for return address and receiver.
    __ Addu(a0, a0, Operand(2 * kPointerSize));
    // Compute copy end address. Also adjust for return address.
1678
    __ Addu(t3, fp, kPointerSize);
1679 1680 1681 1682 1683 1684

    // Copy the arguments (including the receiver) to the new stack frame.
    // a0: copy start address
    // a1: function
    // a2: expected number of arguments
    // a3: code entry to call
1685
    // t3: copy end address
1686 1687 1688
    Label copy;
    __ bind(&copy);
    __ lw(t0, MemOperand(a0));  // Adjusted above for return addr and receiver.
1689
    __ Subu(sp, sp, kPointerSize);
1690
    __ Subu(a0, a0, kPointerSize);
1691 1692
    __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
    __ sw(t0, MemOperand(sp));  // In the delay slot.
1693 1694 1695 1696 1697 1698 1699 1700

    // Fill the remaining expected arguments with undefined.
    // a1: function
    // a2: expected number of arguments
    // a3: code entry to call
    __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
    __ sll(t2, a2, kPointerSizeLog2);
    __ Subu(a2, fp, Operand(t2));
1701 1702 1703
    // Adjust for frame.
    __ Subu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
                            2 * kPointerSize));
1704 1705 1706

    Label fill;
    __ bind(&fill);
1707 1708 1709
    __ Subu(sp, sp, kPointerSize);
    __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
    __ sw(t0, MemOperand(sp));
1710 1711 1712 1713 1714 1715 1716
  }

  // Call the entry point.
  __ bind(&invoke);

  __ Call(a3);

1717
  // Store offset of return address for deoptimizer.
1718
  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1719

1720 1721 1722 1723 1724 1725 1726 1727 1728 1729
  // Exit frame and return.
  LeaveArgumentsAdaptorFrame(masm);
  __ Ret();


  // -------------------------------------------
  // Don't adapt arguments.
  // -------------------------------------------
  __ bind(&dont_adapt_arguments);
  __ Jump(a3);
1730 1731 1732 1733 1734 1735 1736 1737

  __ bind(&stack_overflow);
  {
    FrameScope frame(masm, StackFrame::MANUAL);
    EnterArgumentsAdaptorFrame(masm);
    __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
    __ break_(0xCC);
  }
1738 1739 1740 1741 1742
}


#undef __

1743 1744
}  // namespace internal
}  // namespace v8
1745

1746
#endif  // V8_TARGET_ARCH_MIPS