builtins-mips.cc 58.4 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4 5 6



7
#include "src/v8.h"
8

9
#if V8_TARGET_ARCH_MIPS
10

11 12 13 14
#include "src/codegen.h"
#include "src/debug.h"
#include "src/deoptimizer.h"
#include "src/full-codegen.h"
15
#include "src/runtime/runtime.h"
16

17 18 19 20 21 22 23 24 25 26 27

namespace v8 {
namespace internal {


#define __ ACCESS_MASM(masm)


void Builtins::Generate_Adaptor(MacroAssembler* masm,
                                CFunctionId id,
                                BuiltinExtraArguments extra_args) {
28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44
  // ----------- S t a t e -------------
  //  -- a0                 : number of arguments excluding receiver
  //  -- a1                 : called function (only guaranteed when
  //  --                      extra_args requires it)
  //  -- cp                 : context
  //  -- sp[0]              : last argument
  //  -- ...
  //  -- sp[4 * (argc - 1)] : first argument
  //  -- sp[4 * agrc]       : receiver
  // -----------------------------------

  // Insert extra arguments.
  int num_extra_args = 0;
  if (extra_args == NEEDS_CALLED_FUNCTION) {
    num_extra_args = 1;
    __ push(a1);
  } else {
45
    DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
46 47
  }

48
  // JumpToExternalReference expects a0 to contain the number of arguments
49
  // including the receiver and the extra arguments.
50
  __ Addu(a0, a0, num_extra_args + 1);
51 52 53 54
  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
}


55 56 57
// Load the built-in InternalArray function from the current context.
static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
                                              Register result) {
58
  // Load the native context.
59

60 61
  __ lw(result,
        MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
62
  __ lw(result,
63 64
        FieldMemOperand(result, GlobalObject::kNativeContextOffset));
  // Load the InternalArray function from the native context.
65 66 67 68 69 70 71
  __ lw(result,
         MemOperand(result,
                    Context::SlotOffset(
                        Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
}


72 73
// Load the built-in Array function from the current context.
static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
74
  // Load the native context.
75

76 77
  __ lw(result,
        MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
78
  __ lw(result,
79 80
        FieldMemOperand(result, GlobalObject::kNativeContextOffset));
  // Load the Array function from the native context.
81
  __ lw(result,
82 83
        MemOperand(result,
                   Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
84 85 86
}


87 88 89 90 91 92 93 94 95 96 97 98 99 100
void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- a0     : number of arguments
  //  -- ra     : return address
  //  -- sp[...]: constructor arguments
  // -----------------------------------
  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;

  // Get the InternalArray function.
  GenerateLoadInternalArrayFunction(masm, a1);

  if (FLAG_debug_code) {
    // Initial map for the builtin InternalArray functions should be maps.
    __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
101
    __ SmiTst(a2, t0);
102
    __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
103 104
              t0, Operand(zero_reg));
    __ GetObjectType(a2, a3, t0);
105
    __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
106 107 108 109 110
              t0, Operand(MAP_TYPE));
  }

  // Run the native code for the InternalArray function called as a normal
  // function.
111 112 113
  // Tail call a stub.
  InternalArrayConstructorStub stub(masm->isolate());
  __ TailCallStub(&stub);
114 115 116
}


117
void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
118 119 120 121 122 123 124 125 126 127 128 129 130
  // ----------- S t a t e -------------
  //  -- a0     : number of arguments
  //  -- ra     : return address
  //  -- sp[...]: constructor arguments
  // -----------------------------------
  Label generic_array_code;

  // Get the Array function.
  GenerateLoadArrayFunction(masm, a1);

  if (FLAG_debug_code) {
    // Initial map for the builtin Array functions should be maps.
    __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
131
    __ SmiTst(a2, t0);
132
    __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
133 134
              t0, Operand(zero_reg));
    __ GetObjectType(a2, a3, t0);
135
    __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
136 137 138 139
              t0, Operand(MAP_TYPE));
  }

  // Run the native code for the Array function called as a normal function.
140
  // Tail call a stub.
141
  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
142 143
  ArrayConstructorStub stub(masm->isolate());
  __ TailCallStub(&stub);
144 145 146
}


147
void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
148 149 150 151 152 153 154 155 156 157 158 159 160
  // ----------- S t a t e -------------
  //  -- a0                     : number of arguments
  //  -- a1                     : constructor function
  //  -- ra                     : return address
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
  //  -- sp[argc * 4]           : receiver
  // -----------------------------------
  Counters* counters = masm->isolate()->counters();
  __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);

  Register function = a1;
  if (FLAG_debug_code) {
    __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
161
    __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
162 163 164 165 166 167 168 169 170 171 172 173 174 175 176
  }

  // Load the first arguments in a0 and get rid of the rest.
  Label no_arguments;
  __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
  // First args = sp[(argc - 1) * 4].
  __ Subu(a0, a0, Operand(1));
  __ sll(a0, a0, kPointerSizeLog2);
  __ Addu(sp, a0, sp);
  __ lw(a0, MemOperand(sp));
  // sp now point to args[0], drop args[0] + receiver.
  __ Drop(2);

  Register argument = a2;
  Label not_cached, argument_is_string;
177 178 179 180 181 182
  __ LookupNumberStringCache(a0,        // Input.
                             argument,  // Result.
                             a3,        // Scratch.
                             t0,        // Scratch.
                             t1,        // Scratch.
                             &not_cached);
183 184 185 186 187 188 189 190 191 192
  __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
  __ bind(&argument_is_string);

  // ----------- S t a t e -------------
  //  -- a2     : argument converted to string
  //  -- a1     : constructor function
  //  -- ra     : return address
  // -----------------------------------

  Label gc_required;
193 194 195 196 197 198
  __ Allocate(JSValue::kSize,
              v0,  // Result.
              a3,  // Scratch.
              t0,  // Scratch.
              &gc_required,
              TAG_OBJECT);
199 200 201 202 203 204

  // Initialising the String Object.
  Register map = a3;
  __ LoadGlobalFunctionInitialMap(function, map, t0);
  if (FLAG_debug_code) {
    __ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
205
    __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
206 207
        t0, Operand(JSValue::kSize >> kPointerSizeLog2));
    __ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
208
    __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232
        t0, Operand(zero_reg));
  }
  __ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));

  __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
  __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
  __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));

  __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));

  // Ensure the object is fully initialized.
  STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);

  __ Ret();

  // The argument was not found in the number to string cache. Check
  // if it's a string already before calling the conversion builtin.
  Label convert_argument;
  __ bind(&not_cached);
  __ JumpIfSmi(a0, &convert_argument);

  // Is it a String?
  __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
  __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
233
  STATIC_ASSERT(kNotStringTag != 0);
234 235 236 237 238 239 240 241 242 243
  __ And(t0, a3, Operand(kIsNotStringMask));
  __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
  __ mov(argument, a0);
  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
  __ Branch(&argument_is_string);

  // Invoke the conversion builtin and put the result into a2.
  __ bind(&convert_argument);
  __ push(function);  // Preserve the function.
  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
244 245
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
246
    __ push(a0);
247 248
    __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
  }
249 250 251 252 253 254 255
  __ pop(function);
  __ mov(argument, v0);
  __ Branch(&argument_is_string);

  // Load the empty string into a2, remove the receiver from the
  // stack, and jump back to the case where the argument is a string.
  __ bind(&no_arguments);
256
  __ LoadRoot(argument, Heap::kempty_stringRootIndex);
257 258 259 260 261 262 263
  __ Drop(1);
  __ Branch(&argument_is_string);

  // At this point the argument is already a string. Call runtime to
  // create a string wrapper.
  __ bind(&gc_required);
  __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
264 265 266 267 268
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    __ push(argument);
    __ CallRuntime(Runtime::kNewStringWrapper, 1);
  }
269
  __ Ret();
270 271 272
}


273 274
static void CallRuntimePassFunction(
    MacroAssembler* masm, Runtime::FunctionId function_id) {
275 276
  FrameScope scope(masm, StackFrame::INTERNAL);
  // Push a copy of the function onto the stack.
277
  // Push call kind information and function as parameter to the runtime call.
278
  __ Push(a1, a1);
279 280

  __ CallRuntime(function_id, 1);
281
  // Restore call kind information and receiver.
282
  __ Pop(a1);
283 284 285
}


286 287 288 289 290 291 292 293
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
  __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
  __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
  __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
  __ Jump(at);
}


294 295 296 297 298 299 300
static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
  __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
  __ Jump(at);
}


void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
301 302 303 304 305 306 307 308 309
  // Checking whether the queued function is ready for install is optional,
  // since we come across interrupts and stack checks elsewhere.  However,
  // not checking may delay installing ready functions, and always checking
  // would be quite expensive.  A good compromise is to first check against
  // stack limit as a cue for an interrupt signal.
  Label ok;
  __ LoadRoot(t0, Heap::kStackLimitRootIndex);
  __ Branch(&ok, hs, sp, Operand(t0));

310
  CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
311
  GenerateTailCallToReturnedCode(masm);
312

313 314
  __ bind(&ok);
  GenerateTailCallToSharedCode(masm);
315 316 317
}


318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347
static void Generate_Runtime_NewObject(MacroAssembler* masm,
                                       bool create_memento,
                                       Register original_constructor,
                                       Label* count_incremented,
                                       Label* allocated) {
  if (create_memento) {
    // Get the cell or allocation site.
    __ lw(a2, MemOperand(sp, 2 * kPointerSize));
    __ push(a2);
  }

  __ push(a1);                    // argument for Runtime_NewObject
  __ push(original_constructor);  // original constructor
  if (create_memento) {
    __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
  } else {
    __ CallRuntime(Runtime::kNewObject, 2);
  }
  __ mov(t4, v0);

  // Runtime_NewObjectWithAllocationSite increments allocation count.
  // Skip the increment.
  if (create_memento) {
    __ jmp(count_incremented);
  } else {
    __ jmp(allocated);
  }
}


348 349
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
                                           bool is_api_function,
350
                                           bool create_memento) {
351 352 353
  // ----------- S t a t e -------------
  //  -- a0     : number of arguments
  //  -- a1     : constructor function
354
  //  -- a2     : allocation site or undefined
355
  //  -- a3     : original constructor
356 357 358 359
  //  -- ra     : return address
  //  -- sp[...]: constructor arguments
  // -----------------------------------

360
  // Should never create mementos for api functions.
361
  DCHECK(!is_api_function || !create_memento);
362

363 364 365 366 367 368 369 370 371 372
  Isolate* isolate = masm->isolate();

  // ----------- S t a t e -------------
  //  -- a0     : number of arguments
  //  -- a1     : constructor function
  //  -- ra     : return address
  //  -- sp[...]: constructor arguments
  // -----------------------------------

  // Enter a construct frame.
373 374
  {
    FrameScope scope(masm, StackFrame::CONSTRUCT);
375

376
    if (create_memento) {
377
      __ AssertUndefinedOrAllocationSite(a2, t0);
378 379 380
      __ push(a2);
    }

381 382 383
    // Preserve the two incoming parameters on the stack.
    __ sll(a0, a0, kSmiTagSize);  // Tag arguments count.
    __ MultiPushReversed(a0.bit() | a1.bit());
384

385 386 387 388 389 390 391 392
    Label rt_call, allocated, normal_new, count_incremented;
    __ Branch(&normal_new, eq, a1, Operand(a3));

    // Original constructor and function are different.
    Generate_Runtime_NewObject(masm, create_memento, a3, &count_incremented,
                               &allocated);
    __ bind(&normal_new);

393 394 395 396 397 398 399 400 401
    // Try to allocate the object without transitioning into C code. If any of
    // the preconditions is not met, the code bails out to the runtime call.
    if (FLAG_inline_new) {
      Label undo_allocation;
      ExternalReference debug_step_in_fp =
          ExternalReference::debug_step_in_fp_address(isolate);
      __ li(a2, Operand(debug_step_in_fp));
      __ lw(a2, MemOperand(a2));
      __ Branch(&rt_call, ne, a2, Operand(zero_reg));
402

403 404 405
      // Load the initial map and verify that it is in fact a map.
      // a1: constructor function
      __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
406
      __ JumpIfSmi(a2, &rt_call);
407 408 409 410 411 412 413 414 415 416
      __ GetObjectType(a2, a3, t4);
      __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));

      // Check that the constructor is not constructing a JSFunction (see
      // comments in Runtime_NewObject in runtime.cc). In which case the
      // initial map's instance type would be JS_FUNCTION_TYPE.
      // a1: constructor function
      // a2: initial map
      __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
      __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
417

418
      if (!is_api_function) {
419
        Label allocate;
420 421 422
        MemOperand bit_field3 = FieldMemOperand(a2, Map::kBitField3Offset);
        // Check if slack tracking is enabled.
        __ lw(t0, bit_field3);
423 424
        __ DecodeField<Map::Counter>(t2, t0);
        __ Branch(&allocate, lt, t2, Operand(Map::kSlackTrackingCounterEnd));
425
        // Decrease generous allocation count.
426 427 428
        __ Subu(t0, t0, Operand(1 << Map::Counter::kShift));
        __ Branch(USE_DELAY_SLOT, &allocate, ne, t2,
                  Operand(Map::kSlackTrackingCounterEnd));
429
        __ sw(t0, bit_field3);  // In delay slot.
430

431
        __ Push(a1, a2, a1);  // a1 = Constructor.
432
        __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
433

434
        __ Pop(a1, a2);
435 436 437
        // Slack tracking counter is Map::kSlackTrackingCounterEnd after runtime
        // call.
        __ li(t2, Map::kSlackTrackingCounterEnd);
438 439

        __ bind(&allocate);
440 441
      }

442 443 444 445
      // Now allocate the JSObject on the heap.
      // a1: constructor function
      // a2: initial map
      __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
446 447 448 449
      if (create_memento) {
        __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
      }

450
      __ Allocate(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
451 452 453 454 455

      // Allocated the JSObject, now initialize the fields. Map is set to
      // initial map and properties and elements are set to empty fixed array.
      // a1: constructor function
      // a2: initial map
456
      // a3: object size (not including memento if create_memento)
457 458 459 460 461 462 463
      // t4: JSObject (not tagged)
      __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
      __ mov(t5, t4);
      __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
      __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
      __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
      __ Addu(t5, t5, Operand(3*kPointerSize));
464 465 466
      DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
      DCHECK_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
      DCHECK_EQ(2 * kPointerSize, JSObject::kElementsOffset);
467 468 469 470

      // Fill all the in-object properties with appropriate filler.
      // a1: constructor function
      // a2: initial map
471
      // a3: object size (in words, including memento if create_memento)
472 473
      // t4: JSObject (not tagged)
      // t5: First in-object property of JSObject (not tagged)
474
      // t2: slack tracking counter (non-API function case)
475
      DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
476

477 478 479
      // Use t7 to hold undefined, which is used in several places below.
      __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);

480 481 482 483
      if (!is_api_function) {
        Label no_inobject_slack_tracking;

        // Check if slack tracking is enabled.
484 485
        __ Branch(&no_inobject_slack_tracking, lt, t2,
                  Operand(Map::kSlackTrackingCounterEnd));
486 487

        // Allocate object with a slack.
488
        __ lbu(a0, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset));
489 490
        __ sll(at, a0, kPointerSizeLog2);
        __ addu(a0, t5, at);
491 492
        // a0: offset of first field after pre-allocated fields
        if (FLAG_debug_code) {
493 494
          __ sll(at, a3, kPointerSizeLog2);
          __ Addu(t6, t4, Operand(at));   // End of object.
495
          __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
496
              a0, Operand(t6));
497
        }
498 499 500
        __ InitializeFieldsWithFiller(t5, a0, t7);
        // To allow for truncation.
        __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
501 502 503 504 505 506 507 508 509
        // Fill the remaining fields with one pointer filler map.

        __ bind(&no_inobject_slack_tracking);
      }

      if (create_memento) {
        __ Subu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
        __ sll(a0, a0, kPointerSizeLog2);
        __ Addu(a0, t4, Operand(a0));  // End of object.
510 511 512 513 514
        __ InitializeFieldsWithFiller(t5, a0, t7);

        // Fill in memento fields.
        // t5: points to the allocated but uninitialized memento.
        __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
515
        DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
516 517 518 519
        __ sw(t7, MemOperand(t5));
        __ Addu(t5, t5, kPointerSize);
        // Load the AllocationSite.
        __ lw(t7, MemOperand(sp, 2 * kPointerSize));
520
        DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
521 522 523 524 525 526
        __ sw(t7, MemOperand(t5));
        __ Addu(t5, t5, kPointerSize);
      } else {
        __ sll(at, a3, kPointerSizeLog2);
        __ Addu(a0, t4, Operand(at));  // End of object.
        __ InitializeFieldsWithFiller(t5, a0, t7);
527
      }
528

529 530 531 532 533 534 535 536 537 538 539 540 541 542
      // Add the object tag to make the JSObject real, so that we can continue
      // and jump into the continuation code at any time from now on. Any
      // failures need to undo the allocation, so that the heap is in a
      // consistent state and verifiable.
      __ Addu(t4, t4, Operand(kHeapObjectTag));

      // Check if a non-empty properties array is needed. Continue with
      // allocated object if not fall through to runtime call if it is.
      // a1: constructor function
      // t4: JSObject
      // t5: start of next object (not tagged)
      __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
      // The field instance sizes contains both pre-allocated property fields
      // and in-object properties.
543
      __ lbu(t6, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset));
544
      __ Addu(a3, a3, Operand(t6));
545
      __ lbu(t6, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
546
      __ subu(a3, a3, t6);
547 548 549

      // Done if no extra properties are to be allocated.
      __ Branch(&allocated, eq, a3, Operand(zero_reg));
550
      __ Assert(greater_equal, kPropertyAllocationCountFailed,
551 552 553 554 555 556 557 558 559
          a3, Operand(zero_reg));

      // Scale the number of elements by pointer size and add the header for
      // FixedArrays to the start of the next object calculation from above.
      // a1: constructor
      // a3: number of elements in properties array
      // t4: JSObject
      // t5: start of next object
      __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
560
      __ Allocate(
561 562 563 564 565 566 567 568 569
          a0,
          t5,
          t6,
          a2,
          &undo_allocation,
          static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));

      // Initialize the FixedArray.
      // a1: constructor
570
      // a3: number of elements in properties array (untagged)
571 572 573 574 575 576 577 578 579
      // t4: JSObject
      // t5: start of next object
      __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
      __ mov(a2, t5);
      __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
      __ sll(a0, a3, kSmiTagSize);
      __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset));
      __ Addu(a2, a2, Operand(2 * kPointerSize));

580 581
      DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
      DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
582 583 584 585 586 587 588 589 590

      // Initialize the fields to undefined.
      // a1: constructor
      // a2: First element of FixedArray (not tagged)
      // a3: number of elements in properties array
      // t4: JSObject
      // t5: FixedArray (not tagged)
      __ sll(t3, a3, kPointerSizeLog2);
      __ addu(t6, a2, t3);  // End of object.
591
      DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
592
      { Label loop, entry;
593
        if (!is_api_function || create_memento) {
594 595
          __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
        } else if (FLAG_debug_code) {
596 597
          __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
          __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t2));
598 599 600 601 602 603 604
        }
        __ jmp(&entry);
        __ bind(&loop);
        __ sw(t7, MemOperand(a2));
        __ addiu(a2, a2, kPointerSize);
        __ bind(&entry);
        __ Branch(&loop, less, a2, Operand(t6));
605
      }
606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625

      // Store the initialized FixedArray into the properties field of
      // the JSObject.
      // a1: constructor function
      // t4: JSObject
      // t5: FixedArray (not tagged)
      __ Addu(t5, t5, Operand(kHeapObjectTag));  // Add the heap tag.
      __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset));

      // Continue with JSObject being successfully allocated.
      // a1: constructor function
      // a4: JSObject
      __ jmp(&allocated);

      // Undo the setting of the new top so that the heap is verifiable. For
      // example, the map's unused properties potentially do not match the
      // allocated objects unused properties.
      // t4: JSObject (previous new top)
      __ bind(&undo_allocation);
      __ UndoAllocationInNewSpace(t4, t5);
626 627
    }

628
    // Allocate the new receiver object using the runtime call.
629
    // a1: constructor function
630
    __ bind(&rt_call);
631 632
    Generate_Runtime_NewObject(masm, create_memento, a1, &count_incremented,
                               &allocated);
633

634
    // Receiver for constructor call allocated.
635
    // t4: JSObject
636
    __ bind(&allocated);
637 638 639 640 641 642 643 644 645 646 647 648 649 650 651

    if (create_memento) {
      __ lw(a2, MemOperand(sp, kPointerSize * 2));
      __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
      __ Branch(&count_incremented, eq, a2, Operand(t5));
      // a2 is an AllocationSite. We are creating a memento from it, so we
      // need to increment the memento create count.
      __ lw(a3, FieldMemOperand(a2,
                                AllocationSite::kPretenureCreateCountOffset));
      __ Addu(a3, a3, Operand(Smi::FromInt(1)));
      __ sw(a3, FieldMemOperand(a2,
                                AllocationSite::kPretenureCreateCountOffset));
      __ bind(&count_incremented);
    }

652
    __ Push(t4, t4);
653

654 655
    // Reload the number of arguments from the stack.
    // sp[0]: receiver
656 657 658 659 660
    // sp[1]: receiver
    // sp[2]: constructor function
    // sp[3]: number of arguments (smi-tagged)
    __ lw(a1, MemOperand(sp, 2 * kPointerSize));
    __ lw(a3, MemOperand(sp, 3 * kPointerSize));
661

662
    // Set up pointer to last argument.
663
    __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
664

665
    // Set up number of arguments for function call below.
666
    __ srl(a0, a3, kSmiTagSize);
667

668 669 670 671 672 673
    // Copy arguments and receiver to the expression stack.
    // a0: number of arguments
    // a1: constructor function
    // a2: address of last argument (caller sp)
    // a3: number of arguments (smi-tagged)
    // sp[0]: receiver
674 675 676
    // sp[1]: receiver
    // sp[2]: constructor function
    // sp[3]: number of arguments (smi-tagged)
677 678 679 680 681 682 683 684 685 686
    Label loop, entry;
    __ jmp(&entry);
    __ bind(&loop);
    __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
    __ Addu(t0, a2, Operand(t0));
    __ lw(t1, MemOperand(t0));
    __ push(t1);
    __ bind(&entry);
    __ Addu(a3, a3, Operand(-2));
    __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
687

688 689 690 691 692 693 694
    // Call the function.
    // a0: number of arguments
    // a1: constructor function
    if (is_api_function) {
      __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
      Handle<Code> code =
          masm->isolate()->builtins()->HandleApiCallConstruct();
695
      __ Call(code, RelocInfo::CODE_TARGET);
696 697
    } else {
      ParameterCount actual(a0);
698
      __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
699
    }
700

701
    // Store offset of return address for deoptimizer.
702
    if (!is_api_function) {
703 704 705
      masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
    }

706 707 708 709 710 711 712 713 714 715 716 717 718
    // Restore context from the frame.
    __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));

    // If the result is an object (in the ECMA sense), we should get rid
    // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    // on page 74.
    Label use_receiver, exit;

    // If the result is a smi, it is *not* an object in the ECMA sense.
    // v0: result
    // sp[0]: receiver (newly allocated object)
    // sp[1]: constructor function
    // sp[2]: number of arguments (smi-tagged)
719
    __ JumpIfSmi(v0, &use_receiver);
720 721 722

    // If the type of the result (stored in its map) is less than
    // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
723
    __ GetObjectType(v0, a1, a3);
724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740
    __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));

    // Throw away the result of the constructor invocation and use the
    // on-stack receiver as the result.
    __ bind(&use_receiver);
    __ lw(v0, MemOperand(sp));

    // Remove receiver from the stack, remove caller arguments, and
    // return.
    __ bind(&exit);
    // v0: result
    // sp[0]: receiver (newly allocated object)
    // sp[1]: constructor function
    // sp[2]: number of arguments (smi-tagged)
    __ lw(a1, MemOperand(sp, 2 * kPointerSize));

    // Leave construct frame.
741 742 743 744 745 746 747
  }

  __ sll(t0, a1, kPointerSizeLog2 - 1);
  __ Addu(sp, sp, t0);
  __ Addu(sp, sp, kPointerSize);
  __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
  __ Ret();
748 749 750
}


751
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
752
  Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
753 754 755
}


756
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
757
  Generate_JSConstructStubHelper(masm, true, false);
758 759 760
}


761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780
void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- a0     : number of arguments
  //  -- a1     : constructor function
  //  -- a2     : allocation site or undefined
  //  -- a3     : original constructor
  //  -- ra     : return address
  //  -- sp[...]: constructor arguments
  // -----------------------------------

  // TODO(dslomov): support pretenuring
  CHECK(!FLAG_pretenuring_call_new);

  {
    FrameScope frame_scope(masm, StackFrame::CONSTRUCT);

    __ mov(t0, a0);
    __ SmiTag(t0);
    __ push(t0);  // Smi-tagged arguments count.

781 782 783
    // Push new.target.
    __ push(a3);

784 785 786 787 788 789 790 791 792 793 794 795 796
    // receiver is the hole.
    __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
    __ push(at);

    // Set up pointer to last argument.
    __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));

    // Copy arguments and receiver to the expression stack.
    // a0: number of arguments
    // a1: constructor function
    // a2: address of last argument (caller sp)
    // t0: number of arguments (smi-tagged)
    // sp[0]: receiver
797 798
    // sp[1]: new.target
    // sp[2]: number of arguments (smi-tagged)
799 800 801 802 803 804 805 806 807 808 809
    Label loop, entry;
    __ Branch(&entry);
    __ bind(&loop);
    __ sll(at, t0, kPointerSizeLog2 - 1);
    __ Addu(at, a2, Operand(at));
    __ lw(at, MemOperand(at));
    __ push(at);
    __ bind(&entry);
    __ Subu(t0, t0, Operand(2));
    __ Branch(&loop, ge, t0, Operand(zero_reg));

810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825
    __ Addu(a0, a0, Operand(1));

    // Handle step in.
    Label skip_step_in;
    ExternalReference debug_step_in_fp =
        ExternalReference::debug_step_in_fp_address(masm->isolate());
    __ li(a2, Operand(debug_step_in_fp));
    __ lw(a2, MemOperand(a2));
    __ Branch(&skip_step_in, eq, a2, Operand(zero_reg));

    __ Push(a0, a1, a1);
    __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
    __ Pop(a0, a1);

    __ bind(&skip_step_in);

826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847
    // Call the function.
    // a0: number of arguments
    // a1: constructor function
    ParameterCount actual(a0);
    __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());

    // Restore context from the frame.
    // v0: result
    // sp[0]: number of arguments (smi-tagged)
    __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
    __ lw(a1, MemOperand(sp, 0));

    // Leave construct frame.
  }

  __ sll(at, a1, kPointerSizeLog2 - 1);
  __ Addu(sp, sp, Operand(at));
  __ Addu(sp, sp, Operand(kPointerSize));
  __ Jump(ra);
}


848 849 850 851 852 853 854
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
                                             bool is_construct) {
  // Called from JSEntryStub::GenerateBody

  // ----------- S t a t e -------------
  //  -- a0: code entry
  //  -- a1: function
855
  //  -- a2: receiver_pointer
856 857 858
  //  -- a3: argc
  //  -- s0: argv
  // -----------------------------------
859
  ProfileEntryHookStub::MaybeCallEntryHook(masm);
860 861 862 863 864

  // Clear the context before we push it when entering the JS frame.
  __ mov(cp, zero_reg);

  // Enter an internal frame.
865 866
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
867

868 869
    // Set up the context from the function argument.
    __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
870

871 872
    // Push the function and the receiver onto the stack.
    __ Push(a1, a2);
873

874 875
    // Copy arguments to the stack in a loop.
    // a3: argc
876
    // s0: argv, i.e. points to first arg
877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904
    Label loop, entry;
    __ sll(t0, a3, kPointerSizeLog2);
    __ addu(t2, s0, t0);
    __ b(&entry);
    __ nop();   // Branch delay slot nop.
    // t2 points past last arg.
    __ bind(&loop);
    __ lw(t0, MemOperand(s0));  // Read next parameter.
    __ addiu(s0, s0, kPointerSize);
    __ lw(t0, MemOperand(t0));  // Dereference handle.
    __ push(t0);  // Push parameter.
    __ bind(&entry);
    __ Branch(&loop, ne, s0, Operand(t2));

    // Initialize all JavaScript callee-saved registers, since they will be seen
    // by the garbage collector as part of handlers.
    __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
    __ mov(s1, t0);
    __ mov(s2, t0);
    __ mov(s3, t0);
    __ mov(s4, t0);
    __ mov(s5, t0);
    // s6 holds the root address. Do not clobber.
    // s7 is cp. Do not init.

    // Invoke the code and pass argc as a0.
    __ mov(a0, a3);
    if (is_construct) {
905
      // No type feedback cell is available
906
      __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
907
      CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
908
      __ CallStub(&stub);
909 910
    } else {
      ParameterCount actual(a0);
911
      __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
912
    }
913

914 915
    // Leave internal frame.
  }
916 917

  __ Jump(ra);
918 919 920 921
}


void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
922
  Generate_JSEntryTrampolineHelper(masm, false);
923 924 925 926
}


void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
927
  Generate_JSEntryTrampolineHelper(masm, true);
928 929 930
}


931 932
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
  CallRuntimePassFunction(masm, Runtime::kCompileLazy);
933
  GenerateTailCallToReturnedCode(masm);
934 935 936
}


937
static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
938 939
  FrameScope scope(masm, StackFrame::INTERNAL);
  // Push a copy of the function onto the stack.
940 941
  // Push function as parameter to the runtime call.
  __ Push(a1, a1);
942 943 944
  // Whether to compile in a background thread.
  __ Push(masm->isolate()->factory()->ToBoolean(concurrent));

945
  __ CallRuntime(Runtime::kCompileOptimized, 2);
946 947
  // Restore receiver.
  __ Pop(a1);
948 949 950 951 952 953 954 955 956 957 958 959
}


void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
  CallCompileOptimized(masm, false);
  GenerateTailCallToReturnedCode(masm);
}


void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
  CallCompileOptimized(masm, true);
  GenerateTailCallToReturnedCode(masm);
960 961 962
}


963

964
static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
965 966 967 968 969 970
  // For now, we are relying on the fact that make_code_young doesn't do any
  // garbage collection which allows us to save/restore the registers without
  // worrying about which of them contain pointers. We also don't build an
  // internal frame to make the code faster, since we shouldn't have to do stack
  // crawls in MakeCodeYoung. This seems a bit fragile.

971
  // Set a0 to point to the head of the PlatformCodeAge sequence.
972
  __ Subu(a0, a0,
973
      Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
974 975 976 977

  // The following registers must be saved and restored when calling through to
  // the runtime:
  //   a0 - contains return address (beginning of patch sequence)
978
  //   a1 - isolate
979 980 981 982
  RegList saved_regs =
      (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
  FrameScope scope(masm, StackFrame::MANUAL);
  __ MultiPush(saved_regs);
983
  __ PrepareCallCFunction(2, 0, a2);
984
  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
985
  __ CallCFunction(
986
      ExternalReference::get_make_code_young_function(masm->isolate()), 2);
987 988
  __ MultiPop(saved_regs);
  __ Jump(a0);
989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003
}

#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
    MacroAssembler* masm) {                                  \
  GenerateMakeCodeYoungAgainCommon(masm);                    \
}                                                            \
void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
    MacroAssembler* masm) {                                  \
  GenerateMakeCodeYoungAgainCommon(masm);                    \
}
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR


1004 1005 1006 1007 1008 1009
void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
  // that make_code_young doesn't do any garbage collection which allows us to
  // save/restore the registers without worrying about which of them contain
  // pointers.

1010
  // Set a0 to point to the head of the PlatformCodeAge sequence.
1011
  __ Subu(a0, a0,
1012
      Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1013 1014 1015 1016 1017 1018 1019 1020 1021

  // The following registers must be saved and restored when calling through to
  // the runtime:
  //   a0 - contains return address (beginning of patch sequence)
  //   a1 - isolate
  RegList saved_regs =
      (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
  FrameScope scope(masm, StackFrame::MANUAL);
  __ MultiPush(saved_regs);
1022
  __ PrepareCallCFunction(2, 0, a2);
1023 1024 1025 1026 1027 1028 1029 1030
  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
  __ CallCFunction(
      ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
      2);
  __ MultiPop(saved_regs);

  // Perform prologue operations usually performed by the young code stub.
  __ Push(ra, fp, cp, a1);
1031
  __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1032 1033

  // Jump to point after the code-age stub.
1034
  __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
1035 1036 1037 1038 1039 1040 1041 1042 1043
  __ Jump(a0);
}


void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
  GenerateMakeCodeYoungAgainCommon(masm);
}


1044 1045
static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
                                             SaveFPRegsMode save_doubles) {
1046 1047 1048 1049 1050 1051 1052 1053
  {
    FrameScope scope(masm, StackFrame::INTERNAL);

    // Preserve registers across notification, this is important for compiled
    // stubs that tail call the runtime on deopts passing their parameters in
    // registers.
    __ MultiPush(kJSCallerSaved | kCalleeSaved);
    // Pass the function and deoptimization type to the runtime system.
1054
    __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
1055 1056 1057 1058
    __ MultiPop(kJSCallerSaved | kCalleeSaved);
  }

  __ Addu(sp, sp, Operand(kPointerSize));  // Ignore state
1059
  __ Jump(ra);  // Jump to miss handler
1060 1061 1062
}


1063 1064 1065 1066 1067 1068 1069 1070 1071 1072
void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
}


void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
}


1073 1074 1075 1076 1077 1078 1079
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
                                             Deoptimizer::BailoutType type) {
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    // Pass the function and deoptimization type to the runtime system.
    __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
    __ push(a0);
1080
    __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1081 1082 1083 1084 1085 1086 1087 1088 1089
  }

  // Get the full codegen state from the stack and untag it -> t2.
  __ lw(t2, MemOperand(sp, 0 * kPointerSize));
  __ SmiUntag(t2);
  // Switch on the state.
  Label with_tos_register, unknown_state;
  __ Branch(&with_tos_register,
            ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
1090 1091
  __ Ret(USE_DELAY_SLOT);
  // Safe to fill delay slot Addu will emit one instruction.
1092 1093 1094 1095 1096 1097
  __ Addu(sp, sp, Operand(1 * kPointerSize));  // Remove state.

  __ bind(&with_tos_register);
  __ lw(v0, MemOperand(sp, 1 * kPointerSize));
  __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));

1098 1099
  __ Ret(USE_DELAY_SLOT);
  // Safe to fill delay slot Addu will emit one instruction.
1100 1101 1102 1103 1104 1105 1106
  __ Addu(sp, sp, Operand(2 * kPointerSize));  // Remove state.

  __ bind(&unknown_state);
  __ stop("no cases left");
}


1107
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1108
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1109 1110 1111
}


1112 1113 1114 1115 1116
void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
}


1117
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1118
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1119 1120 1121 1122
}


void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1123
  // Lookup the function in the JavaScript frame.
1124 1125 1126
  __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
1127
    // Pass function as argument.
1128
    __ push(a0);
1129
    __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1130 1131
  }

1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151
  // If the code object is null, just return to the unoptimized code.
  __ Ret(eq, v0, Operand(Smi::FromInt(0)));

  // Load deoptimization data from the code object.
  // <deopt_data> = <code>[#deoptimization_data_offset]
  __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));

  // Load the OSR entrypoint offset from the deoptimization data.
  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
  __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
      DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
  __ SmiUntag(a1);

  // Compute the target address = code_obj + header_size + osr_offset
  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
  __ addu(v0, v0, a1);
  __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);

  // And "return" to the OSR entry point of the function.
  __ Ret();
1152 1153 1154
}


1155 1156 1157 1158 1159 1160 1161
void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
  // We check the stack limit as indicator that recompilation might be done.
  Label ok;
  __ LoadRoot(at, Heap::kStackLimitRootIndex);
  __ Branch(&ok, hs, sp, Operand(at));
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
1162
    __ CallRuntime(Runtime::kStackGuard, 0);
1163 1164 1165 1166 1167 1168 1169 1170 1171
  }
  __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
          RelocInfo::CODE_TARGET);

  __ bind(&ok);
  __ Ret();
}


1172
void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185
  // 1. Make sure we have at least one argument.
  // a0: actual number of arguments
  { Label done;
    __ Branch(&done, ne, a0, Operand(zero_reg));
    __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
    __ push(t2);
    __ Addu(a0, a0, Operand(1));
    __ bind(&done);
  }

  // 2. Get the function to call (passed as receiver) from the stack, check
  //    if it is a function.
  // a0: actual number of arguments
1186
  Label slow, non_function;
1187 1188 1189
  __ sll(at, a0, kPointerSizeLog2);
  __ addu(at, sp, at);
  __ lw(a1, MemOperand(at));
1190
  __ JumpIfSmi(a1, &non_function);
1191
  __ GetObjectType(a1, a2, a2);
1192
  __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1193 1194 1195 1196 1197

  // 3a. Patch the first argument if necessary when calling a function.
  // a0: actual number of arguments
  // a1: function
  Label shift_arguments;
1198
  __ li(t0, Operand(0, RelocInfo::NONE32));  // Indicate regular JS_FUNCTION.
1199
  { Label convert_to_object, use_global_proxy, patch_receiver;
1200 1201 1202 1203 1204 1205
    // Change context eagerly in case we need the global receiver.
    __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));

    // Do not transform the receiver for strict mode functions.
    __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
    __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1206
    __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1207
                                 kSmiTagSize)));
1208
    __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1209

1210
    // Do not transform the receiver for native (Compilerhints already in a3).
1211 1212
    __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
    __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1213

1214
    // Compute the receiver in sloppy mode.
1215 1216 1217 1218 1219 1220 1221 1222 1223
    // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
    __ sll(at, a0, kPointerSizeLog2);
    __ addu(a2, sp, at);
    __ lw(a2, MemOperand(a2, -kPointerSize));
    // a0: actual number of arguments
    // a1: function
    // a2: first argument
    __ JumpIfSmi(a2, &convert_to_object, t2);

1224
    __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1225
    __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1226
    __ LoadRoot(a3, Heap::kNullValueRootIndex);
1227
    __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1228

1229
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1230
    __ GetObjectType(a2, a3, a3);
1231
    __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1232 1233

    __ bind(&convert_to_object);
1234 1235 1236 1237
    // Enter an internal frame in order to preserve argument count.
    {
      FrameScope scope(masm, StackFrame::INTERNAL);
      __ sll(a0, a0, kSmiTagSize);  // Smi tagged.
1238
      __ Push(a0, a2);
1239 1240 1241 1242 1243 1244 1245
      __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
      __ mov(a2, v0);

      __ pop(a0);
      __ sra(a0, a0, kSmiTagSize);  // Un-tag.
      // Leave internal frame.
    }
1246

1247
    // Restore the function to a1, and the flag to t0.
1248 1249 1250
    __ sll(at, a0, kPointerSizeLog2);
    __ addu(at, sp, at);
    __ lw(a1, MemOperand(at));
1251 1252
    __ Branch(USE_DELAY_SLOT, &patch_receiver);
    __ li(t0, Operand(0, RelocInfo::NONE32));  // In delay slot.
1253

1254
    __ bind(&use_global_proxy);
1255
    __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1256
    __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
1257 1258 1259 1260 1261 1262 1263 1264 1265

    __ bind(&patch_receiver);
    __ sll(at, a0, kPointerSizeLog2);
    __ addu(a3, sp, at);
    __ sw(a2, MemOperand(a3, -kPointerSize));

    __ Branch(&shift_arguments);
  }

1266 1267
  // 3b. Check for function proxy.
  __ bind(&slow);
1268
  __ li(t0, Operand(1, RelocInfo::NONE32));  // Indicate function proxy.
1269
  __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1270

1271
  __ bind(&non_function);
1272
  __ li(t0, Operand(2, RelocInfo::NONE32));  // Indicate non-function.
1273 1274

  // 3c. Patch the first argument when calling a non-function.  The
1275 1276 1277 1278 1279
  //     CALL_NON_FUNCTION builtin expects the non-function callee as
  //     receiver, so overwrite the first argument which will ultimately
  //     become the receiver.
  // a0: actual number of arguments
  // a1: function
1280
  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1281 1282 1283 1284 1285 1286 1287 1288 1289
  __ sll(at, a0, kPointerSizeLog2);
  __ addu(a2, sp, at);
  __ sw(a1, MemOperand(a2, -kPointerSize));

  // 4. Shift arguments and return address one slot down on the stack
  //    (overwriting the original receiver).  Adjust argument count to make
  //    the original first argument the new receiver.
  // a0: actual number of arguments
  // a1: function
1290
  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307
  __ bind(&shift_arguments);
  { Label loop;
    // Calculate the copy start address (destination). Copy end address is sp.
    __ sll(at, a0, kPointerSizeLog2);
    __ addu(a2, sp, at);

    __ bind(&loop);
    __ lw(at, MemOperand(a2, -kPointerSize));
    __ sw(at, MemOperand(a2));
    __ Subu(a2, a2, Operand(kPointerSize));
    __ Branch(&loop, ne, a2, Operand(sp));
    // Adjust the actual number of arguments and remove the top element
    // (which is a copy of the last argument).
    __ Subu(a0, a0, Operand(1));
    __ Pop();
  }

1308 1309
  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
  //     or a function proxy via CALL_FUNCTION_PROXY.
1310 1311
  // a0: actual number of arguments
  // a1: function
1312 1313 1314 1315 1316 1317 1318 1319 1320
  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
  { Label function, non_proxy;
    __ Branch(&function, eq, t0, Operand(zero_reg));
    // Expected number of arguments is 0 for CALL_NON_FUNCTION.
    __ mov(a2, zero_reg);
    __ Branch(&non_proxy, ne, t0, Operand(1));

    __ push(a1);  // Re-add proxy object as additional argument.
    __ Addu(a0, a0, Operand(1));
1321
    __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1322 1323 1324 1325
    __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
            RelocInfo::CODE_TARGET);

    __ bind(&non_proxy);
1326
    __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344
    __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
            RelocInfo::CODE_TARGET);
    __ bind(&function);
  }

  // 5b. Get the code to call from the function and check that the number of
  //     expected arguments matches what we're providing.  If so, jump
  //     (tail-call) to the code in register edx without checking arguments.
  // a0: actual number of arguments
  // a1: function
  __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
  __ lw(a2,
         FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
  __ sra(a2, a2, kSmiTagSize);
  // Check formal and actual parameter counts.
  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
          RelocInfo::CODE_TARGET, ne, a2, Operand(a0));

1345
  __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1346
  ParameterCount expected(0);
1347
  __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1348 1349 1350 1351
}


void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1352 1353 1354 1355 1356 1357 1358
  const int kIndexOffset    =
      StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
  const int kLimitOffset    =
      StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
  const int kArgsOffset     = 2 * kPointerSize;
  const int kRecvOffset     = 3 * kPointerSize;
  const int kFunctionOffset = 4 * kPointerSize;
1359

1360
  {
1361
    FrameScope frame_scope(masm, StackFrame::INTERNAL);
1362 1363 1364 1365 1366 1367 1368
    __ lw(a0, MemOperand(fp, kFunctionOffset));  // Get the function.
    __ push(a0);
    __ lw(a0, MemOperand(fp, kArgsOffset));  // Get the args array.
    __ push(a0);
    // Returns (in v0) number of arguments to copy to stack as Smi.
    __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);

1369
    // Check the stack for overflow. We are not trying to catch
1370 1371 1372 1373 1374 1375 1376 1377
    // interruptions (e.g. debug break and preemption) here, so the "real stack
    // limit" is checked.
    Label okay;
    __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
    // Make a2 the space we have left. The stack might already be overflowed
    // here which will cause a2 to become negative.
    __ subu(a2, sp, a2);
    // Check if the arguments will overflow the stack.
1378 1379
    __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
    __ Branch(&okay, gt, a2, Operand(t3));  // Signed comparison.
1380 1381 1382

    // Out of stack space.
    __ lw(a1, MemOperand(fp, kFunctionOffset));
1383
    __ Push(a1, v0);
1384
    __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1385 1386 1387 1388
    // End of stack check.

    // Push current limit and index.
    __ bind(&okay);
1389 1390
    __ mov(a1, zero_reg);
    __ Push(v0, a1);  // Limit and initial index.
1391

1392 1393 1394 1395 1396 1397 1398 1399 1400
    // Get the receiver.
    __ lw(a0, MemOperand(fp, kRecvOffset));

    // Check that the function is a JS function (otherwise it must be a proxy).
    Label push_receiver;
    __ lw(a1, MemOperand(fp, kFunctionOffset));
    __ GetObjectType(a1, a2, a2);
    __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));

1401
    // Change context eagerly to get the right global object if necessary.
1402 1403 1404
    __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
    // Load the shared function info while the function is still in a1.
    __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1405

1406 1407
    // Compute the receiver.
    // Do not transform the receiver for strict mode functions.
1408
    Label call_to_object, use_global_proxy;
1409 1410
    __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
    __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1411
                                 kSmiTagSize)));
1412
    __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1413 1414

    // Do not transform the receiver for native (Compilerhints already in a2).
1415 1416
    __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
    __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1417

1418
    // Compute the receiver in sloppy mode.
1419
    __ JumpIfSmi(a0, &call_to_object);
1420
    __ LoadRoot(a1, Heap::kNullValueRootIndex);
1421
    __ Branch(&use_global_proxy, eq, a0, Operand(a1));
1422
    __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1423
    __ Branch(&use_global_proxy, eq, a0, Operand(a2));
1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438

    // Check if the receiver is already a JavaScript object.
    // a0: receiver
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
    __ GetObjectType(a0, a1, a1);
    __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));

    // Convert the receiver to a regular object.
    // a0: receiver
    __ bind(&call_to_object);
    __ push(a0);
    __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
    __ mov(a0, v0);  // Put object in a0 to match other paths to push_receiver.
    __ Branch(&push_receiver);

1439
    __ bind(&use_global_proxy);
1440
    __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1441
    __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset));
1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457

    // Push the receiver.
    // a0: receiver
    __ bind(&push_receiver);
    __ push(a0);

    // Copy all arguments from the array to the stack.
    Label entry, loop;
    __ lw(a0, MemOperand(fp, kIndexOffset));
    __ Branch(&entry);

    // Load the current argument from the arguments array and push it to the
    // stack.
    // a0: current argument index
    __ bind(&loop);
    __ lw(a1, MemOperand(fp, kArgsOffset));
1458
    __ Push(a1, a0);
1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473

    // Call the runtime to access the property in the arguments array.
    __ CallRuntime(Runtime::kGetProperty, 2);
    __ push(v0);

    // Use inline caching to access the arguments.
    __ lw(a0, MemOperand(fp, kIndexOffset));
    __ Addu(a0, a0, Operand(1 << kSmiTagSize));
    __ sw(a0, MemOperand(fp, kIndexOffset));

    // Test if the copy loop has finished copying all the elements from the
    // arguments object.
    __ bind(&entry);
    __ lw(a1, MemOperand(fp, kLimitOffset));
    __ Branch(&loop, ne, a0, Operand(a1));
1474

1475
    // Call the function.
1476
    Label call_proxy;
1477 1478 1479
    ParameterCount actual(a0);
    __ sra(a0, a0, kSmiTagSize);
    __ lw(a1, MemOperand(fp, kFunctionOffset));
1480 1481
    __ GetObjectType(a1, a2, a2);
    __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1482

1483
    __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
1484

1485
    frame_scope.GenerateLeaveFrame();
1486 1487 1488
    __ Ret(USE_DELAY_SLOT);
    __ Addu(sp, sp, Operand(3 * kPointerSize));  // In delay slot.

1489
    // Call the function proxy.
1490 1491 1492
    __ bind(&call_proxy);
    __ push(a1);  // Add function proxy as last argument.
    __ Addu(a0, a0, Operand(1));
1493
    __ li(a2, Operand(0, RelocInfo::NONE32));
1494
    __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1495 1496
    __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
            RelocInfo::CODE_TARGET);
1497 1498
    // Tear down the internal frame and remove function, receiver and args.
  }
1499 1500 1501

  __ Ret(USE_DELAY_SLOT);
  __ Addu(sp, sp, Operand(3 * kPointerSize));  // In delay slot.
1502 1503 1504
}


1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525
static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
                                      Label* stack_overflow) {
  // ----------- S t a t e -------------
  //  -- a0 : actual number of arguments
  //  -- a1 : function (passed through to callee)
  //  -- a2 : expected number of arguments
  // -----------------------------------
  // Check the stack for overflow. We are not trying to catch
  // interruptions (e.g. debug break and preemption) here, so the "real stack
  // limit" is checked.
  __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
  // Make t1 the space we have left. The stack might already be overflowed
  // here which will cause t1 to become negative.
  __ subu(t1, sp, t1);
  // Check if the arguments will overflow the stack.
  __ sll(at, a2, kPointerSizeLog2);
  // Signed comparison.
  __ Branch(stack_overflow, le, t1, Operand(at));
}


1526 1527 1528 1529
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
  __ sll(a0, a0, kSmiTagSize);
  __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
  __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1530 1531
  __ Addu(fp, sp,
      Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1532 1533 1534 1535 1536 1537 1538 1539 1540
}


static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- v0 : result being passed through
  // -----------------------------------
  // Get the number of arguments passed (as a smi), tear down the frame and
  // then tear down the parameters.
1541 1542
  __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
                             kPointerSize)));
1543 1544 1545 1546 1547 1548
  __ mov(sp, fp);
  __ MultiPop(fp.bit() | ra.bit());
  __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
  __ Addu(sp, sp, t0);
  // Adjust for the receiver.
  __ Addu(sp, sp, Operand(kPointerSize));
1549 1550 1551 1552
}


void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1553 1554 1555 1556 1557 1558 1559
  // State setup as expected by MacroAssembler::InvokePrologue.
  // ----------- S t a t e -------------
  //  -- a0: actual arguments count
  //  -- a1: function (passed through to callee)
  //  -- a2: expected arguments count
  // -----------------------------------

1560 1561
  Label stack_overflow;
  ArgumentAdaptorStackCheck(masm, &stack_overflow);
1562 1563 1564
  Label invoke, dont_adapt_arguments;

  Label enough, too_few;
1565
  __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610 1611 1612 1613 1614 1615 1616 1617
  __ Branch(&dont_adapt_arguments, eq,
      a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
  // We use Uless as the number of argument should always be greater than 0.
  __ Branch(&too_few, Uless, a0, Operand(a2));

  {  // Enough parameters: actual >= expected.
    // a0: actual number of arguments as a smi
    // a1: function
    // a2: expected number of arguments
    // a3: code entry to call
    __ bind(&enough);
    EnterArgumentsAdaptorFrame(masm);

    // Calculate copy start address into a0 and copy end address into a2.
    __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
    __ Addu(a0, fp, a0);
    // Adjust for return address and receiver.
    __ Addu(a0, a0, Operand(2 * kPointerSize));
    // Compute copy end address.
    __ sll(a2, a2, kPointerSizeLog2);
    __ subu(a2, a0, a2);

    // Copy the arguments (including the receiver) to the new stack frame.
    // a0: copy start address
    // a1: function
    // a2: copy end address
    // a3: code entry to call

    Label copy;
    __ bind(&copy);
    __ lw(t0, MemOperand(a0));
    __ push(t0);
    __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a2));
    __ addiu(a0, a0, -kPointerSize);  // In delay slot.

    __ jmp(&invoke);
  }

  {  // Too few parameters: Actual < expected.
    __ bind(&too_few);
    EnterArgumentsAdaptorFrame(masm);

    // Calculate copy start address into a0 and copy end address is fp.
    // a0: actual number of arguments as a smi
    // a1: function
    // a2: expected number of arguments
    // a3: code entry to call
    __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
    __ Addu(a0, fp, a0);
    // Adjust for return address and receiver.
    __ Addu(a0, a0, Operand(2 * kPointerSize));
    // Compute copy end address. Also adjust for return address.
1618
    __ Addu(t3, fp, kPointerSize);
1619 1620 1621 1622 1623 1624

    // Copy the arguments (including the receiver) to the new stack frame.
    // a0: copy start address
    // a1: function
    // a2: expected number of arguments
    // a3: code entry to call
1625
    // t3: copy end address
1626 1627 1628
    Label copy;
    __ bind(&copy);
    __ lw(t0, MemOperand(a0));  // Adjusted above for return addr and receiver.
1629
    __ Subu(sp, sp, kPointerSize);
1630
    __ Subu(a0, a0, kPointerSize);
1631 1632
    __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
    __ sw(t0, MemOperand(sp));  // In the delay slot.
1633 1634 1635 1636 1637 1638 1639 1640

    // Fill the remaining expected arguments with undefined.
    // a1: function
    // a2: expected number of arguments
    // a3: code entry to call
    __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
    __ sll(t2, a2, kPointerSizeLog2);
    __ Subu(a2, fp, Operand(t2));
1641 1642 1643
    // Adjust for frame.
    __ Subu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
                            2 * kPointerSize));
1644 1645 1646

    Label fill;
    __ bind(&fill);
1647 1648 1649
    __ Subu(sp, sp, kPointerSize);
    __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
    __ sw(t0, MemOperand(sp));
1650 1651 1652 1653 1654 1655 1656
  }

  // Call the entry point.
  __ bind(&invoke);

  __ Call(a3);

1657
  // Store offset of return address for deoptimizer.
1658
  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1659

1660 1661 1662 1663 1664 1665 1666 1667 1668 1669
  // Exit frame and return.
  LeaveArgumentsAdaptorFrame(masm);
  __ Ret();


  // -------------------------------------------
  // Don't adapt arguments.
  // -------------------------------------------
  __ bind(&dont_adapt_arguments);
  __ Jump(a3);
1670 1671 1672 1673 1674 1675 1676 1677

  __ bind(&stack_overflow);
  {
    FrameScope frame(masm, StackFrame::MANUAL);
    EnterArgumentsAdaptorFrame(masm);
    __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
    __ break_(0xCC);
  }
1678 1679 1680 1681 1682 1683 1684
}


#undef __

} }  // namespace v8::internal

1685
#endif  // V8_TARGET_ARCH_MIPS