builtins-x64.cc 62.2 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5
#if V8_TARGET_ARCH_X64
6

7
#include "src/code-factory.h"
8 9
#include "src/codegen.h"
#include "src/deoptimizer.h"
10
#include "src/full-codegen/full-codegen.h"
11

12 13
namespace v8 {
namespace internal {
14

15

16 17
#define __ ACCESS_MASM(masm)

18 19 20 21 22

void Builtins::Generate_Adaptor(MacroAssembler* masm,
                                CFunctionId id,
                                BuiltinExtraArguments extra_args) {
  // ----------- S t a t e -------------
23 24 25 26 27 28
  //  -- rax                 : number of arguments excluding receiver
  //  -- rdi                 : called function (only guaranteed when
  //                           extra_args requires it)
  //  -- rsi                 : context
  //  -- rsp[0]              : return address
  //  -- rsp[8]              : last argument
29
  //  -- ...
30 31
  //  -- rsp[8 * argc]       : first argument (argc == rax)
  //  -- rsp[8 * (argc + 1)] : receiver
32 33 34 35 36 37
  // -----------------------------------

  // Insert extra arguments.
  int num_extra_args = 0;
  if (extra_args == NEEDS_CALLED_FUNCTION) {
    num_extra_args = 1;
38
    __ PopReturnAddressTo(kScratchRegister);
39
    __ Push(rdi);
40
    __ PushReturnAddressFrom(kScratchRegister);
41
  } else {
42
    DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
43 44
  }

serya@chromium.org's avatar
serya@chromium.org committed
45
  // JumpToExternalReference expects rax to contain the number of arguments
46
  // including the receiver and the extra arguments.
47
  __ addp(rax, Immediate(num_extra_args + 1));
48
  __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
49 50
}

51

52 53
static void CallRuntimePassFunction(
    MacroAssembler* masm, Runtime::FunctionId function_id) {
54 55
  FrameScope scope(masm, StackFrame::INTERNAL);
  // Push a copy of the function onto the stack.
56
  __ Push(rdi);
57
  // Function is also the parameter to the runtime call.
58
  __ Push(rdi);
59 60 61

  __ CallRuntime(function_id, 1);
  // Restore receiver.
62
  __ Pop(rdi);
63 64 65
}


66
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
67
  __ movp(kScratchRegister,
68
          FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
69
  __ movp(kScratchRegister,
70
          FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
71
  __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
72 73 74 75
  __ jmp(kScratchRegister);
}


76
static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
77
  __ leap(rax, FieldOperand(rax, Code::kHeaderSize));
78 79 80 81 82
  __ jmp(rax);
}


void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
83 84 85 86 87 88 89 90 91
  // Checking whether the queued function is ready for install is optional,
  // since we come across interrupts and stack checks elsewhere.  However,
  // not checking may delay installing ready functions, and always checking
  // would be quite expensive.  A good compromise is to first check against
  // stack limit as a cue for an interrupt signal.
  Label ok;
  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
  __ j(above_equal, &ok);

92
  CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
93
  GenerateTailCallToReturnedCode(masm);
94 95 96

  __ bind(&ok);
  GenerateTailCallToSharedCode(masm);
97 98 99
}


100 101
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
                                           bool is_api_function,
102
                                           bool create_memento) {
103 104 105
  // ----------- S t a t e -------------
  //  -- rax: number of arguments
  //  -- rdi: constructor function
106
  //  -- rbx: allocation site or undefined
107
  //  -- rdx: original constructor
108
  // -----------------------------------
109

110
  // Should never create mementos for api functions.
111
  DCHECK(!is_api_function || !create_memento);
112

113 114 115
  // Enter a construct frame.
  {
    FrameScope scope(masm, StackFrame::CONSTRUCT);
116

117
    // Preserve the incoming parameters on the stack.
118 119
    __ AssertUndefinedOrAllocationSite(rbx);
    __ Push(rbx);
120
    __ Integer32ToSmi(rax, rax);
121 122
    __ Push(rax);
    __ Push(rdi);
123
    __ Push(rdx);
124

125 126
    // Try to allocate the object without transitioning into C code. If any of
    // the preconditions is not met, the code bails out to the runtime call.
127
    Label rt_call, allocated;
128 129 130
    if (FLAG_inline_new) {
      ExternalReference debug_step_in_fp =
          ExternalReference::debug_step_in_fp_address(masm->isolate());
131
      __ Move(kScratchRegister, debug_step_in_fp);
132
      __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
133
      __ j(not_equal, &rt_call);
134

135 136 137 138
      // Fall back to runtime if the original constructor and function differ.
      __ cmpp(rdx, rdi);
      __ j(not_equal, &rt_call);

139 140 141
      // Verified that the constructor is a JSFunction.
      // Load the initial map and verify that it is in fact a map.
      // rdi: constructor
142
      __ movp(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
143
      // Will both indicate a NULL and a Smi
144
      DCHECK(kSmiTag == 0);
145 146 147 148 149 150 151 152 153 154 155 156 157
      __ JumpIfSmi(rax, &rt_call);
      // rdi: constructor
      // rax: initial map (if proven valid below)
      __ CmpObjectType(rax, MAP_TYPE, rbx);
      __ j(not_equal, &rt_call);

      // Check that the constructor is not constructing a JSFunction (see
      // comments in Runtime_NewObject in runtime.cc). In which case the
      // initial map's instance type would be JS_FUNCTION_TYPE.
      // rdi: constructor
      // rax: initial map
      __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
      __ j(equal, &rt_call);
158
      if (!is_api_function) {
159
        Label allocate;
160
        // The code below relies on these assumptions.
ulan's avatar
ulan committed
161
        STATIC_ASSERT(Map::Counter::kShift + Map::Counter::kSize == 32);
162 163
        // Check if slack tracking is enabled.
        __ movl(rsi, FieldOperand(rax, Map::kBitField3Offset));
ulan's avatar
ulan committed
164 165 166
        __ shrl(rsi, Immediate(Map::Counter::kShift));
        __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
        __ j(less, &allocate);
167
        // Decrease generous allocation count.
168
        __ subl(FieldOperand(rax, Map::kBitField3Offset),
ulan's avatar
ulan committed
169
                Immediate(1 << Map::Counter::kShift));
170

ulan's avatar
ulan committed
171
        __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
172
        __ j(not_equal, &allocate);
173

174
        __ Push(rax);
175
        __ Push(rdx);
176
        __ Push(rdi);
177

178
        __ Push(rdi);  // constructor
179
        __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
180

181
        __ Pop(rdi);
182
        __ Pop(rdx);
183
        __ Pop(rax);
ulan's avatar
ulan committed
184
        __ movl(rsi, Immediate(Map::kSlackTrackingCounterEnd - 1));
185

186
        __ bind(&allocate);
187
      }
188

189
      // Now allocate the JSObject on the heap.
190
      __ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
191
      __ shlp(rdi, Immediate(kPointerSizeLog2));
192
      if (create_memento) {
193
        __ addp(rdi, Immediate(AllocationMemento::kSize));
194
      }
195
      // rdi: size of new object
196 197 198 199 200 201
      __ Allocate(rdi,
                  rbx,
                  rdi,
                  no_reg,
                  &rt_call,
                  NO_ALLOCATION_FLAGS);
202
      Factory* factory = masm->isolate()->factory();
203 204 205
      // Allocated the JSObject, now initialize the fields.
      // rax: initial map
      // rbx: JSObject (not HeapObject tagged - the actual address).
206
      // rdi: start of next object (including memento if create_memento)
207
      __ movp(Operand(rbx, JSObject::kMapOffset), rax);
208
      __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
209 210
      __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
      __ movp(Operand(rbx, JSObject::kElementsOffset), rcx);
211 212 213
      // Set extra fields in the newly allocated object.
      // rax: initial map
      // rbx: JSObject
214
      // rdi: start of next object (including memento if create_memento)
215
      // rsi: slack tracking counter (non-API function case)
216
      __ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
217
      __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
218 219 220 221
      if (!is_api_function) {
        Label no_inobject_slack_tracking;

        // Check if slack tracking is enabled.
ulan's avatar
ulan committed
222 223
        __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
        __ j(less, &no_inobject_slack_tracking);
224 225

        // Allocate object with a slack.
226 227 228 229
        __ movzxbp(
            rsi,
            FieldOperand(
                rax, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset));
230 231
        __ movzxbp(rax, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
        __ subp(rsi, rax);
232
        __ leap(rsi,
233 234 235
               Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
        // rsi: offset of first field after pre-allocated fields
        if (FLAG_debug_code) {
236
          __ cmpp(rsi, rdi);
237
          __ Assert(less_equal,
238
                    kUnexpectedNumberOfPreAllocatedPropertyFields);
239
        }
240 241
        __ InitializeFieldsWithFiller(rcx, rsi, rdx);
        __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
242 243 244 245 246
        // Fill the remaining fields with one pointer filler map.

        __ bind(&no_inobject_slack_tracking);
      }
      if (create_memento) {
247
        __ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
248 249 250 251 252
        __ InitializeFieldsWithFiller(rcx, rsi, rdx);

        // Fill in memento fields if necessary.
        // rsi: points to the allocated but uninitialized memento.
        __ Move(Operand(rsi, AllocationMemento::kMapOffset),
253
                factory->allocation_memento_map());
254
        // Get the cell or undefined.
255 256
        __ movp(rdx, Operand(rsp, 3 * kPointerSize));
        __ AssertUndefinedOrAllocationSite(rdx);
257
        __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx);
258 259
      } else {
        __ InitializeFieldsWithFiller(rcx, rdi, rdx);
260
      }
261

262
      // Add the object tag to make the JSObject real, so that we can continue
263 264
      // and jump into the continuation code at any time from now on.
      // rbx: JSObject (untagged)
265
      __ orp(rbx, Immediate(kHeapObjectTag));
266 267

      // Continue with JSObject being successfully allocated
268
      // rbx: JSObject (tagged)
269 270
      __ jmp(&allocated);
    }
271

272
    // Allocate the new receiver object using the runtime call.
273
    // rdx: original constructor
274
    __ bind(&rt_call);
275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
    int offset = kPointerSize;
    if (create_memento) {
      // Get the cell or allocation site.
      __ movp(rdi, Operand(rsp, kPointerSize * 3));
      __ Push(rdi);  // argument 1: allocation site
      offset += kPointerSize;
    }

    // Must restore rsi (context) and rdi (constructor) before calling runtime.
    __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
    __ movp(rdi, Operand(rsp, offset));
    __ Push(rdi);  // argument 2/1: constructor function
    __ Push(rdx);  // argument 3/2: original constructor
    if (create_memento) {
      __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
    } else {
      __ CallRuntime(Runtime::kNewObject, 2);
    }
    __ movp(rbx, rax);  // store result in rbx

    // Runtime_NewObjectWithAllocationSite increments allocation count.
    // Skip the increment.
    Label count_incremented;
    if (create_memento) {
      __ jmp(&count_incremented);
    }
301

302 303 304
    // New object allocated.
    // rbx: newly allocated object
    __ bind(&allocated);
305 306

    if (create_memento) {
307
      __ movp(rcx, Operand(rsp, 3 * kPointerSize));
308 309 310 311 312 313 314 315 316 317
      __ Cmp(rcx, masm->isolate()->factory()->undefined_value());
      __ j(equal, &count_incremented);
      // rcx is an AllocationSite. We are creating a memento from it, so we
      // need to increment the memento create count.
      __ SmiAddConstant(
          FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset),
          Smi::FromInt(1));
      __ bind(&count_incremented);
    }

318
    // Restore the parameters.
319
    __ Pop(rdx);
320
    __ Pop(rdi);
321

322
    // Retrieve smi-tagged arguments count from the stack.
323
    __ movp(rax, Operand(rsp, 0));
324
    __ SmiToInteger32(rax, rax);
325

326 327
    // Push new.target onto the construct frame. This is stored just below the
    // receiver on the stack.
328
    __ Push(rdx);
329

330 331 332
    // Push the allocated receiver to the stack. We need two copies
    // because we may have to return the original one and the calling
    // conventions dictate that the called function pops the receiver.
333 334
    __ Push(rbx);
    __ Push(rbx);
335

336
    // Set up pointer to last argument.
337
    __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
338

339 340
    // Copy arguments and receiver to the expression stack.
    Label loop, entry;
341
    __ movp(rcx, rax);
342 343
    __ jmp(&entry);
    __ bind(&loop);
344
    __ Push(Operand(rbx, rcx, times_pointer_size, 0));
345
    __ bind(&entry);
346
    __ decp(rcx);
347 348 349 350
    __ j(greater_equal, &loop);

    // Call the function.
    if (is_api_function) {
351
      __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
352 353
      Handle<Code> code =
          masm->isolate()->builtins()->HandleApiCallConstruct();
354
      __ Call(code, RelocInfo::CODE_TARGET);
355 356
    } else {
      ParameterCount actual(rax);
357
      __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
358
    }
359

360
    // Store offset of return address for deoptimizer.
361
    if (!is_api_function) {
362 363 364
      masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
    }

365
    // Restore context from the frame.
366
    __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
367

368 369 370 371 372 373
    // If the result is an object (in the ECMA sense), we should get rid
    // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    // on page 74.
    Label use_receiver, exit;
    // If the result is a smi, it is *not* an object in the ECMA sense.
    __ JumpIfSmi(rax, &use_receiver);
374

375 376 377 378 379
    // If the type of the result (stored in its map) is less than
    // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
    __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
    __ j(above_equal, &exit);
380

381 382 383
    // Throw away the result of the constructor invocation and use the
    // on-stack receiver as the result.
    __ bind(&use_receiver);
384
    __ movp(rax, Operand(rsp, 0));
385

386 387
    // Restore the arguments count and leave the construct frame. The arguments
    // count is stored below the reciever and the new.target.
388
    __ bind(&exit);
389
    __ movp(rbx, Operand(rsp, 2 * kPointerSize));
390 391 392

    // Leave construct frame.
  }
393

394
  // Remove caller arguments from the stack and return.
395
  __ PopReturnAddressTo(rcx);
396
  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
397
  __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
398
  __ PushReturnAddressFrom(rcx);
399 400
  Counters* counters = masm->isolate()->counters();
  __ IncrementCounter(counters->constructed_objects(), 1);
401
  __ ret(0);
402 403
}

404

405
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
406
  Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
407
}
408 409


410
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
411
  Generate_JSConstructStubHelper(masm, true, false);
412
}
413 414


415 416 417 418 419 420 421 422 423 424 425
void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax: number of arguments
  //  -- rdi: constructor function
  //  -- rbx: allocation site or undefined
  //  -- rdx: original constructor
  // -----------------------------------

  {
    FrameScope frame_scope(masm, StackFrame::CONSTRUCT);

426 427 428 429
    // Preserve allocation site.
    __ AssertUndefinedOrAllocationSite(rbx);
    __ Push(rbx);

430 431 432 433 434
    // Store a smi-tagged arguments count on the stack.
    __ Integer32ToSmi(rax, rax);
    __ Push(rax);
    __ SmiToInteger32(rax, rax);

435 436 437
    // Push new.target
    __ Push(rdx);

438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453
    // receiver is the hole.
    __ Push(masm->isolate()->factory()->the_hole_value());

    // Set up pointer to last argument.
    __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));

    // Copy arguments and receiver to the expression stack.
    Label loop, entry;
    __ movp(rcx, rax);
    __ jmp(&entry);
    __ bind(&loop);
    __ Push(Operand(rbx, rcx, times_pointer_size, 0));
    __ bind(&entry);
    __ decp(rcx);
    __ j(greater_equal, &loop);

454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471
    // Handle step in.
    Label skip_step_in;
    ExternalReference debug_step_in_fp =
        ExternalReference::debug_step_in_fp_address(masm->isolate());
    __ Move(kScratchRegister, debug_step_in_fp);
    __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
    __ j(equal, &skip_step_in);

    __ Push(rax);
    __ Push(rdi);
    __ Push(rdi);
    __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
    __ Pop(rdi);
    __ Pop(rax);

    __ bind(&skip_step_in);

    // Call the function.
472 473 474 475 476 477
    ParameterCount actual(rax);
    __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());

    // Restore context from the frame.
    __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));

478 479 480
    // Get arguments count, skipping over new.target.
    __ movp(rbx, Operand(rsp, kPointerSize));  // Get arguments count.
  }                                            // Leave construct frame.
481 482 483 484 485 486 487 488 489 490

  // Remove caller arguments from the stack and return.
  __ PopReturnAddressTo(rcx);
  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
  __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
  __ PushReturnAddressFrom(rcx);
  __ ret(0);
}


491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523
enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt };


// Clobbers rcx, rdx, kScratchRegister; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm,
                                        const int calleeOffset,
                                        IsTagged rax_is_tagged) {
  // rax   : the number of items to be pushed to the stack
  //
  // Check the stack for overflow. We are not trying to catch
  // interruptions (e.g. debug break and preemption) here, so the "real stack
  // limit" is checked.
  Label okay;
  __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
  __ movp(rcx, rsp);
  // Make rcx the space we have left. The stack might already be overflowed
  // here which will cause rcx to become negative.
  __ subp(rcx, kScratchRegister);
  // Make rdx the space we need for the array when it is unrolled onto the
  // stack.
  if (rax_is_tagged == kRaxIsSmiTagged) {
    __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
  } else {
    DCHECK(rax_is_tagged == kRaxIsUntaggedInt);
    __ movp(rdx, rax);
    __ shlq(rdx, Immediate(kPointerSizeLog2));
  }
  // Check if the arguments will overflow the stack.
  __ cmpp(rcx, rdx);
  __ j(greater, &okay);  // Signed comparison.

  // Out of stack space.
  __ Push(Operand(rbp, calleeOffset));
524 525 526
  if (rax_is_tagged == kRaxIsUntaggedInt) {
    __ Integer32ToSmi(rax, rax);
  }
527
  __ Push(rax);
528
  __ CallRuntime(Runtime::kThrowStackOverflow, 0);
529 530 531 532 533

  __ bind(&okay);
}


534 535
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
                                             bool is_construct) {
536 537
  ProfileEntryHookStub::MaybeCallEntryHook(masm);

538 539 540 541 542 543 544
  // Expects five C++ function parameters.
  // - Address entry (ignored)
  // - JSFunction* function (
  // - Object* receiver
  // - int argc
  // - Object*** argv
  // (see Handle::Invoke in execution.cc).
545

546 547 548 549 550 551
  // Open a C++ scope for the FrameScope.
  {
    // Platform specific argument handling. After this, the stack contains
    // an internal frame and the pushed function and receiver, and
    // register rax and rbx holds the argument count and argument array,
    // while rdi holds the function pointer and rsi the context.
552

553 554
#ifdef _WIN64
    // MSVC parameters in:
555 556 557 558
    // rcx        : entry (ignored)
    // rdx        : function
    // r8         : receiver
    // r9         : argc
559 560 561 562 563 564 565 566
    // [rsp+0x20] : argv

    // Clear the context before we push it when entering the internal frame.
    __ Set(rsi, 0);
    // Enter an internal frame.
    FrameScope scope(masm, StackFrame::INTERNAL);

    // Load the function context into rsi.
567
    __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
568 569

    // Push the function and the receiver onto the stack.
570 571
    __ Push(rdx);
    __ Push(r8);
572 573

    // Load the number of arguments and setup pointer to the arguments.
574
    __ movp(rax, r9);
575
    // Load the previous frame pointer to access C argument on stack
576 577
    __ movp(kScratchRegister, Operand(rbp, 0));
    __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
578
    // Load the function pointer into rdi.
579
    __ movp(rdi, rdx);
580
#else  // _WIN64
581 582 583 584 585 586 587
    // GCC parameters in:
    // rdi : entry (ignored)
    // rsi : function
    // rdx : receiver
    // rcx : argc
    // r8  : argv

588
    __ movp(rdi, rsi);
589 590 591 592 593 594 595 596
    // rdi : function

    // Clear the context before we push it when entering the internal frame.
    __ Set(rsi, 0);
    // Enter an internal frame.
    FrameScope scope(masm, StackFrame::INTERNAL);

    // Push the function and receiver and setup the context.
597 598
    __ Push(rdi);
    __ Push(rdx);
599
    __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
600

601
    // Load the number of arguments and setup pointer to the arguments.
602 603
    __ movp(rax, rcx);
    __ movp(rbx, r8);
604 605
#endif  // _WIN64

606
    // Current stack contents:
607 608 609
    // [rsp + 2 * kPointerSize ... ] : Internal frame
    // [rsp + kPointerSize]          : function
    // [rsp]                         : receiver
610 611 612 613 614 615
    // Current register contents:
    // rax : argc
    // rbx : argv
    // rsi : context
    // rdi : function

616 617 618 619 620 621 622 623
    // Check if we have enough stack space to push all arguments.
    // The function is the first thing that was pushed above after entering
    // the internal frame.
    const int kFunctionOffset =
        InternalFrameConstants::kCodeOffset - kRegisterSize;
    // Expects argument count in rax. Clobbers rcx, rdx.
    Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsUntaggedInt);

624 625 626 627 628 629 630
    // Copy arguments to the stack in a loop.
    // Register rbx points to array of pointers to handle locations.
    // Push the values of these handles.
    Label loop, entry;
    __ Set(rcx, 0);  // Set loop variable to 0.
    __ jmp(&entry);
    __ bind(&loop);
631
    __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
632
    __ Push(Operand(kScratchRegister, 0));  // dereference handle
633
    __ addp(rcx, Immediate(1));
634
    __ bind(&entry);
635
    __ cmpp(rcx, rax);
636 637 638 639
    __ j(not_equal, &loop);

    // Invoke the code.
    if (is_construct) {
640
      // No type feedback cell is available
641
      __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
642
      // Expects rdi to hold function pointer.
643
      CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
644
      __ CallStub(&stub);
645 646 647
    } else {
      ParameterCount actual(rax);
      // Function must be in rdi.
648
      __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
649 650 651 652
    }
    // Exit the internal frame. Notice that this also removes the empty
    // context and the function left on the stack by the code
    // invocation.
653
  }
654

655
  // TODO(X64): Is argument correct? Is there a receiver to remove?
656
  __ ret(1 * kPointerSize);  // Remove receiver.
657 658
}

659

660 661
void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
  Generate_JSEntryTrampolineHelper(masm, false);
662 663 664
}


665 666 667
void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
  Generate_JSEntryTrampolineHelper(masm, true);
}
668 669


670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696
// Generate code for entering a JS function with the interpreter.
// On entry to the function the receiver and arguments have been pushed on the
// stack left to right.  The actual argument count matches the formal parameter
// count expected by the function.
//
// The live registers are:
//   o rdi: the JS function object being called
//   o rsi: our context
//   o rbp: the caller's frame pointer
//   o rsp: stack pointer (pointing to return address)
//
// The function builds a JS frame.  Please see JavaScriptFrameConstants in
// frames-x64.h for its layout.
// TODO(rmcilroy): We will need to include the current bytecode pointer in the
// frame.
void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
  // Open a frame scope to indicate that there is a frame on the stack.  The
  // MANUAL indicates that the scope shouldn't actually generate code to set up
  // the frame (that is done below).
  FrameScope frame_scope(masm, StackFrame::MANUAL);
  __ pushq(rbp);  // Caller's frame pointer.
  __ movp(rbp, rsp);
  __ Push(rsi);  // Callee's context.
  __ Push(rdi);  // Callee's JS function.

  // Get the bytecode array from the function object and load the pointer to the
  // first entry into edi (InterpreterBytecodeRegister).
697 698 699
  __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
  __ movp(kInterpreterBytecodeArrayRegister,
          FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
700 701 702

  if (FLAG_debug_code) {
    // Check function data field is actually a BytecodeArray object.
703 704 705
    __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
    __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
                     rax);
706 707 708 709 710 711
    __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
  }

  // Allocate the local and temporary register file on the stack.
  {
    // Load frame size from the BytecodeArray object.
712 713
    __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
                              BytecodeArray::kFrameSizeOffset));
714 715 716 717 718 719 720

    // Do a stack check to ensure we don't go over the limit.
    Label ok;
    __ movp(rdx, rsp);
    __ subp(rdx, rcx);
    __ CompareRoot(rdx, Heap::kRealStackLimitRootIndex);
    __ j(above_equal, &ok, Label::kNear);
721
    __ CallRuntime(Runtime::kThrowStackOverflow, 0);
722 723 724 725
    __ bind(&ok);

    // If ok, push undefined as the initial value for all register file entries.
    Label loop_header;
726
    Label loop_check;
727
    __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
728
    __ j(always, &loop_check);
729 730 731 732
    __ bind(&loop_header);
    // TODO(rmcilroy): Consider doing more than one push per loop iteration.
    __ Push(rdx);
    // Continue loop if not done.
733
    __ bind(&loop_check);
734
    __ subp(rcx, Immediate(kPointerSize));
735
    __ j(greater_equal, &loop_header, Label::kNear);
736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767
  }

  // TODO(rmcilroy): List of things not currently dealt with here but done in
  // fullcodegen's prologue:
  //  - Support profiler (specifically profiling_counter).
  //  - Call ProfileEntryHookStub when isolate has a function_entry_hook.
  //  - Allow simulator stop operations if FLAG_stop_at is set.
  //  - Deal with sloppy mode functions which need to replace the
  //    receiver with the global proxy when called as functions (without an
  //    explicit receiver object).
  //  - Code aging of the BytecodeArray object.
  //  - Supporting FLAG_trace.
  //
  // The following items are also not done here, and will probably be done using
  // explicit bytecodes instead:
  //  - Allocating a new local context if applicable.
  //  - Setting up a local binding to the this function, which is used in
  //    derived constructors with super calls.
  //  - Setting new.target if required.
  //  - Dealing with REST parameters (only if
  //    https://codereview.chromium.org/1235153006 doesn't land by then).
  //  - Dealing with argument objects.

  // Perform stack guard check.
  {
    Label ok;
    __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
    __ j(above_equal, &ok, Label::kNear);
    __ CallRuntime(Runtime::kStackGuard, 0);
    __ bind(&ok);
  }

768 769 770 771 772 773 774 775 776 777 778 779 780
  // Load accumulator, register file, bytecode offset, dispatch table into
  // registers.
  __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
  __ movp(kInterpreterRegisterFileRegister, rbp);
  __ subp(
      kInterpreterRegisterFileRegister,
      Immediate(kPointerSize + StandardFrameConstants::kFixedFrameSizeFromFp));
  __ movp(kInterpreterBytecodeOffsetRegister,
          Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
  __ LoadRoot(kInterpreterDispatchTableRegister,
              Heap::kInterpreterTableRootIndex);
  __ addp(kInterpreterDispatchTableRegister,
          Immediate(FixedArray::kHeaderSize - kHeapObjectTag));
781 782

  // Dispatch to the first bytecode handler for the function.
783 784 785 786
  __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
                          kInterpreterBytecodeOffsetRegister, times_1, 0));
  __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
                       times_pointer_size, 0));
787 788
  // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
  // and header removal.
789 790
  __ addp(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
  __ call(rbx);
791 792 793 794 795 796 797 798 799 800
}


void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
  // TODO(rmcilroy): List of things not currently dealt with here but done in
  // fullcodegen's EmitReturnSequence.
  //  - Supporting FLAG_trace for Runtime::TraceExit.
  //  - Support profiler (specifically decrementing profiling_counter
  //    appropriately and calling out to HandleInterrupts if necessary).

801 802
  // The return value is in accumulator, which is already in rax.

803 804
  // Leave the frame (also dropping the register file).
  __ leave();
805 806 807 808 809 810 811 812

  // Drop receiver + arguments and return.
  __ movl(rbx, FieldOperand(kInterpreterBytecodeArrayRegister,
                            BytecodeArray::kParameterSizeOffset));
  __ PopReturnAddressTo(rcx);
  __ addp(rsp, rbx);
  __ PushReturnAddressFrom(rcx);
  __ ret(0);
813 814 815
}


816 817
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
  CallRuntimePassFunction(masm, Runtime::kCompileLazy);
818
  GenerateTailCallToReturnedCode(masm);
819
}
820 821


822 823 824 825
static void CallCompileOptimized(MacroAssembler* masm,
                                            bool concurrent) {
  FrameScope scope(masm, StackFrame::INTERNAL);
  // Push a copy of the function onto the stack.
826
  __ Push(rdi);
827
  // Function is also the parameter to the runtime call.
828
  __ Push(rdi);
829 830 831
  // Whether to compile in a background thread.
  __ Push(masm->isolate()->factory()->ToBoolean(concurrent));

832
  __ CallRuntime(Runtime::kCompileOptimized, 2);
833
  // Restore receiver.
834
  __ Pop(rdi);
835 836 837 838 839 840 841 842 843 844 845 846
}


void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
  CallCompileOptimized(masm, false);
  GenerateTailCallToReturnedCode(masm);
}


void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
  CallCompileOptimized(masm, true);
  GenerateTailCallToReturnedCode(masm);
847
}
848 849


850 851 852 853 854 855 856 857 858
static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
  // For now, we are relying on the fact that make_code_young doesn't do any
  // garbage collection which allows us to save/restore the registers without
  // worrying about which of them contain pointers. We also don't build an
  // internal frame to make the code faster, since we shouldn't have to do stack
  // crawls in MakeCodeYoung. This seems a bit fragile.

  // Re-execute the code that was patched back to the young age when
  // the stub returns.
859
  __ subp(Operand(rsp, 0), Immediate(5));
860
  __ Pushad();
861
  __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
862
  __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
863 864
  {  // NOLINT
    FrameScope scope(masm, StackFrame::MANUAL);
865
    __ PrepareCallCFunction(2);
866
    __ CallCFunction(
867
        ExternalReference::get_make_code_young_function(masm->isolate()), 2);
868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886
  }
  __ Popad();
  __ ret(0);
}


#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
    MacroAssembler* masm) {                                  \
  GenerateMakeCodeYoungAgainCommon(masm);                    \
}                                                            \
void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
    MacroAssembler* masm) {                                  \
  GenerateMakeCodeYoungAgainCommon(masm);                    \
}
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR


887 888 889 890 891 892
void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
  // that make_code_young doesn't do any garbage collection which allows us to
  // save/restore the registers without worrying about which of them contain
  // pointers.
  __ Pushad();
893
  __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
894
  __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
895
  __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
896 897
  {  // NOLINT
    FrameScope scope(masm, StackFrame::MANUAL);
898
    __ PrepareCallCFunction(2);
899 900
    __ CallCFunction(
        ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
901
        2);
902 903 904 905
  }
  __ Popad();

  // Perform prologue operations usually performed by the young code stub.
906
  __ PopReturnAddressTo(kScratchRegister);
907
  __ pushq(rbp);  // Caller's frame pointer.
908
  __ movp(rbp, rsp);
909 910
  __ Push(rsi);  // Callee's context.
  __ Push(rdi);  // Callee's JS Function.
911
  __ PushReturnAddressFrom(kScratchRegister);
912 913 914 915 916 917 918 919 920 921 922

  // Jump to point after the code-age stub.
  __ ret(0);
}


void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
  GenerateMakeCodeYoungAgainCommon(masm);
}


923 924 925 926 927
void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
  Generate_MarkCodeAsExecutedOnce(masm);
}


928 929
static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
                                             SaveFPRegsMode save_doubles) {
930 931 932 933 934 935 936 937
  // Enter an internal frame.
  {
    FrameScope scope(masm, StackFrame::INTERNAL);

    // Preserve registers across notification, this is important for compiled
    // stubs that tail call the runtime on deopts passing their parameters in
    // registers.
    __ Pushad();
938
    __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
939 940 941 942
    __ Popad();
    // Tear down internal frame.
  }

943
  __ DropUnderReturnAddress(1);  // Ignore state offset
944 945 946 947
  __ ret(0);  // Return to IC Miss stub, continuation still on stack.
}


948 949 950 951 952 953 954 955 956 957
void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
}


void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
}


958 959
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
                                             Deoptimizer::BailoutType type) {
960
  // Enter an internal frame.
961 962
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
963

964 965
    // Pass the deoptimization type to the runtime system.
    __ Push(Smi::FromInt(static_cast<int>(type)));
966

967
    __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
968 969
    // Tear down internal frame.
  }
970 971

  // Get the full codegen state from the stack and untag it.
972
  __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
973 974

  // Switch on the state.
975
  Label not_no_registers, not_tos_rax;
976
  __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
977
  __ j(not_equal, &not_no_registers, Label::kNear);
978 979 980
  __ ret(1 * kPointerSize);  // Remove state.

  __ bind(&not_no_registers);
981
  __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
982
  __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
983
  __ j(not_equal, &not_tos_rax, Label::kNear);
984 985 986
  __ ret(2 * kPointerSize);  // Remove state, rax.

  __ bind(&not_tos_rax);
987
  __ Abort(kNoCasesLeft);
988
}
989

990

991 992 993
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
}
994 995


996 997 998 999 1000
void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
}


1001
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1002
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1003
}
1004 1005


1006
// static
1007 1008
void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
  // Stack Layout:
1009 1010 1011
  // rsp[0]           : Return address
  // rsp[8]           : Argument n
  // rsp[16]          : Argument n-1
1012
  //  ...
1013
  // rsp[8 * n]       : Argument 1
1014
  // rsp[8 * (n + 1)] : Receiver (callable to call)
1015 1016 1017 1018
  //
  // rax contains the number of arguments, n, not counting the receiver.
  //
  // 1. Make sure we have at least one argument.
1019 1020
  {
    Label done;
1021
    __ testp(rax, rax);
1022
    __ j(not_zero, &done, Label::kNear);
1023
    __ PopReturnAddressTo(rbx);
1024
    __ PushRoot(Heap::kUndefinedValueRootIndex);
1025
    __ PushReturnAddressFrom(rbx);
1026
    __ incp(rax);
1027 1028
    __ bind(&done);
  }
1029

1030 1031 1032
  // 2. Get the callable to call (passed as receiver) from the stack.
  {
    StackArgumentsAccessor args(rsp, rax);
1033
    __ movp(rdi, args.GetReceiverOperand());
1034
  }
1035

1036
  // 3. Shift arguments and return address one slot down on the stack
1037 1038
  //    (overwriting the original receiver).  Adjust argument count to make
  //    the original first argument the new receiver.
1039 1040
  {
    Label loop;
1041
    __ movp(rcx, rax);
1042
    StackArgumentsAccessor args(rsp, rcx);
1043
    __ bind(&loop);
1044 1045
    __ movp(rbx, args.GetArgumentOperand(1));
    __ movp(args.GetArgumentOperand(0), rbx);
1046
    __ decp(rcx);
1047
    __ j(not_zero, &loop);              // While non-zero.
1048
    __ DropUnderReturnAddress(1, rbx);  // Drop one slot under return address.
1049
    __ decp(rax);  // One fewer argument (first argument is new receiver).
1050
  }
1051

1052 1053
  // 4. Call the callable.
  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1054 1055 1056
}


1057 1058 1059 1060 1061 1062
static void Generate_PushAppliedArguments(MacroAssembler* masm,
                                          const int argumentsOffset,
                                          const int indexOffset,
                                          const int limitOffset) {
  Register receiver = LoadDescriptor::ReceiverRegister();
  Register key = LoadDescriptor::NameRegister();
1063 1064
  Register slot = LoadDescriptor::SlotRegister();
  Register vector = LoadWithVectorDescriptor::VectorRegister();
1065 1066 1067 1068 1069 1070 1071 1072 1073

  // Copy all arguments from the array to the stack.
  Label entry, loop;
  __ movp(key, Operand(rbp, indexOffset));
  __ jmp(&entry);
  __ bind(&loop);
  __ movp(receiver, Operand(rbp, argumentsOffset));  // load arguments

  // Use inline caching to speed up access to arguments.
1074 1075
  Code::Kind kinds[] = {Code::KEYED_LOAD_IC};
  FeedbackVectorSpec spec(0, 1, kinds);
1076 1077 1078 1079 1080
  Handle<TypeFeedbackVector> feedback_vector =
      masm->isolate()->factory()->NewTypeFeedbackVector(&spec);
  int index = feedback_vector->GetIndex(FeedbackVectorICSlot(0));
  __ Move(slot, Smi::FromInt(index));
  __ Move(vector, feedback_vector);
1081 1082
  Handle<Code> ic =
      KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1083 1084 1085 1086 1087
  __ Call(ic, RelocInfo::CODE_TARGET);
  // It is important that we do not have a test instruction after the
  // call.  A test instruction after the call is used to indicate that
  // we have generated an inline version of the keyed load.  In this
  // case, we know that we are not generating a test instruction next.
1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110

  // Push the nth argument.
  __ Push(rax);

  // Update the index on the stack and in register key.
  __ movp(key, Operand(rbp, indexOffset));
  __ SmiAddConstant(key, key, Smi::FromInt(1));
  __ movp(Operand(rbp, indexOffset), key);

  __ bind(&entry);
  __ cmpp(key, Operand(rbp, limitOffset));
  __ j(not_equal, &loop);

  // On exit, the pushed arguments count is in rax, untagged
  __ SmiToInteger64(rax, key);
}


// Used by FunctionApply and ReflectApply
static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
  const int kFormalParameters = targetIsArgument ? 3 : 2;
  const int kStackSize = kFormalParameters + 1;

1111
  // Stack at entry:
1112 1113 1114 1115
  // rsp     : return address
  // rsp[8]  : arguments
  // rsp[16] : receiver ("this")
  // rsp[24] : function
1116 1117 1118
  {
    FrameScope frame_scope(masm, StackFrame::INTERNAL);
    // Stack frame:
1119 1120 1121 1122 1123
    // rbp     : Old base pointer
    // rbp[8]  : return address
    // rbp[16] : function arguments
    // rbp[24] : receiver
    // rbp[32] : function
1124 1125 1126
    static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
    static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
    static const int kFunctionOffset = kReceiverOffset + kPointerSize;
1127

1128 1129
    __ Push(Operand(rbp, kFunctionOffset));
    __ Push(Operand(rbp, kArgumentsOffset));
1130
    if (targetIsArgument) {
1131 1132
      __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX,
                       CALL_FUNCTION);
1133
    } else {
1134
      __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION);
1135 1136
    }

1137
    Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsSmiTagged);
1138

1139
    // Push current index and limit, and receiver.
1140 1141 1142
    const int kLimitOffset =
        StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
    const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
1143 1144 1145
    __ Push(rax);                            // limit
    __ Push(Immediate(0));                   // index
    __ Push(Operand(rbp, kReceiverOffset));  // receiver
1146

1147
    // Loop over the arguments array, pushing each value to the stack
1148 1149
    Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset,
                                  kLimitOffset);
1150

1151 1152
    // Call the callable.
    // TODO(bmeurer): This should be a tail call according to ES6.
1153
    __ movp(rdi, Operand(rbp, kFunctionOffset));
1154
    __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1155 1156 1157

    // Leave internal frame.
  }
1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196
  __ ret(kStackSize * kPointerSize);  // remove this, receiver, and arguments
}


// Used by ReflectConstruct
static void Generate_ConstructHelper(MacroAssembler* masm) {
  const int kFormalParameters = 3;
  const int kStackSize = kFormalParameters + 1;

  // Stack at entry:
  // rsp     : return address
  // rsp[8]  : original constructor (new.target)
  // rsp[16] : arguments
  // rsp[24] : constructor
  {
    FrameScope frame_scope(masm, StackFrame::INTERNAL);
    // Stack frame:
    // rbp     : Old base pointer
    // rbp[8]  : return address
    // rbp[16] : original constructor (new.target)
    // rbp[24] : arguments
    // rbp[32] : constructor
    static const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
    static const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
    static const int kFunctionOffset = kArgumentsOffset + kPointerSize;

    // If newTarget is not supplied, set it to constructor
    Label validate_arguments;
    __ movp(rax, Operand(rbp, kNewTargetOffset));
    __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
    __ j(not_equal, &validate_arguments, Label::kNear);
    __ movp(rax, Operand(rbp, kFunctionOffset));
    __ movp(Operand(rbp, kNewTargetOffset), rax);

    // Validate arguments
    __ bind(&validate_arguments);
    __ Push(Operand(rbp, kFunctionOffset));
    __ Push(Operand(rbp, kArgumentsOffset));
    __ Push(Operand(rbp, kNewTargetOffset));
1197 1198
    __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX,
                     CALL_FUNCTION);
1199

1200
    Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsSmiTagged);
1201 1202 1203 1204 1205 1206 1207

    // Push current index and limit.
    const int kLimitOffset =
        StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
    const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
    __ Push(rax);  // limit
    __ Push(Immediate(0));  // index
1208
    // Push the constructor function as callee.
1209 1210 1211 1212 1213 1214 1215 1216 1217
    __ Push(Operand(rbp, kFunctionOffset));

    // Loop over the arguments array, pushing each value to the stack
    Generate_PushAppliedArguments(
        masm, kArgumentsOffset, kIndexOffset, kLimitOffset);

    // Use undefined feedback vector
    __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
    __ movp(rdi, Operand(rbp, kFunctionOffset));
1218
    __ movp(rcx, Operand(rbp, kNewTargetOffset));
1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242

    // Call the function.
    CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
    __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);

    // Leave internal frame.
  }
  // remove this, target, arguments and newTarget
  __ ret(kStackSize * kPointerSize);
}


void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
  Generate_ApplyHelper(masm, false);
}


void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
  Generate_ApplyHelper(masm, true);
}


void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
  Generate_ConstructHelper(masm);
1243
}
1244 1245


1246 1247
void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
  // ----------- S t a t e -------------
1248
  //  -- rax    : argc
1249 1250 1251 1252 1253 1254 1255 1256 1257 1258
  //  -- rsp[0] : return address
  //  -- rsp[8] : last argument
  // -----------------------------------
  Label generic_array_code;

  // Get the InternalArray function.
  __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);

  if (FLAG_debug_code) {
    // Initial map for the builtin InternalArray functions should be maps.
1259
    __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1260 1261 1262
    // Will both indicate a NULL and a Smi.
    STATIC_ASSERT(kSmiTag == 0);
    Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1263
    __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1264
    __ CmpObjectType(rbx, MAP_TYPE, rcx);
1265
    __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1266 1267 1268 1269
  }

  // Run the native code for the InternalArray function called as a normal
  // function.
1270 1271 1272
  // tail call a stub
  InternalArrayConstructorStub stub(masm->isolate());
  __ TailCallStub(&stub);
1273 1274 1275
}


1276 1277
void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
  // ----------- S t a t e -------------
1278
  //  -- rax    : argc
1279 1280 1281 1282 1283 1284 1285 1286 1287
  //  -- rsp[0] : return address
  //  -- rsp[8] : last argument
  // -----------------------------------
  Label generic_array_code;

  // Get the Array function.
  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);

  if (FLAG_debug_code) {
1288
    // Initial map for the builtin Array functions should be maps.
1289
    __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1290
    // Will both indicate a NULL and a Smi.
1291
    STATIC_ASSERT(kSmiTag == 0);
1292
    Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1293
    __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1294
    __ CmpObjectType(rbx, MAP_TYPE, rcx);
1295
    __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1296 1297
  }

dslomov's avatar
dslomov committed
1298
  __ movp(rdx, rdi);
1299
  // Run the native code for the Array function called as a normal function.
1300
  // tail call a stub
1301
  __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1302 1303
  ArrayConstructorStub stub(masm->isolate());
  __ TailCallStub(&stub);
1304 1305 1306
}


1307
void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1308 1309 1310 1311 1312 1313 1314 1315
  // ----------- S t a t e -------------
  //  -- rax                 : number of arguments
  //  -- rdi                 : constructor function
  //  -- rsp[0]              : return address
  //  -- rsp[(argc - n) * 8] : arg[n] (zero-based)
  //  -- rsp[(argc + 1) * 8] : receiver
  // -----------------------------------

1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330
  // 1. Load the first argument into rbx and get rid of the rest (including the
  // receiver).
  {
    StackArgumentsAccessor args(rsp, rax);
    Label no_arguments, done;
    __ testp(rax, rax);
    __ j(zero, &no_arguments, Label::kNear);
    __ movp(rbx, args.GetArgumentOperand(1));
    __ jmp(&done, Label::kNear);
    __ bind(&no_arguments);
    __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
    __ bind(&done);
    __ PopReturnAddressTo(rcx);
    __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
    __ PushReturnAddressFrom(rcx);
1331 1332
  }

1333
  // 2. Make sure rbx is a string.
1334
  {
1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349
    Label convert, done_convert;
    __ JumpIfSmi(rbx, &convert, Label::kNear);
    __ CmpObjectType(rbx, FIRST_NONSTRING_TYPE, rdx);
    __ j(below, &done_convert);
    __ bind(&convert);
    {
      FrameScope scope(masm, StackFrame::INTERNAL);
      ToStringStub stub(masm->isolate());
      __ Push(rdi);
      __ Move(rax, rbx);
      __ CallStub(&stub);
      __ Move(rbx, rax);
      __ Pop(rdi);
    }
    __ bind(&done_convert);
1350 1351
  }

1352
  // 3. Allocate a JSValue wrapper for the string.
1353
  {
1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384
    // ----------- S t a t e -------------
    //  -- rbx : the first argument
    //  -- rdi : constructor function
    // -----------------------------------

    Label allocate, done_allocate;
    __ Allocate(JSValue::kSize, rax, rcx, no_reg, &allocate, TAG_OBJECT);
    __ bind(&done_allocate);

    // Initialize the JSValue in rax.
    __ LoadGlobalFunctionInitialMap(rdi, rcx);
    __ movp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
    __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
    __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
    __ movp(FieldOperand(rax, JSObject::kElementsOffset), rcx);
    __ movp(FieldOperand(rax, JSValue::kValueOffset), rbx);
    STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
    __ Ret();

    // Fallback to the runtime to allocate in new space.
    __ bind(&allocate);
    {
      FrameScope scope(masm, StackFrame::INTERNAL);
      __ Push(rbx);
      __ Push(rdi);
      __ Push(Smi::FromInt(JSValue::kSize));
      __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
      __ Pop(rdi);
      __ Pop(rbx);
    }
    __ jmp(&done_allocate);
1385
  }
1386 1387
}

1388

1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406
static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
                                       Label* stack_overflow) {
  // ----------- S t a t e -------------
  //  -- rax : actual number of arguments
  //  -- rbx : expected number of arguments
  //  -- rdi: function (passed through to callee)
  // -----------------------------------
  // Check the stack for overflow. We are not trying to catch
  // interruptions (e.g. debug break and preemption) here, so the "real stack
  // limit" is checked.
  Label okay;
  __ LoadRoot(rdx, Heap::kRealStackLimitRootIndex);
  __ movp(rcx, rsp);
  // Make rcx the space we have left. The stack might already be overflowed
  // here which will cause rcx to become negative.
  __ subp(rcx, rdx);
  // Make rdx the space we need for the array when it is unrolled onto the
  // stack.
1407 1408
  __ movp(rdx, rbx);
  __ shlp(rdx, Immediate(kPointerSizeLog2));
1409 1410 1411 1412 1413 1414
  // Check if the arguments will overflow the stack.
  __ cmpp(rcx, rdx);
  __ j(less_equal, stack_overflow);  // Signed comparison.
}


1415
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1416
  __ pushq(rbp);
1417
  __ movp(rbp, rsp);
1418

1419 1420 1421 1422
  // Store the arguments adaptor context sentinel.
  __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));

  // Push the function on the stack.
1423
  __ Push(rdi);
1424

1425 1426
  // Preserve the number of arguments on the stack. Must preserve rax,
  // rbx and rcx because these registers are used when copying the
1427
  // arguments and the receiver.
1428
  __ Integer32ToSmi(r8, rax);
1429
  __ Push(r8);
1430
}
1431 1432


1433 1434
static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
  // Retrieve the number of arguments from the stack. Number is a Smi.
1435
  __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1436 1437

  // Leave the frame.
1438
  __ movp(rsp, rbp);
1439
  __ popq(rbp);
1440 1441

  // Remove caller arguments from the stack.
1442
  __ PopReturnAddressTo(rcx);
1443
  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1444
  __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1445
  __ PushReturnAddressFrom(rcx);
1446 1447
}

1448

1449 1450 1451 1452
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax : actual number of arguments
  //  -- rbx : expected number of arguments
1453
  //  -- rdi: function (passed through to callee)
1454
  // -----------------------------------
1455

1456
  Label invoke, dont_adapt_arguments;
1457 1458
  Counters* counters = masm->isolate()->counters();
  __ IncrementCounter(counters->arguments_adaptors(), 1);
1459

1460 1461 1462
  Label stack_overflow;
  ArgumentsAdaptorStackCheck(masm, &stack_overflow);

1463
  Label enough, too_few;
1464
  __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1465
  __ cmpp(rax, rbx);
1466
  __ j(less, &too_few);
1467
  __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1468
  __ j(equal, &dont_adapt_arguments);
1469

1470 1471 1472
  {  // Enough parameters: Actual >= expected.
    __ bind(&enough);
    EnterArgumentsAdaptorFrame(masm);
1473

1474 1475
    // Copy receiver and all expected arguments.
    const int offset = StandardFrameConstants::kCallerSPOffset;
1476
    __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
1477
    __ Set(r8, -1);  // account for receiver
1478

1479 1480
    Label copy;
    __ bind(&copy);
1481
    __ incp(r8);
1482
    __ Push(Operand(rax, 0));
1483
    __ subp(rax, Immediate(kPointerSize));
1484
    __ cmpp(r8, rbx);
1485 1486 1487
    __ j(less, &copy);
    __ jmp(&invoke);
  }
1488

1489 1490
  {  // Too few parameters: Actual < expected.
    __ bind(&too_few);
1491 1492

    // If the function is strong we need to throw an error.
1493
    Label no_strong_error;
1494 1495 1496 1497 1498
    __ movp(kScratchRegister,
            FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    __ testb(FieldOperand(kScratchRegister,
                          SharedFunctionInfo::kStrongModeByteOffset),
             Immediate(1 << SharedFunctionInfo::kStrongModeBitWithinByte));
1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517
    __ j(equal, &no_strong_error, Label::kNear);

    // What we really care about is the required number of arguments.

    if (kPointerSize == kInt32Size) {
      __ movp(
          kScratchRegister,
          FieldOperand(kScratchRegister, SharedFunctionInfo::kLengthOffset));
      __ SmiToInteger32(kScratchRegister, kScratchRegister);
    } else {
      // See comment near kLengthOffset in src/objects.h
      __ movsxlq(
          kScratchRegister,
          FieldOperand(kScratchRegister, SharedFunctionInfo::kLengthOffset));
      __ shrq(kScratchRegister, Immediate(1));
    }

    __ cmpp(rax, kScratchRegister);
    __ j(greater_equal, &no_strong_error, Label::kNear);
1518 1519 1520 1521 1522 1523 1524

    {
      FrameScope frame(masm, StackFrame::MANUAL);
      EnterArgumentsAdaptorFrame(masm);
      __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0);
    }

1525
    __ bind(&no_strong_error);
1526
    EnterArgumentsAdaptorFrame(masm);
1527

1528 1529
    // Copy receiver and all actual arguments.
    const int offset = StandardFrameConstants::kCallerSPOffset;
1530
    __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
1531
    __ Set(r8, -1);  // account for receiver
1532

1533 1534
    Label copy;
    __ bind(&copy);
1535
    __ incp(r8);
1536
    __ Push(Operand(rdi, 0));
1537
    __ subp(rdi, Immediate(kPointerSize));
1538
    __ cmpp(r8, rax);
1539
    __ j(less, &copy);
1540

1541 1542 1543 1544
    // Fill remaining expected arguments with undefined values.
    Label fill;
    __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
    __ bind(&fill);
1545
    __ incp(r8);
1546
    __ Push(kScratchRegister);
1547
    __ cmpp(r8, rbx);
1548
    __ j(less, &fill);
1549

1550
    // Restore function pointer.
1551
    __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1552 1553 1554 1555
  }

  // Call the entry point.
  __ bind(&invoke);
1556 1557 1558
  __ movp(rax, rbx);
  // rax : expected number of arguments
  // rdi: function (passed through to callee)
1559 1560
  __ call(rdx);

1561
  // Store offset of return address for deoptimizer.
1562
  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1563

1564 1565 1566 1567 1568 1569 1570 1571 1572
  // Leave frame and return.
  LeaveArgumentsAdaptorFrame(masm);
  __ ret(0);

  // -------------------------------------------
  // Dont adapt arguments.
  // -------------------------------------------
  __ bind(&dont_adapt_arguments);
  __ jmp(rdx);
1573 1574 1575 1576 1577

  __ bind(&stack_overflow);
  {
    FrameScope frame(masm, StackFrame::MANUAL);
    EnterArgumentsAdaptorFrame(masm);
1578
    __ CallRuntime(Runtime::kThrowStackOverflow, 0);
1579 1580
    __ int3();
  }
1581 1582 1583
}


1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610 1611 1612 1613 1614 1615 1616 1617 1618 1619 1620 1621 1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724
// static
void Builtins::Generate_CallFunction(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
  //  -- rdi : the function to call (checked to be a JSFunction)
  // -----------------------------------

  Label convert, convert_global_proxy, convert_to_object, done_convert;
  StackArgumentsAccessor args(rsp, rax);
  __ AssertFunction(rdi);
  // TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal
  // slot is "classConstructor".
  // Enter the context of the function; ToObject has to run in the function
  // context, and we also need to take the global proxy from the function
  // context in case of conversion.
  // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
  STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
                SharedFunctionInfo::kStrictModeByteOffset);
  __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
  __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
  // We need to convert the receiver for non-native sloppy mode functions.
  __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
           Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
                     (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
  __ j(not_zero, &done_convert);
  {
    __ movp(rcx, args.GetReceiverOperand());

    // ----------- S t a t e -------------
    //  -- rax : the number of arguments (not including the receiver)
    //  -- rcx : the receiver
    //  -- rdx : the shared function info.
    //  -- rdi : the function to call (checked to be a JSFunction)
    //  -- rsi : the function context.
    // -----------------------------------

    Label convert_receiver;
    __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
    STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
    __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
    __ j(above_equal, &done_convert);
    __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex, &convert_global_proxy,
                  Label::kNear);
    __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
                     Label::kNear);
    __ bind(&convert_global_proxy);
    {
      // Patch receiver to global proxy.
      __ LoadGlobalProxy(rcx);
    }
    __ jmp(&convert_receiver);
    __ bind(&convert_to_object);
    {
      // Convert receiver using ToObject.
      // TODO(bmeurer): Inline the allocation here to avoid building the frame
      // in the fast case? (fall back to AllocateInNewSpace?)
      FrameScope scope(masm, StackFrame::INTERNAL);
      __ Integer32ToSmi(rax, rax);
      __ Push(rax);
      __ Push(rdi);
      __ movp(rax, rcx);
      ToObjectStub stub(masm->isolate());
      __ CallStub(&stub);
      __ movp(rcx, rax);
      __ Pop(rdi);
      __ Pop(rax);
      __ SmiToInteger32(rax, rax);
    }
    __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    __ bind(&convert_receiver);
    __ movp(args.GetReceiverOperand(), rcx);
  }
  __ bind(&done_convert);

  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
  //  -- rdx : the shared function info.
  //  -- rdi : the function to call (checked to be a JSFunction)
  //  -- rsi : the function context.
  // -----------------------------------

  __ LoadSharedFunctionInfoSpecialField(
      rbx, rdx, SharedFunctionInfo::kFormalParameterCountOffset);
  __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
  ParameterCount actual(rax);
  ParameterCount expected(rbx);
  __ InvokeCode(rdx, expected, actual, JUMP_FUNCTION, NullCallWrapper());
}


// static
void Builtins::Generate_Call(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
  //  -- rdi : the target to call (can be any Object).
  // -----------------------------------

  Label non_smi, non_function;
  __ JumpIfSmi(rdi, &non_function);
  __ bind(&non_smi);
  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rdx);
  __ j(equal, masm->isolate()->builtins()->CallFunction(),
       RelocInfo::CODE_TARGET);
  __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
  __ j(not_equal, &non_function);

  // 1. Call to function proxy.
  // TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies.
  __ movp(rdi, FieldOperand(rdi, JSFunctionProxy::kCallTrapOffset));
  __ AssertNotSmi(rdi);
  __ jmp(&non_smi);

  // 2. Call to something else, which might have a [[Call]] internal method (if
  // not we raise an exception).
  __ bind(&non_function);
  // TODO(bmeurer): I wonder why we prefer to have slow API calls? This could
  // be awesome instead; i.e. a trivial improvement would be to call into the
  // runtime and just deal with the API function there instead of returning a
  // delegate from a runtime call that just jumps back to the runtime once
  // called. Or, bonus points, call directly into the C API function here, as
  // we do in some Crankshaft fast cases.
  StackArgumentsAccessor args(rsp, rax);
  // Overwrite the original receiver with the (original) target.
  __ movp(args.GetReceiverOperand(), rdi);
  {
    // Determine the delegate for the target (if any).
    FrameScope scope(masm, StackFrame::INTERNAL);
    __ Integer32ToSmi(rax, rax);
    __ Push(rax);
    __ Push(rdi);
    __ CallRuntime(Runtime::kGetFunctionDelegate, 1);
    __ movp(rdi, rax);
    __ Pop(rax);
    __ SmiToInteger32(rax, rax);
  }
  // The delegate is always a regular function.
  __ AssertFunction(rdi);
  __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
}


1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747 1748 1749 1750 1751 1752 1753 1754 1755 1756 1757 1758 1759
// static
void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
  //  -- rbx : the address of the first argument to be pushed. Subsequent
  //           arguments should be consecutive above this, in the same order as
  //           they are to be pushed onto the stack.
  //  -- rdi : the target to call (can be any Object).

  // Pop return address to allow tail-call after pushing arguments.
  __ Pop(rdx);

  // Find the address of the last argument.
  __ movp(rcx, rax);
  __ addp(rcx, Immediate(1));  // Add one for receiver.
  __ shlp(rcx, Immediate(kPointerSizeLog2));
  __ negp(rcx);
  __ addp(rcx, rbx);

  // Push the arguments.
  Label loop_header, loop_check;
  __ j(always, &loop_check);
  __ bind(&loop_header);
  __ Push(Operand(rbx, 0));
  __ subp(rbx, Immediate(kPointerSize));
  __ bind(&loop_check);
  __ cmpp(rbx, rcx);
  __ j(greater, &loop_header, Label::kNear);

  // Call the target.
  __ Push(rdx);  // Re-push return address.
  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}


1760
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1761
  // Lookup the function in the JavaScript frame.
1762
  __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1763 1764
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
1765
    // Pass function as argument.
1766
    __ Push(rax);
1767
    __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1768
  }
1769

1770
  Label skip;
1771
  // If the code object is null, just return to the unoptimized code.
1772
  __ cmpp(rax, Immediate(0));
1773
  __ j(not_equal, &skip, Label::kNear);
1774 1775 1776
  __ ret(0);

  __ bind(&skip);
1777 1778

  // Load deoptimization data from the code object.
1779
  __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1780 1781 1782 1783 1784 1785

  // Load the OSR entrypoint offset from the deoptimization data.
  __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
      DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));

  // Compute the target address = code_obj + header_size + osr_offset
1786
  __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
1787 1788

  // Overwrite the return address on the stack.
1789
  __ movq(StackOperandForReturnAddress(0), rax);
1790 1791 1792

  // And "return" to the OSR entry point of the function.
  __ ret(0);
1793 1794 1795
}


1796 1797 1798 1799 1800 1801 1802
void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
  // We check the stack limit as indicator that recompilation might be done.
  Label ok;
  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
  __ j(above_equal, &ok);
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
1803
    __ CallRuntime(Runtime::kStackGuard, 0);
1804 1805 1806 1807 1808 1809 1810 1811 1812
  }
  __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
         RelocInfo::CODE_TARGET);

  __ bind(&ok);
  __ ret(0);
}


1813 1814
#undef __

1815 1816
}  // namespace internal
}  // namespace v8
1817 1818

#endif  // V8_TARGET_ARCH_X64