builtins-x64.cc 97.5 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5
#if V8_TARGET_ARCH_X64
6

7
#include "src/code-factory.h"
8
#include "src/counters.h"
9
#include "src/deoptimizer.h"
10 11
#include "src/frame-constants.h"
#include "src/frames.h"
12
#include "src/objects-inl.h"
13
#include "src/objects/debug-objects.h"
14

15 16
namespace v8 {
namespace internal {
17

18 19
#define __ ACCESS_MASM(masm)

20
void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
21
                                ExitFrameType exit_frame_type) {
22 23 24 25 26 27 28 29 30 31 32 33 34 35 36
  __ LoadAddress(rbx, ExternalReference(address, masm->isolate()));
  if (exit_frame_type == BUILTIN_EXIT) {
    __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
            RelocInfo::CODE_TARGET);
  } else {
    DCHECK(exit_frame_type == EXIT);
    __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
            RelocInfo::CODE_TARGET);
  }
}

namespace {

void AdaptorWithExitFrameType(MacroAssembler* masm,
                              Builtins::ExitFrameType exit_frame_type) {
37
  // ----------- S t a t e -------------
38
  //  -- rax                 : number of arguments excluding receiver
39
  //  -- rbx                 : entry point
40 41
  //  -- rdi                 : target
  //  -- rdx                 : new.target
42 43
  //  -- rsp[0]              : return address
  //  -- rsp[8]              : last argument
44
  //  -- ...
45
  //  -- rsp[8 * argc]       : first argument
46
  //  -- rsp[8 * (argc + 1)] : receiver
47
  // -----------------------------------
48 49
  __ AssertFunction(rdi);

50 51 52
  // The logic contained here is mirrored for TurboFan inlining in
  // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync.

53 54 55 56 57 58
  // Make sure we operate in the context of the called function (for example
  // ConstructStubs implemented in C++ will be run in the context of the caller
  // instead of the callee, due to the way that [[Construct]] is defined for
  // ordinary functions).
  __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));

59 60
  // CEntryStub expects rax to contain the number of arguments including the
  // receiver and the extra arguments.
61
  __ addp(rax, Immediate(BuiltinExitFrameConstants::kNumExtraArgsWithReceiver));
62 63

  // Unconditionally insert argc, target and new target as extra arguments. They
64 65
  // will be used by stack frame iterators when constructing the stack trace.
  __ PopReturnAddressTo(kScratchRegister);
66
  __ Integer32ToSmi(rax, rax);
67
  __ PushRoot(Heap::kTheHoleValueRootIndex);  // Padding.
68 69
  __ Push(rax);
  __ SmiToInteger32(rax, rax);
70 71 72
  __ Push(rdi);
  __ Push(rdx);
  __ PushReturnAddressFrom(kScratchRegister);
73

74 75 76 77 78 79 80 81 82 83 84 85 86 87
  // Jump to the C entry runtime stub directly here instead of using
  // JumpToExternalReference because rbx is loaded by Generate_adaptor.
  CEntryStub ces(masm->isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
                 exit_frame_type == Builtins::BUILTIN_EXIT);
  __ jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
}
}  // namespace

void Builtins::Generate_AdaptorWithExitFrame(MacroAssembler* masm) {
  AdaptorWithExitFrameType(masm, EXIT);
}

void Builtins::Generate_AdaptorWithBuiltinExitFrame(MacroAssembler* masm) {
  AdaptorWithExitFrameType(masm, BUILTIN_EXIT);
88 89
}

90
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
91
  __ movp(kScratchRegister,
92
          FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
93
  __ movp(kScratchRegister,
94
          FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
95
  __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
96 97 98
  __ jmp(kScratchRegister);
}

99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115
static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
                                           Runtime::FunctionId function_id) {
  // ----------- S t a t e -------------
  //  -- rax : argument count (preserved for callee)
  //  -- rdx : new target (preserved for callee)
  //  -- rdi : target function (preserved for callee)
  // -----------------------------------
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    // Push the number of arguments to the callee.
    __ Integer32ToSmi(rax, rax);
    __ Push(rax);
    // Push a copy of the target function and the new target.
    __ Push(rdi);
    __ Push(rdx);
    // Function is also the parameter to the runtime call.
    __ Push(rdi);
116

117 118 119 120 121 122 123 124 125 126 127
    __ CallRuntime(function_id, 1);
    __ movp(rbx, rax);

    // Restore target function and new target.
    __ Pop(rdx);
    __ Pop(rdi);
    __ Pop(rax);
    __ SmiToInteger32(rax, rax);
  }
  __ leap(rbx, FieldOperand(rbx, Code::kHeaderSize));
  __ jmp(rbx);
128 129
}

130 131
namespace {

132
void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
133 134 135
  // ----------- S t a t e -------------
  //  -- rax: number of arguments
  //  -- rdi: constructor function
136
  //  -- rdx: new target
137
  //  -- rsi: context
138
  // -----------------------------------
139

140 141 142
  // Enter a construct frame.
  {
    FrameScope scope(masm, StackFrame::CONSTRUCT);
143

144
    // Preserve the incoming parameters on the stack.
145
    __ Integer32ToSmi(rcx, rax);
146
    __ Push(rsi);
147 148
    __ Push(rcx);

149 150
    // The receiver for the builtin/api call.
    __ PushRoot(Heap::kTheHoleValueRootIndex);
151

152
    // Set up pointer to last argument.
153
    __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
154

155 156
    // Copy arguments and receiver to the expression stack.
    Label loop, entry;
157
    __ movp(rcx, rax);
158 159 160 161 162 163 164 165 166 167
    // ----------- S t a t e -------------
    //  --                rax: number of arguments (untagged)
    //  --                rdi: constructor function
    //  --                rdx: new target
    //  --                rbx: pointer to last argument
    //  --                rcx: counter
    //  -- sp[0*kPointerSize]: the hole (receiver)
    //  -- sp[1*kPointerSize]: number of arguments (tagged)
    //  -- sp[2*kPointerSize]: context
    // -----------------------------------
168 169
    __ jmp(&entry);
    __ bind(&loop);
170
    __ Push(Operand(rbx, rcx, times_pointer_size, 0));
171
    __ bind(&entry);
172
    __ decp(rcx);
173 174 175
    __ j(greater_equal, &loop);

    // Call the function.
176 177 178
    // rax: number of arguments (untagged)
    // rdi: constructor function
    // rdx: new target
179
    ParameterCount actual(rax);
180
    __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
181

182
    // Restore context from the frame.
183
    __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
184 185
    // Restore smi-tagged arguments count from the frame.
    __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
186 187 188

    // Leave construct frame.
  }
189

190
  // Remove caller arguments from the stack and return.
191
  __ PopReturnAddressTo(rcx);
192
  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
193
  __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
194
  __ PushReturnAddressFrom(rcx);
195

196
  __ ret(0);
197
}
198

199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220
// The construct stub for ES5 constructor functions and ES6 class constructors.
void Generate_JSConstructStubGeneric(MacroAssembler* masm,
                                     bool restrict_constructor_return) {
  // ----------- S t a t e -------------
  //  -- rax: number of arguments (untagged)
  //  -- rdi: constructor function
  //  -- rdx: new target
  //  -- rsi: context
  //  -- sp[...]: constructor arguments
  // -----------------------------------

  // Enter a construct frame.
  {
    FrameScope scope(masm, StackFrame::CONSTRUCT);
    Label post_instantiation_deopt_entry, not_create_implicit_receiver;

    // Preserve the incoming parameters on the stack.
    __ Integer32ToSmi(rcx, rax);
    __ Push(rsi);
    __ Push(rcx);
    __ Push(rdi);
    __ Push(rdx);
221 222

    // ----------- S t a t e -------------
223 224 225 226
    //  --         sp[0*kPointerSize]: new target
    //  -- rdi and sp[1*kPointerSize]: constructor function
    //  --         sp[2*kPointerSize]: argument count
    //  --         sp[3*kPointerSize]: context
227 228
    // -----------------------------------

229
    __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
230 231
    __ testl(FieldOperand(rbx, SharedFunctionInfo::kCompilerHintsOffset),
             Immediate(SharedFunctionInfo::kDerivedConstructorMask));
232 233 234 235
    __ j(not_zero, &not_create_implicit_receiver);

    // If not derived class constructor: Allocate the new receiver object.
    __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
236
    __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
237
            RelocInfo::CODE_TARGET);
238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261
    __ jmp(&post_instantiation_deopt_entry, Label::kNear);

    // Else: use TheHoleValue as receiver for constructor call
    __ bind(&not_create_implicit_receiver);
    __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);

    // ----------- S t a t e -------------
    //  -- rax                          implicit receiver
    //  -- Slot 3 / sp[0*kPointerSize]  new target
    //  -- Slot 2 / sp[1*kPointerSize]  constructor function
    //  -- Slot 1 / sp[2*kPointerSize]  number of arguments (tagged)
    //  -- Slot 0 / sp[3*kPointerSize]  context
    // -----------------------------------
    // Deoptimizer enters here.
    masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
        masm->pc_offset());
    __ bind(&post_instantiation_deopt_entry);

    // Restore new target.
    __ Pop(rdx);

    // Push the allocated receiver to the stack. We need two copies
    // because we may have to return the original one and the calling
    // conventions dictate that the called function pops the receiver.
262 263 264
    __ Push(rax);
    __ Push(rax);

265 266 267 268 269 270 271 272 273 274
    // ----------- S t a t e -------------
    //  -- sp[0*kPointerSize]  implicit receiver
    //  -- sp[1*kPointerSize]  implicit receiver
    //  -- sp[2*kPointerSize]  constructor function
    //  -- sp[3*kPointerSize]  number of arguments (tagged)
    //  -- sp[4*kPointerSize]  context
    // -----------------------------------

    // Restore constructor function and argument count.
    __ movp(rdi, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
275 276 277
    __ SmiToInteger32(rax,
                      Operand(rbp, ConstructFrameConstants::kLengthOffset));

278 279
    // Set up pointer to last argument.
    __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
280

281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
    // Copy arguments and receiver to the expression stack.
    Label loop, entry;
    __ movp(rcx, rax);
    // ----------- S t a t e -------------
    //  --                        rax: number of arguments (untagged)
    //  --                        rdx: new target
    //  --                        rbx: pointer to last argument
    //  --                        rcx: counter (tagged)
    //  --         sp[0*kPointerSize]: implicit receiver
    //  --         sp[1*kPointerSize]: implicit receiver
    //  -- rdi and sp[2*kPointerSize]: constructor function
    //  --         sp[3*kPointerSize]: number of arguments (tagged)
    //  --         sp[4*kPointerSize]: context
    // -----------------------------------
    __ jmp(&entry, Label::kNear);
    __ bind(&loop);
    __ Push(Operand(rbx, rcx, times_pointer_size, 0));
    __ bind(&entry);
    __ decp(rcx);
    __ j(greater_equal, &loop);
301

302 303
    // Call the function.
    ParameterCount actual(rax);
304
    __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
305

306 307 308 309 310 311 312
    // ----------- S t a t e -------------
    //  -- rax                 constructor result
    //  -- sp[0*kPointerSize]  implicit receiver
    //  -- sp[1*kPointerSize]  constructor function
    //  -- sp[2*kPointerSize]  number of arguments
    //  -- sp[3*kPointerSize]  context
    // -----------------------------------
313

314 315 316
    // Store offset of return address for deoptimizer.
    masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
        masm->pc_offset());
317

318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342
    // Restore context from the frame.
    __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));

    // If the result is an object (in the ECMA sense), we should get rid
    // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    // on page 74.
    Label use_receiver, do_throw, other_result, leave_frame;

    // If the result is undefined, we jump out to using the implicit receiver.
    __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &use_receiver,
                  Label::kNear);

    // Otherwise we do a smi check and fall through to check if the return value
    // is a valid receiver.

    // If the result is a smi, it is *not* an object in the ECMA sense.
    __ JumpIfSmi(rax, &other_result, Label::kNear);

    // If the type of the result (stored in its map) is less than
    // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
    STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
    __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
    __ j(above_equal, &leave_frame, Label::kNear);

    // The result is now neither undefined nor an object.
343 344 345 346 347 348
    __ bind(&other_result);
    __ movp(rbx, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
    __ movp(rbx, FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset));
    __ testl(FieldOperand(rbx, SharedFunctionInfo::kCompilerHintsOffset),
             Immediate(SharedFunctionInfo::kClassConstructorMask));

349 350 351 352
    if (restrict_constructor_return) {
      // Throw if constructor function is a class constructor
      __ j(Condition::zero, &use_receiver, Label::kNear);
    } else {
353 354 355
      __ j(not_zero, &use_receiver, Label::kNear);
      __ CallRuntime(
          Runtime::kIncrementUseCounterConstructorReturnNonUndefinedPrimitive);
356 357
      __ jmp(&use_receiver, Label::kNear);
    }
358

359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378
    __ bind(&do_throw);
    __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);

    // Throw away the result of the constructor invocation and use the
    // on-stack receiver as the result.
    __ bind(&use_receiver);
    __ movp(rax, Operand(rsp, 0 * kPointerSize));
    __ JumpIfRoot(rax, Heap::kTheHoleValueRootIndex, &do_throw);

    __ bind(&leave_frame);
    // Restore the arguments count.
    __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
    // Leave construct frame.
  }
  // Remove caller arguments from the stack and return.
  __ PopReturnAddressTo(rcx);
  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
  __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
  __ PushReturnAddressFrom(rcx);
  __ ret(0);
379
}
380
}  // namespace
381

382 383 384 385 386
void Builtins::Generate_JSConstructStubGenericRestrictedReturn(
    MacroAssembler* masm) {
  return Generate_JSConstructStubGeneric(masm, true);
}
void Builtins::Generate_JSConstructStubGenericUnrestrictedReturn(
387
    MacroAssembler* masm) {
388 389 390 391 392 393 394
  return Generate_JSConstructStubGeneric(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
  Generate_JSBuiltinsConstructStubHelper(masm);
}
void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
  Generate_JSBuiltinsConstructStubHelper(masm);
395 396
}

397 398 399
void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
  FrameScope scope(masm, StackFrame::INTERNAL);
  __ Push(rdi);
400
  __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
401 402
}

403
// Clobbers rcx, r11, kScratchRegister; preserves all other registers.
404
static void Generate_CheckStackOverflow(MacroAssembler* masm) {
405 406 407 408 409 410 411 412 413 414 415
  // rax   : the number of items to be pushed to the stack
  //
  // Check the stack for overflow. We are not trying to catch
  // interruptions (e.g. debug break and preemption) here, so the "real stack
  // limit" is checked.
  Label okay;
  __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
  __ movp(rcx, rsp);
  // Make rcx the space we have left. The stack might already be overflowed
  // here which will cause rcx to become negative.
  __ subp(rcx, kScratchRegister);
416
  // Make r11 the space we need for the array when it is unrolled onto the
417
  // stack.
418 419
  __ movp(r11, rax);
  __ shlq(r11, Immediate(kPointerSizeLog2));
420
  // Check if the arguments will overflow the stack.
421
  __ cmpp(rcx, r11);
422 423 424
  __ j(greater, &okay);  // Signed comparison.

  // Out of stack space.
425
  __ CallRuntime(Runtime::kThrowStackOverflow);
426 427 428 429

  __ bind(&okay);
}

430 431
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
                                             bool is_construct) {
432 433
  ProfileEntryHookStub::MaybeCallEntryHook(masm);

434
  // Expects five C++ function parameters.
435 436
  // - Object* new_target
  // - JSFunction* function
437 438 439 440
  // - Object* receiver
  // - int argc
  // - Object*** argv
  // (see Handle::Invoke in execution.cc).
441

442 443
  // Open a C++ scope for the FrameScope.
  {
444 445 446 447 448
// Platform specific argument handling. After this, the stack contains
// an internal frame and the pushed function and receiver, and
// register rax and rbx holds the argument count and argument array,
// while rdi holds the function pointer, rsi the context, and rdx the
// new.target.
449

450 451
#ifdef _WIN64
    // MSVC parameters in:
452
    // rcx        : new_target
453 454 455
    // rdx        : function
    // r8         : receiver
    // r9         : argc
456 457 458 459 460
    // [rsp+0x20] : argv

    // Enter an internal frame.
    FrameScope scope(masm, StackFrame::INTERNAL);

461
    // Setup the context (we need to use the caller context from the isolate).
462
    ExternalReference context_address(IsolateAddressId::kContextAddress,
463 464
                                      masm->isolate());
    __ movp(rsi, masm->ExternalOperand(context_address));
465 466

    // Push the function and the receiver onto the stack.
467 468
    __ Push(rdx);
    __ Push(r8);
469 470

    // Load the number of arguments and setup pointer to the arguments.
471
    __ movp(rax, r9);
472
    // Load the previous frame pointer to access C argument on stack
473 474
    __ movp(kScratchRegister, Operand(rbp, 0));
    __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
475
    // Load the function pointer into rdi.
476
    __ movp(rdi, rdx);
477 478
    // Load the new.target into rdx.
    __ movp(rdx, rcx);
479
#else   // _WIN64
480
    // GCC parameters in:
481
    // rdi : new_target
482 483 484 485 486
    // rsi : function
    // rdx : receiver
    // rcx : argc
    // r8  : argv

487
    __ movp(r11, rdi);
488
    __ movp(rdi, rsi);
489
    // rdi : function
490
    // r11 : new_target
491 492 493

    // Clear the context before we push it when entering the internal frame.
    __ Set(rsi, 0);
494

495 496 497
    // Enter an internal frame.
    FrameScope scope(masm, StackFrame::INTERNAL);

498
    // Setup the context (we need to use the caller context from the isolate).
499
    ExternalReference context_address(IsolateAddressId::kContextAddress,
500 501 502 503
                                      masm->isolate());
    __ movp(rsi, masm->ExternalOperand(context_address));

    // Push the function and receiver onto the stack.
504 505
    __ Push(rdi);
    __ Push(rdx);
506

507
    // Load the number of arguments and setup pointer to the arguments.
508 509
    __ movp(rax, rcx);
    __ movp(rbx, r8);
510 511 512

    // Load the new.target into rdx.
    __ movp(rdx, r11);
513 514
#endif  // _WIN64

515
    // Current stack contents:
516 517 518
    // [rsp + 2 * kPointerSize ... ] : Internal frame
    // [rsp + kPointerSize]          : function
    // [rsp]                         : receiver
519 520 521 522 523
    // Current register contents:
    // rax : argc
    // rbx : argv
    // rsi : context
    // rdi : function
524
    // rdx : new.target
525

526
    // Check if we have enough stack space to push all arguments.
527
    // Expects argument count in rax. Clobbers rcx, r11.
528
    Generate_CheckStackOverflow(masm);
529

530 531 532 533 534
    // Copy arguments to the stack in a loop.
    // Register rbx points to array of pointers to handle locations.
    // Push the values of these handles.
    Label loop, entry;
    __ Set(rcx, 0);  // Set loop variable to 0.
535
    __ jmp(&entry, Label::kNear);
536
    __ bind(&loop);
537
    __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
538
    __ Push(Operand(kScratchRegister, 0));  // dereference handle
539
    __ addp(rcx, Immediate(1));
540
    __ bind(&entry);
541
    __ cmpp(rcx, rax);
542 543
    __ j(not_equal, &loop);

544 545
    // Invoke the builtin code.
    Handle<Code> builtin = is_construct
546
                               ? BUILTIN_CODE(masm->isolate(), Construct)
547 548 549
                               : masm->isolate()->builtins()->Call();
    __ Call(builtin, RelocInfo::CODE_TARGET);

550 551 552
    // Exit the internal frame. Notice that this also removes the empty
    // context and the function left on the stack by the code
    // invocation.
553
  }
554

555
  __ ret(0);
556 557
}

558 559
void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
  Generate_JSEntryTrampolineHelper(masm, false);
560 561
}

562 563 564
void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
  Generate_JSEntryTrampolineHelper(masm, true);
}
565

566 567 568 569 570 571 572 573
// static
void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax    : the value to pass to the generator
  //  -- rbx    : the JSGeneratorObject to resume
  //  -- rdx    : the resume mode (tagged)
  //  -- rsp[0] : return address
  // -----------------------------------
574
  __ AssertGeneratorObject(rbx);
575 576

  // Store input value into generator object.
577 578 579
  __ movp(FieldOperand(rbx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
  __ RecordWriteField(rbx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx,
                      kDontSaveFPRegs);
580

581 582 583
  // Store resume mode into generator object.
  __ movp(FieldOperand(rbx, JSGeneratorObject::kResumeModeOffset), rdx);

584 585
  // Load suspended function and context.
  __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
586
  __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
587 588

  // Flood function if we are stepping.
589 590
  Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
  Label stepping_prepared;
591 592 593 594 595
  ExternalReference debug_hook =
      ExternalReference::debug_hook_on_function_call_address(masm->isolate());
  Operand debug_hook_operand = masm->ExternalOperand(debug_hook);
  __ cmpb(debug_hook_operand, Immediate(0));
  __ j(not_equal, &prepare_step_in_if_stepping);
596 597 598 599 600 601 602 603 604

  // Flood function if we need to continue stepping in the suspended generator.
  ExternalReference debug_suspended_generator =
      ExternalReference::debug_suspended_generator_address(masm->isolate());
  Operand debug_suspended_generator_operand =
      masm->ExternalOperand(debug_suspended_generator);
  __ cmpp(rbx, debug_suspended_generator_operand);
  __ j(equal, &prepare_step_in_suspended_generator);
  __ bind(&stepping_prepared);
605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625

  // Pop return address.
  __ PopReturnAddressTo(rax);

  // Push receiver.
  __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));

  // ----------- S t a t e -------------
  //  -- rax    : return address
  //  -- rbx    : the JSGeneratorObject to resume
  //  -- rdx    : the resume mode (tagged)
  //  -- rdi    : generator function
  //  -- rsi    : generator context
  //  -- rsp[0] : generator receiver
  // -----------------------------------

  // Push holes for arguments to generator function. Since the parser forced
  // context allocation for any variables in generators, the actual argument
  // values have already been copied into the context and these dummy values
  // will never be used.
  __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
626 627
  __ movl(rcx,
          FieldOperand(rcx, SharedFunctionInfo::kFormalParameterCountOffset));
628 629 630 631 632 633 634 635 636 637
  {
    Label done_loop, loop;
    __ bind(&loop);
    __ subl(rcx, Immediate(1));
    __ j(carry, &done_loop, Label::kNear);
    __ PushRoot(Heap::kTheHoleValueRootIndex);
    __ jmp(&loop);
    __ bind(&done_loop);
  }

638 639 640 641 642 643 644
  // Underlying function needs to have bytecode available.
  if (FLAG_debug_code) {
    __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset));
    __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
    __ Assert(equal, kMissingBytecodeArray);
  }
645

646
  // Resume (Ignition/TurboFan) generator object.
647
  {
648 649
    __ PushReturnAddressFrom(rax);
    __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
650 651
    __ movsxlq(rax, FieldOperand(
                        rax, SharedFunctionInfo::kFormalParameterCountOffset));
652 653 654 655
    // We abuse new.target both to indicate that this is a resume call and to
    // pass in the generator object.  In ordinary calls, new.target is always
    // undefined because generator functions are non-constructable.
    __ movp(rdx, rbx);
656 657 658
    __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
    __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
    __ jmp(rcx);
659 660
  }

661 662 663 664 665 666
  __ bind(&prepare_step_in_if_stepping);
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    __ Push(rbx);
    __ Push(rdx);
    __ Push(rdi);
667
    __ CallRuntime(Runtime::kDebugOnFunctionCall);
668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684
    __ Pop(rdx);
    __ Pop(rbx);
    __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
  }
  __ jmp(&stepping_prepared);

  __ bind(&prepare_step_in_suspended_generator);
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    __ Push(rbx);
    __ Push(rdx);
    __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
    __ Pop(rdx);
    __ Pop(rbx);
    __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
  }
  __ jmp(&stepping_prepared);
685
}
686

687 688
// TODO(juliana): if we remove the code below then we don't need all
// the parameters.
689 690
static void ReplaceClosureCodeWithOptimizedCode(
    MacroAssembler* masm, Register optimized_code, Register closure,
691 692 693
    Register scratch1, Register scratch2, Register scratch3) {

  // Store the optimized code in the closure.
694 695 696 697
  __ movp(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
  __ movp(scratch1, optimized_code);  // Write barrier clobbers scratch1 below.
  __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
                      kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
698 699
}

700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719
static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
                                  Register scratch2) {
  Register args_count = scratch1;
  Register return_pc = scratch2;

  // Get the arguments + receiver count.
  __ movp(args_count,
          Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
  __ movl(args_count,
          FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));

  // Leave the frame (also dropping the register file).
  __ leave();

  // Drop receiver + arguments.
  __ PopReturnAddressTo(return_pc);
  __ addp(rsp, args_count);
  __ PushReturnAddressFrom(return_pc);
}

720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750
// Tail-call |function_id| if |smi_entry| == |marker|
static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
                                          Register smi_entry,
                                          OptimizationMarker marker,
                                          Runtime::FunctionId function_id) {
  Label no_match;
  __ SmiCompare(smi_entry, Smi::FromEnum(marker));
  __ j(not_equal, &no_match, Label::kNear);
  GenerateTailCallToReturnedCode(masm, function_id);
  __ bind(&no_match);
}

static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
                                           Register feedback_vector,
                                           Register scratch1, Register scratch2,
                                           Register scratch3) {
  // ----------- S t a t e -------------
  //  -- rax : argument count (preserved for callee if needed, and caller)
  //  -- rdx : new target (preserved for callee if needed, and caller)
  //  -- rdi : target function (preserved for callee if needed, and caller)
  //  -- feedback vector (preserved for caller if needed)
  // -----------------------------------
  DCHECK(!AreAliased(feedback_vector, rax, rdx, rdi, scratch1, scratch2,
                     scratch3));

  Label optimized_code_slot_is_cell, fallthrough;

  Register closure = rdi;
  Register optimized_code_entry = scratch1;

  __ movp(optimized_code_entry,
751
          FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774

  // Check if the code entry is a Smi. If yes, we interpret it as an
  // optimisation marker. Otherwise, interpret is as a weak cell to a code
  // object.
  __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_cell);

  {
    // Optimized code slot is a Smi optimization marker.

    // Fall through if no optimization trigger.
    __ SmiCompare(optimized_code_entry,
                  Smi::FromEnum(OptimizationMarker::kNone));
    __ j(equal, &fallthrough);

    TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
                                  OptimizationMarker::kCompileOptimized,
                                  Runtime::kCompileOptimized_NotConcurrent);
    TailCallRuntimeIfMarkerEquals(
        masm, optimized_code_entry,
        OptimizationMarker::kCompileOptimizedConcurrent,
        Runtime::kCompileOptimized_Concurrent);

    {
775 776
      // Otherwise, the marker is InOptimizationQueue, so fall through hoping
      // that an interrupt will eventually update the slot with optimized code.
777 778 779 780 781
      if (FLAG_debug_code) {
        __ SmiCompare(optimized_code_entry,
                      Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
        __ Assert(equal, kExpectedOptimizationSentinel);
      }
782
      __ jmp(&fallthrough);
783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805
    }
  }

  {
    // Optimized code slot is a WeakCell.
    __ bind(&optimized_code_slot_is_cell);

    __ movp(optimized_code_entry,
            FieldOperand(optimized_code_entry, WeakCell::kValueOffset));
    __ JumpIfSmi(optimized_code_entry, &fallthrough);

    // Check if the optimized code is marked for deopt. If it is, call the
    // runtime to clear it.
    Label found_deoptimized_code;
    __ testl(
        FieldOperand(optimized_code_entry, Code::kKindSpecificFlags1Offset),
        Immediate(1 << Code::kMarkedForDeoptimizationBit));
    __ j(not_zero, &found_deoptimized_code);

    // Optimized code is good, get it into the closure and link the closure into
    // the optimized functions list, then tail call the optimized code.
    // The feedback vector is no longer used, so re-use it as a scratch
    // register.
806 807 808 809
    ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
                                        scratch2, scratch3, feedback_vector);
    __ addp(optimized_code_entry,
            Immediate(Code::kHeaderSize - kHeapObjectTag));
810 811 812 813 814 815 816 817 818 819 820 821 822
    __ jmp(optimized_code_entry);

    // Optimized code slot contains deoptimized code, evict it and re-enter the
    // closure's code.
    __ bind(&found_deoptimized_code);
    GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
  }

  // Fall-through if the optimized code cell is clear and there is no
  // optimization marker.
  __ bind(&fallthrough);
}

823 824 825
// Advance the current bytecode offset. This simulates what all bytecode
// handlers do upon completion of the underlying operation.
static void AdvanceBytecodeOffset(MacroAssembler* masm, Register bytecode_array,
826 827
                                  Register bytecode_offset, Register bytecode,
                                  Register scratch1) {
828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862
  Register bytecode_size_table = scratch1;
  DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
                     bytecode));

  __ Move(bytecode_size_table,
          ExternalReference::bytecode_size_table_address(masm->isolate()));

  // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
  Label load_size, extra_wide;
  STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
  STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
  __ cmpb(bytecode, Immediate(0x1));
  __ j(above, &load_size, Label::kNear);
  __ j(equal, &extra_wide, Label::kNear);

  // Load the next bytecode and update table to the wide scaled table.
  __ incl(bytecode_offset);
  __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
  __ addp(bytecode_size_table,
          Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
  __ jmp(&load_size, Label::kNear);

  __ bind(&extra_wide);
  // Load the next bytecode and update table to the extra wide scaled table.
  __ incl(bytecode_offset);
  __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
  __ addp(bytecode_size_table,
          Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
  __ jmp(&load_size, Label::kNear);

  // Load the size of the current bytecode.
  __ bind(&load_size);
  __ addl(bytecode_offset, Operand(bytecode_size_table, bytecode, times_4, 0));
}

863 864 865 866 867 868 869
// Generate code for entering a JS function with the interpreter.
// On entry to the function the receiver and arguments have been pushed on the
// stack left to right.  The actual argument count matches the formal parameter
// count expected by the function.
//
// The live registers are:
//   o rdi: the JS function object being called
870
//   o rdx: the incoming new target or generator object
871 872 873 874
//   o rsi: our context
//   o rbp: the caller's frame pointer
//   o rsp: stack pointer (pointing to return address)
//
875 876
// The function builds an interpreter frame.  See InterpreterFrameConstants in
// frames.h for its layout.
877
void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
878 879
  ProfileEntryHookStub::MaybeCallEntryHook(masm);

880 881 882 883 884 885 886 887 888 889 890
  Register closure = rdi;
  Register feedback_vector = rbx;

  // Load the feedback vector from the closure.
  __ movp(feedback_vector,
          FieldOperand(closure, JSFunction::kFeedbackVectorOffset));
  __ movp(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
  // Read off the optimized code slot in the feedback vector, and if there
  // is optimized code or an optimization marker, call that instead.
  MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r14, r15);

891 892 893 894 895 896 897 898 899
  // Open a frame scope to indicate that there is a frame on the stack.  The
  // MANUAL indicates that the scope shouldn't actually generate code to set up
  // the frame (that is done below).
  FrameScope frame_scope(masm, StackFrame::MANUAL);
  __ pushq(rbp);  // Caller's frame pointer.
  __ movp(rbp, rsp);
  __ Push(rsi);  // Callee's context.
  __ Push(rdi);  // Callee's JS function.

900 901
  // Get the bytecode array from the function object (or from the DebugInfo if
  // it is present) and load it into kInterpreterBytecodeArrayRegister.
902
  Label maybe_load_debug_bytecode_array, bytecode_array_loaded;
903
  __ movp(rax, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
904 905
  __ movp(kInterpreterBytecodeArrayRegister,
          FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
906 907
  __ JumpIfNotSmi(FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset),
                  &maybe_load_debug_bytecode_array);
908
  __ bind(&bytecode_array_loaded);
909

910
  // Increment invocation count for the function.
911 912
  __ incl(
      FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
913

914
  // Check function data field is actually a BytecodeArray object.
915
  if (FLAG_debug_code) {
916 917 918
    __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
    __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
                     rax);
919 920 921
    __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
  }

922 923 924 925 926
  // Reset code age.
  __ movb(FieldOperand(kInterpreterBytecodeArrayRegister,
                       BytecodeArray::kBytecodeAgeOffset),
          Immediate(BytecodeArray::kNoAgeBytecodeAge));

927 928 929 930 931
  // Load initial bytecode offset.
  __ movp(kInterpreterBytecodeOffsetRegister,
          Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));

  // Push bytecode array and Smi tagged bytecode offset.
932
  __ Push(kInterpreterBytecodeArrayRegister);
933 934
  __ Integer32ToSmi(rcx, kInterpreterBytecodeOffsetRegister);
  __ Push(rcx);
935

936 937 938
  // Allocate the local and temporary register file on the stack.
  {
    // Load frame size from the BytecodeArray object.
939 940
    __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
                              BytecodeArray::kFrameSizeOffset));
941 942 943

    // Do a stack check to ensure we don't go over the limit.
    Label ok;
944 945 946
    __ movp(rax, rsp);
    __ subp(rax, rcx);
    __ CompareRoot(rax, Heap::kRealStackLimitRootIndex);
947
    __ j(above_equal, &ok, Label::kNear);
948
    __ CallRuntime(Runtime::kThrowStackOverflow);
949 950 951 952
    __ bind(&ok);

    // If ok, push undefined as the initial value for all register file entries.
    Label loop_header;
953
    Label loop_check;
954
    __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
955
    __ j(always, &loop_check);
956 957
    __ bind(&loop_header);
    // TODO(rmcilroy): Consider doing more than one push per loop iteration.
958
    __ Push(rax);
959
    // Continue loop if not done.
960
    __ bind(&loop_check);
961
    __ subp(rcx, Immediate(kPointerSize));
962
    __ j(greater_equal, &loop_header, Label::kNear);
963 964
  }

965 966 967 968 969 970 971 972 973 974 975 976
  // If the bytecode array has a valid incoming new target or generator object
  // register, initialize it with incoming value which was passed in rdx.
  Label no_incoming_new_target_or_generator_register;
  __ movsxlq(
      rax,
      FieldOperand(kInterpreterBytecodeArrayRegister,
                   BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
  __ testl(rax, rax);
  __ j(zero, &no_incoming_new_target_or_generator_register, Label::kNear);
  __ movp(Operand(rbp, rax, times_pointer_size, 0), rdx);
  __ bind(&no_incoming_new_target_or_generator_register);

977
  // Load accumulator with undefined.
978
  __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
979 980 981 982 983

  // Load the dispatch table into a register and dispatch to the bytecode
  // handler at the current bytecode offset.
  Label do_dispatch;
  __ bind(&do_dispatch);
984 985 986
  __ Move(
      kInterpreterDispatchTableRegister,
      ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
987 988 989 990 991
  __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
                          kInterpreterBytecodeOffsetRegister, times_1, 0));
  __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
                       times_pointer_size, 0));
  __ call(rbx);
992
  masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
993

994 995
  // Any returns to the entry trampoline are either due to the return bytecode
  // or the interpreter tail calling a builtin and then a dispatch.
996 997 998 999 1000 1001 1002 1003 1004

  // Get bytecode array and bytecode offset from the stack frame.
  __ movp(kInterpreterBytecodeArrayRegister,
          Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
  __ movp(kInterpreterBytecodeOffsetRegister,
          Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
  __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
                    kInterpreterBytecodeOffsetRegister);

1005 1006 1007 1008 1009 1010 1011
  // Check if we should return.
  Label do_return;
  __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
                          kInterpreterBytecodeOffsetRegister, times_1, 0));
  __ cmpb(rbx, Immediate(static_cast<int>(interpreter::Bytecode::kReturn)));
  __ j(equal, &do_return, Label::kNear);

1012 1013 1014 1015
  // Advance to the next bytecode and dispatch.
  AdvanceBytecodeOffset(masm, kInterpreterBytecodeArrayRegister,
                        kInterpreterBytecodeOffsetRegister, rbx, rcx);
  __ jmp(&do_dispatch);
1016

1017 1018 1019 1020 1021
  __ bind(&do_return);
  // The return value is in rax.
  LeaveInterpreterFrame(masm, rbx, rcx);
  __ ret(0);

1022 1023 1024 1025 1026 1027 1028 1029 1030
  // Load debug copy of the bytecode array if it exists.
  // kInterpreterBytecodeArrayRegister is already loaded with
  // SharedFunctionInfo::kFunctionDataOffset.
  __ bind(&maybe_load_debug_bytecode_array);
  __ movp(rcx, FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset));
  __ SmiToInteger32(kScratchRegister,
                    FieldOperand(rcx, DebugInfo::kFlagsOffset));
  __ testl(kScratchRegister, Immediate(DebugInfo::kHasBreakInfo));
  __ j(zero, &bytecode_array_loaded);
1031
  __ movp(kInterpreterBytecodeArrayRegister,
1032
          FieldOperand(rcx, DebugInfo::kDebugBytecodeArrayOffset));
1033
  __ jmp(&bytecode_array_loaded);
1034 1035
}

1036 1037 1038 1039
static void Generate_StackOverflowCheck(
    MacroAssembler* masm, Register num_args, Register scratch,
    Label* stack_overflow,
    Label::Distance stack_overflow_distance = Label::kFar) {
1040 1041 1042
  // Check the stack for overflow. We are not trying to catch
  // interruptions (e.g. debug break and preemption) here, so the "real stack
  // limit" is checked.
1043 1044 1045 1046 1047 1048
  __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
  __ movp(scratch, rsp);
  // Make scratch the space we have left. The stack might already be overflowed
  // here which will cause scratch to become negative.
  __ subp(scratch, kScratchRegister);
  __ sarp(scratch, Immediate(kPointerSizeLog2));
1049
  // Check if the arguments will overflow the stack.
1050 1051 1052
  __ cmpp(scratch, num_args);
  // Signed comparison.
  __ j(less_equal, stack_overflow, stack_overflow_distance);
1053 1054
}

1055
static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1056 1057
                                         Register num_args,
                                         Register start_address,
1058
                                         Register scratch) {
1059
  // Find the address of the last argument.
1060
  __ Move(scratch, num_args);
1061 1062 1063
  __ shlp(scratch, Immediate(kPointerSizeLog2));
  __ negp(scratch);
  __ addp(scratch, start_address);
1064 1065 1066 1067 1068

  // Push the arguments.
  Label loop_header, loop_check;
  __ j(always, &loop_check);
  __ bind(&loop_header);
1069 1070
  __ Push(Operand(start_address, 0));
  __ subp(start_address, Immediate(kPointerSize));
1071
  __ bind(&loop_check);
1072
  __ cmpp(start_address, scratch);
1073
  __ j(greater, &loop_header, Label::kNear);
1074 1075 1076
}

// static
1077 1078
void Builtins::Generate_InterpreterPushArgsThenCallImpl(
    MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1079
    InterpreterPushArgsMode mode) {
1080 1081 1082 1083 1084 1085 1086
  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
  //  -- rbx : the address of the first argument to be pushed. Subsequent
  //           arguments should be consecutive above this, in the same order as
  //           they are to be pushed onto the stack.
  //  -- rdi : the target to call (can be any Object).
  // -----------------------------------
1087
  Label stack_overflow;
1088

1089 1090 1091 1092
  // Number of values to be pushed.
  __ Move(rcx, rax);
  __ addp(rcx, Immediate(1));  // Add one for receiver.

1093
  // Add a stack check before pushing arguments.
1094
  Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow);
1095 1096 1097 1098

  // Pop return address to allow tail-call after pushing arguments.
  __ PopReturnAddressTo(kScratchRegister);

1099 1100 1101 1102 1103 1104
  // Push "undefined" as the receiver arg if we need to.
  if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
    __ PushRoot(Heap::kUndefinedValueRootIndex);
    __ subp(rcx, Immediate(1));  // Subtract one for receiver.
  }

1105 1106
  // rbx and rdx will be modified.
  Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
1107

1108 1109 1110 1111 1112
  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    __ Pop(rbx);                 // Pass the spread in a register
    __ subp(rax, Immediate(1));  // Subtract one for spread
  }

1113
  // Call the target.
1114
  __ PushReturnAddressFrom(kScratchRegister);  // Re-push return address.
1115

1116
  if (mode == InterpreterPushArgsMode::kJSFunction) {
1117
    __ Jump(masm->isolate()->builtins()->CallFunction(receiver_mode),
1118
            RelocInfo::CODE_TARGET);
1119
  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1120
    __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1121
            RelocInfo::CODE_TARGET);
1122
  } else {
1123
    __ Jump(masm->isolate()->builtins()->Call(receiver_mode),
1124 1125
            RelocInfo::CODE_TARGET);
  }
1126 1127 1128 1129 1130 1131 1132 1133

  // Throw stack overflow exception.
  __ bind(&stack_overflow);
  {
    __ TailCallRuntime(Runtime::kThrowStackOverflow);
    // This should be unreachable.
    __ int3();
  }
1134 1135
}

1136
// static
1137
void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1138
    MacroAssembler* masm, InterpreterPushArgsMode mode) {
1139 1140
  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
1141
  //  -- rdx : the new target (either the same as the constructor or
1142 1143
  //           the JSFunction on which new was invoked initially)
  //  -- rdi : the constructor to call (can be any Object)
1144 1145
  //  -- rbx : the allocation site feedback if available, undefined otherwise
  //  -- rcx : the address of the first argument to be pushed. Subsequent
1146 1147 1148
  //           arguments should be consecutive above this, in the same order as
  //           they are to be pushed onto the stack.
  // -----------------------------------
1149 1150 1151
  Label stack_overflow;

  // Add a stack check before pushing arguments.
1152
  Generate_StackOverflowCheck(masm, rax, r8, &stack_overflow);
1153 1154 1155 1156 1157 1158 1159

  // Pop return address to allow tail-call after pushing arguments.
  __ PopReturnAddressTo(kScratchRegister);

  // Push slot for the receiver to be constructed.
  __ Push(Immediate(0));

1160 1161
  // rcx and r8 will be modified.
  Generate_InterpreterPushArgs(masm, rax, rcx, r8);
1162

1163 1164 1165 1166 1167 1168 1169 1170 1171 1172
  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    __ Pop(rbx);                 // Pass the spread in a register
    __ subp(rax, Immediate(1));  // Subtract one for spread

    // Push return address in preparation for the tail-call.
    __ PushReturnAddressFrom(kScratchRegister);
  } else {
    __ PushReturnAddressFrom(kScratchRegister);
    __ AssertUndefinedOrAllocationSite(rbx);
  }
1173

1174
  if (mode == InterpreterPushArgsMode::kJSFunction) {
1175 1176 1177 1178 1179 1180 1181 1182 1183
    // Tail call to the function-specific construct stub (still in the caller
    // context at this point).
    __ AssertFunction(rdi);

    __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
    __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
    // Jump to the constructor function (rax, rbx, rdx passed on).
    __ jmp(rcx);
1184
  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1185
    // Call the constructor (rax, rdx, rdi passed on).
1186
    __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1187
            RelocInfo::CODE_TARGET);
1188
  } else {
1189
    DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1190
    // Call the constructor (rax, rdx, rdi passed on).
1191
    __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1192
  }
1193

1194 1195 1196 1197 1198 1199 1200 1201 1202
  // Throw stack overflow exception.
  __ bind(&stack_overflow);
  {
    __ TailCallRuntime(Runtime::kThrowStackOverflow);
    // This should be unreachable.
    __ int3();
  }
}

1203
static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1204 1205 1206 1207
  // Set the return address to the correct point in the interpreter entry
  // trampoline.
  Smi* interpreter_entry_return_pc_offset(
      masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1208
  DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1209
  __ Move(rbx, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
1210 1211 1212
  __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value() +
                         Code::kHeaderSize - kHeapObjectTag));
  __ Push(rbx);
1213

1214
  // Initialize dispatch table register.
1215 1216 1217
  __ Move(
      kInterpreterDispatchTableRegister,
      ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1218 1219

  // Get the bytecode array pointer from the frame.
1220 1221
  __ movp(kInterpreterBytecodeArrayRegister,
          Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1222 1223 1224 1225 1226 1227 1228 1229 1230 1231

  if (FLAG_debug_code) {
    // Check function data field is actually a BytecodeArray object.
    __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
    __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
                     rbx);
    __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
  }

  // Get the target bytecode offset from the frame.
1232 1233
  __ movp(kInterpreterBytecodeOffsetRegister,
          Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244
  __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
                    kInterpreterBytecodeOffsetRegister);

  // Dispatch to the target bytecode.
  __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
                          kInterpreterBytecodeOffsetRegister, times_1, 0));
  __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
                       times_pointer_size, 0));
  __ jmp(rbx);
}

1245
void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1246 1247 1248 1249 1250 1251 1252 1253
  // Get bytecode array and bytecode offset from the stack frame.
  __ movp(kInterpreterBytecodeArrayRegister,
          Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
  __ movp(kInterpreterBytecodeOffsetRegister,
          Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
  __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
                    kInterpreterBytecodeOffsetRegister);

1254 1255 1256 1257
  // Load the current bytecode.
  __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
                          kInterpreterBytecodeOffsetRegister, times_1, 0));

1258 1259 1260 1261 1262 1263 1264
  // Advance to the next bytecode.
  AdvanceBytecodeOffset(masm, kInterpreterBytecodeArrayRegister,
                        kInterpreterBytecodeOffsetRegister, rbx, rcx);

  // Convert new bytecode offset to a Smi and save in the stackframe.
  __ Integer32ToSmi(rbx, kInterpreterBytecodeOffsetRegister);
  __ movp(Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp), rbx);
1265 1266 1267 1268 1269 1270 1271 1272

  Generate_InterpreterEnterBytecode(masm);
}

void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
  Generate_InterpreterEnterBytecode(masm);
}

1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299
void Builtins::Generate_CheckOptimizationMarker(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax : argument count (preserved for callee)
  //  -- rdx : new target (preserved for callee)
  //  -- rdi : target function (preserved for callee)
  // -----------------------------------
  Register closure = rdi;

  // Get the feedback vector.
  Register feedback_vector = rbx;
  __ movp(feedback_vector,
          FieldOperand(closure, JSFunction::kFeedbackVectorOffset));
  __ movp(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));

  // The feedback vector must be defined.
  if (FLAG_debug_code) {
    __ CompareRoot(feedback_vector, Heap::kUndefinedValueRootIndex);
    __ Assert(not_equal, BailoutReason::kExpectedFeedbackVector);
  }

  // Is there an optimization marker or optimized code in the feedback vector?
  MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r14, r15);

  // Otherwise, tail call the SFI code.
  GenerateTailCallToSharedCode(masm);
}

1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314
// TODO(jupvfranco): investigate whether there is any case where the CompileLazy
// builtin does not set the code field in the JS function. If there isn't then
// we do not need this builtin and can jump directly to CompileLazy.
void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
  // Set the code slot inside the JSFunction to the trampoline to the
  // interpreter entry.
  __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
  __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
  __ movq(FieldOperand(rdi, JSFunction::kCodeOffset), rcx);
  __ RecordWriteField(rdi, JSFunction::kCodeOffset, rcx, r15, kDontSaveFPRegs,
                      OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
  // Jump to compile lazy.
  Generate_CompileLazy(masm);
}

1315
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1316 1317 1318 1319 1320 1321 1322 1323 1324
  // ----------- S t a t e -------------
  //  -- rax : argument count (preserved for callee)
  //  -- rdx : new target (preserved for callee)
  //  -- rdi : target function (preserved for callee)
  // -----------------------------------
  // First lookup code, maybe we don't need to compile!
  Label gotta_call_runtime;

  Register closure = rdi;
1325
  Register feedback_vector = rbx;
1326 1327

  // Do we have a valid feedback vector?
1328 1329 1330 1331 1332
  __ movp(feedback_vector,
          FieldOperand(closure, JSFunction::kFeedbackVectorOffset));
  __ movp(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
  __ JumpIfRoot(feedback_vector, Heap::kUndefinedValueRootIndex,
                &gotta_call_runtime);
1333

1334 1335
  // Is there an optimization marker or optimized code in the feedback vector?
  MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r14, r15);
1336

1337
  // We found no optimized code.
1338
  Register entry = rcx;
1339
  __ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1340 1341

  // If SFI points to anything other than CompileLazy, install that.
1342
  __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
1343 1344
  __ Move(rbx, masm->CodeObject());
  __ cmpp(entry, rbx);
1345
  __ j(equal, &gotta_call_runtime);
1346 1347

  // Install the SFI's code entry.
1348 1349 1350 1351
  __ movp(FieldOperand(closure, JSFunction::kCodeOffset), entry);
  __ movp(r14, entry);  // Write barrier clobbers r14 below.
  __ RecordWriteField(closure, JSFunction::kCodeOffset, r14, r15,
                      kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1352 1353 1354 1355
  __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
  __ jmp(entry);

  __ bind(&gotta_call_runtime);
1356
  GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1357
}
1358

1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370
// Lazy deserialization design doc: http://goo.gl/dxkYDZ.
void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax : argument count (preserved for callee)
  //  -- rdx : new target (preserved for callee)
  //  -- rdi : target function (preserved for callee)
  // -----------------------------------

  Label deserialize_in_runtime;

  Register target = rdi;  // Must be preserved
  Register scratch0 = rbx;
1371
  Register scratch1 = r12;
1372

1373 1374 1375
  CHECK(scratch0 != rax && scratch0 != rdx && scratch0 != rdi);
  CHECK(scratch1 != rax && scratch1 != rdx && scratch1 != rdi);
  CHECK(scratch0 != scratch1);
1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417

  // Load the builtin id for lazy deserialization from SharedFunctionInfo.

  __ AssertFunction(target);
  __ movp(scratch0,
          FieldOperand(target, JSFunction::kSharedFunctionInfoOffset));

  __ movp(scratch1,
          FieldOperand(scratch0, SharedFunctionInfo::kFunctionDataOffset));
  __ AssertSmi(scratch1);

  // The builtin may already have been deserialized. If that is the case, it is
  // stored in the builtins table, and we can copy to correct code object to
  // both the shared function info and function without calling into runtime.
  //
  // Otherwise, we need to call into runtime to deserialize.

  {
    // Load the code object at builtins_table[builtin_id] into scratch1.

    __ SmiToInteger32(scratch1, scratch1);
    __ Move(scratch0, ExternalReference::builtins_address(masm->isolate()));
    __ movp(scratch1, Operand(scratch0, scratch1, times_pointer_size, 0));

    // Check if the loaded code object has already been deserialized. This is
    // the case iff it does not equal DeserializeLazy.

    __ Move(scratch0, masm->CodeObject());
    __ cmpp(scratch1, scratch0);
    __ j(equal, &deserialize_in_runtime);
  }

  {
    // If we've reached this spot, the target builtin has been deserialized and
    // we simply need to copy it over. First to the shared function info.

    Register target_builtin = scratch1;
    Register shared = scratch0;

    __ movp(shared,
            FieldOperand(target, JSFunction::kSharedFunctionInfoOffset));

1418 1419
    CHECK(r14 != target && r14 != scratch0 && r14 != scratch1);
    CHECK(r15 != target && r15 != scratch0 && r15 != scratch1);
1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443

    __ movp(FieldOperand(shared, SharedFunctionInfo::kCodeOffset),
            target_builtin);
    __ movp(r14, target_builtin);  // Write barrier clobbers r14 below.
    __ RecordWriteField(shared, SharedFunctionInfo::kCodeOffset, r14, r15,
                        kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);

    // And second to the target function.

    __ movp(FieldOperand(target, JSFunction::kCodeOffset), target_builtin);
    __ movp(r14, target_builtin);  // Write barrier clobbers r14 below.
    __ RecordWriteField(target, JSFunction::kCodeOffset, r14, r15,
                        kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);

    // All copying is done. Jump to the deserialized code object.

    __ leap(target_builtin, FieldOperand(target_builtin, Code::kHeaderSize));
    __ jmp(target_builtin);
  }

  __ bind(&deserialize_in_runtime);
  GenerateTailCallToReturnedCode(masm, Runtime::kDeserializeLazy);
}

1444 1445 1446 1447 1448 1449 1450 1451 1452
void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax : argument count (preserved for callee)
  //  -- rdx : new target (preserved for callee)
  //  -- rdi : target function (preserved for callee)
  // -----------------------------------
  Label failed;
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
1453
    // Preserve argument count for later compare.
1454
    __ movp(rcx, rax);
1455 1456 1457 1458 1459 1460 1461 1462 1463 1464
    // Push the number of arguments to the callee.
    __ Integer32ToSmi(rax, rax);
    __ Push(rax);
    // Push a copy of the target function and the new target.
    __ Push(rdi);
    __ Push(rdx);

    // The function.
    __ Push(rdi);
    // Copy arguments from caller (stdlib, foreign, heap).
1465 1466 1467 1468
    Label args_done;
    for (int j = 0; j < 4; ++j) {
      Label over;
      if (j < 3) {
1469
        __ cmpp(rcx, Immediate(j));
1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482
        __ j(not_equal, &over, Label::kNear);
      }
      for (int i = j - 1; i >= 0; --i) {
        __ Push(Operand(
            rbp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
      }
      for (int i = 0; i < 3 - j; ++i) {
        __ PushRoot(Heap::kUndefinedValueRootIndex);
      }
      if (j < 3) {
        __ jmp(&args_done, Label::kNear);
        __ bind(&over);
      }
1483
    }
1484 1485
    __ bind(&args_done);

1486 1487 1488 1489
    // Call runtime, on success unwind frame, and parent frame.
    __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
    // A smi 0 is returned on failure, an object on success.
    __ JumpIfSmi(rax, &failed, Label::kNear);
1490 1491

    __ Drop(2);
1492 1493
    __ Pop(rcx);
    __ SmiToInteger32(rcx, rcx);
1494
    scope.GenerateLeaveFrame();
1495 1496

    __ PopReturnAddressTo(rbx);
1497 1498
    __ incp(rcx);
    __ leap(rsp, Operand(rsp, rcx, times_pointer_size, 0));
1499 1500
    __ PushReturnAddressFrom(rbx);
    __ ret(0);
1501 1502 1503 1504 1505 1506 1507 1508

    __ bind(&failed);
    // Restore target function and new target.
    __ Pop(rdx);
    __ Pop(rdi);
    __ Pop(rax);
    __ SmiToInteger32(rax, rax);
  }
1509 1510 1511 1512 1513
  // On failure, tail call back to regular js by re-calling the function
  // which has be reset to the compile lazy builtin.
  __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
  __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
  __ jmp(rcx);
1514
}
1515

1516 1517 1518 1519
namespace {
void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
                                      bool java_script_builtin,
                                      bool with_result) {
1520
  const RegisterConfiguration* config(RegisterConfiguration::Default());
1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566
  int allocatable_register_count = config->num_allocatable_general_registers();
  if (with_result) {
    // Overwrite the hole inserted by the deoptimizer with the return value from
    // the LAZY deopt point.
    __ movq(Operand(rsp,
                    config->num_allocatable_general_registers() * kPointerSize +
                        BuiltinContinuationFrameConstants::kFixedFrameSize),
            rax);
  }
  for (int i = allocatable_register_count - 1; i >= 0; --i) {
    int code = config->GetAllocatableGeneralCode(i);
    __ popq(Register::from_code(code));
    if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
      __ SmiToInteger32(Register::from_code(code), Register::from_code(code));
    }
  }
  __ movq(
      rbp,
      Operand(rsp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
  const int offsetToPC =
      BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp - kPointerSize;
  __ popq(Operand(rsp, offsetToPC));
  __ Drop(offsetToPC / kPointerSize);
  __ addq(Operand(rsp, 0), Immediate(Code::kHeaderSize - kHeapObjectTag));
  __ Ret();
}
}  // namespace

void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
  Generate_ContinueToBuiltinHelper(masm, false, false);
}

void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
    MacroAssembler* masm) {
  Generate_ContinueToBuiltinHelper(masm, false, true);
}

void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
  Generate_ContinueToBuiltinHelper(masm, true, false);
}

void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
    MacroAssembler* masm) {
  Generate_ContinueToBuiltinHelper(masm, true, true);
}

1567
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1568
  // Enter an internal frame.
1569 1570
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
1571
    __ CallRuntime(Runtime::kNotifyDeoptimized);
1572 1573
    // Tear down internal frame.
  }
1574

1575
  DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
1576 1577
  __ movp(rax, Operand(rsp, kPCOnStackSize));
  __ ret(1 * kPointerSize);  // Remove rax.
1578
}
1579

1580
// static
1581
void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1582 1583 1584 1585 1586 1587 1588
  // ----------- S t a t e -------------
  //  -- rax     : argc
  //  -- rsp[0]  : return address
  //  -- rsp[8]  : argArray
  //  -- rsp[16] : thisArg
  //  -- rsp[24] : receiver
  // -----------------------------------
1589

1590
  // 1. Load receiver into rdi, argArray into rbx (if present), remove all
1591 1592
  // arguments from the stack (including the receiver), and push thisArg (if
  // present) instead.
1593
  {
1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606
    Label no_arg_array, no_this_arg;
    StackArgumentsAccessor args(rsp, rax);
    __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
    __ movp(rbx, rdx);
    __ movp(rdi, args.GetReceiverOperand());
    __ testp(rax, rax);
    __ j(zero, &no_this_arg, Label::kNear);
    {
      __ movp(rdx, args.GetArgumentOperand(1));
      __ cmpp(rax, Immediate(1));
      __ j(equal, &no_arg_array, Label::kNear);
      __ movp(rbx, args.GetArgumentOperand(2));
      __ bind(&no_arg_array);
1607
    }
1608 1609 1610 1611 1612 1613
    __ bind(&no_this_arg);
    __ PopReturnAddressTo(rcx);
    __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
    __ Push(rdx);
    __ PushReturnAddressFrom(rcx);
  }
1614

1615
  // ----------- S t a t e -------------
1616
  //  -- rbx     : argArray
1617 1618 1619 1620 1621
  //  -- rdi     : receiver
  //  -- rsp[0]  : return address
  //  -- rsp[8]  : thisArg
  // -----------------------------------

1622 1623 1624
  // 2. We don't need to check explicitly for callable receiver here,
  // since that's the first thing the Call/CallWithArrayLike builtins
  // will do.
1625

1626 1627
  // 3. Tail call with no arguments if argArray is null or undefined.
  Label no_arguments;
1628 1629
  __ JumpIfRoot(rbx, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
  __ JumpIfRoot(rbx, Heap::kUndefinedValueRootIndex, &no_arguments,
1630
                Label::kNear);
1631

1632
  // 4a. Apply the receiver to the given argArray.
1633
  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1634
          RelocInfo::CODE_TARGET);
1635

1636
  // 4b. The argArray is either null or undefined, so we tail call without any
ishell's avatar
ishell committed
1637 1638
  // arguments to the receiver. Since we did not create a frame for
  // Function.prototype.apply() yet, we use a normal Call builtin here.
1639 1640 1641 1642 1643
  __ bind(&no_arguments);
  {
    __ Set(rax, 0);
    __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
  }
1644 1645
}

1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690 1691 1692
// static
void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
  // Stack Layout:
  // rsp[0]           : Return address
  // rsp[8]           : Argument n
  // rsp[16]          : Argument n-1
  //  ...
  // rsp[8 * n]       : Argument 1
  // rsp[8 * (n + 1)] : Receiver (callable to call)
  //
  // rax contains the number of arguments, n, not counting the receiver.
  //
  // 1. Make sure we have at least one argument.
  {
    Label done;
    __ testp(rax, rax);
    __ j(not_zero, &done, Label::kNear);
    __ PopReturnAddressTo(rbx);
    __ PushRoot(Heap::kUndefinedValueRootIndex);
    __ PushReturnAddressFrom(rbx);
    __ incp(rax);
    __ bind(&done);
  }

  // 2. Get the callable to call (passed as receiver) from the stack.
  {
    StackArgumentsAccessor args(rsp, rax);
    __ movp(rdi, args.GetReceiverOperand());
  }

  // 3. Shift arguments and return address one slot down on the stack
  //    (overwriting the original receiver).  Adjust argument count to make
  //    the original first argument the new receiver.
  {
    Label loop;
    __ movp(rcx, rax);
    StackArgumentsAccessor args(rsp, rcx);
    __ bind(&loop);
    __ movp(rbx, args.GetArgumentOperand(1));
    __ movp(args.GetArgumentOperand(0), rbx);
    __ decp(rcx);
    __ j(not_zero, &loop);              // While non-zero.
    __ DropUnderReturnAddress(1, rbx);  // Drop one slot under return address.
    __ decp(rax);  // One fewer argument (first argument is new receiver).
  }

  // 4. Call the callable.
ishell's avatar
ishell committed
1693 1694
  // Since we did not create a frame for Function.prototype.call() yet,
  // we use a normal Call builtin here.
1695 1696 1697
  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}

1698 1699 1700 1701 1702 1703 1704 1705 1706
void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax     : argc
  //  -- rsp[0]  : return address
  //  -- rsp[8]  : argumentsList
  //  -- rsp[16] : thisArgument
  //  -- rsp[24] : target
  //  -- rsp[32] : receiver
  // -----------------------------------
1707

1708
  // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1709 1710
  // remove all arguments from the stack (including the receiver), and push
  // thisArgument (if present) instead.
1711
  {
1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730
    Label done;
    StackArgumentsAccessor args(rsp, rax);
    __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
    __ movp(rdx, rdi);
    __ movp(rbx, rdi);
    __ cmpp(rax, Immediate(1));
    __ j(below, &done, Label::kNear);
    __ movp(rdi, args.GetArgumentOperand(1));  // target
    __ j(equal, &done, Label::kNear);
    __ movp(rdx, args.GetArgumentOperand(2));  // thisArgument
    __ cmpp(rax, Immediate(3));
    __ j(below, &done, Label::kNear);
    __ movp(rbx, args.GetArgumentOperand(3));  // argumentsList
    __ bind(&done);
    __ PopReturnAddressTo(rcx);
    __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
    __ Push(rdx);
    __ PushReturnAddressFrom(rcx);
  }
1731

1732
  // ----------- S t a t e -------------
1733
  //  -- rbx     : argumentsList
1734 1735 1736 1737
  //  -- rdi     : target
  //  -- rsp[0]  : return address
  //  -- rsp[8]  : thisArgument
  // -----------------------------------
1738

1739 1740 1741
  // 2. We don't need to check explicitly for callable target here,
  // since that's the first thing the Call/CallWithArrayLike builtins
  // will do.
1742

1743
  // 3. Apply the target to the given argumentsList.
1744
  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1745
          RelocInfo::CODE_TARGET);
1746 1747
}

1748 1749 1750 1751 1752 1753 1754 1755 1756
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax     : argc
  //  -- rsp[0]  : return address
  //  -- rsp[8]  : new.target (optional)
  //  -- rsp[16] : argumentsList
  //  -- rsp[24] : target
  //  -- rsp[32] : receiver
  // -----------------------------------
1757

1758
  // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781 1782
  // new.target into rdx (if present, otherwise use target), remove all
  // arguments from the stack (including the receiver), and push thisArgument
  // (if present) instead.
  {
    Label done;
    StackArgumentsAccessor args(rsp, rax);
    __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
    __ movp(rdx, rdi);
    __ movp(rbx, rdi);
    __ cmpp(rax, Immediate(1));
    __ j(below, &done, Label::kNear);
    __ movp(rdi, args.GetArgumentOperand(1));  // target
    __ movp(rdx, rdi);                         // new.target defaults to target
    __ j(equal, &done, Label::kNear);
    __ movp(rbx, args.GetArgumentOperand(2));  // argumentsList
    __ cmpp(rax, Immediate(3));
    __ j(below, &done, Label::kNear);
    __ movp(rdx, args.GetArgumentOperand(3));  // new.target
    __ bind(&done);
    __ PopReturnAddressTo(rcx);
    __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
    __ PushRoot(Heap::kUndefinedValueRootIndex);
    __ PushReturnAddressFrom(rcx);
  }
1783

1784
  // ----------- S t a t e -------------
1785
  //  -- rbx     : argumentsList
1786 1787 1788 1789 1790
  //  -- rdx     : new.target
  //  -- rdi     : target
  //  -- rsp[0]  : return address
  //  -- rsp[8]  : receiver (undefined)
  // -----------------------------------
1791

1792 1793 1794
  // 2. We don't need to check explicitly for constructor target here,
  // since that's the first thing the Construct/ConstructWithArrayLike
  // builtins will do.
1795

1796 1797 1798
  // 3. We don't need to check explicitly for constructor new.target here,
  // since that's the second thing the Construct/ConstructWithArrayLike
  // builtins will do.
1799

1800
  // 4. Construct the target with the given new.target and argumentsList.
1801
  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1802
          RelocInfo::CODE_TARGET);
1803
}
1804

1805
void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
1806
  // ----------- S t a t e -------------
1807
  //  -- rax    : argc
1808 1809 1810 1811 1812 1813
  //  -- rsp[0] : return address
  //  -- rsp[8] : last argument
  // -----------------------------------
  Label generic_array_code;

  // Get the InternalArray function.
1814
  __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1815 1816 1817

  if (FLAG_debug_code) {
    // Initial map for the builtin InternalArray functions should be maps.
1818
    __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1819
    // Will both indicate a nullptr and a Smi.
1820 1821
    STATIC_ASSERT(kSmiTag == 0);
    Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1822
    __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1823
    __ CmpObjectType(rbx, MAP_TYPE, rcx);
1824
    __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1825 1826 1827 1828
  }

  // Run the native code for the InternalArray function called as a normal
  // function.
1829 1830 1831
  // tail call a stub
  InternalArrayConstructorStub stub(masm->isolate());
  __ TailCallStub(&stub);
1832 1833
}

1834
void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
1835
  // ----------- S t a t e -------------
1836
  //  -- rax    : argc
1837 1838 1839 1840 1841 1842
  //  -- rsp[0] : return address
  //  -- rsp[8] : last argument
  // -----------------------------------
  Label generic_array_code;

  // Get the Array function.
1843
  __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rdi);
1844 1845

  if (FLAG_debug_code) {
1846
    // Initial map for the builtin Array functions should be maps.
1847
    __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1848
    // Will both indicate a nullptr and a Smi.
1849
    STATIC_ASSERT(kSmiTag == 0);
1850
    Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1851
    __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1852
    __ CmpObjectType(rbx, MAP_TYPE, rcx);
1853
    __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1854 1855
  }

dslomov's avatar
dslomov committed
1856
  __ movp(rdx, rdi);
1857
  // Run the native code for the Array function called as a normal function.
1858
  // tail call a stub
1859
  __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1860 1861
  ArrayConstructorStub stub(masm->isolate());
  __ TailCallStub(&stub);
1862 1863
}

1864
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1865
  __ pushq(rbp);
1866
  __ movp(rbp, rsp);
1867

1868
  // Store the arguments adaptor context sentinel.
1869
  __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1870 1871

  // Push the function on the stack.
1872
  __ Push(rdi);
1873

1874 1875
  // Preserve the number of arguments on the stack. Must preserve rax,
  // rbx and rcx because these registers are used when copying the
1876
  // arguments and the receiver.
1877
  __ Integer32ToSmi(r8, rax);
1878
  __ Push(r8);
1879
}
1880

1881 1882
static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
  // Retrieve the number of arguments from the stack. Number is a Smi.
1883
  __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1884 1885

  // Leave the frame.
1886
  __ movp(rsp, rbp);
1887
  __ popq(rbp);
1888 1889

  // Remove caller arguments from the stack.
1890
  __ PopReturnAddressTo(rcx);
1891
  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1892
  __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1893
  __ PushReturnAddressFrom(rcx);
1894 1895
}

1896 1897 1898 1899 1900 1901 1902 1903 1904 1905
// static
void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rdx    : requested object size (untagged)
  //  -- rsp[0] : return address
  // -----------------------------------
  __ Integer32ToSmi(rdx, rdx);
  __ PopReturnAddressTo(rcx);
  __ Push(rdx);
  __ PushReturnAddressFrom(rcx);
1906
  __ Move(rsi, Smi::kZero);
1907 1908 1909 1910 1911 1912 1913 1914 1915 1916 1917 1918 1919 1920
  __ TailCallRuntime(Runtime::kAllocateInNewSpace);
}

// static
void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rdx    : requested object size (untagged)
  //  -- rsp[0] : return address
  // -----------------------------------
  __ Integer32ToSmi(rdx, rdx);
  __ PopReturnAddressTo(rcx);
  __ Push(rdx);
  __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
  __ PushReturnAddressFrom(rcx);
1921
  __ Move(rsi, Smi::kZero);
1922 1923
  __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
1924

1925 1926 1927 1928 1929 1930 1931 1932 1933
// static
void Builtins::Generate_Abort(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rdx    : message_id as Smi
  //  -- rsp[0] : return address
  // -----------------------------------
  __ PopReturnAddressTo(rcx);
  __ Push(rdx);
  __ PushReturnAddressFrom(rcx);
1934
  __ Move(rsi, Smi::kZero);
1935 1936 1937
  __ TailCallRuntime(Runtime::kAbort);
}

1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950
// static
void Builtins::Generate_AbortJS(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rdx    : message as String object
  //  -- rsp[0] : return address
  // -----------------------------------
  __ PopReturnAddressTo(rcx);
  __ Push(rdx);
  __ PushReturnAddressFrom(rcx);
  __ Move(rsi, Smi::kZero);
  __ TailCallRuntime(Runtime::kAbortJS);
}

1951 1952 1953 1954
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax : actual number of arguments
  //  -- rbx : expected number of arguments
1955 1956
  //  -- rdx : new target (passed through to callee)
  //  -- rdi : function (passed through to callee)
1957
  // -----------------------------------
1958

1959
  Label invoke, dont_adapt_arguments, stack_overflow;
1960 1961
  Counters* counters = masm->isolate()->counters();
  __ IncrementCounter(counters->arguments_adaptors(), 1);
1962

1963
  Label enough, too_few;
1964
  __ cmpp(rax, rbx);
1965
  __ j(less, &too_few);
1966
  __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1967
  __ j(equal, &dont_adapt_arguments);
1968

1969 1970 1971
  {  // Enough parameters: Actual >= expected.
    __ bind(&enough);
    EnterArgumentsAdaptorFrame(masm);
1972
    // The registers rcx and r8 will be modified. The register rbx is only read.
1973
    Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
1974

1975 1976
    // Copy receiver and all expected arguments.
    const int offset = StandardFrameConstants::kCallerSPOffset;
1977
    __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
1978
    __ Set(r8, -1);  // account for receiver
1979

1980 1981
    Label copy;
    __ bind(&copy);
1982
    __ incp(r8);
1983
    __ Push(Operand(rax, 0));
1984
    __ subp(rax, Immediate(kPointerSize));
1985
    __ cmpp(r8, rbx);
1986 1987 1988
    __ j(less, &copy);
    __ jmp(&invoke);
  }
1989

1990 1991
  {  // Too few parameters: Actual < expected.
    __ bind(&too_few);
1992

1993
    EnterArgumentsAdaptorFrame(masm);
1994
    // The registers rcx and r8 will be modified. The register rbx is only read.
1995
    Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
1996

1997 1998
    // Copy receiver and all actual arguments.
    const int offset = StandardFrameConstants::kCallerSPOffset;
1999
    __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
2000
    __ Set(r8, -1);  // account for receiver
2001

2002 2003
    Label copy;
    __ bind(&copy);
2004
    __ incp(r8);
2005
    __ Push(Operand(rdi, 0));
2006
    __ subp(rdi, Immediate(kPointerSize));
2007
    __ cmpp(r8, rax);
2008
    __ j(less, &copy);
2009

2010 2011 2012 2013
    // Fill remaining expected arguments with undefined values.
    Label fill;
    __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
    __ bind(&fill);
2014
    __ incp(r8);
2015
    __ Push(kScratchRegister);
2016
    __ cmpp(r8, rbx);
2017
    __ j(less, &fill);
2018

2019
    // Restore function pointer.
2020
    __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2021 2022 2023 2024
  }

  // Call the entry point.
  __ bind(&invoke);
2025 2026
  __ movp(rax, rbx);
  // rax : expected number of arguments
2027 2028
  // rdx : new target (passed through to callee)
  // rdi : function (passed through to callee)
2029 2030
  __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
  __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2031
  __ call(rcx);
2032

2033
  // Store offset of return address for deoptimizer.
2034
  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2035

2036 2037 2038 2039 2040 2041 2042 2043
  // Leave frame and return.
  LeaveArgumentsAdaptorFrame(masm);
  __ ret(0);

  // -------------------------------------------
  // Dont adapt arguments.
  // -------------------------------------------
  __ bind(&dont_adapt_arguments);
2044 2045
  __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
  __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2046
  __ jmp(rcx);
2047 2048 2049 2050

  __ bind(&stack_overflow);
  {
    FrameScope frame(masm, StackFrame::MANUAL);
2051
    __ CallRuntime(Runtime::kThrowStackOverflow);
2052 2053
    __ int3();
  }
2054 2055
}

2056
// static
2057 2058
void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
                                               Handle<Code> code) {
2059 2060
  // ----------- S t a t e -------------
  //  -- rdi    : target
2061 2062 2063 2064 2065
  //  -- rax    : number of parameters on the stack (not including the receiver)
  //  -- rbx    : arguments list (a FixedArray)
  //  -- rcx    : len (number of elements to push from args)
  //  -- rdx    : new.target (for [[Construct]])
  //  -- rsp[0] : return address
2066
  // -----------------------------------
2067
  __ AssertFixedArray(rbx);
2068 2069 2070 2071 2072 2073 2074

  // Check for stack overflow.
  {
    // Check the stack for overflow. We are not trying to catch interruptions
    // (i.e. debug break and preemption) here, so check the "real stack limit".
    Label done;
    __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
2075 2076 2077 2078 2079
    __ movp(r8, rsp);
    // Make r8 the space we have left. The stack might already be overflowed
    // here which will cause r8 to become negative.
    __ subp(r8, kScratchRegister);
    __ sarp(r8, Immediate(kPointerSizeLog2));
2080
    // Check if the arguments will overflow the stack.
2081
    __ cmpp(r8, rcx);
2082
    __ j(greater, &done, Label::kNear);  // Signed comparison.
2083
    __ TailCallRuntime(Runtime::kThrowStackOverflow);
2084 2085 2086
    __ bind(&done);
  }

2087
  // Push additional arguments onto the stack.
2088 2089
  {
    __ PopReturnAddressTo(r8);
2090
    __ Set(r9, 0);
2091
    Label done, push, loop;
2092
    __ bind(&loop);
2093
    __ cmpl(r9, rcx);
2094
    __ j(equal, &done, Label::kNear);
2095
    // Turn the hole into undefined as we go.
2096 2097 2098
    __ movp(r11,
            FieldOperand(rbx, r9, times_pointer_size, FixedArray::kHeaderSize));
    __ CompareRoot(r11, Heap::kTheHoleValueRootIndex);
2099
    __ j(not_equal, &push, Label::kNear);
2100
    __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
2101
    __ bind(&push);
2102 2103
    __ Push(r11);
    __ incl(r9);
2104 2105 2106
    __ jmp(&loop);
    __ bind(&done);
    __ PushReturnAddressFrom(r8);
2107
    __ addq(rax, r9);
2108 2109
  }

2110 2111
  // Tail-call to the actual Call or Construct builtin.
  __ Jump(code, RelocInfo::CODE_TARGET);
2112 2113
}

2114
// static
2115
void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2116
                                                      CallOrConstructMode mode,
2117
                                                      Handle<Code> code) {
2118
  // ----------- S t a t e -------------
2119 2120 2121 2122
  //  -- rax : the number of arguments (not including the receiver)
  //  -- rdx : the new target (for [[Construct]] calls)
  //  -- rdi : the target to call (can be any Object)
  //  -- rcx : start index (to support rest parameters)
2123 2124
  // -----------------------------------

2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141 2142
  // Check if new.target has a [[Construct]] internal method.
  if (mode == CallOrConstructMode::kConstruct) {
    Label new_target_constructor, new_target_not_constructor;
    __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
    __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
    __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
             Immediate(1 << Map::kIsConstructor));
    __ j(not_zero, &new_target_constructor, Label::kNear);
    __ bind(&new_target_not_constructor);
    {
      FrameScope scope(masm, StackFrame::MANUAL);
      __ EnterFrame(StackFrame::INTERNAL);
      __ Push(rdx);
      __ CallRuntime(Runtime::kThrowNotConstructor);
    }
    __ bind(&new_target_constructor);
  }

2143 2144 2145
  // Check if we have an arguments adaptor frame below the function frame.
  Label arguments_adaptor, arguments_done;
  __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2146 2147
  __ cmpp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
          Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2148 2149
  __ j(equal, &arguments_adaptor, Label::kNear);
  {
2150 2151
    __ movp(r8, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
    __ movp(r8, FieldOperand(r8, JSFunction::kSharedFunctionInfoOffset));
2152 2153
    __ movl(r8,
            FieldOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
2154 2155 2156 2157 2158 2159
    __ movp(rbx, rbp);
  }
  __ jmp(&arguments_done, Label::kNear);
  __ bind(&arguments_adaptor);
  {
    __ SmiToInteger32(
2160
        r8, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2161 2162 2163
  }
  __ bind(&arguments_done);

2164 2165 2166
  Label stack_done, stack_overflow;
  __ subl(r8, rcx);
  __ j(less_equal, &stack_done);
2167 2168
  {
    // Check for stack overflow.
2169
    Generate_StackOverflowCheck(masm, r8, rcx, &stack_overflow, Label::kNear);
2170 2171 2172 2173

    // Forward the arguments from the caller frame.
    {
      Label loop;
2174 2175
      __ addl(rax, r8);
      __ PopReturnAddressTo(rcx);
2176 2177
      __ bind(&loop);
      {
2178
        StackArgumentsAccessor args(rbx, r8, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2179
        __ Push(args.GetArgumentOperand(0));
2180
        __ decl(r8);
2181 2182
        __ j(not_zero, &loop);
      }
2183
      __ PushReturnAddressFrom(rcx);
2184 2185 2186 2187 2188 2189 2190
    }
  }
  __ jmp(&stack_done, Label::kNear);
  __ bind(&stack_overflow);
  __ TailCallRuntime(Runtime::kThrowStackOverflow);
  __ bind(&stack_done);

2191
  // Tail-call to the {code} handler.
2192 2193 2194
  __ Jump(code, RelocInfo::CODE_TARGET);
}

2195
// static
2196
void Builtins::Generate_CallFunction(MacroAssembler* masm,
2197
                                     ConvertReceiverMode mode) {
2198 2199 2200 2201 2202 2203
  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
  //  -- rdi : the function to call (checked to be a JSFunction)
  // -----------------------------------
  StackArgumentsAccessor args(rsp, rax);
  __ AssertFunction(rdi);
2204 2205 2206 2207

  // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
  // Check that the function is not a "classConstructor".
  Label class_constructor;
2208
  __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2209 2210
  __ testl(FieldOperand(rdx, SharedFunctionInfo::kCompilerHintsOffset),
           Immediate(SharedFunctionInfo::kClassConstructorMask));
2211 2212 2213 2214 2215 2216 2217
  __ j(not_zero, &class_constructor);

  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
  //  -- rdx : the shared function info.
  //  -- rdi : the function to call (checked to be a JSFunction)
  // -----------------------------------
2218

2219 2220 2221 2222 2223
  // Enter the context of the function; ToObject has to run in the function
  // context, and we also need to take the global proxy from the function
  // context in case of conversion.
  __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
  // We need to convert the receiver for non-native sloppy mode functions.
2224
  Label done_convert;
2225 2226 2227
  __ testl(FieldOperand(rdx, SharedFunctionInfo::kCompilerHintsOffset),
           Immediate(SharedFunctionInfo::IsNativeBit::kMask |
                     SharedFunctionInfo::IsStrictBit::kMask));
2228 2229 2230 2231 2232 2233 2234 2235 2236
  __ j(not_zero, &done_convert);
  {
    // ----------- S t a t e -------------
    //  -- rax : the number of arguments (not including the receiver)
    //  -- rdx : the shared function info.
    //  -- rdi : the function to call (checked to be a JSFunction)
    //  -- rsi : the function context.
    // -----------------------------------

2237
    if (mode == ConvertReceiverMode::kNullOrUndefined) {
2238 2239
      // Patch receiver to global proxy.
      __ LoadGlobalProxy(rcx);
2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253 2254 2255 2256 2257 2258 2259 2260 2261 2262 2263 2264 2265 2266 2267 2268 2269
    } else {
      Label convert_to_object, convert_receiver;
      __ movp(rcx, args.GetReceiverOperand());
      __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
      STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
      __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
      __ j(above_equal, &done_convert);
      if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
        Label convert_global_proxy;
        __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex,
                      &convert_global_proxy, Label::kNear);
        __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
                         Label::kNear);
        __ bind(&convert_global_proxy);
        {
          // Patch receiver to global proxy.
          __ LoadGlobalProxy(rcx);
        }
        __ jmp(&convert_receiver);
      }
      __ bind(&convert_to_object);
      {
        // Convert receiver using ToObject.
        // TODO(bmeurer): Inline the allocation here to avoid building the frame
        // in the fast case? (fall back to AllocateInNewSpace?)
        FrameScope scope(masm, StackFrame::INTERNAL);
        __ Integer32ToSmi(rax, rax);
        __ Push(rax);
        __ Push(rdi);
        __ movp(rax, rcx);
2270
        __ Push(rsi);
2271
        __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
2272
                RelocInfo::CODE_TARGET);
2273
        __ Pop(rsi);
2274 2275 2276 2277 2278 2279 2280
        __ movp(rcx, rax);
        __ Pop(rdi);
        __ Pop(rax);
        __ SmiToInteger32(rax, rax);
      }
      __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
      __ bind(&convert_receiver);
2281 2282 2283 2284 2285 2286 2287 2288 2289 2290 2291 2292
    }
    __ movp(args.GetReceiverOperand(), rcx);
  }
  __ bind(&done_convert);

  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
  //  -- rdx : the shared function info.
  //  -- rdi : the function to call (checked to be a JSFunction)
  //  -- rsi : the function context.
  // -----------------------------------

2293 2294
  __ movsxlq(
      rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2295 2296
  ParameterCount actual(rax);
  ParameterCount expected(rbx);
2297

2298
  __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION);
2299 2300 2301 2302 2303

  // The function is a "classConstructor", need to raise an exception.
  __ bind(&class_constructor);
  {
    FrameScope frame(masm, StackFrame::INTERNAL);
2304
    __ Push(rdi);
2305
    __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2306
  }
2307 2308
}

2309 2310 2311 2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322 2323 2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334 2335 2336 2337 2338 2339 2340 2341 2342 2343 2344 2345 2346 2347
namespace {

void Generate_PushBoundArguments(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
  //  -- rdx : new.target (only in case of [[Construct]])
  //  -- rdi : target (checked to be a JSBoundFunction)
  // -----------------------------------

  // Load [[BoundArguments]] into rcx and length of that into rbx.
  Label no_bound_arguments;
  __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
  __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
  __ testl(rbx, rbx);
  __ j(zero, &no_bound_arguments);
  {
    // ----------- S t a t e -------------
    //  -- rax : the number of arguments (not including the receiver)
    //  -- rdx : new.target (only in case of [[Construct]])
    //  -- rdi : target (checked to be a JSBoundFunction)
    //  -- rcx : the [[BoundArguments]] (implemented as FixedArray)
    //  -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
    // -----------------------------------

    // Reserve stack space for the [[BoundArguments]].
    {
      Label done;
      __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
      __ subp(rsp, kScratchRegister);
      // Check the stack for overflow. We are not trying to catch interruptions
      // (i.e. debug break and preemption) here, so check the "real stack
      // limit".
      __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
      __ j(greater, &done, Label::kNear);  // Signed comparison.
      // Restore the stack pointer.
      __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
      {
        FrameScope scope(masm, StackFrame::MANUAL);
        __ EnterFrame(StackFrame::INTERNAL);
2348
        __ CallRuntime(Runtime::kThrowStackOverflow);
2349 2350 2351 2352 2353 2354 2355 2356 2357 2358 2359 2360 2361 2362 2363 2364 2365 2366 2367 2368 2369 2370 2371 2372 2373 2374 2375 2376 2377 2378 2379 2380 2381 2382 2383 2384 2385 2386 2387 2388 2389 2390 2391 2392 2393
      }
      __ bind(&done);
    }

    // Adjust effective number of arguments to include return address.
    __ incl(rax);

    // Relocate arguments and return address down the stack.
    {
      Label loop;
      __ Set(rcx, 0);
      __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
      __ bind(&loop);
      __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
      __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
      __ incl(rcx);
      __ cmpl(rcx, rax);
      __ j(less, &loop);
    }

    // Copy [[BoundArguments]] to the stack (below the arguments).
    {
      Label loop;
      __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
      __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
      __ bind(&loop);
      __ decl(rbx);
      __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size,
                                             FixedArray::kHeaderSize));
      __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister);
      __ leal(rax, Operand(rax, 1));
      __ j(greater, &loop);
    }

    // Adjust effective number of arguments (rax contains the number of
    // arguments from the call plus return address plus the number of
    // [[BoundArguments]]), so we need to subtract one for the return address.
    __ decl(rax);
  }
  __ bind(&no_bound_arguments);
}

}  // namespace

// static
2394
void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2395 2396 2397 2398 2399 2400 2401 2402 2403 2404 2405 2406 2407 2408 2409 2410
  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
  //  -- rdi : the function to call (checked to be a JSBoundFunction)
  // -----------------------------------
  __ AssertBoundFunction(rdi);

  // Patch the receiver to [[BoundThis]].
  StackArgumentsAccessor args(rsp, rax);
  __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
  __ movp(args.GetReceiverOperand(), rbx);

  // Push the [[BoundArguments]] onto the stack.
  Generate_PushBoundArguments(masm);

  // Call the [[BoundTargetFunction]] via the Call builtin.
  __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2411 2412
  __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
          RelocInfo::CODE_TARGET);
2413 2414
}

2415
// static
2416
void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2417 2418
  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
2419
  //  -- rdi : the target to call (can be any Object)
2420
  // -----------------------------------
2421
  StackArgumentsAccessor args(rsp, rax);
2422

2423 2424
  Label non_callable, non_function, non_smi;
  __ JumpIfSmi(rdi, &non_callable);
2425
  __ bind(&non_smi);
2426
  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2427
  __ j(equal, masm->isolate()->builtins()->CallFunction(mode),
2428
       RelocInfo::CODE_TARGET);
2429
  __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2430
  __ j(equal, BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2431
       RelocInfo::CODE_TARGET);
2432 2433 2434 2435 2436 2437

  // Check if target has a [[Call]] internal method.
  __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
           Immediate(1 << Map::kIsCallable));
  __ j(zero, &non_callable);

2438
  // Check if target is a proxy and call CallProxy external builtin
2439
  __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2440
  __ j(not_equal, &non_function);
2441
  __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
2442 2443 2444 2445 2446 2447

  // 2. Call to something else, which might have a [[Call]] internal method (if
  // not we raise an exception).
  __ bind(&non_function);
  // Overwrite the original receiver with the (original) target.
  __ movp(args.GetReceiverOperand(), rdi);
2448
  // Let the "call_as_function_delegate" take care of the rest.
2449
  __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2450
  __ Jump(masm->isolate()->builtins()->CallFunction(
2451
              ConvertReceiverMode::kNotNullOrUndefined),
2452
          RelocInfo::CODE_TARGET);
2453 2454 2455

  // 3. Call to something that is not callable.
  __ bind(&non_callable);
2456 2457 2458
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    __ Push(rdi);
2459
    __ CallRuntime(Runtime::kThrowCalledNonCallable);
2460
  }
2461 2462 2463 2464 2465 2466
}

// static
void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
2467
  //  -- rdx : the new target (checked to be a constructor)
2468 2469 2470 2471 2472 2473 2474 2475 2476 2477 2478 2479 2480 2481 2482 2483
  //  -- rdi : the constructor to call (checked to be a JSFunction)
  // -----------------------------------
  __ AssertFunction(rdi);

  // Calling convention for function specific ConstructStubs require
  // rbx to contain either an AllocationSite or undefined.
  __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);

  // Tail call to the function-specific construct stub (still in the caller
  // context at this point).
  __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
  __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
  __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
  __ jmp(rcx);
}

2484 2485 2486 2487 2488 2489 2490 2491 2492 2493 2494 2495 2496 2497 2498 2499 2500 2501 2502 2503 2504 2505 2506 2507
// static
void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
  //  -- rdx : the new target (checked to be a constructor)
  //  -- rdi : the constructor to call (checked to be a JSBoundFunction)
  // -----------------------------------
  __ AssertBoundFunction(rdi);

  // Push the [[BoundArguments]] onto the stack.
  Generate_PushBoundArguments(masm);

  // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
  {
    Label done;
    __ cmpp(rdi, rdx);
    __ j(not_equal, &done, Label::kNear);
    __ movp(rdx,
            FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
    __ bind(&done);
  }

  // Construct the [[BoundTargetFunction]] via the Construct builtin.
  __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2508
  __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2509 2510
}

2511 2512 2513 2514
// static
void Builtins::Generate_Construct(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rax : the number of arguments (not including the receiver)
2515
  //  -- rdx : the new target (either the same as the constructor or
2516 2517 2518
  //           the JSFunction on which new was invoked initially)
  //  -- rdi : the constructor to call (can be any Object)
  // -----------------------------------
2519
  StackArgumentsAccessor args(rsp, rax);
2520

2521
  // Check if target is a Smi.
2522
  Label non_constructor, non_proxy;
2523 2524 2525
  __ JumpIfSmi(rdi, &non_constructor, Label::kNear);

  // Dispatch based on instance type.
2526
  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2527
  __ j(equal, BUILTIN_CODE(masm->isolate(), ConstructFunction),
2528
       RelocInfo::CODE_TARGET);
2529

2530 2531 2532 2533 2534
  // Check if target has a [[Construct]] internal method.
  __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
           Immediate(1 << Map::kIsConstructor));
  __ j(zero, &non_constructor, Label::kNear);

2535 2536 2537
  // Only dispatch to bound functions after checking whether they are
  // constructors.
  __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2538
  __ j(equal, BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2539 2540
       RelocInfo::CODE_TARGET);

2541 2542
  // Only dispatch to proxies after checking whether they are constructors.
  __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2543
  __ j(not_equal, &non_proxy);
2544 2545
  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
          RelocInfo::CODE_TARGET);
2546

2547
  // Called Construct on an exotic Object with a [[Construct]] internal method.
2548
  __ bind(&non_proxy);
2549 2550 2551 2552
  {
    // Overwrite the original receiver with the (original) target.
    __ movp(args.GetReceiverOperand(), rdi);
    // Let the "call_as_constructor_delegate" take care of the rest.
2553
    __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
2554 2555 2556
    __ Jump(masm->isolate()->builtins()->CallFunction(),
            RelocInfo::CODE_TARGET);
  }
2557

2558 2559 2560
  // Called Construct on an Object that doesn't have a [[Construct]] internal
  // method.
  __ bind(&non_constructor);
2561
  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2562
          RelocInfo::CODE_TARGET);
2563 2564
}

2565 2566
static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
                                              bool has_handler_frame) {
2567
  // Lookup the function in the JavaScript frame.
2568 2569 2570 2571 2572 2573 2574
  if (has_handler_frame) {
    __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
    __ movp(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset));
  } else {
    __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
  }

2575 2576
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
2577
    // Pass function as argument.
2578
    __ Push(rax);
2579
    __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2580
  }
2581

2582
  Label skip;
2583
  // If the code object is null, just return to the caller.
2584
  __ cmpp(rax, Immediate(0));
2585
  __ j(not_equal, &skip, Label::kNear);
2586 2587 2588
  __ ret(0);

  __ bind(&skip);
2589

2590 2591 2592 2593 2594 2595
  // Drop any potential handler frame that is be sitting on top of the actual
  // JavaScript frame. This is the case then OSR is triggered from bytecode.
  if (has_handler_frame) {
    __ leave();
  }

2596
  // Load deoptimization data from the code object.
2597
  __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2598 2599

  // Load the OSR entrypoint offset from the deoptimization data.
2600 2601 2602 2603
  __ SmiToInteger32(rbx,
                    Operand(rbx, FixedArray::OffsetOfElementAt(
                                     DeoptimizationData::kOsrPcOffsetIndex) -
                                     kHeapObjectTag));
2604 2605

  // Compute the target address = code_obj + header_size + osr_offset
2606
  __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
2607 2608

  // Overwrite the return address on the stack.
2609
  __ movq(StackOperandForReturnAddress(0), rax);
2610 2611 2612

  // And "return" to the OSR entry point of the function.
  __ ret(0);
2613 2614
}

2615 2616 2617 2618 2619 2620 2621 2622
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
  Generate_OnStackReplacementHelper(masm, false);
}

void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
  Generate_OnStackReplacementHelper(masm, true);
}

2623 2624 2625 2626 2627 2628 2629 2630 2631 2632 2633 2634 2635 2636 2637 2638 2639 2640 2641 2642 2643 2644 2645 2646 2647 2648 2649 2650 2651 2652 2653 2654 2655 2656 2657 2658 2659 2660
void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
  {
    FrameScope scope(masm, StackFrame::INTERNAL);

    // Save all parameter registers (see wasm-linkage.cc). They might be
    // overwritten in the runtime call below. We don't have any callee-saved
    // registers in wasm, so no need to store anything else.
    constexpr Register gp_regs[]{rax, rbx, rcx, rdx, rsi, rdi};
    constexpr XMMRegister xmm_regs[]{xmm1, xmm2, xmm3, xmm4, xmm5, xmm6};

    for (auto reg : gp_regs) {
      __ Push(reg);
    }
    __ subp(rsp, Immediate(16 * arraysize(xmm_regs)));
    for (int i = 0, e = arraysize(xmm_regs); i < e; ++i) {
      __ movdqu(Operand(rsp, 16 * i), xmm_regs[i]);
    }

    // Initialize rsi register with kZero, CEntryStub will use it to set the
    // current context on the isolate.
    __ Move(rsi, Smi::kZero);
    __ CallRuntime(Runtime::kWasmCompileLazy);
    // Store returned instruction start in r11.
    __ leap(r11, FieldOperand(rax, Code::kHeaderSize));

    // Restore registers.
    for (int i = arraysize(xmm_regs) - 1; i >= 0; --i) {
      __ movdqu(xmm_regs[i], Operand(rsp, 16 * i));
    }
    __ addp(rsp, Immediate(16 * arraysize(xmm_regs)));
    for (int i = arraysize(gp_regs) - 1; i >= 0; --i) {
      __ Pop(gp_regs[i]);
    }
  }
  // Now jump to the instructions of the returned code object.
  __ jmp(r11);
}

2661 2662
#undef __

2663 2664
}  // namespace internal
}  // namespace v8
2665 2666

#endif  // V8_TARGET_ARCH_X64