builtins-ia32.cc 149 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5
#if V8_TARGET_ARCH_IA32
6

7
#include "src/api/api-arguments.h"
8
#include "src/base/bits-iterator.h"
9
#include "src/base/iterator.h"
10
#include "src/codegen/code-factory.h"
11 12 13
// For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
#include "src/codegen/macro-assembler-inl.h"
#include "src/codegen/register-configuration.h"
14
#include "src/debug/debug.h"
15
#include "src/deoptimizer/deoptimizer.h"
16 17
#include "src/execution/frame-constants.h"
#include "src/execution/frames.h"
18
#include "src/heap/heap-inl.h"
19
#include "src/logging/counters.h"
20
#include "src/objects/cell.h"
21
#include "src/objects/foreign.h"
22
#include "src/objects/heap-number.h"
23
#include "src/objects/js-generator.h"
24
#include "src/objects/objects-inl.h"
25
#include "src/objects/smi.h"
26
#include "src/wasm/wasm-linkage.h"
27
#include "src/wasm/wasm-objects.h"
28

29 30
namespace v8 {
namespace internal {
31

32
#define __ ACCESS_MASM(masm)
33

34
void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address) {
35 36
  __ Move(kJavaScriptCallExtraArg1Register,
          Immediate(ExternalReference::Create(address)));
37 38
  __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
          RelocInfo::CODE_TARGET);
39 40
}

41 42
static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
                                           Runtime::FunctionId function_id) {
43 44 45 46
  // ----------- S t a t e -------------
  //  -- edx : new target (preserved for callee)
  //  -- edi : target function (preserved for callee)
  // -----------------------------------
47 48 49 50 51 52 53 54 55
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    // Push a copy of the target function and the new target.
    __ push(edi);
    __ push(edx);
    // Function is also the parameter to the runtime call.
    __ push(edi);

    __ CallRuntime(function_id, 1);
56
    __ mov(ecx, eax);
57 58 59 60 61

    // Restore target function and new target.
    __ pop(edx);
    __ pop(edi);
  }
62

63
  static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
64
  __ JumpCodeObject(ecx);
65 66
}

67 68
namespace {

69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
enum StackLimitKind { kInterruptStackLimit, kRealStackLimit };

void CompareStackLimit(MacroAssembler* masm, Register with,
                       StackLimitKind kind) {
  DCHECK(masm->root_array_available());
  Isolate* isolate = masm->isolate();
  // Address through the root register. No load is needed.
  ExternalReference limit =
      kind == StackLimitKind::kRealStackLimit
          ? ExternalReference::address_of_real_jslimit(isolate)
          : ExternalReference::address_of_jslimit(isolate);
  DCHECK(TurboAssembler::IsAddressableThroughRootRegister(isolate, limit));

  intptr_t offset =
      TurboAssembler::RootRegisterOffsetForExternalReference(isolate, limit);
  __ cmp(with, Operand(kRootRegister, offset));
}

87 88 89 90 91 92 93
void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
                                 Register scratch, Label* stack_overflow,
                                 bool include_receiver = false) {
  // Check the stack for overflow. We are not trying to catch
  // interruptions (e.g. debug break and preemption) here, so the "real stack
  // limit" is checked.
  ExternalReference real_stack_limit =
94
      ExternalReference::address_of_real_jslimit(masm->isolate());
95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110
  // Compute the space that is left as a negative number in scratch. If
  // we already overflowed, this will be a positive number.
  __ mov(scratch, __ ExternalReferenceAsOperand(real_stack_limit, scratch));
  __ sub(scratch, esp);
  // Add the size of the arguments.
  static_assert(kSystemPointerSize == 4,
                "The next instruction assumes kSystemPointerSize == 4");
  __ lea(scratch, Operand(scratch, num_args, times_system_pointer_size, 0));
  if (include_receiver) {
    __ add(scratch, Immediate(kSystemPointerSize));
  }
  // See if we overflowed, i.e. scratch is positive.
  __ cmp(scratch, Immediate(0));
  __ j(greater, stack_overflow);  // Signed comparison.
}

111
void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
112 113 114
  // ----------- S t a t e -------------
  //  -- eax: number of arguments
  //  -- edi: constructor function
115
  //  -- edx: new target
116
  //  -- esi: context
117 118
  // -----------------------------------

119 120 121 122
  Label stack_overflow;

  Generate_StackOverflowCheck(masm, eax, ecx, &stack_overflow);

123
  // Enter a construct frame.
124 125
  {
    FrameScope scope(masm, StackFrame::CONSTRUCT);
126

127
    // Preserve the incoming parameters on the stack.
128
    __ SmiTag(eax);
129
    __ push(esi);
130 131
    __ push(eax);
    __ SmiUntag(eax);
132

133 134 135 136 137 138 139 140 141
#ifdef V8_REVERSE_JSARGS
    // Set up pointer to first argument (skip receiver).
    __ lea(esi, Operand(ebp, StandardFrameConstants::kCallerSPOffset +
                                 kSystemPointerSize));
    // Copy arguments to the expression stack.
    __ PushArray(esi, eax, ecx);
    // The receiver for the builtin/api call.
    __ PushRoot(RootIndex::kTheHoleValue);
#else
142
    // The receiver for the builtin/api call.
143
    __ PushRoot(RootIndex::kTheHoleValue);
144 145
    // Set up pointer to last argument. We are using esi as scratch register.
    __ lea(esi, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
146 147 148
    // Copy arguments to the expression stack.
    __ PushArray(esi, eax, ecx);
#endif
149 150

    // Call the function.
151 152 153
    // eax: number of arguments (untagged)
    // edi: constructor function
    // edx: new target
154 155
    // Reload context from the frame.
    __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
156
    __ InvokeFunction(edi, edx, eax, CALL_FUNCTION);
157

158
    // Restore context from the frame.
159
    __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
160
    // Restore smi-tagged arguments count from the frame.
161
    __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
162 163
    // Leave construct frame.
  }
164 165

  // Remove caller arguments from the stack and return.
166
  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
167
  __ PopReturnAddressTo(ecx);
168 169
  __ lea(esp, Operand(esp, edx, times_half_system_pointer_size,
                      1 * kSystemPointerSize));  // 1 ~ receiver
170
  __ PushReturnAddressFrom(ecx);
171
  __ ret(0);
172

173 174 175 176 177
  __ bind(&stack_overflow);
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    __ CallRuntime(Runtime::kThrowStackOverflow);
    __ int3();  // This should be unreachable.
178 179 180
  }
}

181 182
}  // namespace

183
// The construct stub for ES5 constructor functions and ES6 class constructors.
184
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203
  // ----------- S t a t e -------------
  //  -- eax: number of arguments (untagged)
  //  -- edi: constructor function
  //  -- edx: new target
  //  -- esi: context
  //  -- sp[...]: constructor arguments
  // -----------------------------------

  // Enter a construct frame.
  {
    FrameScope scope(masm, StackFrame::CONSTRUCT);
    Label post_instantiation_deopt_entry, not_create_implicit_receiver;

    // Preserve the incoming parameters on the stack.
    __ mov(ecx, eax);
    __ SmiTag(ecx);
    __ Push(esi);
    __ Push(ecx);
    __ Push(edi);
204
    __ PushRoot(RootIndex::kTheHoleValue);
205 206 207
    __ Push(edx);

    // ----------- S t a t e -------------
208 209 210 211 212
    //  --         sp[0*kSystemPointerSize]: new target
    //  --         sp[1*kSystemPointerSize]: padding
    //  -- edi and sp[2*kSystemPointerSize]: constructor function
    //  --         sp[3*kSystemPointerSize]: argument count
    //  --         sp[4*kSystemPointerSize]: context
213 214
    // -----------------------------------

215
    __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
216 217 218 219
    __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kFlagsOffset));
    __ DecodeField<SharedFunctionInfo::FunctionKindBits>(eax);
    __ JumpIfIsInRange(eax, kDefaultDerivedConstructor, kDerivedConstructor,
                       ecx, &not_create_implicit_receiver, Label::kNear);
220 221

    // If not derived class constructor: Allocate the new receiver object.
222 223
    __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
                        eax);
224
    __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
225
            RelocInfo::CODE_TARGET);
226 227 228 229
    __ jmp(&post_instantiation_deopt_entry, Label::kNear);

    // Else: use TheHoleValue as receiver for constructor call
    __ bind(&not_create_implicit_receiver);
230
    __ LoadRoot(eax, RootIndex::kTheHoleValue);
231 232 233

    // ----------- S t a t e -------------
    //  --                         eax: implicit receiver
234 235 236 237 238
    //  -- Slot 4 / sp[0*kSystemPointerSize]: new target
    //  -- Slot 3 / sp[1*kSystemPointerSize]: padding
    //  -- Slot 2 / sp[2*kSystemPointerSize]: constructor function
    //  -- Slot 1 / sp[3*kSystemPointerSize]: number of arguments (tagged)
    //  -- Slot 0 / sp[4*kSystemPointerSize]: context
239 240
    // -----------------------------------
    // Deoptimizer enters here.
241 242
    masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
        masm->pc_offset());
243 244 245 246 247
    __ bind(&post_instantiation_deopt_entry);

    // Restore new target.
    __ Pop(edx);

248
    // Push the allocated receiver to the stack.
249
    __ Push(eax);
250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265

#ifdef V8_REVERSE_JSARGS
    // We need two copies because we may have to return the original one
    // and the calling conventions dictate that the called function pops the
    // receiver. The second copy is pushed after the arguments, we saved in r8
    // since rax needs to store the number of arguments before
    // InvokingFunction.
    __ movd(xmm0, eax);

    // Set up pointer to first argument (skip receiver).
    __ lea(edi, Operand(ebp, StandardFrameConstants::kCallerSPOffset +
                                 kSystemPointerSize));
#else
    // We need two copies because we may have to return the original one
    // and the calling conventions dictate that the called function pops the
    // receiver.
266
    __ Push(eax);
267

268 269 270
    // Set up pointer to last argument.
    __ lea(edi, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
#endif
271

272
    // Restore argument count.
273 274 275
    __ mov(eax, Operand(ebp, ConstructFrameConstants::kLengthOffset));
    __ SmiUntag(eax);

276 277 278 279 280 281 282 283 284 285 286 287 288 289 290
    // Check if we have enough stack space to push all arguments.
    // Argument count in eax. Clobbers ecx.
    Label enough_stack_space, stack_overflow;
    Generate_StackOverflowCheck(masm, eax, ecx, &stack_overflow);
    __ jmp(&enough_stack_space);

    __ bind(&stack_overflow);
    // Restore context from the frame.
    __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
    __ CallRuntime(Runtime::kThrowStackOverflow);
    // This should be unreachable.
    __ int3();

    __ bind(&enough_stack_space);

291 292 293 294 295 296 297 298
    // Copy arguments to the expression stack.
    __ PushArray(edi, eax, ecx);

#ifdef V8_REVERSE_JSARGS
    // Push implicit receiver.
    __ movd(ecx, xmm0);
    __ Push(ecx);
#endif
299

300 301
    // Restore and and call the constructor function.
    __ mov(edi, Operand(ebp, ConstructFrameConstants::kConstructorOffset));
302
    __ InvokeFunction(edi, edx, eax, CALL_FUNCTION);
303

304 305
    // ----------- S t a t e -------------
    //  --                eax: constructor result
306 307 308 309 310
    //  -- sp[0*kSystemPointerSize]: implicit receiver
    //  -- sp[1*kSystemPointerSize]: padding
    //  -- sp[2*kSystemPointerSize]: constructor function
    //  -- sp[3*kSystemPointerSize]: number of arguments
    //  -- sp[4*kSystemPointerSize]: context
311
    // -----------------------------------
312

313 314 315
    // Store offset of return address for deoptimizer.
    masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
        masm->pc_offset());
316

317 318 319 320 321 322
    // Restore context from the frame.
    __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));

    // If the result is an object (in the ECMA sense), we should get rid
    // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    // on page 74.
323
    Label use_receiver, do_throw, leave_frame;
324 325

    // If the result is undefined, we jump out to using the implicit receiver.
326
    __ JumpIfRoot(eax, RootIndex::kUndefinedValue, &use_receiver, Label::kNear);
327 328 329 330 331

    // Otherwise we do a smi check and fall through to check if the return value
    // is a valid receiver.

    // If the result is a smi, it is *not* an object in the ECMA sense.
332
    __ JumpIfSmi(eax, &use_receiver, Label::kNear);
333 334 335 336 337 338

    // If the type of the result (stored in its map) is less than
    // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
    STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
    __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
    __ j(above_equal, &leave_frame, Label::kNear);
339
    __ jmp(&use_receiver, Label::kNear);
340

341 342 343 344 345 346
    __ bind(&do_throw);
    __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);

    // Throw away the result of the constructor invocation and use the
    // on-stack receiver as the result.
    __ bind(&use_receiver);
347
    __ mov(eax, Operand(esp, 0 * kSystemPointerSize));
348
    __ JumpIfRoot(eax, RootIndex::kTheHoleValue, &do_throw);
349 350 351

    __ bind(&leave_frame);
    // Restore smi-tagged arguments count from the frame.
352
    __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
353 354 355 356 357
    // Leave construct frame.
  }
  // Remove caller arguments from the stack and return.
  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
  __ pop(ecx);
358 359
  __ lea(esp, Operand(esp, edx, times_half_system_pointer_size,
                      1 * kSystemPointerSize));  // 1 ~ receiver
360 361
  __ push(ecx);
  __ ret(0);
362 363
}

364 365
void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
  Generate_JSBuiltinsConstructStubHelper(masm);
366 367
}

368 369 370
void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
  FrameScope scope(masm, StackFrame::INTERNAL);
  __ push(edi);
371
  __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
372 373
}

374 375 376
namespace {

// Called with the native C calling convention. The corresponding function
377
// signature is either:
378
//
379 380 381 382 383 384
//   using JSEntryFunction = GeneratedCode<Address(
//       Address root_register_value, Address new_target, Address target,
//       Address receiver, intptr_t argc, Address** argv)>;
// or
//   using JSEntryFunction = GeneratedCode<Address(
//       Address root_register_value, MicrotaskQueue* microtask_queue)>;
385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400
void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
                             Builtins::Name entry_trampoline) {
  Label invoke, handler_entry, exit;
  Label not_outermost_js, not_outermost_js_2;

  {  // NOLINT. Scope block confuses linter.
    NoRootArrayScope uninitialized_root_register(masm);

    // Set up frame.
    __ push(ebp);
    __ mov(ebp, esp);

    // Push marker in two places.
    __ push(Immediate(StackFrame::TypeToMarker(type)));
    // Reserve a slot for the context. It is filled after the root register has
    // been set up.
401
    __ AllocateStackSpace(kSystemPointerSize);
402 403 404 405 406 407
    // Save callee-saved registers (C calling conventions).
    __ push(edi);
    __ push(esi);
    __ push(ebx);

    // Initialize the root register based on the given Isolate* argument.
408
    // C calling convention. The first argument is passed on the stack.
409 410 411 412 413 414 415 416 417 418 419 420 421
    __ mov(kRootRegister,
           Operand(ebp, EntryFrameConstants::kRootRegisterValueOffset));
  }

  // Save copies of the top frame descriptor on the stack.
  ExternalReference c_entry_fp = ExternalReference::Create(
      IsolateAddressId::kCEntryFPAddress, masm->isolate());
  __ push(__ ExternalReferenceAsOperand(c_entry_fp, edi));

  // Store the context address in the previously-reserved slot.
  ExternalReference context_address = ExternalReference::Create(
      IsolateAddressId::kContextAddress, masm->isolate());
  __ mov(edi, __ ExternalReferenceAsOperand(context_address, edi));
422
  static constexpr int kOffsetToContextSlot = -2 * kSystemPointerSize;
423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481
  __ mov(Operand(ebp, kOffsetToContextSlot), edi);

  // If this is the outermost JS call, set js_entry_sp value.
  ExternalReference js_entry_sp = ExternalReference::Create(
      IsolateAddressId::kJSEntrySPAddress, masm->isolate());
  __ cmp(__ ExternalReferenceAsOperand(js_entry_sp, edi), Immediate(0));
  __ j(not_equal, &not_outermost_js, Label::kNear);
  __ mov(__ ExternalReferenceAsOperand(js_entry_sp, edi), ebp);
  __ push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
  __ jmp(&invoke, Label::kNear);
  __ bind(&not_outermost_js);
  __ push(Immediate(StackFrame::INNER_JSENTRY_FRAME));

  // Jump to a faked try block that does the invoke, with a faked catch
  // block that sets the pending exception.
  __ jmp(&invoke);
  __ bind(&handler_entry);

  // Store the current pc as the handler offset. It's used later to create the
  // handler table.
  masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());

  // Caught exception: Store result (exception) in the pending exception
  // field in the JSEnv and return a failure sentinel.
  ExternalReference pending_exception = ExternalReference::Create(
      IsolateAddressId::kPendingExceptionAddress, masm->isolate());
  __ mov(__ ExternalReferenceAsOperand(pending_exception, edi), eax);
  __ Move(eax, masm->isolate()->factory()->exception());
  __ jmp(&exit);

  // Invoke: Link this frame into the handler chain.
  __ bind(&invoke);
  __ PushStackHandler(edi);

  // Invoke the function by calling through JS entry trampoline builtin and
  // pop the faked function when we return.
  Handle<Code> trampoline_code =
      masm->isolate()->builtins()->builtin_handle(entry_trampoline);
  __ Call(trampoline_code, RelocInfo::CODE_TARGET);

  // Unlink this frame from the handler chain.
  __ PopStackHandler(edi);

  __ bind(&exit);

  // Check if the current stack frame is marked as the outermost JS frame.
  __ pop(edi);
  __ cmp(edi, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
  __ j(not_equal, &not_outermost_js_2);
  __ mov(__ ExternalReferenceAsOperand(js_entry_sp, edi), Immediate(0));
  __ bind(&not_outermost_js_2);

  // Restore the top frame descriptor from the stack.
  __ pop(__ ExternalReferenceAsOperand(c_entry_fp, edi));

  // Restore callee-saved registers (C calling conventions).
  __ pop(ebx);
  __ pop(esi);
  __ pop(edi);
482
  __ add(esp, Immediate(2 * kSystemPointerSize));  // remove markers
483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501

  // Restore frame pointer and return.
  __ pop(ebp);
  __ ret(0);
}

}  // namespace

void Builtins::Generate_JSEntry(MacroAssembler* masm) {
  Generate_JSEntryVariant(masm, StackFrame::ENTRY,
                          Builtins::kJSEntryTrampoline);
}

void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
  Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
                          Builtins::kJSConstructEntryTrampoline);
}

void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
502 503
  Generate_JSEntryVariant(masm, StackFrame::ENTRY,
                          Builtins::kRunMicrotasksTrampoline);
504
}
505

506 507
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
                                             bool is_construct) {
508 509
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
510

511 512 513
    const Register scratch1 = edx;
    const Register scratch2 = edi;

514
    // Setup the context (we need to use the caller context from the isolate).
515 516
    ExternalReference context_address = ExternalReference::Create(
        IsolateAddressId::kContextAddress, masm->isolate());
517
    __ mov(esi, __ ExternalReferenceAsOperand(context_address, scratch1));
518

519 520
    // Load the previous frame pointer (edx) to access C arguments
    __ mov(scratch1, Operand(ebp, 0));
521

522
    // Push the function.
523
    __ push(Operand(scratch1, EntryFrameConstants::kFunctionArgOffset));
524 525 526

#ifndef V8_REVERSE_JSARGS
    // And the receiver onto the stack.
527
    __ push(Operand(scratch1, EntryFrameConstants::kReceiverArgOffset));
528
#endif
529

530
    // Load the number of arguments and setup pointer to the arguments.
531 532
    __ mov(eax, Operand(scratch1, EntryFrameConstants::kArgcOffset));
    __ mov(scratch1, Operand(scratch1, EntryFrameConstants::kArgvOffset));
533

534
    // Check if we have enough stack space to push all arguments.
535
    // Argument count in eax. Clobbers ecx.
536
    Label enough_stack_space, stack_overflow;
537
    Generate_StackOverflowCheck(masm, eax, ecx, &stack_overflow);
538 539 540 541 542 543 544 545
    __ jmp(&enough_stack_space);

    __ bind(&stack_overflow);
    __ CallRuntime(Runtime::kThrowStackOverflow);
    // This should be unreachable.
    __ int3();

    __ bind(&enough_stack_space);
546

547
    // Copy arguments to the stack in a loop.
548 549 550 551 552 553 554 555 556 557 558 559
#ifdef V8_REVERSE_JSARGS
    Label loop, entry;
    __ Move(ecx, eax);
    __ jmp(&entry, Label::kNear);
    __ bind(&loop);
    // Push the parameter from argv.
    __ mov(scratch2, Operand(scratch1, ecx, times_system_pointer_size, 0));
    __ push(Operand(scratch2, 0));  // dereference handle
    __ bind(&entry);
    __ dec(ecx);
    __ j(greater_equal, &loop);
#else
560
    Label loop, entry;
561
    __ Move(ecx, Immediate(0));
562
    __ jmp(&entry, Label::kNear);
563
    __ bind(&loop);
564
    // Push the parameter from argv.
565
    __ mov(scratch2, Operand(scratch1, ecx, times_system_pointer_size, 0));
566
    __ push(Operand(scratch2, 0));  // dereference handle
567
    __ inc(ecx);
568
    __ bind(&entry);
569
    __ cmp(ecx, eax);
570
    __ j(not_equal, &loop);
571
#endif
572

573
    // Load the previous frame pointer to access C arguments
574
    __ mov(scratch2, Operand(ebp, 0));
575

576 577 578 579 580
#ifdef V8_REVERSE_JSARGS
    // Push the receiver onto the stack.
    __ push(Operand(scratch2, EntryFrameConstants::kReceiverArgOffset));
#endif

581
    // Get the new.target and function from the frame.
582 583
    __ mov(edx, Operand(scratch2, EntryFrameConstants::kNewTargetArgOffset));
    __ mov(edi, Operand(scratch2, EntryFrameConstants::kFunctionArgOffset));
584 585

    // Invoke the code.
586
    Handle<Code> builtin = is_construct
587
                               ? BUILTIN_CODE(masm->isolate(), Construct)
588 589
                               : masm->isolate()->builtins()->Call();
    __ Call(builtin, RelocInfo::CODE_TARGET);
590

591 592 593
    // Exit the internal frame. Notice that this also removes the empty.
    // context and the function left on the stack by the code
    // invocation.
594
  }
595
  __ ret(0);
596 597 598 599 600 601 602 603 604 605
}

void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
  Generate_JSEntryTrampolineHelper(masm, false);
}

void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
  Generate_JSEntryTrampolineHelper(masm, true);
}

606 607 608 609 610 611 612 613 614
void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
  // This expects two C++ function parameters passed by Invoke() in
  // execution.cc.
  //   r1: microtask_queue
  __ mov(RunMicrotasksDescriptor::MicrotaskQueueRegister(),
         Operand(ebp, EntryFrameConstants::kMicrotaskQueueArgOffset));
  __ Jump(BUILTIN_CODE(masm->isolate(), RunMicrotasks), RelocInfo::CODE_TARGET);
}

615 616 617 618 619 620 621 622 623 624 625 626 627
static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
                                          Register sfi_data,
                                          Register scratch1) {
  Label done;

  __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
  __ j(not_equal, &done, Label::kNear);
  __ mov(sfi_data,
         FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));

  __ bind(&done);
}

628 629 630 631
// static
void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- eax    : the value to pass to the generator
632
  //  -- edx    : the JSGeneratorObject to resume
633 634
  //  -- esp[0] : return address
  // -----------------------------------
635
  __ AssertGeneratorObject(edx);
636 637

  // Store input value into generator object.
638 639
  __ mov(FieldOperand(edx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
  __ RecordWriteField(edx, JSGeneratorObject::kInputOrDebugPosOffset, eax, ecx,
640
                      kDontSaveFPRegs);
641 642

  // Load suspended function and context.
643
  __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
644
  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
645 646

  // Flood function if we are stepping.
647 648
  Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
  Label stepping_prepared;
649 650
  ExternalReference debug_hook =
      ExternalReference::debug_hook_on_function_call_address(masm->isolate());
651
  __ cmpb(__ ExternalReferenceAsOperand(debug_hook, ecx), Immediate(0));
652
  __ j(not_equal, &prepare_step_in_if_stepping);
653 654 655 656

  // Flood function if we need to continue stepping in the suspended generator.
  ExternalReference debug_suspended_generator =
      ExternalReference::debug_suspended_generator_address(masm->isolate());
657
  __ cmp(edx, __ ExternalReferenceAsOperand(debug_suspended_generator, ecx));
658 659
  __ j(equal, &prepare_step_in_suspended_generator);
  __ bind(&stepping_prepared);
660

661 662 663
  // Check the stack for overflow. We are not trying to catch interruptions
  // (i.e. debug break and preemption) here, so check the "real stack limit".
  Label stack_overflow;
664
  CompareStackLimit(masm, esp, StackLimitKind::kRealStackLimit);
665 666
  __ j(below, &stack_overflow);

667 668 669
  // Pop return address.
  __ PopReturnAddressTo(eax);

670
#ifndef V8_REVERSE_JSARGS
671
  // Push receiver.
672
  __ Push(FieldOperand(edx, JSGeneratorObject::kReceiverOffset));
673
#endif
674 675 676

  // ----------- S t a t e -------------
  //  -- eax    : return address
677
  //  -- edx    : the JSGeneratorObject to resume
678 679
  //  -- edi    : generator function
  //  -- esi    : generator context
680
  //  -- esp[0] : generator receiver, if V8_REVERSE_JSARGS is not set
681 682 683
  // -----------------------------------

  {
684
    __ movd(xmm0, ebx);
685

686 687 688 689 690 691
    // Copy the function arguments from the generator object's register file.
    __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
    __ movzx_w(ecx, FieldOperand(
                        ecx, SharedFunctionInfo::kFormalParameterCountOffset));
    __ mov(ebx,
           FieldOperand(edx, JSGeneratorObject::kParametersAndRegistersOffset));
692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709
#ifdef V8_REVERSE_JSARGS
    {
      Label done_loop, loop;
      __ mov(edi, ecx);

      __ bind(&loop);
      __ dec(edi);
      __ j(less, &done_loop);
      __ Push(
          FieldOperand(ebx, edi, times_tagged_size, FixedArray::kHeaderSize));
      __ jmp(&loop);

      __ bind(&done_loop);
    }

    // Push receiver.
    __ Push(FieldOperand(edx, JSGeneratorObject::kReceiverOffset));
#else
710 711 712
    {
      Label done_loop, loop;
      __ Set(edi, 0);
713

714 715 716
      __ bind(&loop);
      __ cmp(edi, ecx);
      __ j(greater_equal, &done_loop);
717 718
      __ Push(
          FieldOperand(ebx, edi, times_tagged_size, FixedArray::kHeaderSize));
719 720 721 722 723
      __ add(edi, Immediate(1));
      __ jmp(&loop);

      __ bind(&done_loop);
    }
724
#endif
725 726

    // Restore registers.
727
    __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
728
    __ movd(ebx, xmm0);
729 730
  }

731 732 733 734
  // Underlying function needs to have bytecode available.
  if (FLAG_debug_code) {
    __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
    __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
735 736 737
    __ Push(eax);
    GetSharedFunctionInfoBytecode(masm, ecx, eax);
    __ Pop(eax);
738
    __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, ecx);
739
    __ Assert(equal, AbortReason::kMissingBytecodeArray);
740
  }
741

742
  // Resume (Ignition/TurboFan) generator object.
743
  {
744 745
    __ PushReturnAddressFrom(eax);
    __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
746 747
    __ movzx_w(eax, FieldOperand(
                        eax, SharedFunctionInfo::kFormalParameterCountOffset));
748 749 750
    // We abuse new.target both to indicate that this is a resume call and to
    // pass in the generator object.  In ordinary calls, new.target is always
    // undefined because generator functions are non-constructable.
751
    static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
752
    __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
753
    __ JumpCodeObject(ecx);
754 755
  }

756 757 758 759 760
  __ bind(&prepare_step_in_if_stepping);
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    __ Push(edx);
    __ Push(edi);
761
    // Push hole as receiver since we do not use it for stepping.
762
    __ PushRoot(RootIndex::kTheHoleValue);
763
    __ CallRuntime(Runtime::kDebugOnFunctionCall);
764
    __ Pop(edx);
765
    __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
766 767 768 769 770 771 772 773 774
  }
  __ jmp(&stepping_prepared);

  __ bind(&prepare_step_in_suspended_generator);
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    __ Push(edx);
    __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
    __ Pop(edx);
775
    __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
776 777
  }
  __ jmp(&stepping_prepared);
778 779 780 781 782 783 784

  __ bind(&stack_overflow);
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    __ CallRuntime(Runtime::kThrowStackOverflow);
    __ int3();  // This should be unreachable.
  }
785
}
786

787 788 789 790 791
static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm,
                                                Register optimized_code,
                                                Register closure,
                                                Register scratch1,
                                                Register scratch2) {
792
  // Store the optimized code in the closure.
793 794 795 796
  __ mov(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
  __ mov(scratch1, optimized_code);  // Write barrier clobbers scratch1 below.
  __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
                      kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
797 798
}

799 800 801 802 803
static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
                                  Register scratch2) {
  Register args_count = scratch1;
  Register return_pc = scratch2;

804
  // Get the arguments + receiver count.
805 806 807 808 809 810 811 812 813 814 815 816 817 818
  __ mov(args_count,
         Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
  __ mov(args_count,
         FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));

  // Leave the frame (also dropping the register file).
  __ leave();

  // Drop receiver + arguments.
  __ pop(return_pc);
  __ add(esp, args_count);
  __ push(return_pc);
}

819 820 821 822 823 824 825 826 827 828 829 830
// Tail-call |function_id| if |smi_entry| == |marker|
static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
                                          Register smi_entry,
                                          OptimizationMarker marker,
                                          Runtime::FunctionId function_id) {
  Label no_match;
  __ cmp(smi_entry, Immediate(Smi::FromEnum(marker)));
  __ j(not_equal, &no_match, Label::kNear);
  GenerateTailCallToReturnedCode(masm, function_id);
  __ bind(&no_match);
}

831 832
static void TailCallOptimizedCodeSlot(MacroAssembler* masm,
                                      Register optimized_code_entry) {
833 834 835 836
  // ----------- S t a t e -------------
  //  -- edx : new target (preserved for callee if needed, and caller)
  //  -- edi : target function (preserved for callee if needed, and caller)
  // -----------------------------------
837
  DCHECK(!AreAliased(edx, edi, optimized_code_entry));
838 839 840

  Register closure = edi;

841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866
  __ push(edx);

  // Check if the optimized code is marked for deopt. If it is, bailout to a
  // given label.
  Label found_deoptimized_code;
  __ mov(eax,
         FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
  __ test(FieldOperand(eax, CodeDataContainer::kKindSpecificFlagsOffset),
          Immediate(1 << Code::kMarkedForDeoptimizationBit));
  __ j(not_zero, &found_deoptimized_code);

  // Optimized code is good, get it into the closure and link the closure
  // into the optimized functions list, then tail call the optimized code.
  ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure, edx,
                                      eax);
  static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
  __ LoadCodeObjectEntry(ecx, optimized_code_entry);
  __ pop(edx);
  __ jmp(ecx);

  // Optimized code slot contains deoptimized code, evict it and re-enter
  // the closure's code.
  __ bind(&found_deoptimized_code);
  __ pop(edx);
  GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
}
867

868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888
static void MaybeOptimizeCode(MacroAssembler* masm,
                              Register optimization_marker) {
  // ----------- S t a t e -------------
  //  -- edx : new target (preserved for callee if needed, and caller)
  //  -- edi : target function (preserved for callee if needed, and caller)
  //  -- optimization_marker : a Smi containing a non-zero optimization marker.
  // -----------------------------------
  DCHECK(!AreAliased(edx, edi, optimization_marker));

  // TODO(v8:8394): The logging of first execution will break if
  // feedback vectors are not allocated. We need to find a different way of
  // logging these events if required.
  TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
                                OptimizationMarker::kLogFirstExecution,
                                Runtime::kFunctionFirstExecution);
  TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
                                OptimizationMarker::kCompileOptimized,
                                Runtime::kCompileOptimized_NotConcurrent);
  TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
                                OptimizationMarker::kCompileOptimizedConcurrent,
                                Runtime::kCompileOptimized_Concurrent);
889 890

  {
891 892 893 894 895 896 897
    // Otherwise, the marker is InOptimizationQueue, so fall through hoping
    // that an interrupt will eventually update the slot with optimized code.
    if (FLAG_debug_code) {
      __ cmp(
          optimization_marker,
          Immediate(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
      __ Assert(equal, AbortReason::kExpectedOptimizationSentinel);
898 899 900 901
    }
  }
}

902
// Advance the current bytecode offset. This simulates what all bytecode
903
// handlers do upon completion of the underlying operation. Will bail out to a
904 905 906
// label if the bytecode (without prefix) is a return bytecode. Will not advance
// the bytecode offset if the current bytecode is a JumpLoop, instead just
// re-executing the JumpLoop to jump to the correct bytecode.
907 908 909
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
                                          Register bytecode_array,
                                          Register bytecode_offset,
910
                                          Register scratch1, Register scratch2,
911
                                          Register scratch3, Label* if_return) {
912
  Register bytecode_size_table = scratch1;
913
  Register bytecode = scratch2;
914 915 916 917 918 919

  // The bytecode offset value will be increased by one in wide and extra wide
  // cases. In the case of having a wide or extra wide JumpLoop bytecode, we
  // will restore the original bytecode. In order to simplify the code, we have
  // a backup of it.
  Register original_bytecode_offset = scratch3;
920
  DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
921
                     bytecode, original_bytecode_offset));
922
  __ Move(bytecode_size_table,
923
          Immediate(ExternalReference::bytecode_size_table_address()));
924

925
  // Load the current bytecode.
926 927
  __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
  __ Move(original_bytecode_offset, bytecode_offset);
928

929
  // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
930
  Label process_bytecode, extra_wide;
931 932
  STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
  STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
933 934 935
  STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
  STATIC_ASSERT(3 ==
                static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
936
  __ cmp(bytecode, Immediate(0x3));
937
  __ j(above, &process_bytecode, Label::kNear);
938 939 940 941
  // The code to load the next bytecode is common to both wide and extra wide.
  // We can hoist them up here. inc has to happen before test since it
  // modifies the ZF flag.
  __ inc(bytecode_offset);
942
  __ test(bytecode, Immediate(0x1));
943
  __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
944
  __ j(not_equal, &extra_wide, Label::kNear);
945 946 947 948

  // Load the next bytecode and update table to the wide scaled table.
  __ add(bytecode_size_table,
         Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
949
  __ jmp(&process_bytecode, Label::kNear);
950 951

  __ bind(&extra_wide);
952
  // Update table to the extra wide scaled table.
953 954 955
  __ add(bytecode_size_table,
         Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));

956 957 958
  __ bind(&process_bytecode);

// Bailout to the return label if this is a return bytecode.
959 960 961
#define JUMP_IF_EQUAL(NAME)                                            \
  __ cmp(bytecode,                                                     \
         Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
962
  __ j(equal, if_return);
963 964 965
  RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
#undef JUMP_IF_EQUAL

966 967 968 969 970 971 972 973 974 975 976 977 978
  // If this is a JumpLoop, re-execute it to perform the jump to the beginning
  // of the loop.
  Label end, not_jump_loop;
  __ cmp(bytecode,
         Immediate(static_cast<int>(interpreter::Bytecode::kJumpLoop)));
  __ j(not_equal, &not_jump_loop, Label::kNear);
  // If this is a wide or extra wide JumpLoop, we need to restore the original
  // bytecode_offset since we might have increased it to skip the wide /
  // extra-wide prefix bytecode.
  __ Move(bytecode_offset, original_bytecode_offset);
  __ jmp(&end, Label::kNear);

  __ bind(&not_jump_loop);
979
  // Otherwise, load the size of the current bytecode and advance the offset.
980 981
  __ add(bytecode_offset,
         Operand(bytecode_size_table, bytecode, times_int_size, 0));
982 983

  __ bind(&end);
984 985
}

986 987 988 989 990 991 992
// Generate code for entering a JS function with the interpreter.
// On entry to the function the receiver and arguments have been pushed on the
// stack left to right.  The actual argument count matches the formal parameter
// count expected by the function.
//
// The live registers are:
//   o edi: the JS function object being called
993
//   o edx: the incoming new target or generator object
994 995 996 997
//   o esi: our context
//   o ebp: the caller's frame pointer
//   o esp: stack pointer (pointing to return address)
//
998 999
// The function builds an interpreter frame.  See InterpreterFrameConstants in
// frames.h for its layout.
1000
void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1001 1002
  Register closure = edi;

1003 1004 1005 1006 1007 1008 1009 1010 1011
  // The bytecode array could have been flushed from the shared function info,
  // if so, call into CompileLazy.
  Label compile_lazy;
  __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
  __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
  GetSharedFunctionInfoBytecode(masm, ecx, eax);
  __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, eax);
  __ j(not_equal, &compile_lazy);

1012 1013 1014 1015 1016 1017 1018 1019
  Register feedback_vector = ecx;
  Label push_stack_frame;
  // Load feedback vector and check if it is valid. If valid, check for
  // optimized code and update invocation count. Otherwise, setup the stack
  // frame.
  __ mov(feedback_vector,
         FieldOperand(closure, JSFunction::kFeedbackCellOffset));
  __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
1020 1021 1022
  __ mov(eax, FieldOperand(feedback_vector, HeapObject::kMapOffset));
  __ CmpInstanceType(eax, FEEDBACK_VECTOR_TYPE);
  __ j(not_equal, &push_stack_frame);
1023

1024 1025 1026 1027 1028 1029 1030
  // Read off the optimized code slot in the feedback vector.
  // Load the optimized code from the feedback vector and re-use the register.
  Register optimized_code_entry = ecx;
  __ mov(optimized_code_entry,
         FieldOperand(feedback_vector,
                      FeedbackVector::kOptimizedCodeWeakOrSmiOffset));

1031 1032
  // Check if the optimized code slot is not empty.
  Label optimized_code_slot_not_empty;
1033 1034
  __ cmp(optimized_code_entry,
         Immediate(Smi::FromEnum(OptimizationMarker::kNone)));
1035
  __ j(not_equal, &optimized_code_slot_not_empty);
1036 1037 1038

  Label not_optimized;
  __ bind(&not_optimized);
1039 1040

  // Load the feedback vector and increment the invocation count.
1041
  __ mov(feedback_vector,
1042
         FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1043
  __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
1044
  __ inc(FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
1045

1046
  // Open a frame scope to indicate that there is a frame on the stack.  The
1047 1048
  // MANUAL indicates that the scope shouldn't actually generate code to set
  // up the frame (that is done below).
1049
  __ bind(&push_stack_frame);
1050 1051 1052 1053 1054 1055
  FrameScope frame_scope(masm, StackFrame::MANUAL);
  __ push(ebp);  // Caller's frame pointer.
  __ mov(ebp, esp);
  __ push(esi);  // Callee's context.
  __ push(edi);  // Callee's JS function.

1056 1057
  // Get the bytecode array from the function object and load it into
  // kInterpreterBytecodeArrayRegister.
1058 1059 1060
  __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
  __ mov(kInterpreterBytecodeArrayRegister,
         FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
1061
  GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, eax);
1062

1063
  // Check function data field is actually a BytecodeArray object.
1064
  if (FLAG_debug_code) {
1065 1066 1067
    __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
    __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
                     eax);
1068 1069 1070
    __ Assert(
        equal,
        AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1071 1072
  }

1073 1074 1075 1076
  // Reset code age and the OSR arming. The OSR field and BytecodeAgeOffset are
  // 8-bit fields next to each other, so we could just optimize by writing a
  // 16-bit. These static asserts guard our assumption is valid.
  STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
1077
                BytecodeArray::kOsrNestingLevelOffset + kCharSize);
1078 1079
  STATIC_ASSERT(BytecodeArray::kNoAgeBytecodeAge == 0);
  __ mov_w(FieldOperand(kInterpreterBytecodeArrayRegister,
1080
                        BytecodeArray::kOsrNestingLevelOffset),
1081
           Immediate(0));
1082

1083 1084
  // Push bytecode array.
  __ push(kInterpreterBytecodeArrayRegister);
1085 1086
  // Push Smi tagged initial bytecode array offset.
  __ push(Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag)));
1087

1088
  // Allocate the local and temporary register file on the stack.
1089
  Label stack_overflow;
1090 1091
  {
    // Load frame size from the BytecodeArray object.
1092 1093 1094
    Register frame_size = ecx;
    __ mov(frame_size, FieldOperand(kInterpreterBytecodeArrayRegister,
                                    BytecodeArray::kFrameSizeOffset));
1095 1096

    // Do a stack check to ensure we don't go over the limit.
1097 1098
    __ mov(eax, esp);
    __ sub(eax, frame_size);
1099
    CompareStackLimit(masm, eax, StackLimitKind::kRealStackLimit);
1100
    __ j(below, &stack_overflow);
1101 1102 1103

    // If ok, push undefined as the initial value for all register file entries.
    Label loop_header;
1104
    Label loop_check;
1105
    __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1106
    __ jmp(&loop_check);
1107 1108
    __ bind(&loop_header);
    // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1109
    __ push(kInterpreterAccumulatorRegister);
1110
    // Continue loop if not done.
1111
    __ bind(&loop_check);
1112
    __ sub(frame_size, Immediate(kSystemPointerSize));
1113
    __ j(greater_equal, &loop_header);
1114 1115
  }

1116 1117 1118
  // If the bytecode array has a valid incoming new target or generator object
  // register, initialize it with incoming value which was passed in edx.
  Label no_incoming_new_target_or_generator_register;
1119
  __ mov(ecx, FieldOperand(
1120 1121
                  kInterpreterBytecodeArrayRegister,
                  BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1122
  __ test(ecx, ecx);
1123
  __ j(zero, &no_incoming_new_target_or_generator_register);
1124
  __ mov(Operand(ebp, ecx, times_system_pointer_size, 0), edx);
1125 1126
  __ bind(&no_incoming_new_target_or_generator_register);

1127 1128 1129 1130
  // Perform interrupt stack check.
  // TODO(solanes): Merge with the real stack limit check above.
  Label stack_check_interrupt, after_stack_check_interrupt;
  CompareStackLimit(masm, esp, StackLimitKind::kInterruptStackLimit);
1131
  __ j(below, &stack_check_interrupt);
1132 1133 1134 1135
  __ bind(&after_stack_check_interrupt);

  // The accumulator is already loaded with undefined.

1136 1137
  __ mov(kInterpreterBytecodeOffsetRegister,
         Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
1138 1139 1140 1141 1142

  // Load the dispatch table into a register and dispatch to the bytecode
  // handler at the current bytecode offset.
  Label do_dispatch;
  __ bind(&do_dispatch);
1143 1144 1145
  __ Move(kInterpreterDispatchTableRegister,
          Immediate(ExternalReference::interpreter_dispatch_table_address(
              masm->isolate())));
1146
  __ movzx_b(ecx, Operand(kInterpreterBytecodeArrayRegister,
1147
                          kInterpreterBytecodeOffsetRegister, times_1, 0));
1148 1149 1150
  __ mov(kJavaScriptCallCodeStartRegister,
         Operand(kInterpreterDispatchTableRegister, ecx,
                 times_system_pointer_size, 0));
1151
  __ call(kJavaScriptCallCodeStartRegister);
1152
  masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1153

1154 1155
  // Any returns to the entry trampoline are either due to the return bytecode
  // or the interpreter tail calling a builtin and then a dispatch.
1156 1157 1158 1159 1160 1161 1162 1163

  // Get bytecode array and bytecode offset from the stack frame.
  __ mov(kInterpreterBytecodeArrayRegister,
         Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
  __ mov(kInterpreterBytecodeOffsetRegister,
         Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
  __ SmiUntag(kInterpreterBytecodeOffsetRegister);

1164
  // Either return, or advance to the next bytecode and dispatch.
1165
  Label do_return;
1166
  __ Push(eax);
1167
  AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1168
                                kInterpreterBytecodeOffsetRegister, ecx,
1169 1170 1171
                                kInterpreterDispatchTableRegister, eax,
                                &do_return);
  __ Pop(eax);
1172
  __ jmp(&do_dispatch);
1173

1174
  __ bind(&do_return);
1175
  __ Pop(eax);
1176
  // The return value is in eax.
1177
  LeaveInterpreterFrame(masm, edx, ecx);
1178
  __ ret(0);
1179

1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204
  __ bind(&stack_check_interrupt);
  // Modify the bytecode offset in the stack to be kFunctionEntryBytecodeOffset
  // for the call to the StackGuard.
  __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp),
         Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag +
                                kFunctionEntryBytecodeOffset)));
  __ CallRuntime(Runtime::kStackGuard);

  // After the call, restore the bytecode array, bytecode offset and accumulator
  // registers again. Also, restore the bytecode offset in the stack to its
  // previous value.
  __ mov(kInterpreterBytecodeArrayRegister,
         Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
  __ mov(kInterpreterBytecodeOffsetRegister,
         Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
  __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);

  // It's ok to clobber kInterpreterBytecodeOffsetRegister since we are setting
  // it again after continuing.
  __ SmiTag(kInterpreterBytecodeOffsetRegister);
  __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp),
         kInterpreterBytecodeOffsetRegister);

  __ jmp(&after_stack_check_interrupt);

1205 1206 1207 1208 1209
  __ bind(&optimized_code_slot_not_empty);
  Label maybe_has_optimized_code;
  // Check if optimized code marker is actually a weak reference to the
  // optimized code as opposed to an optimization marker.
  __ JumpIfNotSmi(optimized_code_entry, &maybe_has_optimized_code);
1210 1211 1212 1213 1214 1215 1216 1217 1218 1219
  MaybeOptimizeCode(masm, optimized_code_entry);
  // Fall through if there's no runnable optimized code.
  __ jmp(&not_optimized);

  __ bind(&maybe_has_optimized_code);
  // Load code entry from the weak reference, if it was cleared, resume
  // execution of unoptimized code.
  __ LoadWeakValue(optimized_code_entry, &not_optimized);
  TailCallOptimizedCodeSlot(masm, optimized_code_entry);

1220 1221
  __ bind(&compile_lazy);
  GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1222 1223 1224

  __ bind(&stack_overflow);
  __ CallRuntime(Runtime::kThrowStackOverflow);
1225
  __ int3();  // Should not return.
1226 1227
}

1228

1229
static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1230 1231
                                         Register array_limit,
                                         Register start_address) {
1232
  // ----------- S t a t e -------------
1233
  //  -- start_address : Pointer to the last argument in the args array.
1234 1235 1236 1237 1238 1239
  //  -- array_limit : Pointer to one before the first argument in the
  //                   args array.
  // -----------------------------------
  Label loop_header, loop_check;
  __ jmp(&loop_check);
  __ bind(&loop_header);
1240 1241 1242 1243 1244 1245 1246
#ifdef V8_REVERSE_JSARGS
  __ Push(Operand(array_limit, 0));
  __ bind(&loop_check);
  __ add(array_limit, Immediate(kSystemPointerSize));
  __ cmp(array_limit, start_address);
  __ j(below_equal, &loop_header, Label::kNear);
#else
1247
  __ Push(Operand(start_address, 0));
1248
  __ sub(start_address, Immediate(kSystemPointerSize));
1249
  __ bind(&loop_check);
1250
  __ cmp(start_address, array_limit);
1251
  __ j(above, &loop_header, Label::kNear);
1252
#endif
1253 1254
}

1255
// static
1256 1257
void Builtins::Generate_InterpreterPushArgsThenCallImpl(
    MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1258
    InterpreterPushArgsMode mode) {
1259
  DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1260 1261
  // ----------- S t a t e -------------
  //  -- eax : the number of arguments (not including the receiver)
1262
  //  -- ecx : the address of the first argument to be pushed. Subsequent
1263 1264 1265
  //           arguments should be consecutive above this, in the same order as
  //           they are to be pushed onto the stack.
  //  -- edi : the target to call (can be any Object).
1266
  // -----------------------------------
1267

1268
  const Register scratch = edx;
1269 1270
  const Register argv = ecx;

1271
  Label stack_overflow;
1272 1273 1274 1275 1276 1277 1278 1279

#ifdef V8_REVERSE_JSARGS
  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    // The spread argument should not be pushed.
    __ dec(eax);
  }
#endif

1280 1281 1282 1283 1284
  // Add a stack check before pushing the arguments.
  Generate_StackOverflowCheck(masm, eax, scratch, &stack_overflow, true);

  __ movd(xmm0, eax);  // Spill number of arguments.

1285
  // Compute the expected number of arguments.
1286
  __ mov(scratch, eax);
1287

1288
  // Pop return address to allow tail-call after pushing arguments.
1289
  __ PopReturnAddressTo(eax);
1290

1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317
#ifdef V8_REVERSE_JSARGS
  if (receiver_mode != ConvertReceiverMode::kNullOrUndefined) {
    __ add(scratch, Immediate(1));  // Add one for receiver.
  }

  // Find the address of the last argument.
  __ shl(scratch, kSystemPointerSizeLog2);
  __ neg(scratch);
  __ add(scratch, argv);

  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    __ movd(xmm1, scratch);
    Generate_InterpreterPushArgs(masm, scratch, argv);
    // Pass the spread in the register ecx.
    __ movd(ecx, xmm1);
    __ mov(ecx, Operand(ecx, 0));
  } else {
    Generate_InterpreterPushArgs(masm, scratch, argv);
  }

  // Push "undefined" as the receiver arg if we need to.
  if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
    __ PushRoot(RootIndex::kUndefinedValue);
  }
#else
  __ add(scratch, Immediate(1));  // Add one for receiver.

1318 1319
  // Push "undefined" as the receiver arg if we need to.
  if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1320
    __ PushRoot(RootIndex::kUndefinedValue);
1321
    __ sub(scratch, Immediate(1));  // Subtract one for receiver.
1322 1323
  }

1324
  // Find the address of the last argument.
1325
  __ shl(scratch, kSystemPointerSizeLog2);
1326 1327 1328
  __ neg(scratch);
  __ add(scratch, argv);
  Generate_InterpreterPushArgs(masm, scratch, argv);
1329

1330
  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1331
    __ Pop(ecx);                // Pass the spread in a register
1332 1333 1334 1335 1336 1337 1338 1339 1340
  }
#endif

  __ PushReturnAddressFrom(eax);
  __ movd(eax, xmm0);  // Restore number of arguments.

  // Call the target.
  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
#ifndef V8_REVERSE_JSARGS
1341
    __ sub(eax, Immediate(1));  // Subtract one for spread
1342
#endif
1343
    __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1344
            RelocInfo::CODE_TARGET);
1345
  } else {
1346
    __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1347 1348
            RelocInfo::CODE_TARGET);
  }
1349 1350 1351 1352 1353 1354 1355 1356

  __ bind(&stack_overflow);
  {
    __ TailCallRuntime(Runtime::kThrowStackOverflow);

    // This should be unreachable.
    __ int3();
  }
1357 1358
}

1359 1360
namespace {

1361 1362
// This function modifies start_addr, and only reads the contents of num_args
// register. scratch1 and scratch2 are used as temporary registers.
1363
void Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1364
    MacroAssembler* masm, Register num_args, Register start_addr,
1365
    Register scratch1, Register scratch2, int num_slots_to_move,
1366
    Label* stack_overflow) {
1367 1368 1369
  // We have to move return address and the temporary registers above it
  // before we can copy arguments onto the stack. To achieve this:
  // Step 1: Increment the stack pointer by num_args + 1 (for receiver).
1370
  // Step 2: Move the return address and values around it to the top of stack.
1371
  // Step 3: Copy the arguments into the correct locations.
1372
  //  current stack    =====>    required stack layout
1373 1374
  // |             |            | return addr   | (2) <-- esp (1)
  // |             |            | addtl. slot   |
1375
  // |             |            | arg N         | (3)
1376 1377 1378 1379
  // |             |            | ....          |
  // |             |            | arg 1         |
  // | return addr | <-- esp    | arg 0         |
  // | addtl. slot |            | receiver slot |
1380

1381
  // Check for stack overflow before we increment the stack pointer.
1382
  Generate_StackOverflowCheck(masm, num_args, scratch1, stack_overflow, true);
1383

1384
  // Step 1 - Update the stack pointer.
1385

1386 1387
  __ lea(scratch1,
         Operand(num_args, times_system_pointer_size, kSystemPointerSize));
1388
  __ AllocateStackSpace(scratch1);
1389

1390
  // Step 2 move return_address and slots around it to the correct locations.
1391 1392 1393
  // Move from top to bottom, otherwise we may overwrite when num_args = 0 or 1,
  // basically when the source and destination overlap. We at least need one
  // extra slot for receiver, so no extra checks are required to avoid copy.
1394
  for (int i = 0; i < num_slots_to_move + 1; i++) {
1395 1396 1397
    __ mov(scratch1, Operand(esp, num_args, times_system_pointer_size,
                             (i + 1) * kSystemPointerSize));
    __ mov(Operand(esp, i * kSystemPointerSize), scratch1);
1398
  }
1399

1400
  // Step 3 copy arguments to correct locations.
1401 1402
  // Slot meant for receiver contains return address. Reset it so that
  // we will not incorrectly interpret return address as an object.
1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421
#ifdef V8_REVERSE_JSARGS
  __ mov(Operand(esp, (num_slots_to_move + 1) * kSystemPointerSize),
         Immediate(0));
  __ mov(scratch1, Immediate(0));

  Label loop_header, loop_check;
  __ jmp(&loop_check);
  __ bind(&loop_header);
  __ mov(scratch2, Operand(start_addr, 0));
  __ mov(Operand(esp, scratch1, times_system_pointer_size,
                 (num_slots_to_move + 1) * kSystemPointerSize),
         scratch2);
  __ sub(start_addr, Immediate(kSystemPointerSize));
  __ bind(&loop_check);
  __ inc(scratch1);
  __ cmp(scratch1, eax);
  __ j(less_equal, &loop_header, Label::kNear);

#else
1422 1423
  __ mov(Operand(esp, num_args, times_system_pointer_size,
                 (num_slots_to_move + 1) * kSystemPointerSize),
1424 1425
         Immediate(0));
  __ mov(scratch1, num_args);
1426

1427 1428 1429
  Label loop_header, loop_check;
  __ jmp(&loop_check);
  __ bind(&loop_header);
1430
  __ mov(scratch2, Operand(start_addr, 0));
1431 1432
  __ mov(Operand(esp, scratch1, times_system_pointer_size,
                 num_slots_to_move * kSystemPointerSize),
1433
         scratch2);
1434
  __ sub(start_addr, Immediate(kSystemPointerSize));
1435
  __ sub(scratch1, Immediate(1));
1436
  __ bind(&loop_check);
1437
  __ cmp(scratch1, Immediate(0));
1438
  __ j(greater, &loop_header, Label::kNear);
1439
#endif
1440 1441
}

1442
}  // anonymous namespace
1443 1444

// static
1445
void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1446
    MacroAssembler* masm, InterpreterPushArgsMode mode) {
1447
  // ----------- S t a t e -------------
1448 1449 1450 1451 1452 1453 1454 1455
  //  -- eax     : the number of arguments (not including the receiver)
  //  -- ecx     : the address of the first argument to be pushed. Subsequent
  //               arguments should be consecutive above this, in the same order
  //               as they are to be pushed onto the stack.
  //  -- esp[0]  : return address
  //  -- esp[4]  : allocation site feedback (if available or undefined)
  //  -- esp[8]  : the new target
  //  -- esp[12] : the constructor
1456
  // -----------------------------------
1457
  Label stack_overflow;
1458

1459 1460 1461 1462 1463 1464 1465
#ifdef V8_REVERSE_JSARGS
  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    // The spread argument should not be pushed.
    __ dec(eax);
  }
#endif

1466 1467 1468 1469 1470 1471 1472
  // Push arguments and move return address and stack spill slots to the top of
  // stack. The eax register is readonly. The ecx register will be modified. edx
  // and edi are used as scratch registers.
  Generate_InterpreterPushZeroAndArgsAndReturnAddress(
      masm, eax, ecx, edx, edi,
      InterpreterPushArgsThenConstructDescriptor::kStackArgumentsCount,
      &stack_overflow);
1473

1474 1475
  // Call the appropriate constructor. eax and ecx already contain intended
  // values, remaining registers still need to be initialized from the stack.
1476

1477
  if (mode == InterpreterPushArgsMode::kArrayFunction) {
1478 1479
    // Tail call to the array construct stub (still in the caller context at
    // this point).
1480

1481 1482
    __ movd(xmm0, eax);  // Spill number of arguments.
    __ PopReturnAddressTo(eax);
1483 1484 1485
    __ Pop(kJavaScriptCallExtraArg1Register);
    __ Pop(kJavaScriptCallNewTargetRegister);
    __ Pop(kJavaScriptCallTargetRegister);
1486
    __ PushReturnAddressFrom(eax);
1487 1488

    __ AssertFunction(kJavaScriptCallTargetRegister);
1489 1490 1491
    __ AssertUndefinedOrAllocationSite(kJavaScriptCallExtraArg1Register, eax);

    __ movd(eax, xmm0);  // Reload number of arguments.
1492 1493
    __ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
            RelocInfo::CODE_TARGET);
1494
  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1495 1496
    __ movd(xmm0, eax);  // Spill number of arguments.
    __ PopReturnAddressTo(eax);
1497 1498 1499
    __ Drop(1);  // The allocation site is unused.
    __ Pop(kJavaScriptCallNewTargetRegister);
    __ Pop(kJavaScriptCallTargetRegister);
1500 1501 1502 1503 1504 1505
#ifdef V8_REVERSE_JSARGS
    // Pass the spread in the register ecx, overwriting ecx.
    __ mov(ecx, Operand(ecx, 0));
    __ PushReturnAddressFrom(eax);
    __ movd(eax, xmm0);  // Reload number of arguments.
#else
1506
    __ Pop(ecx);  // Pop the spread (i.e. the first argument), overwriting ecx.
1507 1508
    __ PushReturnAddressFrom(eax);
    __ movd(eax, xmm0);         // Reload number of arguments.
1509
    __ sub(eax, Immediate(1));  // The actual argc thus decrements by one.
1510
#endif
1511
    __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1512
            RelocInfo::CODE_TARGET);
1513
  } else {
1514
    DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1515 1516 1517 1518 1519 1520
    __ PopReturnAddressTo(ecx);
    __ Drop(1);  // The allocation site is unused.
    __ Pop(kJavaScriptCallNewTargetRegister);
    __ Pop(kJavaScriptCallTargetRegister);
    __ PushReturnAddressFrom(ecx);

1521
    __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1522
  }
1523 1524

  __ bind(&stack_overflow);
1525 1526
  __ TailCallRuntime(Runtime::kThrowStackOverflow);
  __ int3();
1527 1528
}

1529
static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1530 1531
  // Set the return address to the correct point in the interpreter entry
  // trampoline.
1532
  Label builtin_trampoline, trampoline_loaded;
1533
  Smi interpreter_entry_return_pc_offset(
1534
      masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1535
  DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1536

1537 1538
  static constexpr Register scratch = ecx;

1539 1540 1541 1542
  // If the SFI function_data is an InterpreterData, the function will have a
  // custom copy of the interpreter entry trampoline for profiling. If so,
  // get the custom trampoline, otherwise grab the entry address of the global
  // trampoline.
1543 1544 1545 1546
  __ mov(scratch, Operand(ebp, StandardFrameConstants::kFunctionOffset));
  __ mov(scratch, FieldOperand(scratch, JSFunction::kSharedFunctionInfoOffset));
  __ mov(scratch,
         FieldOperand(scratch, SharedFunctionInfo::kFunctionDataOffset));
1547
  __ Push(eax);
1548
  __ CmpObjectType(scratch, INTERPRETER_DATA_TYPE, eax);
1549 1550
  __ j(not_equal, &builtin_trampoline, Label::kNear);

1551 1552
  __ mov(scratch,
         FieldOperand(scratch, InterpreterData::kInterpreterTrampolineOffset));
1553
  __ add(scratch, Immediate(Code::kHeaderSize - kHeapObjectTag));
1554 1555 1556
  __ jmp(&trampoline_loaded, Label::kNear);

  __ bind(&builtin_trampoline);
1557 1558 1559 1560 1561 1562
  __ mov(scratch,
         __ ExternalReferenceAsOperand(
             ExternalReference::
                 address_of_interpreter_entry_trampoline_instruction_start(
                     masm->isolate()),
             scratch));
1563 1564 1565

  __ bind(&trampoline_loaded);
  __ Pop(eax);
1566
  __ add(scratch, Immediate(interpreter_entry_return_pc_offset.value()));
1567
  __ push(scratch);
1568

1569
  // Initialize the dispatch table register.
1570 1571 1572
  __ Move(kInterpreterDispatchTableRegister,
          Immediate(ExternalReference::interpreter_dispatch_table_address(
              masm->isolate())));
1573 1574 1575

  // Get the bytecode array pointer from the frame.
  __ mov(kInterpreterBytecodeArrayRegister,
1576
         Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1577 1578 1579 1580 1581

  if (FLAG_debug_code) {
    // Check function data field is actually a BytecodeArray object.
    __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
    __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1582
                     scratch);
1583 1584 1585
    __ Assert(
        equal,
        AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1586 1587 1588
  }

  // Get the target bytecode offset from the frame.
1589 1590
  __ mov(kInterpreterBytecodeOffsetRegister,
         Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1591 1592
  __ SmiUntag(kInterpreterBytecodeOffsetRegister);

1593 1594 1595 1596 1597 1598 1599 1600 1601
  if (FLAG_debug_code) {
    Label okay;
    __ cmp(kInterpreterBytecodeOffsetRegister,
           Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
    __ j(greater_equal, &okay, Label::kNear);
    __ int3();
    __ bind(&okay);
  }

1602
  // Dispatch to the target bytecode.
1603 1604 1605
  __ movzx_b(scratch, Operand(kInterpreterBytecodeArrayRegister,
                              kInterpreterBytecodeOffsetRegister, times_1, 0));
  __ mov(kJavaScriptCallCodeStartRegister,
1606 1607
         Operand(kInterpreterDispatchTableRegister, scratch,
                 times_system_pointer_size, 0));
1608
  __ jmp(kJavaScriptCallCodeStartRegister);
1609 1610
}

1611
void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1612 1613 1614 1615 1616 1617 1618
  // Get bytecode array and bytecode offset from the stack frame.
  __ mov(kInterpreterBytecodeArrayRegister,
         Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
  __ mov(kInterpreterBytecodeOffsetRegister,
         Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
  __ SmiUntag(kInterpreterBytecodeOffsetRegister);

1619 1620 1621 1622 1623 1624
  Label enter_bytecode, function_entry_bytecode;
  __ cmp(kInterpreterBytecodeOffsetRegister,
         Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag +
                   kFunctionEntryBytecodeOffset));
  __ j(equal, &function_entry_bytecode);

1625
  // Advance to the next bytecode.
1626
  Label if_return;
1627
  __ Push(eax);
1628
  AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1629
                                kInterpreterBytecodeOffsetRegister, ecx, esi,
1630 1631
                                eax, &if_return);
  __ Pop(eax);
1632

1633
  __ bind(&enter_bytecode);
1634
  // Convert new bytecode offset to a Smi and save in the stackframe.
1635 1636 1637
  __ mov(ecx, kInterpreterBytecodeOffsetRegister);
  __ SmiTag(ecx);
  __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp), ecx);
1638 1639

  Generate_InterpreterEnterBytecode(masm);
1640

1641 1642 1643 1644 1645 1646 1647 1648 1649
  __ bind(&function_entry_bytecode);
  // If the code deoptimizes during the implicit function entry stack interrupt
  // check, it will have a bailout ID of kFunctionEntryBytecodeOffset, which is
  // not a valid bytecode offset. Detect this case and advance to the first
  // actual bytecode.
  __ mov(kInterpreterBytecodeOffsetRegister,
         Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
  __ jmp(&enter_bytecode);

1650 1651
  // We should never take the if_return path.
  __ bind(&if_return);
1652
  // No need to pop eax here since we will be aborting anyway.
1653
  __ Abort(AbortReason::kInvalidBytecodeAdvance);
1654 1655 1656 1657 1658 1659
}

void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
  Generate_InterpreterEnterBytecode(masm);
}

1660 1661 1662 1663
namespace {
void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
                                      bool java_script_builtin,
                                      bool with_result) {
1664
  const RegisterConfiguration* config(RegisterConfiguration::Default());
1665 1666
  int allocatable_register_count = config->num_allocatable_general_registers();
  if (with_result) {
1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680
#ifdef V8_REVERSE_JSARGS
    if (java_script_builtin) {
      // xmm0 is not included in the allocateable registers.
      __ movd(xmm0, eax);
    } else {
      // Overwrite the hole inserted by the deoptimizer with the return value
      // from the LAZY deopt point.
      __ mov(
          Operand(esp, config->num_allocatable_general_registers() *
                               kSystemPointerSize +
                           BuiltinContinuationFrameConstants::kFixedFrameSize),
          eax);
    }
#else
1681 1682
    // Overwrite the hole inserted by the deoptimizer with the return value from
    // the LAZY deopt point.
1683 1684 1685
    __ mov(Operand(esp, config->num_allocatable_general_registers() *
                                kSystemPointerSize +
                            BuiltinContinuationFrameConstants::kFixedFrameSize),
1686
           eax);
1687
#endif
1688
  }
1689 1690 1691 1692 1693 1694 1695 1696 1697

  // Replace the builtin index Smi on the stack with the start address of the
  // builtin loaded from the builtins table. The ret below will return to this
  // address.
  int offset_to_builtin_index = allocatable_register_count * kSystemPointerSize;
  __ mov(eax, Operand(esp, offset_to_builtin_index));
  __ LoadEntryFromBuiltinIndex(eax);
  __ mov(Operand(esp, offset_to_builtin_index), eax);

1698 1699 1700 1701 1702 1703 1704
  for (int i = allocatable_register_count - 1; i >= 0; --i) {
    int code = config->GetAllocatableGeneralCode(i);
    __ pop(Register::from_code(code));
    if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
      __ SmiUntag(Register::from_code(code));
    }
  }
1705 1706 1707 1708 1709 1710 1711 1712 1713 1714
#ifdef V8_REVERSE_JSARGS
  if (with_result && java_script_builtin) {
    // Overwrite the hole inserted by the deoptimizer with the return value from
    // the LAZY deopt point. eax contains the arguments count, the return value
    // from LAZY is always the last argument.
    __ movd(Operand(esp, eax, times_system_pointer_size,
                    BuiltinContinuationFrameConstants::kFixedFrameSize),
            xmm0);
  }
#endif
1715 1716 1717 1718
  __ mov(
      ebp,
      Operand(esp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
  const int offsetToPC =
1719 1720
      BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp -
      kSystemPointerSize;
1721
  __ pop(Operand(esp, offsetToPC));
1722
  __ Drop(offsetToPC / kSystemPointerSize);
1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743 1744
  __ ret(0);
}
}  // namespace

void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
  Generate_ContinueToBuiltinHelper(masm, false, false);
}

void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
    MacroAssembler* masm) {
  Generate_ContinueToBuiltinHelper(masm, false, true);
}

void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
  Generate_ContinueToBuiltinHelper(masm, true, false);
}

void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
    MacroAssembler* masm) {
  Generate_ContinueToBuiltinHelper(masm, true, true);
}

1745
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1746 1747
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
1748
    __ CallRuntime(Runtime::kNotifyDeoptimized);
1749 1750
    // Tear down internal frame.
  }
1751

1752
  DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
1753 1754
  __ mov(eax, Operand(esp, 1 * kSystemPointerSize));
  __ ret(1 * kSystemPointerSize);  // Remove eax.
1755 1756
}

1757
// static
1758
void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1759 1760 1761
  // ----------- S t a t e -------------
  //  -- eax     : argc
  //  -- esp[0]  : return address
1762 1763 1764 1765
  // The order of args depends on V8_REVERSE_JSARGS
  //  -- args[0] : receiver
  //  -- args[1] : thisArg
  //  -- args[2] : argArray
1766
  // -----------------------------------
1767

1768
  // 1. Load receiver into xmm0, argArray into edx (if present), remove all
1769 1770
  // arguments from the stack (including the receiver), and push thisArg (if
  // present) instead.
1771
  {
1772
    Label no_arg_array, no_this_arg;
1773
    StackArgumentsAccessor args(eax);
1774
    // Spill receiver to allow the usage of edi as a scratch register.
1775
    __ movd(xmm0, args[0]);
1776

1777
    __ LoadRoot(edx, RootIndex::kUndefinedValue);
1778
    __ mov(edi, edx);
1779 1780 1781
    __ test(eax, eax);
    __ j(zero, &no_this_arg, Label::kNear);
    {
1782
      __ mov(edi, args[1]);
1783 1784
      __ cmp(eax, Immediate(1));
      __ j(equal, &no_arg_array, Label::kNear);
1785
      __ mov(edx, args[2]);
1786
      __ bind(&no_arg_array);
1787
    }
1788 1789
    __ bind(&no_this_arg);
    __ PopReturnAddressTo(ecx);
1790 1791
    __ lea(esp,
           Operand(esp, eax, times_system_pointer_size, kSystemPointerSize));
1792
    __ Push(edi);
1793
    __ PushReturnAddressFrom(ecx);
1794 1795 1796

    // Restore receiver to edi.
    __ movd(edi, xmm0);
1797
  }
1798

1799
  // ----------- S t a t e -------------
1800
  //  -- edx    : argArray
1801 1802 1803 1804
  //  -- edi    : receiver
  //  -- esp[0] : return address
  //  -- esp[4] : thisArg
  // -----------------------------------
1805

1806 1807 1808
  // 2. We don't need to check explicitly for callable receiver here,
  // since that's the first thing the Call/CallWithArrayLike builtins
  // will do.
1809

1810 1811
  // 3. Tail call with no arguments if argArray is null or undefined.
  Label no_arguments;
1812 1813
  __ JumpIfRoot(edx, RootIndex::kNullValue, &no_arguments, Label::kNear);
  __ JumpIfRoot(edx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
1814

1815
  // 4a. Apply the receiver to the given argArray.
1816
  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1817
          RelocInfo::CODE_TARGET);
1818

1819 1820 1821 1822 1823 1824 1825
  // 4b. The argArray is either null or undefined, so we tail call without any
  // arguments to the receiver.
  __ bind(&no_arguments);
  {
    __ Set(eax, 0);
    __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
  }
1826 1827
}

1828 1829 1830 1831 1832 1833 1834 1835 1836
// static
void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
  // Stack Layout:
  // esp[0]           : Return address
  // esp[8]           : Argument n
  // esp[16]          : Argument n-1
  //  ...
  // esp[8 * n]       : Argument 1
  // esp[8 * (n + 1)] : Receiver (callable to call)
1837
  // NOTE: The order of args are reversed if V8_REVERSE_JSARGS
1838
  // eax contains the number of arguments, n, not counting the receiver.
1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865

#ifdef V8_REVERSE_JSARGS
  // 1. Get the callable to call (passed as receiver) from the stack.
  {
    StackArgumentsAccessor args(eax);
    __ mov(edi, args.GetReceiverOperand());
  }

  // 2. Save the return address and drop the callable.
  __ PopReturnAddressTo(edx);
  __ Pop(ecx);

  // 3. Make sure we have at least one argument.
  {
    Label done;
    __ test(eax, eax);
    __ j(not_zero, &done, Label::kNear);
    __ PushRoot(RootIndex::kUndefinedValue);
    __ inc(eax);
    __ bind(&done);
  }

  // 4. Push back the return address one slot down on the stack (overwriting the
  // original callable), making the original first argument the new receiver.
  __ PushReturnAddressFrom(edx);
  __ dec(eax);  // One fewer argument (first argument is new receiver).
#else
1866 1867 1868 1869 1870
  // 1. Make sure we have at least one argument.
  {
    Label done;
    __ test(eax, eax);
    __ j(not_zero, &done, Label::kNear);
1871
    __ PopReturnAddressTo(edx);
1872
    __ PushRoot(RootIndex::kUndefinedValue);
1873
    __ PushReturnAddressFrom(edx);
1874 1875 1876 1877 1878
    __ inc(eax);
    __ bind(&done);
  }

  // 2. Get the callable to call (passed as receiver) from the stack.
1879 1880 1881 1882
  {
    StackArgumentsAccessor args(eax);
    __ mov(edi, args.GetReceiverOperand());
  }
1883 1884 1885 1886 1887 1888 1889 1890

  // 3. Shift arguments and return address one slot down on the stack
  //    (overwriting the original receiver).  Adjust argument count to make
  //    the original first argument the new receiver.
  {
    Label loop;
    __ mov(ecx, eax);
    __ bind(&loop);
1891 1892 1893
    __ mov(edx, Operand(esp, ecx, times_system_pointer_size, 0));
    __ mov(Operand(esp, ecx, times_system_pointer_size, kSystemPointerSize),
           edx);
1894 1895
    __ dec(ecx);
    __ j(not_sign, &loop);  // While non-negative (to copy return address).
1896
    __ pop(edx);            // Discard copy of return address.
1897 1898
    __ dec(eax);  // One fewer argument (first argument is new receiver).
  }
1899
#endif
1900 1901 1902 1903 1904

  // 4. Call the callable.
  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}

1905 1906 1907 1908
void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- eax     : argc
  //  -- esp[0]  : return address
1909 1910 1911 1912 1913
  // The order of args depends on V8_REVERSE_JSARGS
  //  -- args[0] : receiver
  //  -- args[1] : target
  //  -- args[2] : thisArgument
  //  -- args[3] : argumentsList
1914
  // -----------------------------------
1915

1916
  // 1. Load target into edi (if present), argumentsList into edx (if present),
1917 1918
  // remove all arguments from the stack (including the receiver), and push
  // thisArgument (if present) instead.
1919
  {
1920
    Label done;
1921
    StackArgumentsAccessor args(eax);
1922
    __ LoadRoot(edi, RootIndex::kUndefinedValue);
1923
    __ mov(edx, edi);
1924
    __ mov(ecx, edi);
1925 1926
    __ cmp(eax, Immediate(1));
    __ j(below, &done, Label::kNear);
1927
    __ mov(edi, args[1]);  // target
1928
    __ j(equal, &done, Label::kNear);
1929
    __ mov(ecx, args[2]);  // thisArgument
1930 1931
    __ cmp(eax, Immediate(3));
    __ j(below, &done, Label::kNear);
1932
    __ mov(edx, args[3]);  // argumentsList
1933
    __ bind(&done);
1934 1935 1936 1937 1938

    // Spill argumentsList to use edx as a scratch register.
    __ movd(xmm0, edx);

    __ PopReturnAddressTo(edx);
1939 1940
    __ lea(esp,
           Operand(esp, eax, times_system_pointer_size, kSystemPointerSize));
1941 1942 1943 1944 1945
    __ Push(ecx);
    __ PushReturnAddressFrom(edx);

    // Restore argumentsList.
    __ movd(edx, xmm0);
1946
  }
1947

1948
  // ----------- S t a t e -------------
1949
  //  -- edx    : argumentsList
1950 1951 1952 1953 1954
  //  -- edi    : target
  //  -- esp[0] : return address
  //  -- esp[4] : thisArgument
  // -----------------------------------

1955 1956 1957
  // 2. We don't need to check explicitly for callable target here,
  // since that's the first thing the Call/CallWithArrayLike builtins
  // will do.
1958

1959
  // 3. Apply the target to the given argumentsList.
1960
  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1961
          RelocInfo::CODE_TARGET);
1962 1963
}

1964 1965 1966 1967
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- eax     : argc
  //  -- esp[0]  : return address
1968 1969 1970 1971 1972
  // The order of args depends on V8_REVERSE_JSARGS
  //  -- args[0] : receiver
  //  -- args[1] : target
  //  -- args[2] : argumentsList
  //  -- args[3] : new.target (optional)
1973
  // -----------------------------------
1974

1975
  // 1. Load target into edi (if present), argumentsList into ecx (if present),
1976 1977 1978 1979 1980
  // new.target into edx (if present, otherwise use target), remove all
  // arguments from the stack (including the receiver), and push thisArgument
  // (if present) instead.
  {
    Label done;
1981
    StackArgumentsAccessor args(eax);
1982
    __ LoadRoot(edi, RootIndex::kUndefinedValue);
1983
    __ mov(edx, edi);
1984
    __ mov(ecx, edi);
1985 1986
    __ cmp(eax, Immediate(1));
    __ j(below, &done, Label::kNear);
1987
    __ mov(edi, args[1]);  // target
1988 1989
    __ mov(edx, edi);
    __ j(equal, &done, Label::kNear);
1990
    __ mov(ecx, args[2]);  // argumentsList
1991 1992
    __ cmp(eax, Immediate(3));
    __ j(below, &done, Label::kNear);
1993
    __ mov(edx, args[3]);  // new.target
1994
    __ bind(&done);
1995 1996 1997 1998

    // Spill argumentsList to use ecx as a scratch register.
    __ movd(xmm0, ecx);

1999
    __ PopReturnAddressTo(ecx);
2000 2001
    __ lea(esp,
           Operand(esp, eax, times_system_pointer_size, kSystemPointerSize));
2002
    __ PushRoot(RootIndex::kUndefinedValue);
2003
    __ PushReturnAddressFrom(ecx);
2004 2005 2006

    // Restore argumentsList.
    __ movd(ecx, xmm0);
2007
  }
2008

2009
  // ----------- S t a t e -------------
2010
  //  -- ecx    : argumentsList
2011 2012 2013 2014 2015
  //  -- edx    : new.target
  //  -- edi    : target
  //  -- esp[0] : return address
  //  -- esp[4] : receiver (undefined)
  // -----------------------------------
2016

2017 2018 2019
  // 2. We don't need to check explicitly for constructor target here,
  // since that's the first thing the Construct/ConstructWithArrayLike
  // builtins will do.
2020

2021 2022 2023
  // 3. We don't need to check explicitly for constructor new.target here,
  // since that's the second thing the Construct/ConstructWithArrayLike
  // builtins will do.
2024

2025
  // 4. Construct the target with the given new.target and argumentsList.
2026
  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
2027
          RelocInfo::CODE_TARGET);
2028 2029 2030 2031
}

static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
  __ push(ebp);
2032
  __ mov(ebp, esp);
2033 2034

  // Store the arguments adaptor context sentinel.
2035
  __ push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2036 2037 2038 2039

  // Push the function on the stack.
  __ push(edi);

2040 2041
  // Preserve the number of arguments on the stack. Must preserve eax,
  // ebx and ecx because these registers are used when copying the
2042
  // arguments and the receiver.
2043
  STATIC_ASSERT(kSmiTagSize == 1);
2044 2045
  __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
  __ push(edi);
2046 2047

  __ Push(Immediate(0));  // Padding.
2048 2049
}

2050
static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2051
  // Retrieve the number of arguments from the stack.
2052
  __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2053 2054 2055 2056 2057

  // Leave the frame.
  __ leave();

  // Remove caller arguments from the stack.
2058
  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
2059
  __ PopReturnAddressTo(ecx);
2060 2061
  __ lea(esp, Operand(esp, edi, times_half_system_pointer_size,
                      1 * kSystemPointerSize));  // 1 ~ receiver
2062
  __ PushReturnAddressFrom(ecx);
2063 2064
}

2065
// static
2066 2067
void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
                                               Handle<Code> code) {
2068 2069
  // ----------- S t a t e -------------
  //  -- edi    : target
2070
  //  -- esi    : context for the Call / Construct builtin
2071 2072
  //  -- eax    : number of parameters on the stack (not including the receiver)
  //  -- ecx    : len (number of elements to from args)
2073 2074
  //  -- ecx    : new.target (checked to be constructor or undefined)
  //  -- esp[4] : arguments list (a FixedArray)
2075 2076 2077
  //  -- esp[0] : return address.
  // -----------------------------------

2078
  // We need to preserve eax, edi, esi and ebx.
2079 2080 2081
  __ movd(xmm0, edx);
  __ movd(xmm1, edi);
  __ movd(xmm2, eax);
2082
  __ movd(xmm3, esi);  // Spill the context.
2083

2084
  const Register kArgumentsList = esi;
2085 2086 2087 2088 2089 2090
  const Register kArgumentsLength = ecx;

  __ PopReturnAddressTo(edx);
  __ pop(kArgumentsList);
  __ PushReturnAddressFrom(edx);

2091
  if (masm->emit_debug_code()) {
2092 2093
    // Allow kArgumentsList to be a FixedArray, or a FixedDoubleArray if
    // kArgumentsLength == 0.
2094
    Label ok, fail;
2095 2096
    __ AssertNotSmi(kArgumentsList);
    __ mov(edx, FieldOperand(kArgumentsList, HeapObject::kMapOffset));
2097 2098 2099 2100
    __ CmpInstanceType(edx, FIXED_ARRAY_TYPE);
    __ j(equal, &ok);
    __ CmpInstanceType(edx, FIXED_DOUBLE_ARRAY_TYPE);
    __ j(not_equal, &fail);
2101
    __ cmp(kArgumentsLength, 0);
2102 2103 2104 2105 2106 2107 2108 2109
    __ j(equal, &ok);
    // Fall through.
    __ bind(&fail);
    __ Abort(AbortReason::kOperandIsNotAFixedArray);

    __ bind(&ok);
  }

2110 2111 2112 2113
  // Check the stack for overflow. We are not trying to catch interruptions
  // (i.e. debug break and preemption) here, so check the "real stack limit".
  Label stack_overflow;
  Generate_StackOverflowCheck(masm, kArgumentsLength, edx, &stack_overflow);
2114

2115 2116 2117 2118 2119 2120 2121 2122 2123 2124 2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164
#ifdef V8_REVERSE_JSARGS
  __ movd(xmm4, kArgumentsList);  // Spill the arguments list.

  // Move the arguments already in the stack,
  // including the receiver and the return address.
  {
    Label copy, check;
    Register src = edx, current = edi, tmp = esi;
    // Update stack pointer.
    __ mov(src, esp);
    __ lea(tmp, Operand(kArgumentsLength, times_system_pointer_size, 0));
    __ AllocateStackSpace(tmp);
    // Include return address and receiver.
    __ add(eax, Immediate(2));
    __ mov(current, Immediate(0));
    __ jmp(&check);
    // Loop.
    __ bind(&copy);
    __ mov(tmp, Operand(src, current, times_system_pointer_size, 0));
    __ mov(Operand(esp, current, times_system_pointer_size, 0), tmp);
    __ inc(current);
    __ bind(&check);
    __ cmp(current, eax);
    __ j(less, &copy);
    __ lea(edx, Operand(esp, eax, times_system_pointer_size, 0));
  }

  __ movd(kArgumentsList, xmm4);  // Recover arguments list.

  // Push additional arguments onto the stack.
  {
    __ Move(eax, Immediate(0));
    Label done, push, loop;
    __ bind(&loop);
    __ cmp(eax, kArgumentsLength);
    __ j(equal, &done, Label::kNear);
    // Turn the hole into undefined as we go.
    __ mov(edi, FieldOperand(kArgumentsList, eax, times_tagged_size,
                             FixedArray::kHeaderSize));
    __ CompareRoot(edi, RootIndex::kTheHoleValue);
    __ j(not_equal, &push, Label::kNear);
    __ LoadRoot(edi, RootIndex::kUndefinedValue);
    __ bind(&push);
    __ mov(Operand(edx, 0), edi);
    __ add(edx, Immediate(kSystemPointerSize));
    __ inc(eax);
    __ jmp(&loop);
    __ bind(&done);
  }
#else   // !V8_REVERSE_JSARGS
2165
  // Push additional arguments onto the stack.
2166 2167
  {
    __ PopReturnAddressTo(edx);
2168
    __ Move(eax, Immediate(0));
2169
    Label done, push, loop;
2170
    __ bind(&loop);
2171
    __ cmp(eax, kArgumentsLength);
2172
    __ j(equal, &done, Label::kNear);
2173
    // Turn the hole into undefined as we go.
2174
    __ mov(edi, FieldOperand(kArgumentsList, eax, times_tagged_size,
2175
                             FixedArray::kHeaderSize));
2176
    __ CompareRoot(edi, RootIndex::kTheHoleValue);
2177
    __ j(not_equal, &push, Label::kNear);
2178
    __ LoadRoot(edi, RootIndex::kUndefinedValue);
2179 2180
    __ bind(&push);
    __ Push(edi);
2181
    __ inc(eax);
2182 2183 2184 2185
    __ jmp(&loop);
    __ bind(&done);
    __ PushReturnAddressFrom(edx);
  }
2186
#endif  // !V8_REVERSE_JSARGS
2187

2188
  // Restore eax, edi and edx.
2189
  __ movd(esi, xmm3);  // Restore the context.
2190 2191 2192 2193 2194
  __ movd(eax, xmm2);
  __ movd(edi, xmm1);
  __ movd(edx, xmm0);

  // Compute the actual parameter count.
2195
  __ add(eax, kArgumentsLength);
2196 2197 2198

  // Tail-call to the actual Call or Construct builtin.
  __ Jump(code, RelocInfo::CODE_TARGET);
2199 2200

  __ bind(&stack_overflow);
2201
  __ movd(esi, xmm3);  // Restore the context.
2202
  __ TailCallRuntime(Runtime::kThrowStackOverflow);
2203 2204
}

2205
// static
2206
void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2207
                                                      CallOrConstructMode mode,
2208
                                                      Handle<Code> code) {
2209
  // ----------- S t a t e -------------
2210 2211
  //  -- eax : the number of arguments (not including the receiver)
  //  -- edi : the target to call (can be any Object)
2212
  //  -- esi : context for the Call / Construct builtin
2213 2214
  //  -- edx : the new target (for [[Construct]] calls)
  //  -- ecx : start index (to support rest parameters)
2215 2216
  // -----------------------------------

2217 2218 2219 2220
  __ movd(xmm0, esi);  // Spill the context.

  Register scratch = esi;

2221 2222 2223 2224
  // Check if new.target has a [[Construct]] internal method.
  if (mode == CallOrConstructMode::kConstruct) {
    Label new_target_constructor, new_target_not_constructor;
    __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
2225 2226
    __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
    __ test_b(FieldOperand(scratch, Map::kBitFieldOffset),
2227
              Immediate(Map::Bits1::IsConstructorBit::kMask));
2228 2229 2230 2231 2232 2233
    __ j(not_zero, &new_target_constructor, Label::kNear);
    __ bind(&new_target_not_constructor);
    {
      FrameScope scope(masm, StackFrame::MANUAL);
      __ EnterFrame(StackFrame::INTERNAL);
      __ Push(edx);
2234
      __ movd(esi, xmm0);  // Restore the context.
2235 2236 2237 2238 2239
      __ CallRuntime(Runtime::kThrowNotConstructor);
    }
    __ bind(&new_target_constructor);
  }

2240
  __ movd(xmm1, edx);  // Preserve new.target (in case of [[Construct]]).
2241

2242 2243
  // Check if we have an arguments adaptor frame below the function frame.
  Label arguments_adaptor, arguments_done;
2244 2245
  __ mov(scratch, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
  __ cmp(Operand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset),
2246
         Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2247 2248
  __ j(equal, &arguments_adaptor, Label::kNear);
  {
2249
    __ mov(edx, Operand(ebp, StandardFrameConstants::kFunctionOffset));
2250
    __ mov(edx, FieldOperand(edx, JSFunction::kSharedFunctionInfoOffset));
2251 2252
    __ movzx_w(edx, FieldOperand(
                        edx, SharedFunctionInfo::kFormalParameterCountOffset));
2253
    __ mov(scratch, ebp);
2254 2255 2256 2257 2258
  }
  __ jmp(&arguments_done, Label::kNear);
  __ bind(&arguments_adaptor);
  {
    // Just load the length from the ArgumentsAdaptorFrame.
2259 2260
    __ mov(edx,
           Operand(scratch, ArgumentsAdaptorFrameConstants::kLengthOffset));
2261
    __ SmiUntag(edx);
2262 2263 2264
  }
  __ bind(&arguments_done);

2265
  Label stack_done, stack_overflow;
2266 2267
  __ sub(edx, ecx);
  __ j(less_equal, &stack_done);
2268
  {
2269 2270 2271 2272 2273 2274 2275 2276 2277 2278
    // ----------- S t a t e -------------
    //  -- eax : the number of arguments already in the stack (not including the
    //  receiver)
    //  -- ecx : start index (to support rest parameters)
    //  -- edx : number of arguments to copy, i.e. arguments count - start index
    //  -- edi : the target to call (can be any Object)
    //  -- esi : point to the caller stack frame
    //  -- xmm0 : context for the Call / Construct builtin
    //  -- xmm1 : the new target (for [[Construct]] calls)
    // -----------------------------------
2279 2280

    // Forward the arguments from the caller frame.
2281 2282 2283 2284 2285 2286 2287 2288 2289 2290 2291 2292 2293 2294 2295
#ifdef V8_REVERSE_JSARGS
    __ movd(xmm2, edi);  // Preserve the target to call.
    Generate_StackOverflowCheck(masm, edx, edi, &stack_overflow);
    __ movd(xmm3, ebx);  // Preserve root register.

    Register scratch = ebx;

    // Point to the first argument to copy (skipping receiver).
    __ lea(ecx, Operand(ecx, times_system_pointer_size,
                        CommonFrameConstants::kFixedFrameSizeAboveFp +
                            kSystemPointerSize));
    __ add(esi, ecx);

    // Move the arguments already in the stack,
    // including the receiver and the return address.
2296
    {
2297 2298 2299 2300 2301 2302 2303 2304 2305 2306 2307 2308 2309 2310 2311 2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322 2323 2324 2325 2326
      Label copy, check;
      Register src = ecx, current = edi;
      // Update stack pointer.
      __ mov(src, esp);
      __ lea(scratch, Operand(edx, times_system_pointer_size, 0));
      __ AllocateStackSpace(scratch);
      // Include return address and receiver.
      __ add(eax, Immediate(2));
      __ Set(current, 0);
      __ jmp(&check);
      // Loop.
      __ bind(&copy);
      __ mov(scratch, Operand(src, current, times_system_pointer_size, 0));
      __ mov(Operand(esp, current, times_system_pointer_size, 0), scratch);
      __ inc(current);
      __ bind(&check);
      __ cmp(current, eax);
      __ j(less, &copy);
      __ lea(ecx, Operand(esp, eax, times_system_pointer_size, 0));
    }

    // Update total number of arguments.
    __ sub(eax, Immediate(2));
    __ add(eax, edx);

    // Copy the additional caller arguments onto the stack.
    // TODO(victorgomes): Consider using forward order as potentially more cache
    // friendly.
    {
      Register src = esi, dest = ecx, num = edx;
2327 2328
      Label loop;
      __ bind(&loop);
2329 2330 2331 2332 2333 2334 2335 2336
      __ dec(num);
      __ mov(scratch, Operand(src, num, times_system_pointer_size, 0));
      __ mov(Operand(dest, num, times_system_pointer_size, 0), scratch);
      __ j(not_zero, &loop);
    }

    __ movd(ebx, xmm3);  // Restore root register.
    __ movd(edi, xmm2);  // Restore the target to call.
2337
#else
2338 2339 2340 2341 2342 2343 2344 2345 2346 2347
    Generate_StackOverflowCheck(masm, edx, ecx, &stack_overflow);
    Label loop;
    __ add(eax, edx);
    __ PopReturnAddressTo(ecx);
    __ bind(&loop);
    {
      __ dec(edx);
      __ Push(Operand(scratch, edx, times_system_pointer_size,
                      kFPOnStackSize + kPCOnStackSize));
      __ j(not_zero, &loop);
2348
    }
2349 2350
    __ PushReturnAddressFrom(ecx);
#endif
2351 2352 2353
  }
  __ bind(&stack_done);

2354 2355
  __ movd(edx, xmm1);  // Restore new.target (in case of [[Construct]]).
  __ movd(esi, xmm0);  // Restore the context.
2356 2357

  // Tail-call to the {code} handler.
2358
  __ Jump(code, RelocInfo::CODE_TARGET);
2359 2360

  __ bind(&stack_overflow);
2361 2362 2363
#ifdef V8_REVERSE_JSARGS
  __ movd(edi, xmm2);  // Restore the target to call.
#endif
2364
  __ movd(esi, xmm0);  // Restore the context.
2365
  __ TailCallRuntime(Runtime::kThrowStackOverflow);
2366 2367
}

2368
// static
2369
void Builtins::Generate_CallFunction(MacroAssembler* masm,
2370
                                     ConvertReceiverMode mode) {
2371 2372 2373 2374
  // ----------- S t a t e -------------
  //  -- eax : the number of arguments (not including the receiver)
  //  -- edi : the function to call (checked to be a JSFunction)
  // -----------------------------------
2375
  StackArgumentsAccessor args(eax);
2376
  __ AssertFunction(edi);
2377

2378 2379 2380 2381
  // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
  // Check that the function is not a "classConstructor".
  Label class_constructor;
  __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2382
  __ test(FieldOperand(edx, SharedFunctionInfo::kFlagsOffset),
2383
          Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask));
2384
  __ j(not_zero, &class_constructor);
2385

2386 2387 2388 2389 2390
  // Enter the context of the function; ToObject has to run in the function
  // context, and we also need to take the global proxy from the function
  // context in case of conversion.
  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
  // We need to convert the receiver for non-native sloppy mode functions.
2391
  Label done_convert;
2392
  __ test(FieldOperand(edx, SharedFunctionInfo::kFlagsOffset),
2393 2394
          Immediate(SharedFunctionInfo::IsNativeBit::kMask |
                    SharedFunctionInfo::IsStrictBit::kMask));
2395 2396 2397 2398 2399 2400 2401 2402 2403
  __ j(not_zero, &done_convert);
  {
    // ----------- S t a t e -------------
    //  -- eax : the number of arguments (not including the receiver)
    //  -- edx : the shared function info.
    //  -- edi : the function to call (checked to be a JSFunction)
    //  -- esi : the function context.
    // -----------------------------------

2404
    if (mode == ConvertReceiverMode::kNullOrUndefined) {
2405 2406
      // Patch receiver to global proxy.
      __ LoadGlobalProxy(ecx);
2407 2408
    } else {
      Label convert_to_object, convert_receiver;
2409
      __ mov(ecx, args.GetReceiverOperand());
2410 2411
      __ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
      STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2412
      __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);  // Clobbers ecx.
2413
      __ j(above_equal, &done_convert);
2414
      // Reload the receiver (it was clobbered by CmpObjectType).
2415
      __ mov(ecx, args.GetReceiverOperand());
2416 2417
      if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
        Label convert_global_proxy;
2418 2419 2420
        __ JumpIfRoot(ecx, RootIndex::kUndefinedValue, &convert_global_proxy,
                      Label::kNear);
        __ JumpIfNotRoot(ecx, RootIndex::kNullValue, &convert_to_object,
2421 2422 2423 2424 2425 2426 2427 2428 2429 2430 2431 2432 2433 2434 2435 2436 2437 2438
                         Label::kNear);
        __ bind(&convert_global_proxy);
        {
          // Patch receiver to global proxy.
          __ LoadGlobalProxy(ecx);
        }
        __ jmp(&convert_receiver);
      }
      __ bind(&convert_to_object);
      {
        // Convert receiver using ToObject.
        // TODO(bmeurer): Inline the allocation here to avoid building the frame
        // in the fast case? (fall back to AllocateInNewSpace?)
        FrameScope scope(masm, StackFrame::INTERNAL);
        __ SmiTag(eax);
        __ Push(eax);
        __ Push(edi);
        __ mov(eax, ecx);
2439
        __ Push(esi);
2440
        __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
2441
                RelocInfo::CODE_TARGET);
2442
        __ Pop(esi);
2443 2444 2445 2446 2447 2448 2449
        __ mov(ecx, eax);
        __ Pop(edi);
        __ Pop(eax);
        __ SmiUntag(eax);
      }
      __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
      __ bind(&convert_receiver);
2450
    }
2451
    __ mov(args.GetReceiverOperand(), ecx);
2452 2453 2454 2455 2456 2457 2458 2459 2460 2461
  }
  __ bind(&done_convert);

  // ----------- S t a t e -------------
  //  -- eax : the number of arguments (not including the receiver)
  //  -- edx : the shared function info.
  //  -- edi : the function to call (checked to be a JSFunction)
  //  -- esi : the function context.
  // -----------------------------------

2462
  __ movzx_w(
2463
      ecx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2464
  __ InvokeFunctionCode(edi, no_reg, ecx, eax, JUMP_FUNCTION);
2465 2466 2467 2468
  // The function is a "classConstructor", need to raise an exception.
  __ bind(&class_constructor);
  {
    FrameScope frame(masm, StackFrame::INTERNAL);
2469
    __ push(edi);
2470
    __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2471
  }
2472 2473
}

2474 2475 2476 2477 2478 2479 2480 2481
namespace {

void Generate_PushBoundArguments(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- eax : the number of arguments (not including the receiver)
  //  -- edx : new.target (only in case of [[Construct]])
  //  -- edi : target (checked to be a JSBoundFunction)
  // -----------------------------------
2482 2483 2484
  __ movd(xmm0, edx);  // Spill edx.

  // Load [[BoundArguments]] into ecx and length of that into edx.
2485 2486
  Label no_bound_arguments;
  __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2487 2488 2489
  __ mov(edx, FieldOperand(ecx, FixedArray::kLengthOffset));
  __ SmiUntag(edx);
  __ test(edx, edx);
2490
  __ j(zero, &no_bound_arguments);
2491 2492 2493 2494 2495 2496 2497 2498 2499 2500 2501 2502 2503 2504 2505 2506 2507 2508 2509 2510 2511 2512 2513 2514 2515 2516 2517 2518 2519 2520 2521 2522 2523 2524 2525 2526 2527 2528 2529 2530 2531 2532 2533 2534 2535 2536 2537 2538 2539 2540 2541 2542 2543 2544 2545 2546 2547 2548 2549 2550 2551
#ifdef V8_REVERSE_JSARGS
  {
    // ----------- S t a t e -------------
    //  -- eax  : the number of arguments (not including the receiver)
    //  -- xmm0 : new.target (only in case of [[Construct]])
    //  -- edi  : target (checked to be a JSBoundFunction)
    //  -- ecx  : the [[BoundArguments]] (implemented as FixedArray)
    //  -- edx  : the number of [[BoundArguments]]
    // -----------------------------------

    // Check the stack for overflow.
    {
      Label done, stack_overflow;
      Generate_StackOverflowCheck(masm, edx, ecx, &stack_overflow);
      __ jmp(&done);
      __ bind(&stack_overflow);
      {
        FrameScope frame(masm, StackFrame::MANUAL);
        __ CallRuntime(Runtime::kThrowStackOverflow);
        __ int3();
      }
      __ bind(&done);
    }

    // Spill context.
    __ movd(xmm3, esi);

    // Save Return Adress and Receiver into registers.
    __ pop(esi);
    __ movd(xmm1, esi);
    __ pop(esi);
    __ movd(xmm2, esi);

    // Push [[BoundArguments]] to the stack.
    {
      Label loop;
      __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
      __ mov(edx, FieldOperand(ecx, FixedArray::kLengthOffset));
      __ SmiUntag(edx);
      // Adjust effective number of arguments (eax contains the number of
      // arguments from the call not including receiver plus the number of
      // [[BoundArguments]]).
      __ add(eax, edx);
      __ bind(&loop);
      __ dec(edx);
      __ mov(esi, FieldOperand(ecx, edx, times_tagged_size,
                               FixedArray::kHeaderSize));
      __ push(esi);
      __ j(greater, &loop);
    }

    // Restore Receiver and Return Address.
    __ movd(esi, xmm2);
    __ push(esi);
    __ movd(esi, xmm1);
    __ push(esi);

    // Restore context.
    __ movd(esi, xmm3);
  }
#else  // !V8_REVERSE_JSARGS
2552 2553
  {
    // ----------- S t a t e -------------
2554 2555 2556 2557 2558
    //  -- eax  : the number of arguments (not including the receiver)
    //  -- xmm0 : new.target (only in case of [[Construct]])
    //  -- edi  : target (checked to be a JSBoundFunction)
    //  -- ecx  : the [[BoundArguments]] (implemented as FixedArray)
    //  -- edx  : the number of [[BoundArguments]]
2559 2560 2561 2562 2563
    // -----------------------------------

    // Reserve stack space for the [[BoundArguments]].
    {
      Label done;
2564
      __ lea(ecx, Operand(edx, times_system_pointer_size, 0));
2565
      __ sub(esp, ecx);  // Not Windows-friendly, but corrected below.
2566 2567 2568
      // Check the stack for overflow. We are not trying to catch interruptions
      // (i.e. debug break and preemption) here, so check the "real stack
      // limit".
2569
      CompareStackLimit(masm, esp, StackLimitKind::kRealStackLimit);
2570
      __ j(above_equal, &done, Label::kNear);
2571
      // Restore the stack pointer.
2572
      __ lea(esp, Operand(esp, edx, times_system_pointer_size, 0));
2573 2574 2575
      {
        FrameScope scope(masm, StackFrame::MANUAL);
        __ EnterFrame(StackFrame::INTERNAL);
2576
        __ CallRuntime(Runtime::kThrowStackOverflow);
2577 2578 2579
      }
      __ bind(&done);
    }
2580 2581 2582 2583 2584 2585 2586 2587 2588 2589 2590 2591 2592
#if V8_OS_WIN
    // Correctly allocate the stack space that was checked above.
    {
      Label win_done;
      __ cmp(ecx, TurboAssemblerBase::kStackPageSize);
      __ j(less_equal, &win_done, Label::kNear);
      // Reset esp and walk through the range touching every page.
      __ lea(esp, Operand(esp, edx, times_system_pointer_size, 0));
      __ AllocateStackSpace(ecx);
      __ bind(&win_done);
    }
#endif

2593 2594 2595 2596 2597 2598 2599
    // Adjust effective number of arguments to include return address.
    __ inc(eax);

    // Relocate arguments and return address down the stack.
    {
      Label loop;
      __ Set(ecx, 0);
2600
      __ lea(edx, Operand(esp, edx, times_system_pointer_size, 0));
2601
      __ bind(&loop);
2602 2603
      __ movd(xmm1, Operand(edx, ecx, times_system_pointer_size, 0));
      __ movd(Operand(esp, ecx, times_system_pointer_size, 0), xmm1);
2604 2605 2606 2607 2608 2609 2610 2611 2612
      __ inc(ecx);
      __ cmp(ecx, eax);
      __ j(less, &loop);
    }

    // Copy [[BoundArguments]] to the stack (below the arguments).
    {
      Label loop;
      __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2613 2614
      __ mov(edx, FieldOperand(ecx, FixedArray::kLengthOffset));
      __ SmiUntag(edx);
2615
      __ bind(&loop);
2616
      __ dec(edx);
2617
      __ movd(xmm1, FieldOperand(ecx, edx, times_tagged_size,
2618
                                 FixedArray::kHeaderSize));
2619
      __ movd(Operand(esp, eax, times_system_pointer_size, 0), xmm1);
2620 2621 2622 2623 2624 2625 2626 2627 2628
      __ lea(eax, Operand(eax, 1));
      __ j(greater, &loop);
    }

    // Adjust effective number of arguments (eax contains the number of
    // arguments from the call plus return address plus the number of
    // [[BoundArguments]]), so we need to subtract one for the return address.
    __ dec(eax);
  }
2629
#endif  // !V8_REVERSE_JSARGS
2630

2631
  __ bind(&no_bound_arguments);
2632
  __ movd(edx, xmm0);  // Reload edx.
2633 2634 2635 2636 2637
}

}  // namespace

// static
2638
void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2639 2640 2641 2642 2643 2644 2645
  // ----------- S t a t e -------------
  //  -- eax : the number of arguments (not including the receiver)
  //  -- edi : the function to call (checked to be a JSBoundFunction)
  // -----------------------------------
  __ AssertBoundFunction(edi);

  // Patch the receiver to [[BoundThis]].
2646
  StackArgumentsAccessor args(eax);
2647
  __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
2648
  __ mov(args.GetReceiverOperand(), ecx);
2649 2650 2651 2652 2653 2654

  // Push the [[BoundArguments]] onto the stack.
  Generate_PushBoundArguments(masm);

  // Call the [[BoundTargetFunction]] via the Call builtin.
  __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2655 2656
  __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
          RelocInfo::CODE_TARGET);
2657 2658
}

2659
// static
2660
void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2661 2662 2663 2664
  // ----------- S t a t e -------------
  //  -- eax : the number of arguments (not including the receiver)
  //  -- edi : the target to call (can be any Object).
  // -----------------------------------
2665
  StackArgumentsAccessor args(eax);
2666

2667 2668
  Label non_callable, non_function, non_smi, non_jsfunction,
      non_jsboundfunction;
2669
  __ JumpIfSmi(edi, &non_callable);
2670
  __ bind(&non_smi);
2671
  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2672 2673 2674 2675 2676
  __ j(not_equal, &non_jsfunction);
  __ Jump(masm->isolate()->builtins()->CallFunction(mode),
          RelocInfo::CODE_TARGET);

  __ bind(&non_jsfunction);
2677
  __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2678 2679 2680
  __ j(not_equal, &non_jsboundfunction);
  __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
          RelocInfo::CODE_TARGET);
2681

2682
  // Check if target is a proxy and call CallProxy external builtin
2683
  __ bind(&non_jsboundfunction);
2684
  __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2685
            Immediate(Map::Bits1::IsCallableBit::kMask));
2686 2687
  __ j(zero, &non_callable);

2688
  // Call CallProxy external builtin
2689
  __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2690
  __ j(not_equal, &non_function);
2691
  __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
2692 2693 2694 2695 2696

  // 2. Call to something else, which might have a [[Call]] internal method (if
  // not we raise an exception).
  __ bind(&non_function);
  // Overwrite the original receiver with the (original) target.
2697
  __ mov(args.GetReceiverOperand(), edi);
2698
  // Let the "call_as_function_delegate" take care of the rest.
2699
  __ LoadNativeContextSlot(edi, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2700
  __ Jump(masm->isolate()->builtins()->CallFunction(
2701
              ConvertReceiverMode::kNotNullOrUndefined),
2702
          RelocInfo::CODE_TARGET);
2703 2704 2705

  // 3. Call to something that is not callable.
  __ bind(&non_callable);
2706 2707 2708
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    __ Push(edi);
2709
    __ CallRuntime(Runtime::kThrowCalledNonCallable);
2710 2711 2712
  }
}

2713 2714 2715 2716 2717 2718 2719
// static
void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- eax : the number of arguments (not including the receiver)
  //  -- edx : the new target (checked to be a constructor)
  //  -- edi : the constructor to call (checked to be a JSFunction)
  // -----------------------------------
2720
  __ AssertConstructor(edi);
2721 2722
  __ AssertFunction(edi);

2723 2724 2725
  Label call_generic_stub;

  // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2726
  __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2727 2728 2729 2730
  __ test(FieldOperand(ecx, SharedFunctionInfo::kFlagsOffset),
          Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
  __ j(zero, &call_generic_stub, Label::kNear);

2731 2732
  // Calling convention for function specific ConstructStubs require
  // ecx to contain either an AllocationSite or undefined.
2733
  __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2734 2735 2736 2737
  __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
          RelocInfo::CODE_TARGET);

  __ bind(&call_generic_stub);
2738 2739
  // Calling convention for function specific ConstructStubs require
  // ecx to contain either an AllocationSite or undefined.
2740
  __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2741
  __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2742
          RelocInfo::CODE_TARGET);
2743 2744 2745 2746 2747 2748 2749 2750 2751
}

// static
void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- eax : the number of arguments (not including the receiver)
  //  -- edx : the new target (checked to be a constructor)
  //  -- edi : the constructor to call (checked to be a JSBoundFunction)
  // -----------------------------------
2752
  __ AssertConstructor(edi);
2753 2754 2755 2756 2757 2758 2759 2760 2761 2762 2763 2764 2765 2766 2767 2768
  __ AssertBoundFunction(edi);

  // Push the [[BoundArguments]] onto the stack.
  Generate_PushBoundArguments(masm);

  // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
  {
    Label done;
    __ cmp(edi, edx);
    __ j(not_equal, &done, Label::kNear);
    __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
    __ bind(&done);
  }

  // Construct the [[BoundTargetFunction]] via the Construct builtin.
  __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2769
  __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2770 2771 2772 2773 2774 2775 2776 2777 2778 2779
}

// static
void Builtins::Generate_Construct(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- eax : the number of arguments (not including the receiver)
  //  -- edx : the new target (either the same as the constructor or
  //           the JSFunction on which new was invoked initially)
  //  -- edi : the constructor to call (can be any Object)
  // -----------------------------------
2780
  StackArgumentsAccessor args(eax);
2781 2782

  // Check if target is a Smi.
2783 2784
  Label non_constructor, non_proxy, non_jsfunction, non_jsboundfunction;
  __ JumpIfSmi(edi, &non_constructor);
2785 2786

  // Check if target has a [[Construct]] internal method.
2787
  __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
2788
  __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2789
            Immediate(Map::Bits1::IsConstructorBit::kMask));
2790
  __ j(zero, &non_constructor);
2791

2792 2793
  // Dispatch based on instance type.
  __ CmpInstanceType(ecx, JS_FUNCTION_TYPE);
2794 2795 2796
  __ j(not_equal, &non_jsfunction);
  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
          RelocInfo::CODE_TARGET);
2797

2798 2799
  // Only dispatch to bound functions after checking whether they are
  // constructors.
2800
  __ bind(&non_jsfunction);
2801
  __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2802 2803 2804
  __ j(not_equal, &non_jsboundfunction);
  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
          RelocInfo::CODE_TARGET);
2805 2806

  // Only dispatch to proxies after checking whether they are constructors.
2807
  __ bind(&non_jsboundfunction);
2808
  __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2809
  __ j(not_equal, &non_proxy);
2810 2811
  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
          RelocInfo::CODE_TARGET);
2812 2813

  // Called Construct on an exotic Object with a [[Construct]] internal method.
2814
  __ bind(&non_proxy);
2815 2816
  {
    // Overwrite the original receiver with the (original) target.
2817
    __ mov(args.GetReceiverOperand(), edi);
2818
    // Let the "call_as_constructor_delegate" take care of the rest.
2819
    __ LoadNativeContextSlot(edi, Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2820 2821 2822 2823 2824 2825 2826
    __ Jump(masm->isolate()->builtins()->CallFunction(),
            RelocInfo::CODE_TARGET);
  }

  // Called Construct on an Object that doesn't have a [[Construct]] internal
  // method.
  __ bind(&non_constructor);
2827
  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2828
          RelocInfo::CODE_TARGET);
2829 2830
}

2831 2832 2833
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- eax : actual number of arguments
2834
  //  -- ecx : expected number of arguments
2835
  //  -- edx : new target (passed through to callee)
2836
  //  -- edi : function (passed through to callee)
2837 2838
  // -----------------------------------

2839 2840
  const Register kExpectedNumberOfArgumentsRegister = ecx;

2841
  Label invoke, dont_adapt_arguments, stack_overflow, enough, too_few;
2842
  __ cmp(kExpectedNumberOfArgumentsRegister, kDontAdaptArgumentsSentinel);
2843
  __ j(equal, &dont_adapt_arguments);
2844
  __ cmp(eax, kExpectedNumberOfArgumentsRegister);
2845
  __ j(less, &too_few);
2846 2847 2848 2849

  {  // Enough parameters: Actual >= expected.
    __ bind(&enough);
    EnterArgumentsAdaptorFrame(masm);
2850 2851 2852
    // edi is used as a scratch register. It should be restored from the frame
    // when needed.
    Generate_StackOverflowCheck(masm, kExpectedNumberOfArgumentsRegister, edi,
2853
                                &stack_overflow);
2854 2855 2856

    // Copy receiver and all expected arguments.
    const int offset = StandardFrameConstants::kCallerSPOffset;
2857 2858 2859
#ifdef V8_REVERSE_JSARGS
    __ lea(edi, Operand(ebp, ecx, times_system_pointer_size, offset));
#else
2860
    __ lea(edi, Operand(ebp, eax, times_system_pointer_size, offset));
2861
#endif
2862
    __ mov(eax, -1);  // account for receiver
2863 2864 2865

    Label copy;
    __ bind(&copy);
2866 2867
    __ inc(eax);
    __ push(Operand(edi, 0));
2868
    __ sub(edi, Immediate(kSystemPointerSize));
2869
    __ cmp(eax, kExpectedNumberOfArgumentsRegister);
2870
    __ j(less, &copy);
2871
    // eax now contains the expected number of arguments.
2872 2873 2874 2875 2876 2877
    __ jmp(&invoke);
  }

  {  // Too few parameters: Actual < expected.
    __ bind(&too_few);
    EnterArgumentsAdaptorFrame(masm);
2878 2879
    // edi is used as a scratch register. It should be restored from the frame
    // when needed.
2880
    Generate_StackOverflowCheck(masm, kExpectedNumberOfArgumentsRegister, edi,
2881
                                &stack_overflow);
2882

2883 2884
    // Remember expected arguments in xmm0.
    __ movd(xmm0, kExpectedNumberOfArgumentsRegister);
2885

2886 2887 2888 2889 2890 2891 2892 2893 2894 2895 2896 2897 2898 2899 2900 2901 2902 2903 2904 2905 2906 2907 2908 2909 2910 2911 2912 2913 2914
#ifdef V8_REVERSE_JSARGS
    // Remember new target.
    __ movd(xmm1, edx);

    // Fill remaining expected arguments with undefined values.
    Label fill;
    __ mov(edx, ecx);
    __ sub(edx, eax);
    __ bind(&fill);
    __ Push(Immediate(masm->isolate()->factory()->undefined_value()));
    __ dec(edx);
    __ j(greater, &fill);

    // Copy receiver and all actual arguments.
    const int offset = StandardFrameConstants::kCallerSPOffset;
    __ lea(edi, Operand(ebp, eax, times_system_pointer_size, offset));
    __ mov(edx, Immediate(-1));

    Label copy;
    __ bind(&copy);
    __ inc(edx);
    __ push(Operand(edi, 0));
    __ sub(edi, Immediate(kSystemPointerSize));
    __ cmp(edx, eax);
    __ j(less, &copy);

    // Restore new.target
    __ movd(edx, xmm1);
#else   // !V8_REVERSE_JSARGS
2915 2916
    // Copy receiver and all actual arguments.
    const int offset = StandardFrameConstants::kCallerSPOffset;
2917
    __ lea(edi, Operand(ebp, eax, times_system_pointer_size, offset));
2918 2919
    // ecx = expected - actual.
    __ sub(kExpectedNumberOfArgumentsRegister, eax);
2920 2921
    // eax = -actual - 1
    __ neg(eax);
2922
    __ sub(eax, Immediate(1));
2923 2924 2925

    Label copy;
    __ bind(&copy);
2926
    __ inc(eax);
2927
    __ push(Operand(edi, 0));
2928
    __ sub(edi, Immediate(kSystemPointerSize));
2929
    __ test(eax, eax);
2930
    __ j(not_zero, &copy);
2931 2932 2933 2934

    // Fill remaining expected arguments with undefined values.
    Label fill;
    __ bind(&fill);
2935
    __ inc(eax);
2936
    __ Push(Immediate(masm->isolate()->factory()->undefined_value()));
2937
    __ cmp(eax, kExpectedNumberOfArgumentsRegister);
2938
    __ j(less, &fill);
2939
#endif  // !V8_REVERSE_JSARGS
2940 2941

    // Restore expected arguments.
2942
    __ movd(eax, xmm0);
2943 2944
  }

2945
  // Call the entry point.
2946
  __ bind(&invoke);
2947
  // Restore function pointer.
2948
  __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2949
  // eax : expected number of arguments
2950
  // edx : new target (passed through to callee)
2951
  // edi : function (passed through to callee)
2952
  static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
2953
  __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
2954
  __ CallCodeObject(ecx);
2955

2956
  // Store offset of return address for deoptimizer.
2957
  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2958

2959 2960
  // Leave frame and return.
  LeaveArgumentsAdaptorFrame(masm);
2961 2962 2963
  __ ret(0);

  // -------------------------------------------
2964
  // Dont adapt arguments.
2965
  // -------------------------------------------
2966
  __ bind(&dont_adapt_arguments);
2967
  static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
2968
  __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
2969
  __ JumpCodeObject(ecx);
2970 2971 2972 2973

  __ bind(&stack_overflow);
  {
    FrameScope frame(masm, StackFrame::MANUAL);
2974
    __ CallRuntime(Runtime::kThrowStackOverflow);
2975 2976
    __ int3();
  }
2977 2978
}

2979
void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2980 2981
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
2982
    __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2983
  }
2984

2985
  Label skip;
2986
  // If the code object is null, just return to the caller.
2987
  __ cmp(eax, Immediate(0));
2988
  __ j(not_equal, &skip, Label::kNear);
2989 2990 2991
  __ ret(0);

  __ bind(&skip);
2992

2993
  // Drop the handler frame that is be sitting on top of the actual
2994
  // JavaScript frame. This is the case then OSR is triggered from bytecode.
2995
  __ leave();
2996

2997
  // Load deoptimization data from the code object.
2998
  __ mov(ecx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2999 3000

  // Load the OSR entrypoint offset from the deoptimization data.
3001
  __ mov(ecx, Operand(ecx, FixedArray::OffsetOfElementAt(
3002
                               DeoptimizationData::kOsrPcOffsetIndex) -
3003
                               kHeapObjectTag));
3004
  __ SmiUntag(ecx);
3005 3006

  // Compute the target address = code_obj + header_size + osr_offset
3007
  __ lea(eax, Operand(eax, ecx, times_1, Code::kHeaderSize - kHeapObjectTag));
3008 3009 3010 3011 3012 3013

  // Overwrite the return address on the stack.
  __ mov(Operand(esp, 0), eax);

  // And "return" to the OSR entry point of the function.
  __ ret(0);
3014 3015
}

3016
void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
3017 3018
  // The function index was put in edi by the jump table trampoline.
  // Convert to Smi for the runtime call.
3019
  __ SmiTag(kWasmCompileLazyFuncIndexRegister);
3020
  {
3021
    HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
3022
    FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
3023 3024 3025 3026

    // Save all parameter registers (see wasm-linkage.cc). They might be
    // overwritten in the runtime call below. We don't have any callee-saved
    // registers in wasm, so no need to store anything else.
3027 3028 3029
    static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
                      arraysize(wasm::kGpParamRegisters),
                  "frame size mismatch");
3030
    for (Register reg : wasm::kGpParamRegisters) {
3031 3032
      __ Push(reg);
    }
3033 3034 3035
    static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs ==
                      arraysize(wasm::kFpParamRegisters),
                  "frame size mismatch");
3036
    __ AllocateStackSpace(kSimd128Size * arraysize(wasm::kFpParamRegisters));
3037 3038 3039
    int offset = 0;
    for (DoubleRegister reg : wasm::kFpParamRegisters) {
      __ movdqu(Operand(esp, offset), reg);
3040
      offset += kSimd128Size;
3041 3042
    }

3043
    // Push the Wasm instance as an explicit argument to WasmCompileLazy.
3044
    __ Push(kWasmInstanceRegister);
3045
    // Push the function index as second argument.
3046
    __ Push(kWasmCompileLazyFuncIndexRegister);
3047
    // Initialize the JavaScript context with 0. CEntry will use it to
3048
    // set the current context on the isolate.
3049
    __ Move(kContextRegister, Smi::zero());
3050 3051 3052
    {
      // At this point, ebx has been spilled to the stack but is not yet
      // overwritten with another value. We can still use it as kRootRegister.
3053
      __ CallRuntime(Runtime::kWasmCompileLazy, 2);
3054
    }
3055
    // The entrypoint address is the return value.
3056
    __ mov(edi, kReturnRegister0);
3057 3058

    // Restore registers.
3059
    for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
3060
      offset -= kSimd128Size;
3061
      __ movdqu(reg, Operand(esp, offset));
3062
    }
3063
    DCHECK_EQ(0, offset);
3064
    __ add(esp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
3065 3066
    for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
      __ Pop(reg);
3067 3068
    }
  }
3069
  // Finally, jump to the entrypoint.
3070 3071 3072
  __ jmp(edi);
}

3073 3074 3075
void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
  HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
  {
3076
    FrameScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
3077 3078 3079

    // Save all parameter registers. They might hold live values, we restore
    // them after the runtime call.
3080 3081 3082
    for (int reg_code : base::bits::IterateBitsBackwards(
             WasmDebugBreakFrameConstants::kPushedGpRegs)) {
      __ Push(Register::from_code(reg_code));
3083 3084 3085
    }

    constexpr int kFpStackSize =
3086
        kSimd128Size * WasmDebugBreakFrameConstants::kNumPushedFpRegisters;
3087
    __ AllocateStackSpace(kFpStackSize);
3088 3089 3090 3091 3092
    int offset = kFpStackSize;
    for (int reg_code : base::bits::IterateBitsBackwards(
             WasmDebugBreakFrameConstants::kPushedFpRegs)) {
      offset -= kSimd128Size;
      __ movdqu(Operand(esp, offset), DoubleRegister::from_code(reg_code));
3093 3094 3095 3096 3097 3098 3099 3100
    }

    // Initialize the JavaScript context with 0. CEntry will use it to
    // set the current context on the isolate.
    __ Move(kContextRegister, Smi::zero());
    __ CallRuntime(Runtime::kWasmDebugBreak, 0);

    // Restore registers.
3101 3102 3103 3104
    for (int reg_code :
         base::bits::IterateBits(WasmDebugBreakFrameConstants::kPushedFpRegs)) {
      __ movdqu(DoubleRegister::from_code(reg_code), Operand(esp, offset));
      offset += kSimd128Size;
3105 3106
    }
    __ add(esp, Immediate(kFpStackSize));
3107 3108 3109
    for (int reg_code :
         base::bits::IterateBits(WasmDebugBreakFrameConstants::kPushedGpRegs)) {
      __ Pop(Register::from_code(reg_code));
3110 3111 3112 3113 3114 3115
    }
  }

  __ ret(0);
}

3116 3117 3118 3119
void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
                               SaveFPRegsMode save_doubles, ArgvMode argv_mode,
                               bool builtin_exit_frame) {
  // eax: number of arguments including receiver
3120
  // edx: pointer to C function
3121 3122 3123 3124 3125 3126 3127 3128
  // ebp: frame pointer  (restored after C call)
  // esp: stack pointer  (restored after C call)
  // esi: current context (C callee-saved)
  // edi: JS function of the caller (C callee-saved)
  //
  // If argv_mode == kArgvInRegister:
  // ecx: pointer to the first argument

3129 3130 3131 3132 3133 3134 3135 3136 3137 3138
  STATIC_ASSERT(eax == kRuntimeCallArgCountRegister);
  STATIC_ASSERT(ecx == kRuntimeCallArgvRegister);
  STATIC_ASSERT(edx == kRuntimeCallFunctionRegister);
  STATIC_ASSERT(esi == kContextRegister);
  STATIC_ASSERT(edi == kJSFunctionRegister);

  DCHECK(!AreAliased(kRuntimeCallArgCountRegister, kRuntimeCallArgvRegister,
                     kRuntimeCallFunctionRegister, kContextRegister,
                     kJSFunctionRegister, kRootRegister));

3139 3140 3141 3142 3143 3144 3145 3146 3147 3148
  // Reserve space on the stack for the three arguments passed to the call. If
  // result size is greater than can be returned in registers, also reserve
  // space for the hidden argument for the result location, and space for the
  // result itself.
  int arg_stack_space = 3;

  // Enter the exit frame that transitions from JavaScript to C++.
  if (argv_mode == kArgvInRegister) {
    DCHECK(save_doubles == kDontSaveFPRegs);
    DCHECK(!builtin_exit_frame);
3149
    __ EnterApiExitFrame(arg_stack_space, edi);
3150 3151 3152 3153 3154 3155 3156 3157 3158 3159

    // Move argc and argv into the correct registers.
    __ mov(esi, ecx);
    __ mov(edi, eax);
  } else {
    __ EnterExitFrame(
        arg_stack_space, save_doubles == kSaveFPRegs,
        builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
  }

3160
  // edx: pointer to C function
3161 3162 3163 3164 3165 3166 3167 3168 3169 3170 3171 3172
  // ebp: frame pointer  (restored after C call)
  // esp: stack pointer  (restored after C call)
  // edi: number of arguments including receiver  (C callee-saved)
  // esi: pointer to the first argument (C callee-saved)

  // Result returned in eax, or eax+edx if result size is 2.

  // Check stack alignment.
  if (FLAG_debug_code) {
    __ CheckStackAlignment();
  }
  // Call C function.
3173 3174
  __ mov(Operand(esp, 0 * kSystemPointerSize), edi);  // argc.
  __ mov(Operand(esp, 1 * kSystemPointerSize), esi);  // argv.
3175
  __ Move(ecx, Immediate(ExternalReference::isolate_address(masm->isolate())));
3176
  __ mov(Operand(esp, 2 * kSystemPointerSize), ecx);
3177
  __ call(kRuntimeCallFunctionRegister);
3178 3179 3180 3181 3182

  // Result is in eax or edx:eax - do not destroy these registers!

  // Check result for exception sentinel.
  Label exception_returned;
3183
  __ CompareRoot(eax, RootIndex::kException);
3184 3185 3186 3187 3188 3189
  __ j(equal, &exception_returned);

  // Check that there is no pending exception, otherwise we
  // should have returned the exception sentinel.
  if (FLAG_debug_code) {
    __ push(edx);
3190
    __ LoadRoot(edx, RootIndex::kTheHoleValue);
3191 3192 3193
    Label okay;
    ExternalReference pending_exception_address = ExternalReference::Create(
        IsolateAddressId::kPendingExceptionAddress, masm->isolate());
3194
    __ cmp(edx, __ ExternalReferenceAsOperand(pending_exception_address, ecx));
3195 3196 3197 3198 3199 3200 3201 3202 3203 3204 3205 3206 3207 3208 3209 3210 3211 3212 3213 3214 3215 3216 3217 3218 3219 3220 3221 3222 3223 3224 3225
    // Cannot use check here as it attempts to generate call into runtime.
    __ j(equal, &okay, Label::kNear);
    __ int3();
    __ bind(&okay);
    __ pop(edx);
  }

  // Exit the JavaScript to C++ exit frame.
  __ LeaveExitFrame(save_doubles == kSaveFPRegs, argv_mode == kArgvOnStack);
  __ ret(0);

  // Handling of exception.
  __ bind(&exception_returned);

  ExternalReference pending_handler_context_address = ExternalReference::Create(
      IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
  ExternalReference pending_handler_entrypoint_address =
      ExternalReference::Create(
          IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
  ExternalReference pending_handler_fp_address = ExternalReference::Create(
      IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
  ExternalReference pending_handler_sp_address = ExternalReference::Create(
      IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());

  // Ask the runtime for help to determine the handler. This will set eax to
  // contain the current pending exception, don't clobber it.
  ExternalReference find_handler =
      ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
  {
    FrameScope scope(masm, StackFrame::MANUAL);
    __ PrepareCallCFunction(3, eax);
3226 3227
    __ mov(Operand(esp, 0 * kSystemPointerSize), Immediate(0));  // argc.
    __ mov(Operand(esp, 1 * kSystemPointerSize), Immediate(0));  // argv.
3228 3229
    __ Move(esi,
            Immediate(ExternalReference::isolate_address(masm->isolate())));
3230
    __ mov(Operand(esp, 2 * kSystemPointerSize), esi);
3231 3232 3233 3234
    __ CallCFunction(find_handler, 3);
  }

  // Retrieve the handler context, SP and FP.
3235 3236 3237 3238
  __ mov(esp, __ ExternalReferenceAsOperand(pending_handler_sp_address, esi));
  __ mov(ebp, __ ExternalReferenceAsOperand(pending_handler_fp_address, esi));
  __ mov(esi,
         __ ExternalReferenceAsOperand(pending_handler_context_address, esi));
3239 3240 3241 3242 3243 3244 3245 3246 3247 3248

  // If the handler is a JS frame, restore the context to the frame. Note that
  // the context will be set to (esi == 0) for non-JS frames.
  Label skip;
  __ test(esi, esi);
  __ j(zero, &skip, Label::kNear);
  __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
  __ bind(&skip);

  // Compute the handler entry address and jump to it.
3249 3250
  __ mov(edi, __ ExternalReferenceAsOperand(pending_handler_entrypoint_address,
                                            edi));
3251 3252 3253
  __ jmp(edi);
}

3254 3255 3256 3257
void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
  Label check_negative, process_64_bits, done;

  // Account for return address and saved regs.
3258
  const int kArgumentOffset = 4 * kSystemPointerSize;
3259 3260 3261 3262 3263 3264 3265 3266 3267 3268 3269 3270 3271 3272 3273 3274 3275 3276 3277 3278 3279 3280 3281 3282 3283 3284 3285 3286 3287 3288 3289 3290 3291 3292 3293 3294 3295 3296 3297 3298 3299 3300 3301 3302 3303 3304 3305 3306 3307 3308 3309

  MemOperand mantissa_operand(MemOperand(esp, kArgumentOffset));
  MemOperand exponent_operand(
      MemOperand(esp, kArgumentOffset + kDoubleSize / 2));

  // The result is returned on the stack.
  MemOperand return_operand = mantissa_operand;

  Register scratch1 = ebx;

  // Since we must use ecx for shifts below, use some other register (eax)
  // to calculate the result.
  Register result_reg = eax;
  // Save ecx if it isn't the return register and therefore volatile, or if it
  // is the return register, then save the temp register we use in its stead for
  // the result.
  Register save_reg = eax;
  __ push(ecx);
  __ push(scratch1);
  __ push(save_reg);

  __ mov(scratch1, mantissa_operand);
  if (CpuFeatures::IsSupported(SSE3)) {
    CpuFeatureScope scope(masm, SSE3);
    // Load x87 register with heap number.
    __ fld_d(mantissa_operand);
  }
  __ mov(ecx, exponent_operand);

  __ and_(ecx, HeapNumber::kExponentMask);
  __ shr(ecx, HeapNumber::kExponentShift);
  __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
  __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
  __ j(below, &process_64_bits);

  // Result is entirely in lower 32-bits of mantissa
  int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
  if (CpuFeatures::IsSupported(SSE3)) {
    __ fstp(0);
  }
  __ sub(ecx, Immediate(delta));
  __ xor_(result_reg, result_reg);
  __ cmp(ecx, Immediate(31));
  __ j(above, &done);
  __ shl_cl(scratch1);
  __ jmp(&check_negative);

  __ bind(&process_64_bits);
  if (CpuFeatures::IsSupported(SSE3)) {
    CpuFeatureScope scope(masm, SSE3);
    // Reserve space for 64 bit answer.
3310
    __ AllocateStackSpace(kDoubleSize);  // Nolint.
3311 3312 3313 3314 3315 3316 3317 3318 3319 3320 3321 3322 3323 3324 3325 3326 3327 3328 3329 3330 3331 3332 3333 3334 3335 3336 3337 3338 3339 3340 3341 3342 3343 3344 3345 3346
    // Do conversion, which cannot fail because we checked the exponent.
    __ fisttp_d(Operand(esp, 0));
    __ mov(result_reg, Operand(esp, 0));  // Load low word of answer as result
    __ add(esp, Immediate(kDoubleSize));
    __ jmp(&done);
  } else {
    // Result must be extracted from shifted 32-bit mantissa
    __ sub(ecx, Immediate(delta));
    __ neg(ecx);
    __ mov(result_reg, exponent_operand);
    __ and_(result_reg,
            Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
    __ add(result_reg,
           Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
    __ shrd_cl(scratch1, result_reg);
    __ shr_cl(result_reg);
    __ test(ecx, Immediate(32));
    __ cmov(not_equal, scratch1, result_reg);
  }

  // If the double was negative, negate the integer result.
  __ bind(&check_negative);
  __ mov(result_reg, scratch1);
  __ neg(result_reg);
  __ cmp(exponent_operand, Immediate(0));
  __ cmov(greater, result_reg, scratch1);

  // Restore registers
  __ bind(&done);
  __ mov(return_operand, result_reg);
  __ pop(save_reg);
  __ pop(scratch1);
  __ pop(ecx);
  __ ret(0);
}

3347 3348 3349 3350 3351
void Builtins::Generate_GenericJSToWasmWrapper(MacroAssembler* masm) {
  // TODO(v8:10701): Implement for this platform.
  __ Trap();
}

3352 3353 3354 3355
namespace {

// Generates an Operand for saving parameters after PrepareCallApiFunction.
Operand ApiParameterOperand(int index) {
3356
  return Operand(esp, index * kSystemPointerSize);
3357 3358 3359 3360 3361 3362 3363 3364 3365 3366 3367 3368 3369 3370 3371 3372 3373
}

// Prepares stack to put arguments (aligns and so on). Reserves
// space for return value if needed (assumes the return value is a handle).
// Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
// etc. Saves context (esi). If space was reserved for return value then
// stores the pointer to the reserved slot into esi.
void PrepareCallApiFunction(MacroAssembler* masm, int argc, Register scratch) {
  __ EnterApiExitFrame(argc, scratch);
  if (__ emit_debug_code()) {
    __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
  }
}

// Calls an API function.  Allocates HandleScope, extracts returned value
// from handle and propagates exceptions.  Clobbers esi, edi and
// caller-save registers.  Restores context.  On return removes
3374
// stack_space * kSystemPointerSize (GCed).
3375 3376 3377 3378 3379 3380 3381 3382 3383 3384 3385 3386 3387 3388 3389 3390 3391 3392 3393 3394
void CallApiFunctionAndReturn(MacroAssembler* masm, Register function_address,
                              ExternalReference thunk_ref,
                              Operand thunk_last_arg, int stack_space,
                              Operand* stack_space_operand,
                              Operand return_value_operand) {
  Isolate* isolate = masm->isolate();

  ExternalReference next_address =
      ExternalReference::handle_scope_next_address(isolate);
  ExternalReference limit_address =
      ExternalReference::handle_scope_limit_address(isolate);
  ExternalReference level_address =
      ExternalReference::handle_scope_level_address(isolate);

  DCHECK(edx == function_address);
  // Allocate HandleScope in callee-save registers.
  __ add(__ ExternalReferenceAsOperand(level_address, esi), Immediate(1));
  __ mov(esi, __ ExternalReferenceAsOperand(next_address, esi));
  __ mov(edi, __ ExternalReferenceAsOperand(limit_address, edi));

3395
  Label profiler_enabled, end_profiler_check;
3396 3397
  __ Move(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
  __ cmpb(Operand(eax, 0), Immediate(0));
3398 3399
  __ j(not_zero, &profiler_enabled);
  __ Move(eax, Immediate(ExternalReference::address_of_runtime_stats_flag()));
3400
  __ cmp(Operand(eax, 0), Immediate(0));
3401 3402 3403 3404 3405 3406 3407 3408 3409 3410 3411 3412 3413
  __ j(not_zero, &profiler_enabled);
  {
    // Call the api function directly.
    __ mov(eax, function_address);
    __ jmp(&end_profiler_check);
  }
  __ bind(&profiler_enabled);
  {
    // Additional parameter is the address of the actual getter function.
    __ mov(thunk_last_arg, function_address);
    __ Move(eax, Immediate(thunk_ref));
  }
  __ bind(&end_profiler_check);
3414 3415 3416 3417 3418 3419 3420 3421 3422 3423 3424 3425 3426 3427 3428 3429 3430 3431 3432 3433 3434 3435 3436 3437

  // Call the api function.
  __ call(eax);

  Label prologue;
  // Load the value from ReturnValue
  __ mov(eax, return_value_operand);

  Label promote_scheduled_exception;
  Label delete_allocated_handles;
  Label leave_exit_frame;

  __ bind(&prologue);
  // No more valid handles (the result handle was the last one). Restore
  // previous handle scope.
  __ mov(__ ExternalReferenceAsOperand(next_address, ecx), esi);
  __ sub(__ ExternalReferenceAsOperand(level_address, ecx), Immediate(1));
  __ Assert(above_equal, AbortReason::kInvalidHandleScopeLevel);
  __ cmp(edi, __ ExternalReferenceAsOperand(limit_address, ecx));
  __ j(not_equal, &delete_allocated_handles);

  // Leave the API exit frame.
  __ bind(&leave_exit_frame);
  if (stack_space_operand != nullptr) {
3438
    DCHECK_EQ(stack_space, 0);
3439 3440 3441 3442 3443 3444 3445 3446 3447 3448 3449 3450 3451 3452 3453 3454 3455 3456 3457 3458 3459 3460 3461 3462 3463 3464 3465 3466 3467
    __ mov(edx, *stack_space_operand);
  }
  __ LeaveApiExitFrame();

  // Check if the function scheduled an exception.
  ExternalReference scheduled_exception_address =
      ExternalReference::scheduled_exception_address(isolate);
  __ mov(ecx, __ ExternalReferenceAsOperand(scheduled_exception_address, ecx));
  __ CompareRoot(ecx, RootIndex::kTheHoleValue);
  __ j(not_equal, &promote_scheduled_exception);

#if DEBUG
  // Check if the function returned a valid JavaScript value.
  Label ok;
  Register return_value = eax;
  Register map = ecx;

  __ JumpIfSmi(return_value, &ok, Label::kNear);
  __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));

  __ CmpInstanceType(map, LAST_NAME_TYPE);
  __ j(below_equal, &ok, Label::kNear);

  __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
  __ j(above_equal, &ok, Label::kNear);

  __ CompareRoot(map, RootIndex::kHeapNumberMap);
  __ j(equal, &ok, Label::kNear);

3468 3469 3470
  __ CompareRoot(map, RootIndex::kBigIntMap);
  __ j(equal, &ok, Label::kNear);

3471 3472 3473 3474 3475 3476 3477 3478 3479 3480 3481 3482 3483 3484 3485 3486 3487
  __ CompareRoot(return_value, RootIndex::kUndefinedValue);
  __ j(equal, &ok, Label::kNear);

  __ CompareRoot(return_value, RootIndex::kTrueValue);
  __ j(equal, &ok, Label::kNear);

  __ CompareRoot(return_value, RootIndex::kFalseValue);
  __ j(equal, &ok, Label::kNear);

  __ CompareRoot(return_value, RootIndex::kNullValue);
  __ j(equal, &ok, Label::kNear);

  __ Abort(AbortReason::kAPICallReturnedInvalidObject);

  __ bind(&ok);
#endif

3488 3489
  if (stack_space_operand == nullptr) {
    DCHECK_NE(stack_space, 0);
3490
    __ ret(stack_space * kSystemPointerSize);
3491
  } else {
3492 3493 3494 3495 3496 3497 3498 3499 3500 3501 3502 3503 3504 3505 3506 3507 3508 3509 3510 3511 3512 3513 3514 3515 3516 3517 3518 3519
    DCHECK_EQ(0, stack_space);
    __ pop(ecx);
    __ add(esp, edx);
    __ jmp(ecx);
  }

  // Re-throw by promoting a scheduled exception.
  __ bind(&promote_scheduled_exception);
  __ TailCallRuntime(Runtime::kPromoteScheduledException);

  // HandleScope limit has changed. Delete allocated extensions.
  ExternalReference delete_extensions =
      ExternalReference::delete_handle_scope_extensions();
  __ bind(&delete_allocated_handles);
  __ mov(__ ExternalReferenceAsOperand(limit_address, ecx), edi);
  __ mov(edi, eax);
  __ Move(eax, Immediate(ExternalReference::isolate_address(isolate)));
  __ mov(Operand(esp, 0), eax);
  __ Move(eax, Immediate(delete_extensions));
  __ call(eax);
  __ mov(eax, edi);
  __ jmp(&leave_exit_frame);
}

}  // namespace

void Builtins::Generate_CallApiCallback(MacroAssembler* masm) {
  // ----------- S t a t e -------------
3520 3521 3522 3523 3524
  //  -- esi                 : context
  //  -- edx                 : api function address
  //  -- ecx                 : arguments count (not including the receiver)
  //  -- eax                 : call data
  //  -- edi                 : holder
3525 3526 3527 3528 3529 3530
  //  -- esp[0]              : return address
  //  -- esp[4]              : last argument
  //  -- ...
  //  -- esp[argc * 4]       : first argument
  //  -- esp[(argc + 1) * 4] : receiver
  // -----------------------------------
3531
  // NOTE: The order of args are reversed if V8_REVERSE_JSARGS
3532 3533 3534

  Register api_function_address = edx;
  Register argc = ecx;
3535
  Register call_data = eax;
3536
  Register holder = edi;
3537

3538 3539
  // Park argc in xmm0.
  __ movd(xmm0, argc);
3540

3541
  DCHECK(!AreAliased(api_function_address, argc, holder));
3542

3543
  using FCA = FunctionCallbackArguments;
3544 3545 3546 3547 3548 3549 3550 3551 3552 3553 3554 3555 3556 3557 3558

  STATIC_ASSERT(FCA::kArgsLength == 6);
  STATIC_ASSERT(FCA::kNewTargetIndex == 5);
  STATIC_ASSERT(FCA::kDataIndex == 4);
  STATIC_ASSERT(FCA::kReturnValueOffset == 3);
  STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
  STATIC_ASSERT(FCA::kIsolateIndex == 1);
  STATIC_ASSERT(FCA::kHolderIndex == 0);

  // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
  //
  // Current state:
  //   esp[0]: return address
  //
  // Target state:
3559 3560 3561 3562 3563 3564 3565
  //   esp[0 * kSystemPointerSize]: return address
  //   esp[1 * kSystemPointerSize]: kHolder
  //   esp[2 * kSystemPointerSize]: kIsolate
  //   esp[3 * kSystemPointerSize]: undefined (kReturnValueDefaultValue)
  //   esp[4 * kSystemPointerSize]: undefined (kReturnValue)
  //   esp[5 * kSystemPointerSize]: kData
  //   esp[6 * kSystemPointerSize]: undefined (kNewTarget)
3566

3567 3568 3569 3570 3571 3572 3573 3574
  __ PopReturnAddressTo(ecx);
  __ PushRoot(RootIndex::kUndefinedValue);
  __ Push(call_data);
  __ PushRoot(RootIndex::kUndefinedValue);
  __ PushRoot(RootIndex::kUndefinedValue);
  __ Push(Immediate(ExternalReference::isolate_address(masm->isolate())));
  __ Push(holder);
  __ PushReturnAddressFrom(ecx);
3575

3576 3577
  // Reload argc from xmm0.
  __ movd(argc, xmm0);
3578 3579 3580

  // Keep a pointer to kHolder (= implicit_args) in a scratch register.
  // We use it below to set up the FunctionCallbackInfo object.
3581
  Register scratch = eax;
3582
  __ lea(scratch, Operand(esp, 1 * kSystemPointerSize));
3583 3584 3585 3586 3587 3588 3589 3590 3591 3592 3593 3594 3595 3596 3597 3598 3599 3600

  // The API function takes a reference to v8::Arguments. If the CPU profiler
  // is enabled, a wrapper function will be called and we need to pass
  // the address of the callback as an additional parameter. Always allocate
  // space for it.
  static constexpr int kApiArgc = 1 + 1;

  // Allocate the v8::Arguments structure in the arguments' space since
  // it's not controlled by GC.
  static constexpr int kApiStackSpace = 4;

  PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace, edi);

  // FunctionCallbackInfo::implicit_args_ (points at kHolder as set up above).
  __ mov(ApiParameterOperand(kApiArgc + 0), scratch);

  // FunctionCallbackInfo::values_ (points at the first varargs argument passed
  // on the stack).
3601 3602 3603 3604
#ifdef V8_REVERSE_JSARGS
  __ lea(scratch,
         Operand(scratch, (FCA::kArgsLength + 1) * kSystemPointerSize));
#else
3605 3606
  __ lea(scratch, Operand(scratch, argc, times_system_pointer_size,
                          (FCA::kArgsLength - 1) * kSystemPointerSize));
3607
#endif
3608 3609 3610 3611 3612 3613 3614 3615
  __ mov(ApiParameterOperand(kApiArgc + 1), scratch);

  // FunctionCallbackInfo::length_.
  __ mov(ApiParameterOperand(kApiArgc + 2), argc);

  // We also store the number of bytes to drop from the stack after returning
  // from the API function here.
  __ lea(scratch,
3616 3617
         Operand(argc, times_system_pointer_size,
                 (FCA::kArgsLength + 1 /* receiver */) * kSystemPointerSize));
3618 3619 3620 3621 3622 3623 3624 3625 3626 3627 3628 3629
  __ mov(ApiParameterOperand(kApiArgc + 3), scratch);

  // v8::InvocationCallback's argument.
  __ lea(scratch, ApiParameterOperand(kApiArgc + 0));
  __ mov(ApiParameterOperand(0), scratch);

  ExternalReference thunk_ref = ExternalReference::invoke_function_callback();

  // There are two stack slots above the arguments we constructed on the stack:
  // the stored ebp (pushed by EnterApiExitFrame), and the return address.
  static constexpr int kStackSlotsAboveFCA = 2;
  Operand return_value_operand(
3630 3631
      ebp,
      (kStackSlotsAboveFCA + FCA::kReturnValueOffset) * kSystemPointerSize);
3632 3633 3634 3635 3636 3637 3638 3639 3640 3641 3642 3643 3644 3645 3646 3647 3648 3649 3650 3651 3652 3653 3654 3655 3656 3657 3658 3659 3660 3661 3662 3663 3664 3665 3666 3667 3668 3669 3670 3671 3672 3673 3674 3675 3676 3677 3678 3679 3680

  static constexpr int kUseStackSpaceOperand = 0;
  Operand stack_space_operand = ApiParameterOperand(kApiArgc + 3);
  CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
                           ApiParameterOperand(1), kUseStackSpaceOperand,
                           &stack_space_operand, return_value_operand);
}

void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
  // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
  // name below the exit frame to make GC aware of them.
  STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
  STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
  STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
  STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
  STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
  STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);

  Register receiver = ApiGetterDescriptor::ReceiverRegister();
  Register holder = ApiGetterDescriptor::HolderRegister();
  Register callback = ApiGetterDescriptor::CallbackRegister();
  Register scratch = edi;
  DCHECK(!AreAliased(receiver, holder, callback, scratch));

  __ pop(scratch);  // Pop return address to extend the frame.
  __ push(receiver);
  __ push(FieldOperand(callback, AccessorInfo::kDataOffset));
  __ PushRoot(RootIndex::kUndefinedValue);  // ReturnValue
  // ReturnValue default value
  __ PushRoot(RootIndex::kUndefinedValue);
  __ Push(Immediate(ExternalReference::isolate_address(masm->isolate())));
  __ push(holder);
  __ push(Immediate(Smi::zero()));  // should_throw_on_error -> false
  __ push(FieldOperand(callback, AccessorInfo::kNameOffset));
  __ push(scratch);  // Restore return address.

  // v8::PropertyCallbackInfo::args_ array and name handle.
  const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;

  // Allocate v8::PropertyCallbackInfo object, arguments for callback and
  // space for optional callback address parameter (in case CPU profiler is
  // active) in non-GCed stack space.
  const int kApiArgc = 3 + 1;

  PrepareCallApiFunction(masm, kApiArgc, scratch);

  // Load address of v8::PropertyAccessorInfo::args_ array. The value in ebp
3681 3682
  // here corresponds to esp + kSystemPointerSize before PrepareCallApiFunction.
  __ lea(scratch, Operand(ebp, kSystemPointerSize + 2 * kSystemPointerSize));
3683 3684 3685 3686 3687 3688
  // Create v8::PropertyCallbackInfo object on the stack and initialize
  // it's args_ field.
  Operand info_object = ApiParameterOperand(3);
  __ mov(info_object, scratch);

  // Name as handle.
3689
  __ sub(scratch, Immediate(kSystemPointerSize));
3690 3691 3692 3693 3694 3695 3696 3697 3698 3699 3700 3701 3702 3703 3704 3705
  __ mov(ApiParameterOperand(0), scratch);
  // Arguments pointer.
  __ lea(scratch, info_object);
  __ mov(ApiParameterOperand(1), scratch);
  // Reserve space for optional callback address parameter.
  Operand thunk_last_arg = ApiParameterOperand(2);

  ExternalReference thunk_ref =
      ExternalReference::invoke_accessor_getter_callback();

  __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
  Register function_address = edx;
  __ mov(function_address,
         FieldOperand(scratch, Foreign::kForeignAddressOffset));
  // +3 is to skip prolog, return address and name handle.
  Operand return_value_operand(
3706 3707
      ebp,
      (PropertyCallbackArguments::kReturnValueOffset + 3) * kSystemPointerSize);
3708 3709 3710 3711 3712 3713
  Operand* const kUseStackSpaceConstant = nullptr;
  CallApiFunctionAndReturn(masm, function_address, thunk_ref, thunk_last_arg,
                           kStackUnwindSpace, kUseStackSpaceConstant,
                           return_value_operand);
}

3714 3715 3716 3717
void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
  __ int3();  // Unused on this architecture.
}

3718 3719 3720 3721 3722 3723 3724 3725 3726 3727 3728 3729 3730 3731 3732 3733 3734 3735 3736 3737 3738 3739 3740 3741 3742 3743 3744 3745 3746 3747 3748 3749 3750 3751 3752 3753 3754 3755 3756 3757 3758 3759 3760 3761 3762 3763 3764 3765 3766 3767 3768 3769 3770 3771 3772 3773 3774 3775 3776 3777 3778 3779 3780 3781 3782 3783 3784 3785 3786 3787 3788 3789 3790 3791 3792 3793 3794 3795 3796 3797
namespace {

enum Direction { FORWARD, BACKWARD };
enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };

// Expects registers:
// esi - source, aligned if alignment == ALIGNED
// edi - destination, always aligned
// ecx - count (copy size in bytes)
// edx - loop count (number of 64 byte chunks)
void MemMoveEmitMainLoop(MacroAssembler* masm, Label* move_last_15,
                         Direction direction, Alignment alignment) {
  Register src = esi;
  Register dst = edi;
  Register count = ecx;
  Register loop_count = edx;
  Label loop, move_last_31, move_last_63;
  __ cmp(loop_count, 0);
  __ j(equal, &move_last_63);
  __ bind(&loop);
  // Main loop. Copy in 64 byte chunks.
  if (direction == BACKWARD) __ sub(src, Immediate(0x40));
  __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
  __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
  __ movdq(alignment == MOVE_ALIGNED, xmm2, Operand(src, 0x20));
  __ movdq(alignment == MOVE_ALIGNED, xmm3, Operand(src, 0x30));
  if (direction == FORWARD) __ add(src, Immediate(0x40));
  if (direction == BACKWARD) __ sub(dst, Immediate(0x40));
  __ movdqa(Operand(dst, 0x00), xmm0);
  __ movdqa(Operand(dst, 0x10), xmm1);
  __ movdqa(Operand(dst, 0x20), xmm2);
  __ movdqa(Operand(dst, 0x30), xmm3);
  if (direction == FORWARD) __ add(dst, Immediate(0x40));
  __ dec(loop_count);
  __ j(not_zero, &loop);
  // At most 63 bytes left to copy.
  __ bind(&move_last_63);
  __ test(count, Immediate(0x20));
  __ j(zero, &move_last_31);
  if (direction == BACKWARD) __ sub(src, Immediate(0x20));
  __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
  __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
  if (direction == FORWARD) __ add(src, Immediate(0x20));
  if (direction == BACKWARD) __ sub(dst, Immediate(0x20));
  __ movdqa(Operand(dst, 0x00), xmm0);
  __ movdqa(Operand(dst, 0x10), xmm1);
  if (direction == FORWARD) __ add(dst, Immediate(0x20));
  // At most 31 bytes left to copy.
  __ bind(&move_last_31);
  __ test(count, Immediate(0x10));
  __ j(zero, move_last_15);
  if (direction == BACKWARD) __ sub(src, Immediate(0x10));
  __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0));
  if (direction == FORWARD) __ add(src, Immediate(0x10));
  if (direction == BACKWARD) __ sub(dst, Immediate(0x10));
  __ movdqa(Operand(dst, 0), xmm0);
  if (direction == FORWARD) __ add(dst, Immediate(0x10));
}

void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
  __ pop(esi);
  __ pop(edi);
  __ ret(0);
}

}  // namespace

void Builtins::Generate_MemMove(MacroAssembler* masm) {
  // Generated code is put into a fixed, unmovable buffer, and not into
  // the V8 heap. We can't, and don't, refer to any relocatable addresses
  // (e.g. the JavaScript nan-object).

  // 32-bit C declaration function calls pass arguments on stack.

  // Stack layout:
  // esp[12]: Third argument, size.
  // esp[8]: Second argument, source pointer.
  // esp[4]: First argument, destination pointer.
  // esp[0]: return address

3798 3799 3800
  const int kDestinationOffset = 1 * kSystemPointerSize;
  const int kSourceOffset = 2 * kSystemPointerSize;
  const int kSizeOffset = 3 * kSystemPointerSize;
3801 3802 3803 3804 3805 3806 3807 3808 3809 3810 3811 3812 3813 3814 3815 3816 3817

  // When copying up to this many bytes, use special "small" handlers.
  const size_t kSmallCopySize = 8;
  // When copying up to this many bytes, use special "medium" handlers.
  const size_t kMediumCopySize = 63;
  // When non-overlapping region of src and dst is less than this,
  // use a more careful implementation (slightly slower).
  const size_t kMinMoveDistance = 16;
  // Note that these values are dictated by the implementation below,
  // do not just change them and hope things will work!

  int stack_offset = 0;  // Update if we change the stack height.

  Label backward, backward_much_overlap;
  Label forward_much_overlap, small_size, medium_size, pop_and_return;
  __ push(edi);
  __ push(esi);
3818
  stack_offset += 2 * kSystemPointerSize;
3819 3820 3821 3822 3823 3824 3825 3826 3827 3828 3829 3830 3831 3832 3833 3834 3835 3836 3837 3838 3839 3840 3841 3842 3843 3844 3845 3846 3847 3848 3849 3850 3851 3852 3853 3854 3855 3856 3857 3858 3859 3860 3861 3862 3863 3864 3865 3866 3867 3868 3869 3870 3871 3872 3873 3874 3875 3876 3877 3878 3879 3880 3881 3882 3883 3884 3885 3886 3887 3888 3889 3890 3891 3892 3893 3894 3895 3896 3897 3898 3899 3900 3901 3902 3903 3904 3905 3906 3907 3908 3909 3910 3911 3912 3913 3914 3915 3916 3917 3918 3919 3920 3921 3922 3923 3924 3925 3926 3927 3928 3929 3930 3931 3932 3933 3934 3935 3936 3937 3938 3939 3940 3941 3942 3943 3944 3945 3946 3947 3948 3949 3950 3951 3952 3953 3954 3955 3956 3957 3958 3959 3960 3961 3962 3963 3964 3965 3966 3967 3968 3969 3970 3971 3972 3973 3974 3975 3976 3977 3978 3979 3980 3981 3982 3983 3984 3985 3986 3987 3988 3989 3990 3991 3992 3993 3994 3995 3996 3997 3998 3999 4000 4001 4002 4003 4004 4005 4006 4007 4008 4009 4010 4011 4012 4013 4014 4015 4016 4017 4018 4019 4020 4021 4022 4023 4024 4025 4026 4027 4028 4029 4030 4031 4032 4033 4034 4035 4036 4037 4038 4039 4040 4041 4042 4043 4044 4045 4046 4047 4048 4049 4050 4051 4052 4053 4054 4055 4056 4057 4058 4059 4060 4061 4062 4063 4064 4065 4066 4067 4068 4069 4070 4071 4072 4073 4074 4075 4076 4077 4078 4079 4080 4081 4082 4083 4084 4085 4086 4087 4088 4089 4090 4091 4092 4093 4094 4095 4096 4097 4098 4099 4100 4101 4102 4103 4104
  Register dst = edi;
  Register src = esi;
  Register count = ecx;
  Register loop_count = edx;
  __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
  __ mov(src, Operand(esp, stack_offset + kSourceOffset));
  __ mov(count, Operand(esp, stack_offset + kSizeOffset));

  __ cmp(dst, src);
  __ j(equal, &pop_and_return);

  __ prefetch(Operand(src, 0), 1);
  __ cmp(count, kSmallCopySize);
  __ j(below_equal, &small_size);
  __ cmp(count, kMediumCopySize);
  __ j(below_equal, &medium_size);
  __ cmp(dst, src);
  __ j(above, &backward);

  {
    // |dst| is a lower address than |src|. Copy front-to-back.
    Label unaligned_source, move_last_15, skip_last_move;
    __ mov(eax, src);
    __ sub(eax, dst);
    __ cmp(eax, kMinMoveDistance);
    __ j(below, &forward_much_overlap);
    // Copy first 16 bytes.
    __ movdqu(xmm0, Operand(src, 0));
    __ movdqu(Operand(dst, 0), xmm0);
    // Determine distance to alignment: 16 - (dst & 0xF).
    __ mov(edx, dst);
    __ and_(edx, 0xF);
    __ neg(edx);
    __ add(edx, Immediate(16));
    __ add(dst, edx);
    __ add(src, edx);
    __ sub(count, edx);
    // dst is now aligned. Main copy loop.
    __ mov(loop_count, count);
    __ shr(loop_count, 6);
    // Check if src is also aligned.
    __ test(src, Immediate(0xF));
    __ j(not_zero, &unaligned_source);
    // Copy loop for aligned source and destination.
    MemMoveEmitMainLoop(masm, &move_last_15, FORWARD, MOVE_ALIGNED);
    // At most 15 bytes to copy. Copy 16 bytes at end of string.
    __ bind(&move_last_15);
    __ and_(count, 0xF);
    __ j(zero, &skip_last_move, Label::kNear);
    __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
    __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
    __ bind(&skip_last_move);
    MemMoveEmitPopAndReturn(masm);

    // Copy loop for unaligned source and aligned destination.
    __ bind(&unaligned_source);
    MemMoveEmitMainLoop(masm, &move_last_15, FORWARD, MOVE_UNALIGNED);
    __ jmp(&move_last_15);

    // Less than kMinMoveDistance offset between dst and src.
    Label loop_until_aligned, last_15_much_overlap;
    __ bind(&loop_until_aligned);
    __ mov_b(eax, Operand(src, 0));
    __ inc(src);
    __ mov_b(Operand(dst, 0), eax);
    __ inc(dst);
    __ dec(count);
    __ bind(&forward_much_overlap);  // Entry point into this block.
    __ test(dst, Immediate(0xF));
    __ j(not_zero, &loop_until_aligned);
    // dst is now aligned, src can't be. Main copy loop.
    __ mov(loop_count, count);
    __ shr(loop_count, 6);
    MemMoveEmitMainLoop(masm, &last_15_much_overlap, FORWARD, MOVE_UNALIGNED);
    __ bind(&last_15_much_overlap);
    __ and_(count, 0xF);
    __ j(zero, &pop_and_return);
    __ cmp(count, kSmallCopySize);
    __ j(below_equal, &small_size);
    __ jmp(&medium_size);
  }

  {
    // |dst| is a higher address than |src|. Copy backwards.
    Label unaligned_source, move_first_15, skip_last_move;
    __ bind(&backward);
    // |dst| and |src| always point to the end of what's left to copy.
    __ add(dst, count);
    __ add(src, count);
    __ mov(eax, dst);
    __ sub(eax, src);
    __ cmp(eax, kMinMoveDistance);
    __ j(below, &backward_much_overlap);
    // Copy last 16 bytes.
    __ movdqu(xmm0, Operand(src, -0x10));
    __ movdqu(Operand(dst, -0x10), xmm0);
    // Find distance to alignment: dst & 0xF
    __ mov(edx, dst);
    __ and_(edx, 0xF);
    __ sub(dst, edx);
    __ sub(src, edx);
    __ sub(count, edx);
    // dst is now aligned. Main copy loop.
    __ mov(loop_count, count);
    __ shr(loop_count, 6);
    // Check if src is also aligned.
    __ test(src, Immediate(0xF));
    __ j(not_zero, &unaligned_source);
    // Copy loop for aligned source and destination.
    MemMoveEmitMainLoop(masm, &move_first_15, BACKWARD, MOVE_ALIGNED);
    // At most 15 bytes to copy. Copy 16 bytes at beginning of string.
    __ bind(&move_first_15);
    __ and_(count, 0xF);
    __ j(zero, &skip_last_move, Label::kNear);
    __ sub(src, count);
    __ sub(dst, count);
    __ movdqu(xmm0, Operand(src, 0));
    __ movdqu(Operand(dst, 0), xmm0);
    __ bind(&skip_last_move);
    MemMoveEmitPopAndReturn(masm);

    // Copy loop for unaligned source and aligned destination.
    __ bind(&unaligned_source);
    MemMoveEmitMainLoop(masm, &move_first_15, BACKWARD, MOVE_UNALIGNED);
    __ jmp(&move_first_15);

    // Less than kMinMoveDistance offset between dst and src.
    Label loop_until_aligned, first_15_much_overlap;
    __ bind(&loop_until_aligned);
    __ dec(src);
    __ dec(dst);
    __ mov_b(eax, Operand(src, 0));
    __ mov_b(Operand(dst, 0), eax);
    __ dec(count);
    __ bind(&backward_much_overlap);  // Entry point into this block.
    __ test(dst, Immediate(0xF));
    __ j(not_zero, &loop_until_aligned);
    // dst is now aligned, src can't be. Main copy loop.
    __ mov(loop_count, count);
    __ shr(loop_count, 6);
    MemMoveEmitMainLoop(masm, &first_15_much_overlap, BACKWARD, MOVE_UNALIGNED);
    __ bind(&first_15_much_overlap);
    __ and_(count, 0xF);
    __ j(zero, &pop_and_return);
    // Small/medium handlers expect dst/src to point to the beginning.
    __ sub(dst, count);
    __ sub(src, count);
    __ cmp(count, kSmallCopySize);
    __ j(below_equal, &small_size);
    __ jmp(&medium_size);
  }
  {
    // Special handlers for 9 <= copy_size < 64. No assumptions about
    // alignment or move distance, so all reads must be unaligned and
    // must happen before any writes.
    Label f9_16, f17_32, f33_48, f49_63;

    __ bind(&f9_16);
    __ movsd(xmm0, Operand(src, 0));
    __ movsd(xmm1, Operand(src, count, times_1, -8));
    __ movsd(Operand(dst, 0), xmm0);
    __ movsd(Operand(dst, count, times_1, -8), xmm1);
    MemMoveEmitPopAndReturn(masm);

    __ bind(&f17_32);
    __ movdqu(xmm0, Operand(src, 0));
    __ movdqu(xmm1, Operand(src, count, times_1, -0x10));
    __ movdqu(Operand(dst, 0x00), xmm0);
    __ movdqu(Operand(dst, count, times_1, -0x10), xmm1);
    MemMoveEmitPopAndReturn(masm);

    __ bind(&f33_48);
    __ movdqu(xmm0, Operand(src, 0x00));
    __ movdqu(xmm1, Operand(src, 0x10));
    __ movdqu(xmm2, Operand(src, count, times_1, -0x10));
    __ movdqu(Operand(dst, 0x00), xmm0);
    __ movdqu(Operand(dst, 0x10), xmm1);
    __ movdqu(Operand(dst, count, times_1, -0x10), xmm2);
    MemMoveEmitPopAndReturn(masm);

    __ bind(&f49_63);
    __ movdqu(xmm0, Operand(src, 0x00));
    __ movdqu(xmm1, Operand(src, 0x10));
    __ movdqu(xmm2, Operand(src, 0x20));
    __ movdqu(xmm3, Operand(src, count, times_1, -0x10));
    __ movdqu(Operand(dst, 0x00), xmm0);
    __ movdqu(Operand(dst, 0x10), xmm1);
    __ movdqu(Operand(dst, 0x20), xmm2);
    __ movdqu(Operand(dst, count, times_1, -0x10), xmm3);
    MemMoveEmitPopAndReturn(masm);

    __ bind(&medium_size);  // Entry point into this block.
    __ mov(eax, count);
    __ dec(eax);
    __ shr(eax, 4);
    if (FLAG_debug_code) {
      Label ok;
      __ cmp(eax, 3);
      __ j(below_equal, &ok);
      __ int3();
      __ bind(&ok);
    }

    // Dispatch to handlers.
    Label eax_is_2_or_3;

    __ cmp(eax, 1);
    __ j(greater, &eax_is_2_or_3);
    __ j(less, &f9_16);  // eax == 0.
    __ jmp(&f17_32);     // eax == 1.

    __ bind(&eax_is_2_or_3);
    __ cmp(eax, 3);
    __ j(less, &f33_48);  // eax == 2.
    __ jmp(&f49_63);      // eax == 3.
  }
  {
    // Specialized copiers for copy_size <= 8 bytes.
    Label f0, f1, f2, f3, f4, f5_8;
    __ bind(&f0);
    MemMoveEmitPopAndReturn(masm);

    __ bind(&f1);
    __ mov_b(eax, Operand(src, 0));
    __ mov_b(Operand(dst, 0), eax);
    MemMoveEmitPopAndReturn(masm);

    __ bind(&f2);
    __ mov_w(eax, Operand(src, 0));
    __ mov_w(Operand(dst, 0), eax);
    MemMoveEmitPopAndReturn(masm);

    __ bind(&f3);
    __ mov_w(eax, Operand(src, 0));
    __ mov_b(edx, Operand(src, 2));
    __ mov_w(Operand(dst, 0), eax);
    __ mov_b(Operand(dst, 2), edx);
    MemMoveEmitPopAndReturn(masm);

    __ bind(&f4);
    __ mov(eax, Operand(src, 0));
    __ mov(Operand(dst, 0), eax);
    MemMoveEmitPopAndReturn(masm);

    __ bind(&f5_8);
    __ mov(eax, Operand(src, 0));
    __ mov(edx, Operand(src, count, times_1, -4));
    __ mov(Operand(dst, 0), eax);
    __ mov(Operand(dst, count, times_1, -4), edx);
    MemMoveEmitPopAndReturn(masm);

    __ bind(&small_size);  // Entry point into this block.
    if (FLAG_debug_code) {
      Label ok;
      __ cmp(count, 8);
      __ j(below_equal, &ok);
      __ int3();
      __ bind(&ok);
    }

    // Dispatch to handlers.
    Label count_is_above_3, count_is_2_or_3;

    __ cmp(count, 3);
    __ j(greater, &count_is_above_3);

    __ cmp(count, 1);
    __ j(greater, &count_is_2_or_3);
    __ j(less, &f0);  // count == 0.
    __ jmp(&f1);      // count == 1.

    __ bind(&count_is_2_or_3);
    __ cmp(count, 3);
    __ j(less, &f2);  // count == 2.
    __ jmp(&f3);      // count == 3.

    __ bind(&count_is_above_3);
    __ cmp(count, 5);
    __ j(less, &f4);  // count == 4.
    __ jmp(&f5_8);    // count in [5, 8[.
  }

  __ bind(&pop_and_return);
  MemMoveEmitPopAndReturn(masm);
}

4105
#undef __
4106

4107 4108
}  // namespace internal
}  // namespace v8
4109 4110

#endif  // V8_TARGET_ARCH_IA32