code-stubs-x64.cc 201 KB
Newer Older
1
// Copyright 2013 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5
#if V8_TARGET_ARCH_X64
6

7 8
#include "src/bootstrapper.h"
#include "src/code-stubs.h"
9
#include "src/codegen.h"
10
#include "src/ic/handler-compiler.h"
11
#include "src/ic/ic.h"
12
#include "src/ic/stub-cache.h"
13
#include "src/isolate.h"
14 15
#include "src/regexp/jsregexp.h"
#include "src/regexp/regexp-macro-assembler.h"
16
#include "src/runtime/runtime.h"
17
#include "src/x64/code-stubs-x64.h"
18 19 20 21

namespace v8 {
namespace internal {

22

23
static void InitializeArrayConstructorDescriptor(
24
    Isolate* isolate, CodeStubDescriptor* descriptor,
25
    int constant_stack_parameter_count) {
26
  Address deopt_handler = Runtime::FunctionForId(
27
      Runtime::kArrayConstructor)->entry;
28 29

  if (constant_stack_parameter_count == 0) {
30
    descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
31
                           JS_FUNCTION_STUB_MODE);
32
  } else {
33
    descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
34
                           JS_FUNCTION_STUB_MODE);
35
  }
36 37 38
}


39
static void InitializeInternalArrayConstructorDescriptor(
40
    Isolate* isolate, CodeStubDescriptor* descriptor,
41
    int constant_stack_parameter_count) {
42
  Address deopt_handler = Runtime::FunctionForId(
43
      Runtime::kInternalArrayConstructor)->entry;
44

45
  if (constant_stack_parameter_count == 0) {
46
    descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
47
                           JS_FUNCTION_STUB_MODE);
48
  } else {
49
    descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
50
                           JS_FUNCTION_STUB_MODE);
51 52 53 54
  }
}


55 56 57
void ArrayNoArgumentConstructorStub::InitializeDescriptor(
    CodeStubDescriptor* descriptor) {
  InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
58 59 60
}


61 62 63
void ArraySingleArgumentConstructorStub::InitializeDescriptor(
    CodeStubDescriptor* descriptor) {
  InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
64 65 66
}


67 68 69
void ArrayNArgumentsConstructorStub::InitializeDescriptor(
    CodeStubDescriptor* descriptor) {
  InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
70 71 72
}


73 74 75
void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
    CodeStubDescriptor* descriptor) {
  InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
76 77 78
}


79 80 81
void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
    CodeStubDescriptor* descriptor) {
  InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
82 83 84
}


85 86 87
void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
    CodeStubDescriptor* descriptor) {
  InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
88 89 90
}


91
#define __ ACCESS_MASM(masm)
92

93

94 95
void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
                                               ExternalReference miss) {
96
  // Update the static counter each time a new code stub is generated.
97
  isolate()->counters()->code_stubs()->Increment();
98

99
  CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
100
  int param_count = descriptor.GetRegisterParameterCount();
101 102 103
  {
    // Call the runtime system in a fresh internal frame.
    FrameScope scope(masm, StackFrame::INTERNAL);
104
    DCHECK(param_count == 0 ||
105
           rax.is(descriptor.GetRegisterParameter(param_count - 1)));
106 107
    // Push arguments
    for (int i = 0; i < param_count; ++i) {
108
      __ Push(descriptor.GetRegisterParameter(i));
109
    }
110
    __ CallExternalReference(miss, param_count);
111 112 113 114 115 116
  }

  __ Ret();
}


117
void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
118
  __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
119 120
  const int argument_count = 1;
  __ PrepareCallCFunction(argument_count);
121
  __ LoadAddress(arg_reg_1,
122
                 ExternalReference::isolate_address(isolate()));
123 124 125

  AllowExternalCallThatCantCauseGC scope(masm);
  __ CallCFunction(
126
      ExternalReference::store_buffer_overflow_function(isolate()),
127
      argument_count);
128
  __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
129 130 131 132
  __ ret(0);
}


133 134
class FloatingPointHelper : public AllStatic {
 public:
135 136 137 138
  enum ConvertUndefined {
    CONVERT_UNDEFINED_TO_ZERO,
    BAILOUT_ON_UNDEFINED
  };
139 140 141 142 143 144 145 146 147
  // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
  // If the operands are not both numbers, jump to not_numbers.
  // Leaves rdx and rax unchanged.  SmiOperands assumes both are smis.
  // NumberOperands assumes both are smis or heap numbers.
  static void LoadSSE2UnknownOperands(MacroAssembler* masm,
                                      Label* not_numbers);
};


148 149 150
void DoubleToIStub::Generate(MacroAssembler* masm) {
    Register input_reg = this->source();
    Register final_result_reg = this->destination();
151
    DCHECK(is_truncating());
152

153
    Label check_negative, process_64_bits, done;
154

155 156 157
    int double_offset = offset();

    // Account for return address and saved regs if input is rsp.
158
    if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize;
159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177

    MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
    MemOperand exponent_operand(MemOperand(input_reg,
                                           double_offset + kDoubleSize / 2));

    Register scratch1;
    Register scratch_candidates[3] = { rbx, rdx, rdi };
    for (int i = 0; i < 3; i++) {
      scratch1 = scratch_candidates[i];
      if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
    }

    // Since we must use rcx for shifts below, use some other register (rax)
    // to calculate the result if ecx is the requested return register.
    Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
    // Save ecx if it isn't the return register and therefore volatile, or if it
    // is the return register, then save the temp register we use in its stead
    // for the result.
    Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
178 179
    __ pushq(scratch1);
    __ pushq(save_reg);
180 181 182

    bool stash_exponent_copy = !input_reg.is(rsp);
    __ movl(scratch1, mantissa_operand);
183
    __ Movsd(xmm0, mantissa_operand);
184
    __ movl(rcx, exponent_operand);
185
    if (stash_exponent_copy) __ pushq(rcx);
186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202

    __ andl(rcx, Immediate(HeapNumber::kExponentMask));
    __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
    __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
    __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
    __ j(below, &process_64_bits);

    // Result is entirely in lower 32-bits of mantissa
    int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
    __ subl(rcx, Immediate(delta));
    __ xorl(result_reg, result_reg);
    __ cmpl(rcx, Immediate(31));
    __ j(above, &done);
    __ shll_cl(scratch1);
    __ jmp(&check_negative);

    __ bind(&process_64_bits);
203
    __ Cvttsd2siq(result_reg, xmm0);
204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219
    __ jmp(&done, Label::kNear);

    // If the double was negative, negate the integer result.
    __ bind(&check_negative);
    __ movl(result_reg, scratch1);
    __ negl(result_reg);
    if (stash_exponent_copy) {
        __ cmpl(MemOperand(rsp, 0), Immediate(0));
    } else {
        __ cmpl(exponent_operand, Immediate(0));
    }
    __ cmovl(greater, result_reg, scratch1);

    // Restore registers
    __ bind(&done);
    if (stash_exponent_copy) {
220
        __ addp(rsp, Immediate(kDoubleSize));
221 222
    }
    if (!final_result_reg.is(result_reg)) {
223
        DCHECK(final_result_reg.is(rcx));
224 225
        __ movl(final_result_reg, result_reg);
    }
226 227
    __ popq(save_reg);
    __ popq(scratch1);
228
    __ ret(0);
229 230 231
}


232 233 234 235 236 237
void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
                                                  Label* not_numbers) {
  Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
  // Load operand in rdx into xmm0, or branch to not_numbers.
  __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
  __ JumpIfSmi(rdx, &load_smi_rdx);
238
  __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
239
  __ j(not_equal, not_numbers);  // Argument in rdx is not a number.
240
  __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
241 242 243 244
  // Load operand in rax into xmm1, or branch to not_numbers.
  __ JumpIfSmi(rax, &load_smi_rax);

  __ bind(&load_nonsmi_rax);
245
  __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
246
  __ j(not_equal, not_numbers);
247
  __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
248 249 250 251
  __ jmp(&done);

  __ bind(&load_smi_rdx);
  __ SmiToInteger32(kScratchRegister, rdx);
252
  __ Cvtlsi2sd(xmm0, kScratchRegister);
253 254 255 256
  __ JumpIfNotSmi(rax, &load_nonsmi_rax);

  __ bind(&load_smi_rax);
  __ SmiToInteger32(kScratchRegister, rax);
257
  __ Cvtlsi2sd(xmm1, kScratchRegister);
258 259 260 261
  __ bind(&done);
}


262
void MathPowStub::Generate(MacroAssembler* masm) {
263 264
  const Register exponent = MathPowTaggedDescriptor::exponent();
  DCHECK(exponent.is(rdx));
265 266 267 268 269 270 271
  const Register base = rax;
  const Register scratch = rcx;
  const XMMRegister double_result = xmm3;
  const XMMRegister double_base = xmm2;
  const XMMRegister double_exponent = xmm1;
  const XMMRegister double_scratch = xmm4;

272
  Label call_runtime, done, exponent_not_smi, int_exponent;
273 274

  // Save 1 in double_result - we need this several times later on.
275
  __ movp(scratch, Immediate(1));
276
  __ Cvtlsi2sd(double_result, scratch);
277

278
  if (exponent_type() == ON_STACK) {
279 280 281 282
    Label base_is_smi, unpack_exponent;
    // The exponent and base are supplied as arguments on the stack.
    // This can only happen if the stub is called from non-optimized code.
    // Load input parameters from stack.
283
    StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
284 285
    __ movp(base, args.GetArgumentOperand(0));
    __ movp(exponent, args.GetArgumentOperand(1));
286 287 288
    __ JumpIfSmi(base, &base_is_smi, Label::kNear);
    __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
                   Heap::kHeapNumberMapRootIndex);
289
    __ j(not_equal, &call_runtime);
290

291
    __ Movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
292 293 294 295
    __ jmp(&unpack_exponent, Label::kNear);

    __ bind(&base_is_smi);
    __ SmiToInteger32(base, base);
296
    __ Cvtlsi2sd(double_base, base);
297 298 299 300 301 302 303 304 305
    __ bind(&unpack_exponent);

    __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
    __ SmiToInteger32(exponent, exponent);
    __ jmp(&int_exponent);

    __ bind(&exponent_not_smi);
    __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset),
                   Heap::kHeapNumberMapRootIndex);
306
    __ j(not_equal, &call_runtime);
307
    __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
308
  } else if (exponent_type() == TAGGED) {
309 310 311 312 313
    __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
    __ SmiToInteger32(exponent, exponent);
    __ jmp(&int_exponent);

    __ bind(&exponent_not_smi);
314
    __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
315
  }
316

317
  if (exponent_type() != INTEGER) {
318
    Label fast_power, try_arithmetic_simplification;
319
    // Detect integer exponents stored as double.
320
    __ DoubleToI(exponent, double_exponent, double_scratch,
321 322 323
                 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
                 &try_arithmetic_simplification,
                 &try_arithmetic_simplification);
324 325 326
    __ jmp(&int_exponent);

    __ bind(&try_arithmetic_simplification);
327
    __ Cvttsd2si(exponent, double_exponent);
328
    // Skip to runtime if possibly NaN (indicated by the indefinite integer).
329 330
    __ cmpl(exponent, Immediate(0x1));
    __ j(overflow, &call_runtime);
331

332
    if (exponent_type() == ON_STACK) {
333 334 335 336 337 338
      // Detect square root case.  Crankshaft detects constant +/-0.5 at
      // compile time and uses DoMathPowHalf instead.  We then skip this check
      // for non-constant cases of +/-0.5 as these hardly occur.
      Label continue_sqrt, continue_rsqrt, not_plus_half;
      // Test for 0.5.
      // Load double_scratch with 0.5.
339
      __ movq(scratch, V8_UINT64_C(0x3FE0000000000000));
340
      __ Movq(double_scratch, scratch);
341
      // Already ruled out NaNs for exponent.
342
      __ Ucomisd(double_scratch, double_exponent);
343 344 345 346 347 348
      __ j(not_equal, &not_plus_half, Label::kNear);

      // Calculates square root of base.  Check for the special case of
      // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
      // According to IEEE-754, double-precision -Infinity has the highest
      // 12 bits set and the lowest 52 bits cleared.
349
      __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
350
      __ Movq(double_scratch, scratch);
351
      __ Ucomisd(double_scratch, double_base);
352 353 354 355 356 357
      // Comparing -Infinity with NaN results in "unordered", which sets the
      // zero flag as if both were equal.  However, it also sets the carry flag.
      __ j(not_equal, &continue_sqrt, Label::kNear);
      __ j(carry, &continue_sqrt, Label::kNear);

      // Set result to Infinity in the special case.
358
      __ Xorpd(double_result, double_result);
359
      __ Subsd(double_result, double_scratch);
360 361 362 363
      __ jmp(&done);

      __ bind(&continue_sqrt);
      // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
364
      __ Xorpd(double_scratch, double_scratch);
365
      __ Addsd(double_scratch, double_base);  // Convert -0 to 0.
366
      __ Sqrtsd(double_result, double_scratch);
367 368 369 370 371
      __ jmp(&done);

      // Test for -0.5.
      __ bind(&not_plus_half);
      // Load double_scratch with -0.5 by substracting 1.
372
      __ Subsd(double_scratch, double_result);
373
      // Already ruled out NaNs for exponent.
374
      __ Ucomisd(double_scratch, double_exponent);
375 376 377 378 379 380
      __ j(not_equal, &fast_power, Label::kNear);

      // Calculates reciprocal of square root of base.  Check for the special
      // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
      // According to IEEE-754, double-precision -Infinity has the highest
      // 12 bits set and the lowest 52 bits cleared.
381
      __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
382
      __ Movq(double_scratch, scratch);
383
      __ Ucomisd(double_scratch, double_base);
384 385 386 387 388 389
      // Comparing -Infinity with NaN results in "unordered", which sets the
      // zero flag as if both were equal.  However, it also sets the carry flag.
      __ j(not_equal, &continue_rsqrt, Label::kNear);
      __ j(carry, &continue_rsqrt, Label::kNear);

      // Set result to 0 in the special case.
390
      __ Xorpd(double_result, double_result);
391 392 393 394
      __ jmp(&done);

      __ bind(&continue_rsqrt);
      // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
395
      __ Xorpd(double_exponent, double_exponent);
396
      __ Addsd(double_exponent, double_base);  // Convert -0 to +0.
397
      __ Sqrtsd(double_exponent, double_exponent);
398
      __ Divsd(double_result, double_exponent);
399 400
      __ jmp(&done);
    }
401

402 403 404 405 406
    // Using FPU instructions to calculate power.
    Label fast_power_failed;
    __ bind(&fast_power);
    __ fnclex();  // Clear flags to catch exceptions later.
    // Transfer (B)ase and (E)xponent onto the FPU register stack.
407
    __ subp(rsp, Immediate(kDoubleSize));
408
    __ Movsd(Operand(rsp, 0), double_exponent);
409
    __ fld_d(Operand(rsp, 0));  // E
410
    __ Movsd(Operand(rsp, 0), double_base);
411 412 413 414 415 416 417 418 419 420 421 422 423
    __ fld_d(Operand(rsp, 0));  // B, E

    // Exponent is in st(1) and base is in st(0)
    // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
    // FYL2X calculates st(1) * log2(st(0))
    __ fyl2x();    // X
    __ fld(0);     // X, X
    __ frndint();  // rnd(X), X
    __ fsub(1);    // rnd(X), X-rnd(X)
    __ fxch(1);    // X - rnd(X), rnd(X)
    // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
    __ f2xm1();    // 2^(X-rnd(X)) - 1, rnd(X)
    __ fld1();     // 1, 2^(X-rnd(X)) - 1, rnd(X)
424
    __ faddp(1);   // 2^(X-rnd(X)), rnd(X)
425 426 427 428 429 430 431 432
    // FSCALE calculates st(0) * 2^st(1)
    __ fscale();   // 2^X, rnd(X)
    __ fstp(1);
    // Bail out to runtime in case of exceptions in the status word.
    __ fnstsw_ax();
    __ testb(rax, Immediate(0x5F));  // Check for all but precision exception.
    __ j(not_zero, &fast_power_failed, Label::kNear);
    __ fstp_d(Operand(rsp, 0));
433
    __ Movsd(double_result, Operand(rsp, 0));
434
    __ addp(rsp, Immediate(kDoubleSize));
435
    __ jmp(&done);
436

437 438
    __ bind(&fast_power_failed);
    __ fninit();
439
    __ addp(rsp, Immediate(kDoubleSize));
440
    __ jmp(&call_runtime);
441
  }
442

443 444 445 446
  // Calculate power with integer exponent.
  __ bind(&int_exponent);
  const XMMRegister double_scratch2 = double_exponent;
  // Back up exponent as we need to check if exponent is negative later.
447
  __ movp(scratch, exponent);  // Back up exponent.
448 449
  __ Movsd(double_scratch, double_base);     // Back up base.
  __ Movsd(double_scratch2, double_result);  // Load double_exponent with 1.
450 451

  // Get absolute value of exponent.
452
  Label no_neg, while_true, while_false;
453 454 455 456
  __ testl(scratch, scratch);
  __ j(positive, &no_neg, Label::kNear);
  __ negl(scratch);
  __ bind(&no_neg);
457

458
  __ j(zero, &while_false, Label::kNear);
459
  __ shrl(scratch, Immediate(1));
460 461 462
  // Above condition means CF==0 && ZF==0.  This means that the
  // bit that has been shifted out is 0 and the result is not 0.
  __ j(above, &while_true, Label::kNear);
463
  __ Movsd(double_result, double_scratch);
464
  __ j(zero, &while_false, Label::kNear);
465

466 467
  __ bind(&while_true);
  __ shrl(scratch, Immediate(1));
468
  __ Mulsd(double_scratch, double_scratch);
469
  __ j(above, &while_true, Label::kNear);
470
  __ Mulsd(double_result, double_scratch);
471 472
  __ j(not_zero, &while_true);

473
  __ bind(&while_false);
474
  // If the exponent is negative, return 1/result.
475 476
  __ testl(exponent, exponent);
  __ j(greater, &done);
477
  __ Divsd(double_scratch2, double_result);
478
  __ Movsd(double_result, double_scratch2);
479 480
  // Test whether result is zero.  Bail out to check for subnormal result.
  // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
481
  __ Xorpd(double_scratch2, double_scratch2);
482
  __ Ucomisd(double_scratch2, double_result);
483 484 485 486
  // double_exponent aliased as double_scratch2 has already been overwritten
  // and may not have contained the exponent value in the first place when the
  // input was a smi.  We reset it with exponent value before bailing out.
  __ j(not_equal, &done);
487
  __ Cvtlsi2sd(double_exponent, exponent);
488 489

  // Returning or bailing out.
490
  if (exponent_type() == ON_STACK) {
491 492
    // The arguments are still on the stack.
    __ bind(&call_runtime);
493
    __ TailCallRuntime(Runtime::kMathPowRT);
494

495
    // The stub is called from non-optimized code, which expects the result
496
    // as heap number in rax.
497
    __ bind(&done);
498
    __ AllocateHeapNumber(rax, rcx, &call_runtime);
499
    __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result);
500 501
    __ ret(2 * kPointerSize);
  } else {
502
    __ bind(&call_runtime);
503
    // Move base to the correct argument register.  Exponent is already in xmm1.
504
    __ Movsd(xmm0, double_base);
505
    DCHECK(double_exponent.is(xmm1));
506 507 508 509
    {
      AllowExternalCallThatCantCauseGC scope(masm);
      __ PrepareCallCFunction(2);
      __ CallCFunction(
510
          ExternalReference::power_double_double_function(isolate()), 2);
511 512
    }
    // Return value is in xmm0.
513
    __ Movsd(double_result, xmm0);
514

515 516 517
    __ bind(&done);
    __ ret(0);
  }
518 519 520
}


521 522
void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
  Label miss;
523
  Register receiver = LoadDescriptor::ReceiverRegister();
524 525
  // Ensure that the vector and slot registers won't be clobbered before
  // calling the miss handler.
526 527
  DCHECK(!AreAliased(r8, r9, LoadWithVectorDescriptor::VectorRegister(),
                     LoadDescriptor::SlotRegister()));
528

529 530
  NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8,
                                                          r9, &miss);
531
  __ bind(&miss);
532 533
  PropertyAccessCompiler::TailCallBuiltin(
      masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
534 535 536
}


537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556
void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
  // Return address is on the stack.
  Label slow;

  Register receiver = LoadDescriptor::ReceiverRegister();
  Register key = LoadDescriptor::NameRegister();
  Register scratch = rax;
  DCHECK(!scratch.is(receiver) && !scratch.is(key));

  // Check that the key is an array index, that is Uint32.
  STATIC_ASSERT(kSmiValueSize <= 32);
  __ JumpUnlessNonNegativeSmi(key, &slow);

  // Everything is fine, call runtime.
  __ PopReturnAddressTo(scratch);
  __ Push(receiver);  // receiver
  __ Push(key);       // key
  __ PushReturnAddressFrom(scratch);

  // Perform tail call to the entry.
557
  __ TailCallRuntime(Runtime::kLoadElementWithInterceptor);
558 559 560 561 562 563 564

  __ bind(&slow);
  PropertyAccessCompiler::TailCallBuiltin(
      masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
}


565 566 567 568 569 570
void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
  // Return address is on the stack.
  Label miss;

  Register receiver = LoadDescriptor::ReceiverRegister();
  Register index = LoadDescriptor::NameRegister();
571
  Register scratch = rdi;
572 573
  Register result = rax;
  DCHECK(!scratch.is(receiver) && !scratch.is(index));
574 575
  DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
         result.is(LoadDescriptor::SlotRegister()));
576

577 578 579
  // StringCharAtGenerator doesn't use the result register until it's passed
  // the different miss possibilities. If it did, we would have a conflict
  // when FLAG_vector_ics is true.
580 581 582 583 584 585 586 587 588 589
  StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
                                          &miss,  // When not a string.
                                          &miss,  // When not a number.
                                          &miss,  // When index out of range.
                                          STRING_INDEX_IS_ARRAY_INDEX,
                                          RECEIVER_IS_STRING);
  char_at_generator.GenerateFast(masm);
  __ ret(0);

  StubRuntimeCallHelper call_helper;
590
  char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
591 592 593 594 595 596 597

  __ bind(&miss);
  PropertyAccessCompiler::TailCallBuiltin(
      masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
}


598 599 600 601 602
void RegExpExecStub::Generate(MacroAssembler* masm) {
  // Just jump directly to runtime if native RegExp is not selected at compile
  // time or if regexp entry in generated code is turned off runtime switch or
  // at compilation.
#ifdef V8_INTERPRETED_REGEXP
603
  __ TailCallRuntime(Runtime::kRegExpExec);
604 605 606
#else  // V8_INTERPRETED_REGEXP

  // Stack frame on entry.
607 608 609 610 611
  //  rsp[0]  : return address
  //  rsp[8]  : last_match_info (expected JSArray)
  //  rsp[16] : previous index
  //  rsp[24] : subject string
  //  rsp[32] : JSRegExp object
612

613 614 615 616 617 618 619
  enum RegExpExecStubArgumentIndices {
    JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
    SUBJECT_STRING_ARGUMENT_INDEX,
    PREVIOUS_INDEX_ARGUMENT_INDEX,
    LAST_MATCH_INFO_ARGUMENT_INDEX,
    REG_EXP_EXEC_ARGUMENT_COUNT
  };
620

621 622
  StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
                              ARGUMENTS_DONT_CONTAIN_RECEIVER);
623 624 625
  Label runtime;
  // Ensure that a RegExp stack is allocated.
  ExternalReference address_of_regexp_stack_memory_address =
626
      ExternalReference::address_of_regexp_stack_memory_address(isolate());
627
  ExternalReference address_of_regexp_stack_memory_size =
628
      ExternalReference::address_of_regexp_stack_memory_size(isolate());
629
  __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
630
  __ testp(kScratchRegister, kScratchRegister);
631 632 633
  __ j(zero, &runtime);

  // Check that the first argument is a JSRegExp object.
634
  __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
635 636 637
  __ JumpIfSmi(rax, &runtime);
  __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
  __ j(not_equal, &runtime);
638

639
  // Check that the RegExp has been compiled (data contains a fixed array).
640
  __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset));
641
  if (FLAG_debug_code) {
642
    Condition is_smi = masm->CheckSmi(rax);
643
    __ Check(NegateCondition(is_smi),
644
        kUnexpectedTypeForRegExpDataFixedArrayExpected);
645
    __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
646
    __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
647 648
  }

649
  // rax: RegExp data (FixedArray)
650
  // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
651
  __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
652 653 654
  __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
  __ j(not_equal, &runtime);

655
  // rax: RegExp data (FixedArray)
656 657
  // Check that the number of captures fit in the static offsets vector buffer.
  __ SmiToInteger32(rdx,
658
                    FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
659 660 661 662
  // Check (number_of_captures + 1) * 2 <= offsets vector size
  // Or              number_of_captures <= offsets vector size / 2 - 1
  STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
  __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
663 664
  __ j(above, &runtime);

665 666
  // Reset offset for possibly sliced string.
  __ Set(r14, 0);
667
  __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
668
  __ JumpIfSmi(rdi, &runtime);
669 670
  __ movp(r15, rdi);  // Make a copy of the original subject string.
  __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
671
  __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700
  // rax: RegExp data (FixedArray)
  // rdi: subject string
  // r15: subject string
  // Handle subject string according to its encoding and representation:
  // (1) Sequential two byte?  If yes, go to (9).
  // (2) Sequential one byte?  If yes, go to (6).
  // (3) Anything but sequential or cons?  If yes, go to (7).
  // (4) Cons string.  If the string is flat, replace subject with first string.
  //     Otherwise bailout.
  // (5a) Is subject sequential two byte?  If yes, go to (9).
  // (5b) Is subject external?  If yes, go to (8).
  // (6) One byte sequential.  Load regexp code for one byte.
  // (E) Carry on.
  /// [...]

  // Deferred code at the end of the stub:
  // (7) Not a long external string?  If yes, go to (10).
  // (8) External string.  Make it, offset-wise, look like a sequential string.
  // (8a) Is the external string one byte?  If yes, go to (6).
  // (9) Two byte sequential.  Load regexp code for one byte. Go to (E).
  // (10) Short external string or not a string?  If yes, bail out to runtime.
  // (11) Sliced string.  Replace subject with parent. Go to (5a).

  Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
        external_string /* 8 */, check_underlying /* 5a */,
        not_seq_nor_cons /* 7 */, check_code /* E */,
        not_long_external /* 10 */;

  // (1) Sequential two byte?  If yes, go to (9).
701 702 703 704
  __ andb(rbx, Immediate(kIsNotStringMask |
                         kStringRepresentationMask |
                         kStringEncodingMask |
                         kShortExternalStringMask));
705
  STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
706 707 708 709
  __ j(zero, &seq_two_byte_string);  // Go to (9).

  // (2) Sequential one byte?  If yes, go to (6).
  // Any other sequential string must be one byte.
710 711 712
  __ andb(rbx, Immediate(kIsNotStringMask |
                         kStringRepresentationMask |
                         kShortExternalStringMask));
713 714 715 716 717
  __ j(zero, &seq_one_byte_string, Label::kNear);  // Go to (6).

  // (3) Anything but sequential or cons?  If yes, go to (7).
  // We check whether the subject string is a cons, since sequential strings
  // have already been covered.
718 719
  STATIC_ASSERT(kConsStringTag < kExternalStringTag);
  STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
720
  STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
721
  STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
722
  __ cmpp(rbx, Immediate(kExternalStringTag));
723
  __ j(greater_equal, &not_seq_nor_cons);  // Go to (7).
724

725 726
  // (4) Cons string.  Check that it's flat.
  // Replace subject with first string and reload instance type.
727
  __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
728
                 Heap::kempty_stringRootIndex);
729
  __ j(not_equal, &runtime);
730
  __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
731
  __ bind(&check_underlying);
732 733
  __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
  __ movp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
734 735 736

  // (5a) Is subject sequential two byte?  If yes, go to (9).
  __ testb(rbx, Immediate(kStringRepresentationMask | kStringEncodingMask));
737
  STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
738 739 740 741
  __ j(zero, &seq_two_byte_string);  // Go to (9).
  // (5b) Is subject external?  If yes, go to (8).
  __ testb(rbx, Immediate(kStringRepresentationMask));
  // The underlying external string is never a short external string.
742 743
  STATIC_ASSERT(ExternalString::kMaxShortLength < ConsString::kMinLength);
  STATIC_ASSERT(ExternalString::kMaxShortLength < SlicedString::kMinLength);
744 745 746 747
  __ j(not_zero, &external_string);  // Go to (8)

  // (6) One byte sequential.  Load regexp code for one byte.
  __ bind(&seq_one_byte_string);
748
  // rax: RegExp data (FixedArray)
749
  __ movp(r11, FieldOperand(rax, JSRegExp::kDataOneByteCodeOffset));
750
  __ Set(rcx, 1);  // Type is one byte.
751

752
  // (E) Carry on.  String handling is done.
753
  __ bind(&check_code);
754
  // r11: irregexp code
755 756
  // Check that the irregexp code has been generated for the actual string
  // encoding. If it has, the field contains a code object otherwise it contains
757 758
  // smi (code flushing support)
  __ JumpIfSmi(r11, &runtime);
759

760 761
  // rdi: sequential subject string (or look-alike, external string)
  // r15: original subject string
762
  // rcx: encoding of subject string (1 if one_byte, 0 if two_byte);
763 764 765
  // r11: code
  // Load used arguments before starting to push arguments for call to native
  // RegExp code to avoid handling changing stack height.
766 767 768
  // We have to use r15 instead of rdi to load the length because rdi might
  // have been only made to look like a sequential string when it actually
  // is an external string.
769
  __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
770 771 772 773
  __ JumpIfNotSmi(rbx, &runtime);
  __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
  __ j(above_equal, &runtime);
  __ SmiToInteger64(rbx, rbx);
774

775
  // rdi: subject string
776
  // rbx: previous index
777
  // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
778 779
  // r11: code
  // All checks done. Now push arguments for native regexp code.
780
  Counters* counters = isolate()->counters();
781
  __ IncrementCounter(counters->regexp_entry_native(), 1);
782

783
  // Isolates: note we add an additional parameter here (isolate pointer).
784
  static const int kRegExpExecuteArguments = 9;
785 786
  int argument_slots_on_stack =
      masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
787
  __ EnterApiExitFrame(argument_slots_on_stack);
788

789
  // Argument 9: Pass current isolate address.
790
  __ LoadAddress(kScratchRegister,
791
                 ExternalReference::isolate_address(isolate()));
792
  __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
793 794
          kScratchRegister);

795
  // Argument 8: Indicate that this is a direct call from JavaScript.
796
  __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize),
797 798
          Immediate(1));

799
  // Argument 7: Start (high end) of backtracking stack memory area.
800
  __ Move(kScratchRegister, address_of_regexp_stack_memory_address);
801
  __ movp(r9, Operand(kScratchRegister, 0));
802
  __ Move(kScratchRegister, address_of_regexp_stack_memory_size);
803
  __ addp(r9, Operand(kScratchRegister, 0));
804
  __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9);
805 806 807 808 809

  // Argument 6: Set the number of capture registers to zero to force global
  // regexps to behave as non-global.  This does not affect non-global regexps.
  // Argument 6 is passed in r9 on Linux and on the stack on Windows.
#ifdef _WIN64
810
  __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize),
811 812 813
          Immediate(0));
#else
  __ Set(r9, 0);
814 815 816
#endif

  // Argument 5: static offsets vector buffer.
817 818
  __ LoadAddress(
      r8, ExternalReference::address_of_static_offsets_vector(isolate()));
819 820
  // Argument 5 passed in r8 on Linux and on the stack on Windows.
#ifdef _WIN64
821
  __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
822 823
#endif

824
  // rdi: subject string
825
  // rbx: previous index
826
  // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
827
  // r11: code
828 829 830 831
  // r14: slice offset
  // r15: original subject string

  // Argument 2: Previous index.
832
  __ movp(arg_reg_2, rbx);
833 834 835

  // Argument 4: End of string data
  // Argument 3: Start of string data
836 837 838
  Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
  // Prepare start and end index of the input.
  // Load the length from the original sliced string if that is the case.
839
  __ addp(rbx, r14);
840
  __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
841
  __ addp(r14, arg_reg_3);  // Using arg3 as scratch.
842 843 844 845

  // rbx: start index of the input
  // r14: end index of the input
  // r15: original subject string
846
  __ testb(rcx, rcx);  // Last use of rcx as encoding of subject string.
847
  __ j(zero, &setup_two_byte, Label::kNear);
848
  __ leap(arg_reg_4,
849
         FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
850
  __ leap(arg_reg_3,
851
         FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
852
  __ jmp(&setup_rest, Label::kNear);
853
  __ bind(&setup_two_byte);
854
  __ leap(arg_reg_4,
855
         FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
856
  __ leap(arg_reg_3,
857
         FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
858 859
  __ bind(&setup_rest);

860 861 862 863 864
  // Argument 1: Original subject string.
  // The original subject is in the previous stack frame. Therefore we have to
  // use rbp, which points exactly to one pointer size below the previous rsp.
  // (Because creating a new stack frame pushes the previous rbp onto the stack
  // and thereby moves up rsp by one kPointerSize.)
865
  __ movp(arg_reg_1, r15);
866 867

  // Locate the code entry and call it.
868
  __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
869
  __ call(r11);
870

871
  __ LeaveApiExitFrame(true);
872 873

  // Check the result.
874
  Label success;
875
  Label exception;
876 877 878
  __ cmpl(rax, Immediate(1));
  // We expect exactly one result since we force the called regexp to behave
  // as non-global.
879
  __ j(equal, &success, Label::kNear);
880
  __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
881 882 883 884
  __ j(equal, &exception);
  __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
  // If none of the above, it can only be retry.
  // Handle that in the runtime system.
885
  __ j(not_equal, &runtime);
886 887 888

  // For failure return null.
  __ LoadRoot(rax, Heap::kNullValueRootIndex);
889
  __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
890 891 892

  // Load RegExp data.
  __ bind(&success);
893 894
  __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
  __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
895 896 897 898 899 900
  __ SmiToInteger32(rax,
                    FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
  // Calculate number of capture registers (number_of_captures + 1) * 2.
  __ leal(rdx, Operand(rax, rax, times_1, 2));

  // rdx: Number of capture registers
901
  // Check that the fourth object is a JSArray object.
902
  __ movp(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
903 904 905 906
  __ JumpIfSmi(r15, &runtime);
  __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister);
  __ j(not_equal, &runtime);
  // Check that the JSArray is in fast case.
907 908
  __ movp(rbx, FieldOperand(r15, JSArray::kElementsOffset));
  __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset));
909 910 911 912 913 914 915 916 917
  __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
  __ j(not_equal, &runtime);
  // Check that the last match info has space for the capture registers and the
  // additional information. Ensure no overflow in add.
  STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
  __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
  __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead));
  __ cmpl(rdx, rax);
  __ j(greater, &runtime);
918 919 920 921 922

  // rbx: last_match_info backing store (FixedArray)
  // rdx: number of capture registers
  // Store the capture count.
  __ Integer32ToSmi(kScratchRegister, rdx);
923
  __ movp(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
924 925
          kScratchRegister);
  // Store last subject and last input.
926 927 928
  __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
  __ movp(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
  __ movp(rcx, rax);
929 930 931 932 933
  __ RecordWriteField(rbx,
                      RegExpImpl::kLastSubjectOffset,
                      rax,
                      rdi,
                      kDontSaveFPRegs);
934 935
  __ movp(rax, rcx);
  __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
936 937 938 939 940
  __ RecordWriteField(rbx,
                      RegExpImpl::kLastInputOffset,
                      rax,
                      rdi,
                      kDontSaveFPRegs);
941 942

  // Get the static offsets vector filled by the native regexp code.
943 944
  __ LoadAddress(
      rcx, ExternalReference::address_of_static_offsets_vector(isolate()));
945 946 947 948

  // rbx: last_match_info backing store (FixedArray)
  // rcx: offsets vector
  // rdx: number of capture registers
949
  Label next_capture, done;
950 951 952
  // Capture register counter starts from number of capture registers and
  // counts down until wraping after zero.
  __ bind(&next_capture);
953
  __ subp(rdx, Immediate(1));
954
  __ j(negative, &done, Label::kNear);
955 956
  // Read the value from the static offsets vector buffer and make it a smi.
  __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
957
  __ Integer32ToSmi(rdi, rdi);
958
  // Store the smi value in the last match info.
959
  __ movp(FieldOperand(rbx,
960 961 962 963 964 965 966 967
                       rdx,
                       times_pointer_size,
                       RegExpImpl::kFirstCaptureOffset),
          rdi);
  __ jmp(&next_capture);
  __ bind(&done);

  // Return last match info.
968
  __ movp(rax, r15);
969
  __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
970

971
  __ bind(&exception);
972 973 974 975
  // Result must now be exception. If there is no pending exception already a
  // stack overflow (on the backtrack stack) was detected in RegExp code but
  // haven't created the exception yet. Handle that in the runtime system.
  // TODO(592): Rerunning the RegExp to get the stack overflow exception.
976
  ExternalReference pending_exception_address(
977
      Isolate::kPendingExceptionAddress, isolate());
978 979
  Operand pending_exception_operand =
      masm->ExternalOperand(pending_exception_address, rbx);
980
  __ movp(rax, pending_exception_operand);
981 982 983
  __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
  __ cmpp(rax, rdx);
  __ j(equal, &runtime);
984

985
  // For exception, throw the exception again.
986
  __ TailCallRuntime(Runtime::kRegExpExecReThrow);
987

988 989
  // Do the runtime call to execute the regexp.
  __ bind(&runtime);
990
  __ TailCallRuntime(Runtime::kRegExpExec);
991 992 993 994 995 996 997 998

  // Deferred code for string handling.
  // (7) Not a long external string?  If yes, go to (10).
  __ bind(&not_seq_nor_cons);
  // Compare flags are still set from (3).
  __ j(greater, &not_long_external, Label::kNear);  // Go to (10).

  // (8) External string.  Short external strings have been ruled out.
999
  __ bind(&external_string);
1000
  __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
1001 1002 1003 1004 1005
  __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
  if (FLAG_debug_code) {
    // Assert that we do not have a cons or slice (indirect strings) here.
    // Sequential strings have already been ruled out.
    __ testb(rbx, Immediate(kIsIndirectStringMask));
1006
    __ Assert(zero, kExternalStringExpectedButNotFound);
1007
  }
1008
  __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
1009
  // Move the pointer so that offset-wise, it looks like a sequential string.
1010
  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
1011
  __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
1012
  STATIC_ASSERT(kTwoByteStringTag == 0);
1013
  // (8a) Is the external string one byte?  If yes, go to (6).
1014
  __ testb(rbx, Immediate(kStringEncodingMask));
1015
  __ j(not_zero, &seq_one_byte_string);  // Goto (6).
1016

1017 1018 1019 1020
  // rdi: subject string (flat two-byte)
  // rax: RegExp data (FixedArray)
  // (9) Two byte sequential.  Load regexp code for one byte.  Go to (E).
  __ bind(&seq_two_byte_string);
1021
  __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034
  __ Set(rcx, 0);  // Type is two byte.
  __ jmp(&check_code);  // Go to (E).

  // (10) Not a string or a short external string?  If yes, bail out to runtime.
  __ bind(&not_long_external);
  // Catch non-string subject or short external string.
  STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
  __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
  __ j(not_zero, &runtime);

  // (11) Sliced string.  Replace subject with parent. Go to (5a).
  // Load offset into r14 and replace subject string with parent.
  __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
1035
  __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
1036
  __ jmp(&check_underlying);
1037 1038 1039 1040 1041
#endif  // V8_INTERPRETED_REGEXP
}


static int NegativeComparisonResult(Condition cc) {
1042 1043
  DCHECK(cc != equal);
  DCHECK((cc == less) || (cc == less_equal)
1044 1045 1046 1047 1048
      || (cc == greater) || (cc == greater_equal));
  return (cc == greater || cc == greater_equal) ? LESS : GREATER;
}


1049 1050
static void CheckInputType(MacroAssembler* masm, Register input,
                           CompareICState::State expected, Label* fail) {
1051
  Label ok;
1052
  if (expected == CompareICState::SMI) {
1053
    __ JumpIfNotSmi(input, fail);
1054
  } else if (expected == CompareICState::NUMBER) {
1055
    __ JumpIfSmi(input, &ok);
1056
    __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
1057 1058
    __ j(not_equal, fail);
  }
1059
  // We could be strict about internalized/non-internalized here, but as long as
1060 1061 1062 1063 1064
  // hydrogen doesn't care, the stub doesn't have to care either.
  __ bind(&ok);
}


1065 1066 1067 1068
static void BranchIfNotInternalizedString(MacroAssembler* masm,
                                          Label* label,
                                          Register object,
                                          Register scratch) {
1069
  __ JumpIfSmi(object, label);
1070
  __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset));
1071
  __ movzxbp(scratch,
1072
             FieldOperand(scratch, Map::kInstanceTypeOffset));
1073 1074 1075
  STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
  __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
  __ j(not_zero, label);
1076 1077
}

1078

1079
void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
1080
  Label runtime_call, check_unequal_objects, done;
1081
  Condition cc = GetCondition();
1082
  Factory* factory = isolate()->factory();
1083

1084
  Label miss;
1085 1086
  CheckInputType(masm, rdx, left(), &miss);
  CheckInputType(masm, rax, right(), &miss);
1087 1088 1089 1090

  // Compare two smis.
  Label non_smi, smi_done;
  __ JumpIfNotBothSmi(rax, rdx, &non_smi);
1091
  __ subp(rdx, rax);
1092
  __ j(no_overflow, &smi_done);
1093
  __ notp(rdx);  // Correct sign in case of overflow. rdx cannot be 0 here.
1094
  __ bind(&smi_done);
1095
  __ movp(rax, rdx);
1096 1097
  __ ret(0);
  __ bind(&non_smi);
1098

1099 1100 1101 1102 1103 1104 1105
  // The compare stub returns a positive, negative, or zero 64-bit integer
  // value in rax, corresponding to result of comparing the two inputs.
  // NOTICE! This code is only reached after a smi-fast-case check, so
  // it is certain that at least one operand isn't a smi.

  // Two identical objects are equal unless they are both NaN or undefined.
  {
1106
    Label not_identical;
1107
    __ cmpp(rax, rdx);
1108
    __ j(not_equal, &not_identical, Label::kNear);
1109

1110
    if (cc != equal) {
1111 1112 1113
      // Check for undefined.  undefined OP undefined is false even though
      // undefined == undefined.
      __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1114 1115 1116 1117 1118
      Label check_for_nan;
      __ j(not_equal, &check_for_nan, Label::kNear);
      __ Set(rax, NegativeComparisonResult(cc));
      __ ret(0);
      __ bind(&check_for_nan);
1119 1120
    }

1121
    // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
1122
    // so we do the second best thing - test it ourselves.
1123 1124 1125 1126 1127 1128
    Label heap_number;
    // If it's not a heap number, then return equal for (in)equality operator.
    __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
           factory->heap_number_map());
    __ j(equal, &heap_number, Label::kNear);
    if (cc != equal) {
1129 1130
      __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
      __ movzxbl(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
1131
      // Call runtime on identical objects.  Otherwise return equal.
1132
      __ cmpb(rcx, Immediate(static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE)));
1133
      __ j(above_equal, &runtime_call, Label::kFar);
1134
      // Call runtime on identical symbols since we need to throw a TypeError.
1135 1136
      __ cmpb(rcx, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
      __ j(equal, &runtime_call, Label::kFar);
1137
      // Call runtime on identical SIMD values since we must throw a TypeError.
1138 1139
      __ cmpb(rcx, Immediate(static_cast<uint8_t>(SIMD128_VALUE_TYPE)));
      __ j(equal, &runtime_call, Label::kFar);
1140 1141 1142
    }
    __ Set(rax, EQUAL);
    __ ret(0);
1143

1144 1145 1146 1147 1148 1149
    __ bind(&heap_number);
    // It is a heap number, so return  equal if it's not NaN.
    // For NaN, return 1 for every condition except greater and
    // greater-equal.  Return -1 for them, so the comparison yields
    // false for all conditions except not-equal.
    __ Set(rax, EQUAL);
1150
    __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1151
    __ Ucomisd(xmm0, xmm0);
1152 1153 1154
    __ setcc(parity_even, rax);
    // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
    if (cc == greater_equal || cc == greater) {
1155
      __ negp(rax);
1156
    }
1157
    __ ret(0);
1158 1159 1160 1161

    __ bind(&not_identical);
  }

1162
  if (cc == equal) {  // Both strict and non-strict.
1163 1164 1165 1166 1167 1168
    Label slow;  // Fallthrough label.

    // If we're doing a strict equality comparison, we don't have to do
    // type conversion, so we generate code to do fast comparison for objects
    // and oddballs. Non-smi numbers and strings still go through the usual
    // slow-case code.
1169
    if (strict()) {
1170 1171 1172 1173 1174 1175 1176 1177
      // If either is a Smi (we know that not both are), then they can only
      // be equal if the other is a HeapNumber. If so, use the slow case.
      {
        Label not_smis;
        __ SelectNonSmi(rbx, rax, rdx, &not_smis);

        // Check if the non-smi operand is a heap number.
        __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
1178
               factory->heap_number_map());
1179 1180 1181
        // If heap number, handle it in the slow case.
        __ j(equal, &slow);
        // Return non-equal.  ebx (the lower half of rbx) is not zero.
1182
        __ movp(rax, rbx);
1183 1184 1185 1186 1187 1188 1189 1190 1191 1192
        __ ret(0);

        __ bind(&not_smis);
      }

      // If either operand is a JSObject or an oddball value, then they are not
      // equal since their pointers are different
      // There is no test for undetectability in strict equality.

      // If the first object is a JS object, we have done pointer comparison.
1193
      STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1194
      Label first_non_object;
1195
      __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
1196
      __ j(below, &first_non_object, Label::kNear);
1197
      // Return non-zero (rax (not rax) is not zero)
1198 1199 1200 1201 1202 1203 1204 1205 1206 1207
      Label return_not_equal;
      STATIC_ASSERT(kHeapObjectTag != 0);
      __ bind(&return_not_equal);
      __ ret(0);

      __ bind(&first_non_object);
      // Check for oddballs: true, false, null, undefined.
      __ CmpInstanceType(rcx, ODDBALL_TYPE);
      __ j(equal, &return_not_equal);

1208
      __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220
      __ j(above_equal, &return_not_equal);

      // Check for oddballs: true, false, null, undefined.
      __ CmpInstanceType(rcx, ODDBALL_TYPE);
      __ j(equal, &return_not_equal);

      // Fall through to the general case.
    }
    __ bind(&slow);
  }

  // Generate the number comparison code.
1221 1222 1223 1224 1225
  Label non_number_comparison;
  Label unordered;
  FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
  __ xorl(rax, rax);
  __ xorl(rcx, rcx);
1226
  __ Ucomisd(xmm0, xmm1);
1227

1228 1229 1230 1231 1232
  // Don't base result on EFLAGS when a NaN is involved.
  __ j(parity_even, &unordered, Label::kNear);
  // Return a result of -1, 0, or 1, based on EFLAGS.
  __ setcc(above, rax);
  __ setcc(below, rcx);
1233
  __ subp(rax, rcx);
1234
  __ ret(0);
1235

1236 1237 1238
  // If one of the numbers was NaN, then the result is always false.
  // The cc is never not-equal.
  __ bind(&unordered);
1239
  DCHECK(cc != not_equal);
1240 1241 1242 1243
  if (cc == less || cc == less_equal) {
    __ Set(rax, 1);
  } else {
    __ Set(rax, -1);
1244
  }
1245 1246 1247 1248
  __ ret(0);

  // The number comparison code did not provide a valid result.
  __ bind(&non_number_comparison);
1249

1250
  // Fast negative check for internalized-to-internalized equality.
1251
  Label check_for_strings;
1252
  if (cc == equal) {
1253 1254 1255 1256 1257 1258
    BranchIfNotInternalizedString(
        masm, &check_for_strings, rax, kScratchRegister);
    BranchIfNotInternalizedString(
        masm, &check_for_strings, rdx, kScratchRegister);

    // We've already checked for object identity, so if both operands are
1259
    // internalized strings they aren't equal. Register rax (not rax) already
1260
    // holds a non-zero value, which indicates not equal, so just return.
1261 1262 1263 1264 1265
    __ ret(0);
  }

  __ bind(&check_for_strings);

1266 1267
  __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx,
                                           &check_unequal_objects);
1268

1269
  // Inline comparison of one-byte strings.
1270
  if (cc == equal) {
1271
    StringHelper::GenerateFlatOneByteStringEquals(masm, rdx, rax, rcx, rbx);
1272
  } else {
1273 1274
    StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx,
                                                    rdi, r8);
1275
  }
1276 1277

#ifdef DEBUG
1278
  __ Abort(kUnexpectedFallThroughFromStringComparison);
1279 1280 1281
#endif

  __ bind(&check_unequal_objects);
1282
  if (cc == equal && !strict()) {
1283 1284 1285
    // Not strict equality.  Objects are unequal if
    // they are both JSObjects and not undetectable,
    // and their pointers are different.
1286
    Label return_unequal, undetectable;
1287 1288 1289 1290 1291
    // At most one is a smi, so we can test for smi by adding the two.
    // A smi plus a heap object has the low bit set, a heap object plus
    // a heap object has the low bit clear.
    STATIC_ASSERT(kSmiTag == 0);
    STATIC_ASSERT(kSmiTagMask == 1);
1292
    __ leap(rcx, Operand(rax, rdx, times_1, 0));
1293
    __ testb(rcx, Immediate(kSmiTagMask));
1294
    __ j(not_zero, &runtime_call, Label::kNear);
1295 1296 1297

    __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
    __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1298 1299
    __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
             Immediate(1 << Map::kIsUndetectable));
1300
    __ j(not_zero, &undetectable);
1301 1302
    __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
             Immediate(1 << Map::kIsUndetectable));
1303 1304 1305 1306 1307 1308 1309
    __ j(not_zero, &return_unequal);

    __ CmpInstanceType(rbx, FIRST_JS_RECEIVER_TYPE);
    __ j(below, &runtime_call, Label::kNear);
    __ CmpInstanceType(rcx, FIRST_JS_RECEIVER_TYPE);
    __ j(below, &runtime_call, Label::kNear);

1310
    __ bind(&return_unequal);
1311 1312 1313 1314 1315 1316 1317 1318
    // Return non-equal by returning the non-zero object pointer in rax.
    __ ret(0);

    __ bind(&undetectable);
    __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
             Immediate(1 << Map::kIsUndetectable));
    __ j(zero, &return_unequal);
    __ Set(rax, EQUAL);
1319 1320
    __ ret(0);
  }
1321
  __ bind(&runtime_call);
1322 1323

  // Push arguments below the return address to prepare jump to builtin.
1324
  __ PopReturnAddressTo(rcx);
1325 1326
  __ Push(rdx);
  __ Push(rax);
1327 1328

  // Figure out which native to call and setup the arguments.
1329
  if (cc == equal) {
1330
    __ PushReturnAddressFrom(rcx);
1331
    __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals);
1332
  } else {
1333
    __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
1334
    __ PushReturnAddressFrom(rcx);
1335
    __ TailCallRuntime(Runtime::kCompare);
1336
  }
1337

1338 1339
  __ bind(&miss);
  GenerateMiss(masm);
1340 1341 1342
}


1343
static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1344 1345 1346 1347
  // rax : number of arguments to the construct function
  // rbx : feedback vector
  // rdx : slot in feedback vector (Smi)
  // rdi : the function to call
1348 1349
  FrameScope scope(masm, StackFrame::INTERNAL);

1350
  // Number-of-arguments register must be smi-tagged to call out.
1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367
  __ Integer32ToSmi(rax, rax);
  __ Push(rax);
  __ Push(rdi);
  __ Integer32ToSmi(rdx, rdx);
  __ Push(rdx);
  __ Push(rbx);

  __ CallStub(stub);

  __ Pop(rbx);
  __ Pop(rdx);
  __ Pop(rdi);
  __ Pop(rax);
  __ SmiToInteger32(rax, rax);
}


1368
static void GenerateRecordCallTarget(MacroAssembler* masm) {
1369
  // Cache the called function in a feedback vector slot.  Cache states
1370 1371
  // are uninitialized, monomorphic (indicated by a JSFunction), and
  // megamorphic.
1372
  // rax : number of arguments to the construct function
1373
  // rbx : feedback vector
1374
  // rdx : slot in feedback vector (Smi)
1375 1376
  // rdi : the function to call
  Isolate* isolate = masm->isolate();
1377 1378
  Label initialize, done, miss, megamorphic, not_array_function,
      done_no_smi_convert;
1379

1380
  // Load the cache state into r11.
1381
  __ SmiToInteger32(rdx, rdx);
1382 1383
  __ movp(r11,
          FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1384 1385 1386

  // A monomorphic cache hit or an already megamorphic state: invoke the
  // function without changing the state.
1387
  // We don't know if r11 is a WeakCell or a Symbol, but it's harmless to read
1388 1389
  // at this position in a symbol (see static asserts in
  // type-feedback-vector.h).
1390
  Label check_allocation_site;
1391
  __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset));
1392
  __ j(equal, &done, Label::kFar);
1393
  __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex);
1394
  __ j(equal, &done, Label::kFar);
1395
  __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset),
1396
                 Heap::kWeakCellMapRootIndex);
1397
  __ j(not_equal, &check_allocation_site);
1398

1399
  // If the weak cell is cleared, we have a new chance to become monomorphic.
1400
  __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset));
1401 1402
  __ j(equal, &initialize);
  __ jmp(&megamorphic);
1403

1404 1405 1406 1407 1408 1409 1410 1411 1412
  __ bind(&check_allocation_site);
  // If we came here, we need to see if we are the array function.
  // If we didn't have a matching function, and we didn't find the megamorph
  // sentinel, then we have in the slot either some other function or an
  // AllocationSite.
  __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
  __ j(not_equal, &miss);

  // Make sure the function is the Array() function
1413
  __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
1414 1415
  __ cmpp(rdi, r11);
  __ j(not_equal, &megamorphic);
1416
  __ jmp(&done);
1417

1418
  __ bind(&miss);
1419

1420 1421
  // A monomorphic miss (i.e, here the cache is not uninitialized) goes
  // megamorphic.
1422
  __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex);
1423 1424 1425
  __ j(equal, &initialize);
  // MegamorphicSentinel is an immortal immovable object (undefined) so no
  // write-barrier is needed.
1426
  __ bind(&megamorphic);
1427
  __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1428
          TypeFeedbackVector::MegamorphicSentinel(isolate));
1429
  __ jmp(&done);
1430

1431 1432 1433 1434
  // An uninitialized cache is patched with the function or sentinel to
  // indicate the ElementsKind if function is the Array constructor.
  __ bind(&initialize);

1435
  // Make sure the function is the Array() function
1436
  __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
1437 1438
  __ cmpp(rdi, r11);
  __ j(not_equal, &not_array_function);
1439

1440
  CreateAllocationSiteStub create_stub(isolate);
1441
  CallStubInRecordCallTarget(masm, &create_stub);
1442
  __ jmp(&done_no_smi_convert);
1443

1444 1445
  __ bind(&not_array_function);
  CreateWeakCellStub weak_cell_stub(isolate);
1446
  CallStubInRecordCallTarget(masm, &weak_cell_stub);
1447 1448
  __ jmp(&done_no_smi_convert);

1449
  __ bind(&done);
1450 1451 1452
  __ Integer32ToSmi(rdx, rdx);

  __ bind(&done_no_smi_convert);
1453 1454 1455
}


1456
void CallConstructStub::Generate(MacroAssembler* masm) {
1457
  // rax : number of arguments
1458
  // rbx : feedback vector
1459
  // rdx : slot in feedback vector (Smi)
1460 1461
  // rdi : constructor function

1462 1463 1464 1465
  Label non_function;
  // Check that the constructor is not a smi.
  __ JumpIfSmi(rdi, &non_function);
  // Check that constructor is a JSFunction.
1466
  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11);
1467
  __ j(not_equal, &non_function);
1468

1469
  GenerateRecordCallTarget(masm);
1470

1471 1472 1473 1474 1475 1476 1477 1478 1479
  __ SmiToInteger32(rdx, rdx);
  Label feedback_register_initialized;
  // Put the AllocationSite from the feedback vector into rbx, or undefined.
  __ movp(rbx,
          FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
  __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
  __ j(equal, &feedback_register_initialized, Label::kNear);
  __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
  __ bind(&feedback_register_initialized);
1480

1481
  __ AssertUndefinedOrAllocationSite(rbx);
1482

1483
  // Pass new target to construct stub.
1484
  __ movp(rdx, rdi);
1485

1486 1487 1488 1489 1490 1491
  // Tail call to the function-specific construct stub (still in the caller
  // context at this point).
  __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
  __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
  __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
  __ jmp(rcx);
1492

1493 1494 1495
  __ bind(&non_function);
  __ movp(rdx, rdi);
  __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1496 1497 1498
}


1499
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
1500
  // rdi - function
1501
  // rdx - slot id
1502
  // rbx - vector
1503
  // rcx - allocation site (loaded from vector[slot]).
1504
  __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8);
1505 1506
  __ cmpp(rdi, r8);
  __ j(not_equal, miss);
1507

1508
  __ movp(rax, Immediate(arg_count()));
1509

1510
  // Increment the call count for monomorphic function calls.
1511 1512 1513
  __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
                                 FixedArray::kHeaderSize + kPointerSize),
                    Smi::FromInt(CallICNexus::kCallCountIncrement));
1514

1515 1516 1517 1518
  __ movp(rbx, rcx);
  __ movp(rdx, rdi);
  ArrayConstructorStub stub(masm->isolate(), arg_count());
  __ TailCallStub(&stub);
1519 1520 1521
}


1522
void CallICStub::Generate(MacroAssembler* masm) {
1523 1524 1525 1526 1527
  // ----------- S t a t e -------------
  // -- rdi - function
  // -- rdx - slot id
  // -- rbx - vector
  // -----------------------------------
1528
  Isolate* isolate = masm->isolate();
1529
  Label extra_checks_or_miss, call, call_function;
1530
  int argc = arg_count();
1531 1532 1533 1534 1535
  StackArgumentsAccessor args(rsp, argc);
  ParameterCount actual(argc);

  // The checks. First, does rdi match the recorded monomorphic target?
  __ SmiToInteger32(rdx, rdx);
1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553
  __ movp(rcx,
          FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));

  // We don't know that we have a weak cell. We might have a private symbol
  // or an AllocationSite, but the memory is safe to examine.
  // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
  // FixedArray.
  // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
  // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
  // computed, meaning that it can't appear to be a pointer. If the low bit is
  // 0, then hash is computed, but the 0 bit prevents the field from appearing
  // to be a pointer.
  STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
  STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
                    WeakCell::kValueOffset &&
                WeakCell::kValueOffset == Symbol::kHashFieldSlot);

  __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset));
1554 1555
  __ j(not_equal, &extra_checks_or_miss);

1556 1557 1558 1559
  // The compare above could have been a SMI/SMI comparison. Guard against this
  // convincing us that we have a monomorphic JSFunction.
  __ JumpIfSmi(rdi, &extra_checks_or_miss);

1560 1561 1562 1563 1564
  // Increment the call count for monomorphic function calls.
  __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
                                 FixedArray::kHeaderSize + kPointerSize),
                    Smi::FromInt(CallICNexus::kCallCountIncrement));

1565
  __ bind(&call_function);
1566
  __ Set(rax, argc);
ishell's avatar
ishell committed
1567 1568
  __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
                                                    tail_call_mode()),
1569
          RelocInfo::CODE_TARGET);
1570 1571

  __ bind(&extra_checks_or_miss);
1572
  Label uninitialized, miss, not_allocation_site;
1573

1574
  __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
1575
  __ j(equal, &call);
1576

1577 1578 1579 1580 1581 1582 1583 1584 1585 1586
  // Check if we have an allocation site.
  __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
                 Heap::kAllocationSiteMapRootIndex);
  __ j(not_equal, &not_allocation_site);

  // We have an allocation site.
  HandleArrayCase(masm, &miss);

  __ bind(&not_allocation_site);

1587 1588 1589 1590 1591 1592
  // The following cases attempt to handle MISS cases without going to the
  // runtime.
  if (FLAG_trace_ic) {
    __ jmp(&miss);
  }

1593
  __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate));
1594 1595 1596 1597 1598 1599 1600 1601 1602
  __ j(equal, &uninitialized);

  // We are going megamorphic. If the feedback is a JSFunction, it is fine
  // to handle it here. More complex cases are dealt with in the runtime.
  __ AssertNotSmi(rcx);
  __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx);
  __ j(not_equal, &miss);
  __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
          TypeFeedbackVector::MegamorphicSentinel(isolate));
1603 1604 1605

  __ bind(&call);
  __ Set(rax, argc);
ishell's avatar
ishell committed
1606
  __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
1607
          RelocInfo::CODE_TARGET);
1608 1609 1610 1611 1612 1613 1614 1615 1616 1617 1618 1619

  __ bind(&uninitialized);

  // We are going monomorphic, provided we actually have a JSFunction.
  __ JumpIfSmi(rdi, &miss);

  // Goto miss case if we do not have a function.
  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
  __ j(not_equal, &miss);

  // Make sure the function is not the Array() function, which requires special
  // behavior on MISS.
1620
  __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx);
1621
  __ cmpp(rdi, rcx);
1622 1623
  __ j(equal, &miss);

1624
  // Make sure the function belongs to the same native context.
1625
  __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset));
1626 1627
  __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX));
  __ cmpp(rcx, NativeContextOperand());
1628 1629
  __ j(not_equal, &miss);

1630 1631 1632 1633 1634
  // Initialize the call counter.
  __ Move(FieldOperand(rbx, rdx, times_pointer_size,
                       FixedArray::kHeaderSize + kPointerSize),
          Smi::FromInt(CallICNexus::kCallCountIncrement));

1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647
  // Store the function. Use a stub since we need a frame for allocation.
  // rbx - vector
  // rdx - slot (needs to be in smi form)
  // rdi - function
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    CreateWeakCellStub create_stub(isolate);

    __ Integer32ToSmi(rdx, rdx);
    __ Push(rdi);
    __ CallStub(&create_stub);
    __ Pop(rdi);
  }
1648

1649
  __ jmp(&call_function);
1650

1651 1652
  // We are here because tracing is on or we encountered a MISS case we can't
  // handle here.
1653
  __ bind(&miss);
1654
  GenerateMiss(masm);
1655

1656
  __ jmp(&call);
1657 1658 1659 1660 1661 1662

  // Unreachable
  __ int3();
}


1663
void CallICStub::GenerateMiss(MacroAssembler* masm) {
1664
  FrameScope scope(masm, StackFrame::INTERNAL);
1665

1666 1667 1668 1669 1670
  // Push the receiver and the function and feedback info.
  __ Push(rdi);
  __ Push(rbx);
  __ Integer32ToSmi(rdx, rdx);
  __ Push(rdx);
1671

1672
  // Call the entry.
1673
  __ CallRuntime(Runtime::kCallIC_Miss);
1674

1675 1676
  // Move result to edi and exit the internal frame.
  __ movp(rdi, rax);
1677 1678 1679
}


1680 1681 1682 1683 1684
bool CEntryStub::NeedsImmovableCode() {
  return false;
}


1685 1686 1687
void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
  CEntryStub::GenerateAheadOfTime(isolate);
  StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1688
  StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
1689
  // It is important that the store buffer overflow stubs are generated first.
1690
  ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
1691
  CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
1692
  CreateWeakCellStub::GenerateAheadOfTime(isolate);
1693
  BinaryOpICStub::GenerateAheadOfTime(isolate);
1694
  BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1695
  StoreFastElementStub::GenerateAheadOfTime(isolate);
1696
  TypeofStub::GenerateAheadOfTime(isolate);
1697 1698 1699
}


1700
void CodeStub::GenerateFPStubs(Isolate* isolate) {
1701 1702 1703
}


1704
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1705
  CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1706
  stub.GetCode();
1707
  CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
1708
  save_doubles.GetCode();
1709 1710 1711
}


1712 1713 1714 1715 1716 1717
void CEntryStub::Generate(MacroAssembler* masm) {
  // rax: number of arguments including receiver
  // rbx: pointer to C function  (C callee-saved)
  // rbp: frame pointer of calling JS frame (restored after C call)
  // rsp: stack pointer  (restored after C call)
  // rsi: current context (restored)
1718 1719 1720
  //
  // If argv_in_register():
  // r15: pointer to the first argument
1721 1722 1723 1724

  ProfileEntryHookStub::MaybeCallEntryHook(masm);

#ifdef _WIN64
1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743 1744 1745
  // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
  // stack to be aligned to 16 bytes. It only allows a single-word to be
  // returned in register rax. Larger return sizes must be written to an address
  // passed as a hidden first argument.
  const Register kCCallArg0 = rcx;
  const Register kCCallArg1 = rdx;
  const Register kCCallArg2 = r8;
  const Register kCCallArg3 = r9;
  const int kArgExtraStackSpace = 2;
  const int kMaxRegisterResultSize = 1;
#else
  // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
  // are returned in rax, and a struct of two pointers are returned in rax+rdx.
  // Larger return sizes must be written to an address passed as a hidden first
  // argument.
  const Register kCCallArg0 = rdi;
  const Register kCCallArg1 = rsi;
  const Register kCCallArg2 = rdx;
  const Register kCCallArg3 = rcx;
  const int kArgExtraStackSpace = 0;
  const int kMaxRegisterResultSize = 2;
1746
#endif  // _WIN64
1747 1748 1749 1750 1751

  // Enter the exit frame that transitions from JavaScript to C++.
  int arg_stack_space =
      kArgExtraStackSpace +
      (result_size() <= kMaxRegisterResultSize ? 0 : result_size());
1752 1753 1754 1755 1756 1757 1758 1759
  if (argv_in_register()) {
    DCHECK(!save_doubles());
    __ EnterApiExitFrame(arg_stack_space);
    // Move argc into r14 (argv is already in r15).
    __ movp(r14, rax);
  } else {
    __ EnterExitFrame(arg_stack_space, save_doubles());
  }
1760 1761 1762 1763

  // rbx: pointer to builtin function  (C callee-saved).
  // rbp: frame pointer of exit frame  (restored after C call).
  // rsp: stack pointer (restored after C call).
1764
  // r14: number of arguments including receiver (C callee-saved).
1765
  // r15: argv pointer (C callee-saved).
1766 1767 1768 1769 1770 1771

  // Check stack alignment.
  if (FLAG_debug_code) {
    __ CheckStackAlignment();
  }

1772 1773 1774
  // Call C function. The arguments object will be created by stubs declared by
  // DECLARE_RUNTIME_FUNCTION().
  if (result_size() <= kMaxRegisterResultSize) {
1775
    // Pass a pointer to the Arguments object as the first argument.
1776 1777 1778 1779
    // Return result in single register (rax), or a register pair (rax, rdx).
    __ movp(kCCallArg0, r14);  // argc.
    __ movp(kCCallArg1, r15);  // argv.
    __ Move(kCCallArg2, ExternalReference::isolate_address(isolate()));
1780
  } else {
1781
    DCHECK_LE(result_size(), 3);
1782
    // Pass a pointer to the result location as the first argument.
1783
    __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
1784
    // Pass a pointer to the Arguments object as the second argument.
1785 1786 1787
    __ movp(kCCallArg1, r14);  // argc.
    __ movp(kCCallArg2, r15);  // argv.
    __ Move(kCCallArg3, ExternalReference::isolate_address(isolate()));
1788 1789 1790
  }
  __ call(rbx);

1791
  if (result_size() > kMaxRegisterResultSize) {
1792
    // Read result values stored on stack. Result is stored
1793 1794 1795 1796 1797 1798 1799
    // above the the two Arguments object slots on Win64.
    DCHECK_LE(result_size(), 3);
    __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
    __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
    if (result_size() > 2) {
      __ movq(kReturnRegister2, StackSpaceOperand(kArgExtraStackSpace + 2));
    }
1800
  }
1801
  // Result is in rax, rdx:rax or r8:rdx:rax - do not destroy these registers!
1802 1803 1804 1805 1806 1807 1808 1809 1810 1811 1812

  // Check result for exception sentinel.
  Label exception_returned;
  __ CompareRoot(rax, Heap::kExceptionRootIndex);
  __ j(equal, &exception_returned);

  // Check that there is no pending exception, otherwise we
  // should have returned the exception sentinel.
  if (FLAG_debug_code) {
    Label okay;
    __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
1813 1814
    ExternalReference pending_exception_address(
        Isolate::kPendingExceptionAddress, isolate());
1815 1816 1817 1818 1819 1820 1821
    Operand pending_exception_operand =
        masm->ExternalOperand(pending_exception_address);
    __ cmpp(r14, pending_exception_operand);
    __ j(equal, &okay, Label::kNear);
    __ int3();
    __ bind(&okay);
  }
1822 1823

  // Exit the JavaScript to C++ exit frame.
1824
  __ LeaveExitFrame(save_doubles(), !argv_in_register());
1825 1826
  __ ret(0);

1827 1828
  // Handling of exception.
  __ bind(&exception_returned);
1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842

  ExternalReference pending_handler_context_address(
      Isolate::kPendingHandlerContextAddress, isolate());
  ExternalReference pending_handler_code_address(
      Isolate::kPendingHandlerCodeAddress, isolate());
  ExternalReference pending_handler_offset_address(
      Isolate::kPendingHandlerOffsetAddress, isolate());
  ExternalReference pending_handler_fp_address(
      Isolate::kPendingHandlerFPAddress, isolate());
  ExternalReference pending_handler_sp_address(
      Isolate::kPendingHandlerSPAddress, isolate());

  // Ask the runtime for help to determine the handler. This will set rax to
  // contain the current pending exception, don't clobber it.
1843 1844
  ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
                                 isolate());
1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857 1858
  {
    FrameScope scope(masm, StackFrame::MANUAL);
    __ movp(arg_reg_1, Immediate(0));  // argc.
    __ movp(arg_reg_2, Immediate(0));  // argv.
    __ Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
    __ PrepareCallCFunction(3);
    __ CallCFunction(find_handler, 3);
  }

  // Retrieve the handler context, SP and FP.
  __ movp(rsi, masm->ExternalOperand(pending_handler_context_address));
  __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address));
  __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address));

1859 1860
  // If the handler is a JS frame, restore the context to the frame. Note that
  // the context will be set to (rsi == 0) for non-JS frames.
1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871
  Label skip;
  __ testp(rsi, rsi);
  __ j(zero, &skip, Label::kNear);
  __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
  __ bind(&skip);

  // Compute the handler entry address and jump to it.
  __ movp(rdi, masm->ExternalOperand(pending_handler_code_address));
  __ movp(rdx, masm->ExternalOperand(pending_handler_offset_address));
  __ leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
  __ jmp(rdi);
1872 1873 1874
}


1875
void JSEntryStub::Generate(MacroAssembler* masm) {
1876
  Label invoke, handler_entry, exit;
1877
  Label not_outermost_js, not_outermost_js_2;
1878

1879 1880
  ProfileEntryHookStub::MaybeCallEntryHook(masm);

1881 1882
  {  // NOLINT. Scope block confuses linter.
    MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
1883
    // Set up frame.
1884
    __ pushq(rbp);
1885
    __ movp(rbp, rsp);
1886 1887

    // Push the stack frame type marker twice.
1888
    int marker = type();
1889 1890 1891
    // Scratch register is neither callee-save, nor an argument register on any
    // platform. It's free to use at this point.
    // Cannot use smi-register for loading yet.
1892
    __ Move(kScratchRegister, Smi::FromInt(marker), Assembler::RelocInfoNone());
1893 1894 1895 1896 1897 1898 1899
    __ Push(kScratchRegister);  // context slot
    __ Push(kScratchRegister);  // function slot
    // Save callee-saved registers (X64/X32/Win64 calling conventions).
    __ pushq(r12);
    __ pushq(r13);
    __ pushq(r14);
    __ pushq(r15);
1900
#ifdef _WIN64
1901 1902
    __ pushq(rdi);  // Only callee save in Win64 ABI, argument in AMD64 ABI.
    __ pushq(rsi);  // Only callee save in Win64 ABI, argument in AMD64 ABI.
1903
#endif
1904
    __ pushq(rbx);
1905 1906 1907

#ifdef _WIN64
    // On Win64 XMM6-XMM15 are callee-save
1908
    __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
1909 1910 1911 1912 1913 1914 1915 1916 1917 1918
    __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
    __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
    __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
    __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
    __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
    __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
    __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
    __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
    __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
    __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
1919
#endif
1920 1921 1922 1923 1924

    // Set up the roots and smi constant registers.
    // Needs to be done before any further smi loads.
    __ InitializeRootRegister();
  }
1925 1926

  // Save copies of the top frame descriptor on the stack.
1927
  ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
1928 1929
  {
    Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
1930
    __ Push(c_entry_fp_operand);
1931
  }
1932 1933

  // If this is the outermost JS call, set js_entry_sp value.
1934
  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
1935
  __ Load(rax, js_entry_sp);
1936
  __ testp(rax, rax);
1937
  __ j(not_zero, &not_outermost_js);
1938
  __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
1939
  __ movp(rax, rbp);
1940
  __ Store(js_entry_sp, rax);
1941 1942
  Label cont;
  __ jmp(&cont);
1943
  __ bind(&not_outermost_js);
1944 1945
  __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
  __ bind(&cont);
1946

1947 1948 1949 1950 1951 1952 1953
  // Jump to a faked try block that does the invoke, with a faked catch
  // block that sets the pending exception.
  __ jmp(&invoke);
  __ bind(&handler_entry);
  handler_offset_ = handler_entry.pos();
  // Caught exception: Store result (exception) in the pending exception
  // field in the JSEnv and return a failure sentinel.
1954
  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1955
                                      isolate());
1956
  __ Store(pending_exception, rax);
1957
  __ LoadRoot(rax, Heap::kExceptionRootIndex);
1958 1959
  __ jmp(&exit);

1960
  // Invoke: Link this frame into the handler chain.
1961
  __ bind(&invoke);
1962
  __ PushStackHandler();
1963 1964

  // Clear any pending exceptions.
1965 1966
  __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
  __ Store(pending_exception, rax);
1967 1968

  // Fake a receiver (NULL).
1969
  __ Push(Immediate(0));  // receiver
1970

1971 1972 1973 1974 1975
  // Invoke the function by calling through JS entry trampoline builtin and
  // pop the faked function when we return. We load the address from an
  // external reference instead of inlining the call target address directly
  // in the code, because the builtin stubs may not have been generated yet
  // at the time this code is generated.
1976
  if (type() == StackFrame::ENTRY_CONSTRUCT) {
1977
    ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
1978
                                      isolate());
1979
    __ Load(rax, construct_entry);
1980
  } else {
1981
    ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
1982
    __ Load(rax, entry);
1983
  }
1984
  __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
1985 1986 1987
  __ call(kScratchRegister);

  // Unlink this frame from the handler chain.
1988
  __ PopStackHandler();
1989

1990 1991
  __ bind(&exit);
  // Check if the current stack frame is marked as the outermost JS frame.
1992
  __ Pop(rbx);
1993
  __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
1994
  __ j(not_equal, &not_outermost_js_2);
1995
  __ Move(kScratchRegister, js_entry_sp);
1996
  __ movp(Operand(kScratchRegister, 0), Immediate(0));
1997 1998 1999
  __ bind(&not_outermost_js_2);

  // Restore the top frame descriptor from the stack.
2000
  { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
2001
    __ Pop(c_entry_fp_operand);
2002
  }
2003 2004

  // Restore callee-saved registers (X64 conventions).
2005 2006
#ifdef _WIN64
  // On Win64 XMM6-XMM15 are callee-save
2007 2008 2009
  __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
  __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
  __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
2010
  __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
2011 2012 2013 2014 2015 2016
  __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
  __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
  __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
  __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
  __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
  __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
2017
  __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2018 2019
#endif

2020
  __ popq(rbx);
2021 2022
#ifdef _WIN64
  // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
2023 2024
  __ popq(rsi);
  __ popq(rdi);
2025
#endif
2026 2027 2028 2029
  __ popq(r15);
  __ popq(r14);
  __ popq(r13);
  __ popq(r12);
2030
  __ addp(rsp, Immediate(2 * kPointerSize));  // remove markers
2031 2032

  // Restore frame pointer and return.
2033
  __ popq(rbp);
2034 2035 2036 2037
  __ ret(0);
}


2038 2039 2040 2041 2042 2043 2044 2045 2046 2047 2048 2049 2050 2051 2052 2053 2054 2055 2056 2057 2058 2059 2060 2061 2062
void InstanceOfStub::Generate(MacroAssembler* masm) {
  Register const object = rdx;              // Object (lhs).
  Register const function = rax;            // Function (rhs).
  Register const object_map = rcx;          // Map of {object}.
  Register const function_map = r8;         // Map of {function}.
  Register const function_prototype = rdi;  // Prototype of {function}.

  DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
  DCHECK(function.is(InstanceOfDescriptor::RightRegister()));

  // Check if {object} is a smi.
  Label object_is_smi;
  __ JumpIfSmi(object, &object_is_smi, Label::kNear);

  // Lookup the {function} and the {object} map in the global instanceof cache.
  // Note: This is safe because we clear the global instanceof cache whenever
  // we change the prototype of any object.
  Label fast_case, slow_case;
  __ movp(object_map, FieldOperand(object, HeapObject::kMapOffset));
  __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
  __ j(not_equal, &fast_case, Label::kNear);
  __ CompareRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
  __ j(not_equal, &fast_case, Label::kNear);
  __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
  __ ret(0);
2063

2064 2065 2066 2067 2068 2069 2070 2071
  // If {object} is a smi we can safely return false if {function} is a JS
  // function, otherwise we have to miss to the runtime and throw an exception.
  __ bind(&object_is_smi);
  __ JumpIfSmi(function, &slow_case);
  __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
  __ j(not_equal, &slow_case);
  __ LoadRoot(rax, Heap::kFalseValueRootIndex);
  __ ret(0);
2072

2073 2074 2075 2076 2077 2078 2079 2080 2081 2082 2083 2084 2085 2086 2087 2088 2089 2090 2091 2092 2093 2094 2095 2096 2097 2098 2099 2100 2101 2102 2103 2104 2105 2106 2107 2108
  // Fast-case: The {function} must be a valid JSFunction.
  __ bind(&fast_case);
  __ JumpIfSmi(function, &slow_case);
  __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
  __ j(not_equal, &slow_case);

  // Ensure that {function} has an instance prototype.
  __ testb(FieldOperand(function_map, Map::kBitFieldOffset),
           Immediate(1 << Map::kHasNonInstancePrototype));
  __ j(not_zero, &slow_case);

  // Get the "prototype" (or initial map) of the {function}.
  __ movp(function_prototype,
          FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
  __ AssertNotSmi(function_prototype);

  // Resolve the prototype if the {function} has an initial map.  Afterwards the
  // {function_prototype} will be either the JSReceiver prototype object or the
  // hole value, which means that no instances of the {function} were created so
  // far and hence we should return false.
  Label function_prototype_valid;
  Register const function_prototype_map = kScratchRegister;
  __ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map);
  __ j(not_equal, &function_prototype_valid, Label::kNear);
  __ movp(function_prototype,
          FieldOperand(function_prototype, Map::kPrototypeOffset));
  __ bind(&function_prototype_valid);
  __ AssertNotSmi(function_prototype);

  // Update the global instanceof cache with the current {object} map and
  // {function}.  The cached answer will be set when it is known below.
  __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
  __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);

  // Loop through the prototype chain looking for the {function} prototype.
  // Assume true, and change to false if not found.
2109
  Label done, loop, fast_runtime_fallback;
2110
  __ LoadRoot(rax, Heap::kTrueValueRootIndex);
2111
  __ bind(&loop);
2112 2113 2114 2115

  __ testb(FieldOperand(object_map, Map::kBitFieldOffset),
           Immediate(1 << Map::kIsAccessCheckNeeded));
  __ j(not_zero, &fast_runtime_fallback, Label::kNear);
2116
  __ CmpInstanceType(object_map, JS_PROXY_TYPE);
2117 2118
  __ j(equal, &fast_runtime_fallback, Label::kNear);

2119 2120
  __ movp(object, FieldOperand(object_map, Map::kPrototypeOffset));
  __ cmpp(object, function_prototype);
2121
  __ j(equal, &done, Label::kNear);
2122 2123
  __ CompareRoot(object, Heap::kNullValueRootIndex);
  __ movp(object_map, FieldOperand(object, HeapObject::kMapOffset));
2124 2125 2126 2127 2128
  __ j(not_equal, &loop);
  __ LoadRoot(rax, Heap::kFalseValueRootIndex);
  __ bind(&done);
  __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
  __ ret(0);
2129

2130 2131
  // Found Proxy or access check needed: Call the runtime.
  __ bind(&fast_runtime_fallback);
2132 2133 2134 2135 2136 2137 2138
  __ PopReturnAddressTo(kScratchRegister);
  __ Push(object);
  __ Push(function_prototype);
  __ PushReturnAddressFrom(kScratchRegister);
  // Invalidate the instanceof cache.
  __ Move(rax, Smi::FromInt(0));
  __ StoreRoot(rax, Heap::kInstanceofCacheFunctionRootIndex);
2139
  __ TailCallRuntime(Runtime::kHasInPrototypeChain);
2140 2141

  // Slow-case: Call the %InstanceOf runtime function.
2142 2143 2144 2145 2146
  __ bind(&slow_case);
  __ PopReturnAddressTo(kScratchRegister);
  __ Push(object);
  __ Push(function);
  __ PushReturnAddressFrom(kScratchRegister);
2147
  __ TailCallRuntime(Runtime::kInstanceOf);
2148 2149 2150 2151 2152 2153 2154 2155
}


// -------------------------------------------------------------------------
// StringCharCodeAtGenerator

void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
  // If the receiver is a smi trigger the non-string case.
2156 2157 2158 2159 2160 2161 2162 2163 2164 2165
  if (check_mode_ == RECEIVER_IS_UNKNOWN) {
    __ JumpIfSmi(object_, receiver_not_string_);

    // Fetch the instance type of the receiver into result register.
    __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
    __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
    // If the receiver is not a string trigger the non-string case.
    __ testb(result_, Immediate(kIsNotStringMask));
    __ j(not_zero, receiver_not_string_);
  }
2166 2167 2168 2169 2170 2171

  // If the index is non-smi trigger the non-smi case.
  __ JumpIfNotSmi(index_, &index_not_smi_);
  __ bind(&got_smi_index_);

  // Check for index out of range.
2172
  __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
2173 2174
  __ j(above_equal, index_out_of_range_);

2175
  __ SmiToInteger32(index_, index_);
2176 2177 2178 2179

  StringCharLoadGenerator::Generate(
      masm, object_, index_, result_, &call_runtime_);

2180 2181 2182 2183 2184 2185
  __ Integer32ToSmi(result_, result_);
  __ bind(&exit_);
}


void StringCharCodeAtGenerator::GenerateSlow(
2186
    MacroAssembler* masm, EmbedMode embed_mode,
2187
    const RuntimeCallHelper& call_helper) {
2188
  __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
2189

2190
  Factory* factory = masm->isolate()->factory();
2191 2192 2193
  // Index is not a smi.
  __ bind(&index_not_smi_);
  // If index is a heap number, try converting it to an integer.
2194 2195 2196 2197
  __ CheckMap(index_,
              factory->heap_number_map(),
              index_not_number_,
              DONT_DO_SMI_CHECK);
2198
  call_helper.BeforeCall(masm);
2199
  if (embed_mode == PART_OF_IC_HANDLER) {
2200 2201
    __ Push(LoadWithVectorDescriptor::VectorRegister());
    __ Push(LoadDescriptor::SlotRegister());
2202
  }
2203 2204
  __ Push(object_);
  __ Push(index_);  // Consumed by runtime conversion function.
2205
  if (index_flags_ == STRING_INDEX_IS_NUMBER) {
2206
    __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
2207
  } else {
2208
    DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
2209
    // NumberToSmi discards numbers that are not exact integers.
2210
    __ CallRuntime(Runtime::kNumberToSmi);
2211
  }
2212
  if (!index_.is(rax)) {
2213 2214
    // Save the conversion result before the pop instructions below
    // have a chance to overwrite it.
2215
    __ movp(index_, rax);
2216
  }
2217
  __ Pop(object_);
2218
  if (embed_mode == PART_OF_IC_HANDLER) {
2219 2220
    __ Pop(LoadDescriptor::SlotRegister());
    __ Pop(LoadWithVectorDescriptor::VectorRegister());
2221
  }
2222
  // Reload the instance type.
2223
  __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
2224 2225 2226
  __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
  call_helper.AfterCall(masm);
  // If index is still not a smi, it must be out of range.
2227
  __ JumpIfNotSmi(index_, index_out_of_range_);
2228 2229 2230 2231 2232 2233 2234 2235
  // Otherwise, return to the fast path.
  __ jmp(&got_smi_index_);

  // Call runtime. We get here when the receiver is a string and the
  // index is a number, but the code of getting the actual character
  // is too complex (e.g., when the string needs to be flattened).
  __ bind(&call_runtime_);
  call_helper.BeforeCall(masm);
2236
  __ Push(object_);
2237
  __ Integer32ToSmi(index_, index_);
2238
  __ Push(index_);
2239
  __ CallRuntime(Runtime::kStringCharCodeAtRT);
2240
  if (!result_.is(rax)) {
2241
    __ movp(result_, rax);
2242 2243 2244 2245
  }
  call_helper.AfterCall(masm);
  __ jmp(&exit_);

2246
  __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
2247 2248 2249 2250 2251 2252 2253 2254 2255
}


// -------------------------------------------------------------------------
// StringCharFromCodeGenerator

void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
  // Fast case of Heap::LookupSingleCharacterStringFromCode.
  __ JumpIfNotSmi(code_, &slow_case_);
2256
  __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
2257 2258 2259 2260
  __ j(above, &slow_case_);

  __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
  SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
2261
  __ movp(result_, FieldOperand(result_, index.reg, index.scale,
2262 2263 2264 2265 2266 2267 2268 2269
                                FixedArray::kHeaderSize));
  __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
  __ j(equal, &slow_case_);
  __ bind(&exit_);
}


void StringCharFromCodeGenerator::GenerateSlow(
2270 2271
    MacroAssembler* masm,
    const RuntimeCallHelper& call_helper) {
2272
  __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
2273 2274 2275

  __ bind(&slow_case_);
  call_helper.BeforeCall(masm);
2276
  __ Push(code_);
2277
  __ CallRuntime(Runtime::kStringCharFromCode);
2278
  if (!result_.is(rax)) {
2279
    __ movp(result_, rax);
2280 2281 2282 2283
  }
  call_helper.AfterCall(masm);
  __ jmp(&exit_);

2284
  __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
2285 2286 2287
}


2288 2289 2290 2291 2292
void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
                                          Register dest,
                                          Register src,
                                          Register count,
                                          String::Encoding encoding) {
2293
  // Nothing to do for zero characters.
2294
  Label done;
2295
  __ testl(count, count);
2296
  __ j(zero, &done, Label::kNear);
2297 2298

  // Make count the number of bytes to copy.
2299
  if (encoding == String::TWO_BYTE_ENCODING) {
2300 2301 2302 2303 2304 2305 2306 2307 2308
    STATIC_ASSERT(2 == sizeof(uc16));
    __ addl(count, count);
  }

  // Copy remaining characters.
  Label loop;
  __ bind(&loop);
  __ movb(kScratchRegister, Operand(src, 0));
  __ movb(Operand(dest, 0), kScratchRegister);
2309 2310
  __ incp(src);
  __ incp(dest);
2311 2312 2313 2314 2315 2316 2317 2318 2319 2320 2321
  __ decl(count);
  __ j(not_zero, &loop);

  __ bind(&done);
}


void SubStringStub::Generate(MacroAssembler* masm) {
  Label runtime;

  // Stack frame on entry.
2322 2323 2324 2325
  //  rsp[0]  : return address
  //  rsp[8]  : to
  //  rsp[16] : from
  //  rsp[24] : string
2326

2327 2328 2329 2330 2331 2332 2333 2334 2335
  enum SubStringStubArgumentIndices {
    STRING_ARGUMENT_INDEX,
    FROM_ARGUMENT_INDEX,
    TO_ARGUMENT_INDEX,
    SUB_STRING_ARGUMENT_COUNT
  };

  StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT,
                              ARGUMENTS_DONT_CONTAIN_RECEIVER);
2336 2337

  // Make sure first argument is a string.
2338
  __ movp(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
2339 2340 2341 2342 2343 2344 2345 2346 2347
  STATIC_ASSERT(kSmiTag == 0);
  __ testl(rax, Immediate(kSmiTagMask));
  __ j(zero, &runtime);
  Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
  __ j(NegateCondition(is_string), &runtime);

  // rax: string
  // rbx: instance type
  // Calculate length of sub string using the smi values.
2348 2349
  __ movp(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
  __ movp(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
2350
  __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
2351

2352
  __ SmiSub(rcx, rcx, rdx);  // Overflow doesn't happen.
2353
  __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset));
2354
  Label not_original_string;
2355 2356 2357 2358 2359
  // Shorter than original string's length: an actual substring.
  __ j(below, &not_original_string, Label::kNear);
  // Longer than original string's length or negative: unsafe arguments.
  __ j(above, &runtime);
  // Return original string.
2360
  Counters* counters = isolate()->counters();
2361
  __ IncrementCounter(counters->sub_string_native(), 1);
2362
  __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
2363
  __ bind(&not_original_string);
2364 2365 2366 2367 2368

  Label single_char;
  __ SmiCompare(rcx, Smi::FromInt(1));
  __ j(equal, &single_char);

2369
  __ SmiToInteger32(rcx, rcx);
2370 2371 2372 2373 2374 2375 2376 2377 2378 2379 2380 2381 2382 2383 2384 2385 2386 2387 2388

  // rax: string
  // rbx: instance type
  // rcx: sub string length
  // rdx: from index (smi)
  // Deal with different string types: update the index if necessary
  // and put the underlying string into edi.
  Label underlying_unpacked, sliced_string, seq_or_external_string;
  // If the string is not indirect, it can only be sequential or external.
  STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
  STATIC_ASSERT(kIsIndirectStringMask != 0);
  __ testb(rbx, Immediate(kIsIndirectStringMask));
  __ j(zero, &seq_or_external_string, Label::kNear);

  __ testb(rbx, Immediate(kSlicedNotConsMask));
  __ j(not_zero, &sliced_string, Label::kNear);
  // Cons string.  Check whether it is flat, then fetch first part.
  // Flat cons strings have an empty second part.
  __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset),
2389
                 Heap::kempty_stringRootIndex);
2390
  __ j(not_equal, &runtime);
2391
  __ movp(rdi, FieldOperand(rax, ConsString::kFirstOffset));
2392
  // Update instance type.
2393
  __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
2394 2395 2396 2397 2398
  __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
  __ jmp(&underlying_unpacked, Label::kNear);

  __ bind(&sliced_string);
  // Sliced string.  Fetch parent and correct start index by offset.
2399
  __ addp(rdx, FieldOperand(rax, SlicedString::kOffsetOffset));
2400
  __ movp(rdi, FieldOperand(rax, SlicedString::kParentOffset));
2401
  // Update instance type.
2402
  __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
2403
  __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2404 2405 2406 2407
  __ jmp(&underlying_unpacked, Label::kNear);

  __ bind(&seq_or_external_string);
  // Sequential or external string.  Just move string to the correct register.
2408
  __ movp(rdi, rax);
2409 2410

  __ bind(&underlying_unpacked);
2411

2412 2413
  if (FLAG_string_slices) {
    Label copy_routine;
2414 2415 2416 2417
    // rdi: underlying subject string
    // rbx: instance type of underlying subject string
    // rdx: adjusted start index (smi)
    // rcx: length
2418 2419
    // If coming from the make_two_character_string path, the string
    // is too short to be sliced anyways.
2420
    __ cmpp(rcx, Immediate(SlicedString::kMinLength));
2421 2422 2423 2424 2425 2426 2427 2428
    // Short slice.  Copy instead of slicing.
    __ j(less, &copy_routine);
    // Allocate new sliced string.  At this point we do not reload the instance
    // type including the string encoding because we simply rely on the info
    // provided by the original string.  It does not matter if the original
    // string's encoding is wrong because we always have to recheck encoding of
    // the newly created string's parent anyways due to externalized strings.
    Label two_byte_slice, set_slice_header;
2429
    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
2430 2431
    STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
    __ testb(rbx, Immediate(kStringEncodingMask));
2432
    __ j(zero, &two_byte_slice, Label::kNear);
2433
    __ AllocateOneByteSlicedString(rax, rbx, r14, &runtime);
2434
    __ jmp(&set_slice_header, Label::kNear);
2435
    __ bind(&two_byte_slice);
2436
    __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
2437 2438
    __ bind(&set_slice_header);
    __ Integer32ToSmi(rcx, rcx);
2439 2440
    __ movp(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
    __ movp(FieldOperand(rax, SlicedString::kHashFieldOffset),
2441
           Immediate(String::kEmptyHashField));
2442 2443
    __ movp(FieldOperand(rax, SlicedString::kParentOffset), rdi);
    __ movp(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
2444
    __ IncrementCounter(counters->sub_string_native(), 1);
2445
    __ ret(3 * kPointerSize);
2446 2447 2448

    __ bind(&copy_routine);
  }
2449

2450 2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461 2462 2463
  // rdi: underlying subject string
  // rbx: instance type of underlying subject string
  // rdx: adjusted start index (smi)
  // rcx: length
  // The subject string can only be external or sequential string of either
  // encoding at this point.
  Label two_byte_sequential, sequential_string;
  STATIC_ASSERT(kExternalStringTag != 0);
  STATIC_ASSERT(kSeqStringTag == 0);
  __ testb(rbx, Immediate(kExternalStringTag));
  __ j(zero, &sequential_string);

  // Handle external string.
  // Rule out short external strings.
2464
  STATIC_ASSERT(kShortExternalStringTag != 0);
2465 2466
  __ testb(rbx, Immediate(kShortExternalStringMask));
  __ j(not_zero, &runtime);
2467
  __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
2468
  // Move the pointer so that offset-wise, it looks like a sequential string.
2469
  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2470
  __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2471 2472

  __ bind(&sequential_string);
2473
  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
2474 2475
  __ testb(rbx, Immediate(kStringEncodingMask));
  __ j(zero, &two_byte_sequential);
2476 2477

  // Allocate the result.
2478
  __ AllocateOneByteString(rax, rcx, r11, r14, r15, &runtime);
2479 2480 2481

  // rax: result string
  // rcx: result string length
2482 2483
  {  // Locate character of sub string start.
    SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
2484
    __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
2485
                        SeqOneByteString::kHeaderSize - kHeapObjectTag));
2486
  }
2487
  // Locate first character of result.
2488
  __ leap(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize));
2489 2490 2491

  // rax: result string
  // rcx: result length
2492
  // r14: first character of result
2493
  // rsi: character of sub string start
2494 2495
  StringHelper::GenerateCopyCharacters(
      masm, rdi, r14, rcx, String::ONE_BYTE_ENCODING);
2496
  __ IncrementCounter(counters->sub_string_native(), 1);
2497
  __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
2498

2499
  __ bind(&two_byte_sequential);
2500
  // Allocate the result.
2501
  __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
2502 2503 2504

  // rax: result string
  // rcx: result string length
2505 2506
  {  // Locate character of sub string start.
    SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
2507
    __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
2508
                        SeqOneByteString::kHeaderSize - kHeapObjectTag));
2509
  }
2510
  // Locate first character of result.
2511
  __ leap(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
2512 2513 2514 2515

  // rax: result string
  // rcx: result length
  // rdi: first character of result
2516 2517 2518
  // r14: character of sub string start
  StringHelper::GenerateCopyCharacters(
      masm, rdi, r14, rcx, String::TWO_BYTE_ENCODING);
2519
  __ IncrementCounter(counters->sub_string_native(), 1);
2520
  __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
2521 2522 2523

  // Just jump to runtime to create the sub string.
  __ bind(&runtime);
2524
  __ TailCallRuntime(Runtime::kSubString);
2525 2526 2527 2528 2529 2530

  __ bind(&single_char);
  // rax: string
  // rbx: instance type
  // rcx: sub string length (smi)
  // rdx: from index (smi)
2531 2532 2533
  StringCharAtGenerator generator(rax, rdx, rcx, rax, &runtime, &runtime,
                                  &runtime, STRING_INDEX_IS_NUMBER,
                                  RECEIVER_IS_STRING);
2534
  generator.GenerateFast(masm);
2535
  __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
2536
  generator.SkipSlow(masm, &runtime);
2537 2538 2539
}


2540 2541
void ToNumberStub::Generate(MacroAssembler* masm) {
  // The ToNumber stub takes one argument in rax.
2542 2543
  Label not_smi;
  __ JumpIfNotSmi(rax, &not_smi, Label::kNear);
2544
  __ Ret();
2545
  __ bind(&not_smi);
2546

2547
  Label not_heap_number;
2548 2549
  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
                 Heap::kHeapNumberMapRootIndex);
2550
  __ j(not_equal, &not_heap_number, Label::kNear);
2551
  __ Ret();
2552 2553 2554 2555 2556 2557 2558 2559 2560 2561 2562 2563 2564 2565 2566 2567 2568 2569
  __ bind(&not_heap_number);

  Label not_string, slow_string;
  __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
  // rax: object
  // rdi: object map
  __ j(above_equal, &not_string, Label::kNear);
  // Check if string has a cached array index.
  __ testl(FieldOperand(rax, String::kHashFieldOffset),
           Immediate(String::kContainsCachedArrayIndexMask));
  __ j(not_zero, &slow_string, Label::kNear);
  __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
  __ IndexFromHash(rax, rax);
  __ Ret();
  __ bind(&slow_string);
  __ PopReturnAddressTo(rcx);     // Pop return address.
  __ Push(rax);                   // Push argument.
  __ PushReturnAddressFrom(rcx);  // Push return address.
2570
  __ TailCallRuntime(Runtime::kStringToNumber);
2571 2572 2573 2574 2575 2576 2577 2578
  __ bind(&not_string);

  Label not_oddball;
  __ CmpInstanceType(rdi, ODDBALL_TYPE);
  __ j(not_equal, &not_oddball, Label::kNear);
  __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
  __ Ret();
  __ bind(&not_oddball);
2579

2580 2581 2582
  __ PopReturnAddressTo(rcx);     // Pop return address.
  __ Push(rax);                   // Push argument.
  __ PushReturnAddressFrom(rcx);  // Push return address.
2583
  __ TailCallRuntime(Runtime::kToNumber);
2584 2585 2586
}


2587 2588 2589 2590 2591 2592 2593 2594 2595 2596 2597 2598 2599 2600 2601
void ToLengthStub::Generate(MacroAssembler* masm) {
  // The ToLength stub takes on argument in rax.
  Label not_smi, positive_smi;
  __ JumpIfNotSmi(rax, &not_smi, Label::kNear);
  STATIC_ASSERT(kSmiTag == 0);
  __ testp(rax, rax);
  __ j(greater_equal, &positive_smi, Label::kNear);
  __ xorl(rax, rax);
  __ bind(&positive_smi);
  __ Ret();
  __ bind(&not_smi);

  __ PopReturnAddressTo(rcx);     // Pop return address.
  __ Push(rax);                   // Push argument.
  __ PushReturnAddressFrom(rcx);  // Push return address.
2602
  __ TailCallRuntime(Runtime::kToLength);
2603 2604 2605
}


2606 2607 2608 2609 2610 2611 2612 2613 2614 2615 2616 2617 2618 2619
void ToStringStub::Generate(MacroAssembler* masm) {
  // The ToString stub takes one argument in rax.
  Label is_number;
  __ JumpIfSmi(rax, &is_number, Label::kNear);

  Label not_string;
  __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
  // rax: receiver
  // rdi: receiver map
  __ j(above_equal, &not_string, Label::kNear);
  __ Ret();
  __ bind(&not_string);

  Label not_heap_number;
2620
  __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex);
2621 2622 2623 2624 2625 2626 2627 2628 2629 2630 2631 2632 2633 2634 2635 2636
  __ j(not_equal, &not_heap_number, Label::kNear);
  __ bind(&is_number);
  NumberToStringStub stub(isolate());
  __ TailCallStub(&stub);
  __ bind(&not_heap_number);

  Label not_oddball;
  __ CmpInstanceType(rdi, ODDBALL_TYPE);
  __ j(not_equal, &not_oddball, Label::kNear);
  __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset));
  __ Ret();
  __ bind(&not_oddball);

  __ PopReturnAddressTo(rcx);     // Pop return address.
  __ Push(rax);                   // Push argument.
  __ PushReturnAddressFrom(rcx);  // Push return address.
2637
  __ TailCallRuntime(Runtime::kToString);
2638 2639 2640
}


2641 2642 2643 2644 2645 2646 2647 2648 2649 2650 2651 2652 2653 2654 2655 2656 2657 2658 2659 2660 2661 2662 2663 2664 2665 2666 2667 2668 2669 2670 2671 2672 2673 2674 2675 2676
void ToNameStub::Generate(MacroAssembler* masm) {
  // The ToName stub takes one argument in rax.
  Label is_number;
  __ JumpIfSmi(rax, &is_number, Label::kNear);

  Label not_name;
  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
  __ CmpObjectType(rax, LAST_NAME_TYPE, rdi);
  // rax: receiver
  // rdi: receiver map
  __ j(above, &not_name, Label::kNear);
  __ Ret();
  __ bind(&not_name);

  Label not_heap_number;
  __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex);
  __ j(not_equal, &not_heap_number, Label::kNear);
  __ bind(&is_number);
  NumberToStringStub stub(isolate());
  __ TailCallStub(&stub);
  __ bind(&not_heap_number);

  Label not_oddball;
  __ CmpInstanceType(rdi, ODDBALL_TYPE);
  __ j(not_equal, &not_oddball, Label::kNear);
  __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset));
  __ Ret();
  __ bind(&not_oddball);

  __ PopReturnAddressTo(rcx);     // Pop return address.
  __ Push(rax);                   // Push argument.
  __ PushReturnAddressFrom(rcx);  // Push return address.
  __ TailCallRuntime(Runtime::kToName);
}


2677 2678 2679 2680 2681
void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
                                                   Register left,
                                                   Register right,
                                                   Register scratch1,
                                                   Register scratch2) {
2682 2683 2684
  Register length = scratch1;

  // Compare lengths.
2685
  Label check_zero_length;
2686
  __ movp(length, FieldOperand(left, String::kLengthOffset));
2687
  __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
2688
  __ j(equal, &check_zero_length, Label::kNear);
2689 2690 2691 2692
  __ Move(rax, Smi::FromInt(NOT_EQUAL));
  __ ret(0);

  // Check if the length is zero.
2693
  Label compare_chars;
2694 2695 2696
  __ bind(&check_zero_length);
  STATIC_ASSERT(kSmiTag == 0);
  __ SmiTest(length);
2697
  __ j(not_zero, &compare_chars, Label::kNear);
2698 2699 2700 2701 2702
  __ Move(rax, Smi::FromInt(EQUAL));
  __ ret(0);

  // Compare characters.
  __ bind(&compare_chars);
2703
  Label strings_not_equal;
2704 2705
  GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
                                  &strings_not_equal, Label::kNear);
2706 2707 2708 2709 2710 2711 2712 2713 2714 2715 2716 2717

  // Characters are equal.
  __ Move(rax, Smi::FromInt(EQUAL));
  __ ret(0);

  // Characters are not equal.
  __ bind(&strings_not_equal);
  __ Move(rax, Smi::FromInt(NOT_EQUAL));
  __ ret(0);
}


2718
void StringHelper::GenerateCompareFlatOneByteStrings(
2719 2720
    MacroAssembler* masm, Register left, Register right, Register scratch1,
    Register scratch2, Register scratch3, Register scratch4) {
2721 2722 2723 2724 2725
  // Ensure that you can always subtract a string length from a non-negative
  // number (e.g. another length).
  STATIC_ASSERT(String::kMaxLength < 0x7fffffff);

  // Find minimum length and length difference.
2726 2727
  __ movp(scratch1, FieldOperand(left, String::kLengthOffset));
  __ movp(scratch4, scratch1);
2728 2729
  __ SmiSub(scratch4,
            scratch4,
2730
            FieldOperand(right, String::kLengthOffset));
2731 2732
  // Register scratch4 now holds left.length - right.length.
  const Register length_difference = scratch4;
2733 2734
  Label left_shorter;
  __ j(less, &left_shorter, Label::kNear);
2735 2736 2737
  // The right string isn't longer that the left one.
  // Get the right string's length by subtracting the (non-negative) difference
  // from the left string's length.
2738
  __ SmiSub(scratch1, scratch1, length_difference);
2739 2740 2741 2742
  __ bind(&left_shorter);
  // Register scratch1 now holds Min(left.length, right.length).
  const Register min_length = scratch1;

2743
  Label compare_lengths;
2744 2745
  // If min-length is zero, go directly to comparing lengths.
  __ SmiTest(min_length);
2746
  __ j(zero, &compare_lengths, Label::kNear);
2747

2748
  // Compare loop.
2749
  Label result_not_equal;
2750 2751 2752 2753 2754
  GenerateOneByteCharsCompareLoop(
      masm, left, right, min_length, scratch2, &result_not_equal,
      // In debug-code mode, SmiTest below might push
      // the target label outside the near range.
      Label::kFar);
2755

2756 2757 2758 2759
  // Completed loop without finding different characters.
  // Compare lengths (precomputed).
  __ bind(&compare_lengths);
  __ SmiTest(length_difference);
2760 2761
  Label length_not_equal;
  __ j(not_zero, &length_not_equal, Label::kNear);
2762 2763 2764 2765 2766

  // Result is EQUAL.
  __ Move(rax, Smi::FromInt(EQUAL));
  __ ret(0);

2767
  Label result_greater;
2768 2769 2770 2771
  Label result_less;
  __ bind(&length_not_equal);
  __ j(greater, &result_greater, Label::kNear);
  __ jmp(&result_less, Label::kNear);
2772 2773
  __ bind(&result_not_equal);
  // Unequal comparison of left to right, either character or length.
2774 2775
  __ j(above, &result_greater, Label::kNear);
  __ bind(&result_less);
2776 2777 2778 2779 2780 2781 2782 2783 2784 2785 2786 2787

  // Result is LESS.
  __ Move(rax, Smi::FromInt(LESS));
  __ ret(0);

  // Result is GREATER.
  __ bind(&result_greater);
  __ Move(rax, Smi::FromInt(GREATER));
  __ ret(0);
}


2788
void StringHelper::GenerateOneByteCharsCompareLoop(
2789 2790
    MacroAssembler* masm, Register left, Register right, Register length,
    Register scratch, Label* chars_not_equal, Label::Distance near_jump) {
2791 2792 2793 2794
  // Change index to run from -length to -1 by adding length to string
  // start. This means that loop ends when index reaches zero, which
  // doesn't need an additional compare.
  __ SmiToInteger32(length, length);
2795
  __ leap(left,
2796
         FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
2797
  __ leap(right,
2798
         FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
2799
  __ negq(length);
2800 2801 2802
  Register index = length;  // index = -length;

  // Compare loop.
2803
  Label loop;
2804 2805 2806
  __ bind(&loop);
  __ movb(scratch, Operand(left, index, times_1, 0));
  __ cmpb(scratch, Operand(right, index, times_1, 0));
2807
  __ j(not_equal, chars_not_equal, near_jump);
2808
  __ incq(index);
2809 2810 2811 2812
  __ j(not_zero, &loop);
}


2813
void StringCompareStub::Generate(MacroAssembler* masm) {
2814 2815 2816 2817 2818 2819 2820
  // ----------- S t a t e -------------
  //  -- rdx    : left string
  //  -- rax    : right string
  //  -- rsp[0] : return address
  // -----------------------------------
  __ AssertString(rdx);
  __ AssertString(rax);
2821 2822

  // Check for identity.
2823
  Label not_same;
2824
  __ cmpp(rdx, rax);
2825
  __ j(not_equal, &not_same, Label::kNear);
2826
  __ Move(rax, Smi::FromInt(EQUAL));
2827 2828
  __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
  __ Ret();
2829 2830 2831

  __ bind(&not_same);

2832
  // Check that both are sequential one-byte strings.
2833
  Label runtime;
2834
  __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx, &runtime);
2835

2836
  // Inline comparison of one-byte strings.
2837
  __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
2838 2839
  StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx, rdi,
                                                  r8);
2840 2841 2842 2843

  // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
  // tagged as a small integer.
  __ bind(&runtime);
2844 2845 2846 2847
  __ PopReturnAddressTo(rcx);
  __ Push(rdx);
  __ Push(rax);
  __ PushReturnAddressFrom(rcx);
2848
  __ TailCallRuntime(Runtime::kStringCompare);
2849 2850
}

2851

2852 2853 2854 2855 2856 2857 2858 2859 2860 2861
void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rdx    : left
  //  -- rax    : right
  //  -- rsp[0] : return address
  // -----------------------------------

  // Load rcx with the allocation site.  We stick an undefined dummy value here
  // and replace it with the real allocation site later when we instantiate this
  // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
2862
  __ Move(rcx, handle(isolate()->heap()->undefined_value()));
2863 2864 2865 2866

  // Make sure that we actually patched the allocation site.
  if (FLAG_debug_code) {
    __ testb(rcx, Immediate(kSmiTagMask));
2867
    __ Assert(not_equal, kExpectedAllocationSite);
2868
    __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
2869
           isolate()->factory()->allocation_site_map());
2870 2871 2872 2873 2874
    __ Assert(equal, kExpectedAllocationSite);
  }

  // Tail call into the stub that handles binary operations with allocation
  // sites.
2875
  BinaryOpWithAllocationSiteStub stub(isolate(), state());
2876 2877 2878 2879
  __ TailCallStub(&stub);
}


2880 2881 2882 2883 2884 2885 2886 2887 2888 2889 2890 2891
void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
  DCHECK_EQ(CompareICState::BOOLEAN, state());
  Label miss;
  Label::Distance const miss_distance =
      masm->emit_debug_code() ? Label::kFar : Label::kNear;

  __ JumpIfSmi(rdx, &miss, miss_distance);
  __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
  __ JumpIfSmi(rax, &miss, miss_distance);
  __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
  __ JumpIfNotRoot(rcx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
  __ JumpIfNotRoot(rbx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
2892 2893 2894 2895 2896 2897 2898 2899
  if (!Token::IsEqualityOp(op())) {
    __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
    __ AssertSmi(rax);
    __ movp(rdx, FieldOperand(rdx, Oddball::kToNumberOffset));
    __ AssertSmi(rdx);
    __ pushq(rax);
    __ movq(rax, rdx);
    __ popq(rdx);
2900
  }
2901 2902
  __ subp(rax, rdx);
  __ Ret();
2903 2904 2905 2906 2907 2908

  __ bind(&miss);
  GenerateMiss(masm);
}


2909
void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2910
  DCHECK(state() == CompareICState::SMI);
2911 2912
  Label miss;
  __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
2913 2914 2915

  if (GetCondition() == equal) {
    // For equality we do not care about the sign of the result.
2916
    __ subp(rax, rdx);
2917
  } else {
2918
    Label done;
2919
    __ subp(rdx, rax);
2920
    __ j(no_overflow, &done, Label::kNear);
2921
    // Correct sign of result in case of overflow.
2922
    __ notp(rdx);
2923
    __ bind(&done);
2924
    __ movp(rax, rdx);
2925 2926 2927 2928 2929
  }
  __ ret(0);

  __ bind(&miss);
  GenerateMiss(masm);
2930 2931 2932
}


2933
void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2934
  DCHECK(state() == CompareICState::NUMBER);
2935

2936
  Label generic_stub;
2937
  Label unordered, maybe_undefined1, maybe_undefined2;
2938
  Label miss;
2939

2940
  if (left() == CompareICState::SMI) {
2941 2942
    __ JumpIfNotSmi(rdx, &miss);
  }
2943
  if (right() == CompareICState::SMI) {
2944 2945 2946 2947 2948 2949
    __ JumpIfNotSmi(rax, &miss);
  }

  // Load left and right operand.
  Label done, left, left_smi, right_smi;
  __ JumpIfSmi(rax, &right_smi, Label::kNear);
2950
  __ CompareMap(rax, isolate()->factory()->heap_number_map());
2951
  __ j(not_equal, &maybe_undefined1, Label::kNear);
2952
  __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
2953 2954 2955
  __ jmp(&left, Label::kNear);
  __ bind(&right_smi);
  __ SmiToInteger32(rcx, rax);  // Can't clobber rax yet.
2956
  __ Cvtlsi2sd(xmm1, rcx);
2957 2958 2959

  __ bind(&left);
  __ JumpIfSmi(rdx, &left_smi, Label::kNear);
2960
  __ CompareMap(rdx, isolate()->factory()->heap_number_map());
2961
  __ j(not_equal, &maybe_undefined2, Label::kNear);
2962
  __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2963 2964 2965
  __ jmp(&done);
  __ bind(&left_smi);
  __ SmiToInteger32(rcx, rdx);  // Can't clobber rdx yet.
2966
  __ Cvtlsi2sd(xmm0, rcx);
2967

2968
  __ bind(&done);
2969
  // Compare operands
2970
  __ Ucomisd(xmm0, xmm1);
2971 2972

  // Don't base result on EFLAGS when a NaN is involved.
2973
  __ j(parity_even, &unordered, Label::kNear);
2974 2975 2976 2977 2978 2979

  // Return a result of -1, 0, or 1, based on EFLAGS.
  // Performing mov, because xor would destroy the flag register.
  __ movl(rax, Immediate(0));
  __ movl(rcx, Immediate(0));
  __ setcc(above, rax);  // Add one to zero if carry clear and not equal.
2980
  __ sbbp(rax, rcx);  // Subtract one if below (aka. carry set).
2981 2982 2983 2984
  __ ret(0);

  __ bind(&unordered);
  __ bind(&generic_stub);
2985
  CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
2986
                     CompareICState::GENERIC, CompareICState::GENERIC);
2987
  __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
2988

2989
  __ bind(&maybe_undefined1);
2990
  if (Token::IsOrderedRelationalCompareOp(op())) {
2991
    __ Cmp(rax, isolate()->factory()->undefined_value());
2992
    __ j(not_equal, &miss);
2993
    __ JumpIfSmi(rdx, &unordered);
2994 2995 2996 2997 2998 2999
    __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
    __ j(not_equal, &maybe_undefined2, Label::kNear);
    __ jmp(&unordered);
  }

  __ bind(&maybe_undefined2);
3000
  if (Token::IsOrderedRelationalCompareOp(op())) {
3001
    __ Cmp(rdx, isolate()->factory()->undefined_value());
3002 3003 3004
    __ j(equal, &unordered);
  }

3005 3006
  __ bind(&miss);
  GenerateMiss(masm);
3007 3008 3009
}


3010
void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3011
  DCHECK(state() == CompareICState::INTERNALIZED_STRING);
3012
  DCHECK(GetCondition() == equal);
3013 3014 3015 3016 3017 3018 3019 3020

  // Registers containing left and right operands respectively.
  Register left = rdx;
  Register right = rax;
  Register tmp1 = rcx;
  Register tmp2 = rbx;

  // Check that both operands are heap objects.
3021
  Label miss;
3022
  Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3023
  __ j(cond, &miss, Label::kNear);
3024

3025
  // Check that both operands are internalized strings.
3026 3027
  __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
  __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3028 3029
  __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
  __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3030
  STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3031
  __ orp(tmp1, tmp2);
3032 3033
  __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
  __ j(not_zero, &miss, Label::kNear);
3034

3035
  // Internalized strings are compared by identity.
3036
  Label done;
3037
  __ cmpp(left, right);
3038 3039
  // Make sure rax is non-zero. At this point input operands are
  // guaranteed to be non-zero.
3040
  DCHECK(right.is(rax));
3041
  __ j(not_equal, &done, Label::kNear);
3042 3043 3044 3045 3046 3047
  STATIC_ASSERT(EQUAL == 0);
  STATIC_ASSERT(kSmiTag == 0);
  __ Move(rax, Smi::FromInt(EQUAL));
  __ bind(&done);
  __ ret(0);

3048 3049 3050 3051 3052
  __ bind(&miss);
  GenerateMiss(masm);
}


3053
void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
3054
  DCHECK(state() == CompareICState::UNIQUE_NAME);
3055
  DCHECK(GetCondition() == equal);
3056 3057 3058 3059 3060 3061 3062 3063 3064 3065 3066 3067 3068 3069

  // Registers containing left and right operands respectively.
  Register left = rdx;
  Register right = rax;
  Register tmp1 = rcx;
  Register tmp2 = rbx;

  // Check that both operands are heap objects.
  Label miss;
  Condition cond = masm->CheckEitherSmi(left, right, tmp1);
  __ j(cond, &miss, Label::kNear);

  // Check that both operands are unique names. This leaves the instance
  // types loaded in tmp1 and tmp2.
3070 3071
  __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
  __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3072 3073
  __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
  __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3074

3075 3076
  __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
  __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
3077 3078 3079

  // Unique names are compared by identity.
  Label done;
3080
  __ cmpp(left, right);
3081 3082
  // Make sure rax is non-zero. At this point input operands are
  // guaranteed to be non-zero.
3083
  DCHECK(right.is(rax));
3084 3085 3086 3087 3088 3089 3090
  __ j(not_equal, &done, Label::kNear);
  STATIC_ASSERT(EQUAL == 0);
  STATIC_ASSERT(kSmiTag == 0);
  __ Move(rax, Smi::FromInt(EQUAL));
  __ bind(&done);
  __ ret(0);

3091 3092 3093 3094 3095
  __ bind(&miss);
  GenerateMiss(masm);
}


3096
void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3097
  DCHECK(state() == CompareICState::STRING);
3098 3099
  Label miss;

3100
  bool equality = Token::IsEqualityOp(op());
3101

3102 3103 3104 3105 3106 3107 3108 3109 3110 3111 3112 3113 3114
  // Registers containing left and right operands respectively.
  Register left = rdx;
  Register right = rax;
  Register tmp1 = rcx;
  Register tmp2 = rbx;
  Register tmp3 = rdi;

  // Check that both operands are heap objects.
  Condition cond = masm->CheckEitherSmi(left, right, tmp1);
  __ j(cond, &miss);

  // Check that both operands are strings. This leaves the instance
  // types loaded in tmp1 and tmp2.
3115 3116
  __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
  __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3117 3118
  __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
  __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3119
  __ movp(tmp3, tmp1);
3120
  STATIC_ASSERT(kNotStringTag != 0);
3121
  __ orp(tmp3, tmp2);
3122
  __ testb(tmp3, Immediate(kIsNotStringMask));
3123 3124 3125
  __ j(not_zero, &miss);

  // Fast check for identical strings.
3126
  Label not_same;
3127
  __ cmpp(left, right);
3128
  __ j(not_equal, &not_same, Label::kNear);
3129 3130 3131 3132 3133 3134 3135 3136
  STATIC_ASSERT(EQUAL == 0);
  STATIC_ASSERT(kSmiTag == 0);
  __ Move(rax, Smi::FromInt(EQUAL));
  __ ret(0);

  // Handle not identical strings.
  __ bind(&not_same);

3137
  // Check that both strings are internalized strings. If they are, we're done
3138 3139
  // because we already know they are not identical. We also know they are both
  // strings.
3140 3141
  if (equality) {
    Label do_compare;
3142
    STATIC_ASSERT(kInternalizedTag == 0);
3143
    __ orp(tmp1, tmp2);
3144 3145
    __ testb(tmp1, Immediate(kIsNotInternalizedMask));
    __ j(not_zero, &do_compare, Label::kNear);
3146 3147
    // Make sure rax is non-zero. At this point input operands are
    // guaranteed to be non-zero.
3148
    DCHECK(right.is(rax));
3149 3150 3151
    __ ret(0);
    __ bind(&do_compare);
  }
3152

3153
  // Check that both strings are sequential one-byte.
3154
  Label runtime;
3155
  __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
3156

3157
  // Compare flat one-byte strings. Returns when done.
3158
  if (equality) {
3159 3160
    StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
                                                  tmp2);
3161
  } else {
3162 3163
    StringHelper::GenerateCompareFlatOneByteStrings(
        masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
3164
  }
3165 3166 3167

  // Handle more complex cases in runtime.
  __ bind(&runtime);
3168
  __ PopReturnAddressTo(tmp1);
3169 3170
  __ Push(left);
  __ Push(right);
3171
  __ PushReturnAddressFrom(tmp1);
3172
  if (equality) {
3173
    __ TailCallRuntime(Runtime::kStringEquals);
3174
  } else {
3175
    __ TailCallRuntime(Runtime::kStringCompare);
3176
  }
3177 3178 3179 3180 3181 3182

  __ bind(&miss);
  GenerateMiss(masm);
}


3183 3184
void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
  DCHECK_EQ(CompareICState::RECEIVER, state());
3185
  Label miss;
3186
  Condition either_smi = masm->CheckEitherSmi(rdx, rax);
3187
  __ j(either_smi, &miss, Label::kNear);
3188

3189 3190 3191 3192 3193
  STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
  __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
  __ j(below, &miss, Label::kNear);
  __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
  __ j(below, &miss, Label::kNear);
3194

3195
  DCHECK_EQ(equal, GetCondition());
3196
  __ subp(rax, rdx);
3197 3198 3199 3200
  __ ret(0);

  __ bind(&miss);
  GenerateMiss(masm);
3201 3202 3203
}


3204
void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
3205
  Label miss;
3206
  Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
3207 3208 3209
  Condition either_smi = masm->CheckEitherSmi(rdx, rax);
  __ j(either_smi, &miss, Label::kNear);

3210
  __ GetWeakValue(rdi, cell);
3211
  __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rdi);
3212
  __ j(not_equal, &miss, Label::kNear);
3213
  __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rdi);
3214 3215
  __ j(not_equal, &miss, Label::kNear);

3216 3217 3218 3219 3220 3221 3222 3223 3224
  if (Token::IsEqualityOp(op())) {
    __ subp(rax, rdx);
    __ ret(0);
  } else {
    __ PopReturnAddressTo(rcx);
    __ Push(rdx);
    __ Push(rax);
    __ Push(Smi::FromInt(NegativeComparisonResult(GetCondition())));
    __ PushReturnAddressFrom(rcx);
3225
    __ TailCallRuntime(Runtime::kCompare);
3226
  }
3227 3228 3229 3230 3231

  __ bind(&miss);
  GenerateMiss(masm);
}

3232

3233
void CompareICStub::GenerateMiss(MacroAssembler* masm) {
3234
  {
3235
    // Call the runtime system in a fresh internal frame.
3236
    FrameScope scope(masm, StackFrame::INTERNAL);
3237 3238 3239 3240
    __ Push(rdx);
    __ Push(rax);
    __ Push(rdx);
    __ Push(rax);
3241
    __ Push(Smi::FromInt(op()));
3242
    __ CallRuntime(Runtime::kCompareIC_Miss);
3243

3244
    // Compute the entry point of the rewritten stub.
3245
    __ leap(rdi, FieldOperand(rax, Code::kHeaderSize));
3246 3247
    __ Pop(rax);
    __ Pop(rdx);
3248
  }
3249 3250 3251

  // Do a tail call to the rewritten stub.
  __ jmp(rdi);
3252 3253
}

3254

3255 3256 3257 3258 3259 3260
void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
                                                      Label* miss,
                                                      Label* done,
                                                      Register properties,
                                                      Handle<Name> name,
                                                      Register r0) {
3261
  DCHECK(name->IsUniqueName());
3262 3263 3264 3265
  // If names of slots in range from 1 to kProbes - 1 for the hash value are
  // not equal to the name and kProbes-th slot is not used (its name is the
  // undefined value), it guarantees the hash table doesn't contain the
  // property. It's true even if some slots represent deleted properties
3266
  // (their names are the hole value).
3267 3268 3269 3270 3271 3272 3273
  for (int i = 0; i < kInlinedProbes; i++) {
    // r0 points to properties hash.
    // Compute the masked index: (hash + i + i * i) & mask.
    Register index = r0;
    // Capacity is smi 2^n.
    __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
    __ decl(index);
3274
    __ andp(index,
3275
            Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
3276 3277

    // Scale the index by multiplying by the entry size.
3278
    STATIC_ASSERT(NameDictionary::kEntrySize == 3);
3279
    __ leap(index, Operand(index, index, times_2, 0));  // index *= 3.
3280 3281 3282

    Register entity_name = r0;
    // Having undefined at this place means the name is not contained.
3283
    STATIC_ASSERT(kSmiTagSize == 1);
3284
    __ movp(entity_name, Operand(properties,
3285 3286 3287 3288 3289 3290 3291
                                 index,
                                 times_pointer_size,
                                 kElementsStartOffset - kHeapObjectTag));
    __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
    __ j(equal, done);

    // Stop if found the property.
3292
    __ Cmp(entity_name, Handle<Name>(name));
3293 3294
    __ j(equal, miss);

3295
    Label good;
3296 3297
    // Check for the hole and skip.
    __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
3298
    __ j(equal, &good, Label::kNear);
3299

3300
    // Check if the entry name is not a unique name.
3301
    __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
3302 3303
    __ JumpIfNotUniqueNameInstanceType(
        FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
3304
    __ bind(&good);
3305 3306
  }

3307 3308
  NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
                                NEGATIVE_LOOKUP);
3309
  __ Push(Handle<Object>(name));
3310
  __ Push(Immediate(name->Hash()));
3311
  __ CallStub(&stub);
3312
  __ testp(r0, r0);
3313 3314 3315 3316 3317
  __ j(not_zero, miss);
  __ jmp(done);
}


3318
// Probe the name dictionary in the |elements| register. Jump to the
3319 3320 3321
// |done| label if a property with the given name is found leaving the
// index into the dictionary in |r1|. Jump to the |miss| label
// otherwise.
3322 3323 3324 3325 3326 3327 3328
void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
                                                      Label* miss,
                                                      Label* done,
                                                      Register elements,
                                                      Register name,
                                                      Register r0,
                                                      Register r1) {
3329 3330 3331 3332
  DCHECK(!elements.is(r0));
  DCHECK(!elements.is(r1));
  DCHECK(!name.is(r0));
  DCHECK(!name.is(r1));
3333

3334
  __ AssertName(name);
3335 3336 3337 3338 3339 3340

  __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
  __ decl(r0);

  for (int i = 0; i < kInlinedProbes; i++) {
    // Compute the masked index: (hash + i + i * i) & mask.
3341 3342
    __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
    __ shrl(r1, Immediate(Name::kHashShift));
3343
    if (i > 0) {
3344
      __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
3345
    }
3346
    __ andp(r1, r0);
3347 3348

    // Scale the index by multiplying by the entry size.
3349
    STATIC_ASSERT(NameDictionary::kEntrySize == 3);
3350
    __ leap(r1, Operand(r1, r1, times_2, 0));  // r1 = r1 * 3
3351 3352

    // Check if the key is identical to the name.
3353
    __ cmpp(name, Operand(elements, r1, times_pointer_size,
3354 3355 3356 3357
                          kElementsStartOffset - kHeapObjectTag));
    __ j(equal, done);
  }

3358 3359
  NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1,
                                POSITIVE_LOOKUP);
3360
  __ Push(name);
3361 3362
  __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
  __ shrl(r0, Immediate(Name::kHashShift));
3363
  __ Push(r0);
3364 3365
  __ CallStub(&stub);

3366
  __ testp(r0, r0);
3367 3368 3369 3370 3371
  __ j(zero, miss);
  __ jmp(done);
}


3372
void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
3373 3374
  // This stub overrides SometimesSetsUpAFrame() to return false.  That means
  // we cannot call anything that could cause a GC from this stub.
3375
  // Stack frame on entry:
3376 3377 3378
  //  rsp[0 * kPointerSize] : return address.
  //  rsp[1 * kPointerSize] : key's hash.
  //  rsp[2 * kPointerSize] : key.
3379
  // Registers:
3380
  //  dictionary_: NameDictionary to probe.
3381 3382 3383 3384 3385 3386 3387 3388
  //  result_: used as scratch.
  //  index_: will hold an index of entry if lookup is successful.
  //          might alias with result_.
  // Returns:
  //  result_ is zero if lookup failed, non zero otherwise.

  Label in_dictionary, maybe_in_dictionary, not_in_dictionary;

3389
  Register scratch = result();
3390

3391
  __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset));
3392
  __ decl(scratch);
3393
  __ Push(scratch);
3394 3395 3396 3397 3398 3399

  // If names of slots in range from 1 to kProbes - 1 for the hash value are
  // not equal to the name and kProbes-th slot is not used (its name is the
  // undefined value), it guarantees the hash table doesn't contain the
  // property. It's true even if some slots represent deleted properties
  // (their names are the null value).
3400 3401
  StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
                              kPointerSize);
3402 3403
  for (int i = kInlinedProbes; i < kTotalProbes; i++) {
    // Compute the masked index: (hash + i + i * i) & mask.
3404
    __ movp(scratch, args.GetArgumentOperand(1));
3405
    if (i > 0) {
3406
      __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
3407
    }
3408
    __ andp(scratch, Operand(rsp, 0));
3409 3410

    // Scale the index by multiplying by the entry size.
3411
    STATIC_ASSERT(NameDictionary::kEntrySize == 3);
3412
    __ leap(index(), Operand(scratch, scratch, times_2, 0));  // index *= 3.
3413 3414

    // Having undefined at this place means the name is not contained.
3415
    __ movp(scratch, Operand(dictionary(), index(), times_pointer_size,
3416 3417
                             kElementsStartOffset - kHeapObjectTag));

3418
    __ Cmp(scratch, isolate()->factory()->undefined_value());
3419 3420 3421
    __ j(equal, &not_in_dictionary);

    // Stop if found the property.
3422
    __ cmpp(scratch, args.GetArgumentOperand(0));
3423 3424
    __ j(equal, &in_dictionary);

3425
    if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3426 3427
      // If we hit a key that is not a unique name during negative
      // lookup we have to bailout as this key might be equal to the
3428 3429
      // key we are looking for.

3430
      // Check if the entry name is not a unique name.
3431
      __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
3432 3433 3434
      __ JumpIfNotUniqueNameInstanceType(
          FieldOperand(scratch, Map::kInstanceTypeOffset),
          &maybe_in_dictionary);
3435 3436 3437 3438 3439 3440 3441
    }
  }

  __ bind(&maybe_in_dictionary);
  // If we are doing negative lookup then probing failure should be
  // treated as a lookup success. For positive lookup probing failure
  // should be treated as lookup failure.
3442
  if (mode() == POSITIVE_LOOKUP) {
3443
    __ movp(scratch, Immediate(0));
3444 3445 3446 3447 3448
    __ Drop(1);
    __ ret(2 * kPointerSize);
  }

  __ bind(&in_dictionary);
3449
  __ movp(scratch, Immediate(1));
3450 3451 3452 3453
  __ Drop(1);
  __ ret(2 * kPointerSize);

  __ bind(&not_in_dictionary);
3454
  __ movp(scratch, Immediate(0));
3455 3456 3457 3458 3459
  __ Drop(1);
  __ ret(2 * kPointerSize);
}


3460 3461
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
    Isolate* isolate) {
3462
  StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
3463
  stub1.GetCode();
3464
  StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3465
  stub2.GetCode();
3466 3467 3468 3469 3470 3471 3472 3473 3474 3475 3476 3477 3478 3479 3480 3481 3482 3483 3484
}


// Takes the input in 3 registers: address_ value_ and object_.  A pointer to
// the value has just been written into the object, now this stub makes sure
// we keep the GC informed.  The word in the object where the value has been
// written is in the address register.
void RecordWriteStub::Generate(MacroAssembler* masm) {
  Label skip_to_incremental_noncompacting;
  Label skip_to_incremental_compacting;

  // The first two instructions are generated with labels so as to get the
  // offset fixed up correctly by the bind(Label*) call.  We patch it back and
  // forth between a compare instructions (a nop in this position) and the
  // real branch when we start and stop incremental heap marking.
  // See RecordWriteStub::Patch for details.
  __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
  __ jmp(&skip_to_incremental_compacting, Label::kFar);

3485 3486
  if (remembered_set_action() == EMIT_REMEMBERED_SET) {
    __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3487
                           MacroAssembler::kReturnAtEnd);
3488 3489 3490 3491 3492 3493 3494 3495 3496 3497 3498 3499 3500 3501 3502 3503 3504 3505 3506 3507
  } else {
    __ ret(0);
  }

  __ bind(&skip_to_incremental_noncompacting);
  GenerateIncremental(masm, INCREMENTAL);

  __ bind(&skip_to_incremental_compacting);
  GenerateIncremental(masm, INCREMENTAL_COMPACTION);

  // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
  // Will be checked in IncrementalMarking::ActivateGeneratedStub.
  masm->set_byte_at(0, kTwoByteNopInstruction);
  masm->set_byte_at(2, kFiveByteNopInstruction);
}


void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
  regs_.Save(masm);

3508
  if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3509 3510
    Label dont_need_remembered_set;

3511
    __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
3512 3513 3514 3515
    __ JumpIfNotInNewSpace(regs_.scratch0(),
                           regs_.scratch0(),
                           &dont_need_remembered_set);

ulan's avatar
ulan committed
3516 3517
    __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
                        &dont_need_remembered_set);
3518 3519 3520 3521 3522

    // First notify the incremental marker if necessary, then update the
    // remembered set.
    CheckNeedsToInformIncrementalMarker(
        masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
3523
    InformIncrementalMarker(masm);
3524
    regs_.Restore(masm);
3525
    __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3526
                           MacroAssembler::kReturnAtEnd);
3527 3528 3529 3530 3531 3532

    __ bind(&dont_need_remembered_set);
  }

  CheckNeedsToInformIncrementalMarker(
      masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
3533
  InformIncrementalMarker(masm);
3534 3535 3536 3537 3538
  regs_.Restore(masm);
  __ ret(0);
}


3539
void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3540
  regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
3541
  Register address =
3542
      arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
3543 3544
  DCHECK(!address.is(regs_.object()));
  DCHECK(!address.is(arg_reg_1));
3545
  __ Move(address, regs_.address());
3546
  __ Move(arg_reg_1, regs_.object());
3547
  // TODO(gc) Can we just set address arg2 in the beginning?
3548 3549
  __ Move(arg_reg_2, address);
  __ LoadAddress(arg_reg_3,
3550
                 ExternalReference::isolate_address(isolate()));
3551 3552 3553 3554
  int argument_count = 3;

  AllowExternalCallThatCantCauseGC scope(masm);
  __ PrepareCallCFunction(argument_count);
3555
  __ CallCFunction(
3556
      ExternalReference::incremental_marking_record_write_function(isolate()),
3557
      argument_count);
3558
  regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
3559 3560 3561 3562 3563 3564 3565 3566 3567 3568 3569
}


void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
    MacroAssembler* masm,
    OnNoNeedToInformIncrementalMarker on_no_need,
    Mode mode) {
  Label on_black;
  Label need_incremental;
  Label need_incremental_pop_object;

3570
  __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
3571
  __ andp(regs_.scratch0(), regs_.object());
3572
  __ movp(regs_.scratch1(),
3573 3574
         Operand(regs_.scratch0(),
                 MemoryChunk::kWriteBarrierCounterOffset));
3575
  __ subp(regs_.scratch1(), Immediate(1));
3576
  __ movp(Operand(regs_.scratch0(),
3577 3578 3579 3580
                 MemoryChunk::kWriteBarrierCounterOffset),
         regs_.scratch1());
  __ j(negative, &need_incremental);

3581 3582 3583 3584 3585 3586 3587 3588 3589 3590
  // Let's look at the color of the object:  If it is not black we don't have
  // to inform the incremental marker.
  __ JumpIfBlack(regs_.object(),
                 regs_.scratch0(),
                 regs_.scratch1(),
                 &on_black,
                 Label::kNear);

  regs_.Restore(masm);
  if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
3591
    __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3592
                           MacroAssembler::kReturnAtEnd);
3593 3594 3595 3596 3597 3598 3599
  } else {
    __ ret(0);
  }

  __ bind(&on_black);

  // Get the value from the slot.
3600
  __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
3601 3602 3603 3604 3605 3606 3607 3608 3609 3610 3611 3612 3613 3614 3615 3616 3617 3618 3619 3620 3621 3622

  if (mode == INCREMENTAL_COMPACTION) {
    Label ensure_not_white;

    __ CheckPageFlag(regs_.scratch0(),  // Contains value.
                     regs_.scratch1(),  // Scratch.
                     MemoryChunk::kEvacuationCandidateMask,
                     zero,
                     &ensure_not_white,
                     Label::kNear);

    __ CheckPageFlag(regs_.object(),
                     regs_.scratch1(),  // Scratch.
                     MemoryChunk::kSkipEvacuationSlotsRecordingMask,
                     zero,
                     &need_incremental);

    __ bind(&ensure_not_white);
  }

  // We need an extra register for this, so we push the object register
  // temporarily.
3623
  __ Push(regs_.object());
hpayer's avatar
hpayer committed
3624 3625 3626 3627
  __ JumpIfWhite(regs_.scratch0(),  // The value.
                 regs_.scratch1(),  // Scratch.
                 regs_.object(),    // Scratch.
                 &need_incremental_pop_object, Label::kNear);
3628
  __ Pop(regs_.object());
3629 3630 3631

  regs_.Restore(masm);
  if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
3632
    __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3633
                           MacroAssembler::kReturnAtEnd);
3634 3635 3636 3637 3638
  } else {
    __ ret(0);
  }

  __ bind(&need_incremental_pop_object);
3639
  __ Pop(regs_.object());
3640 3641 3642 3643 3644 3645

  __ bind(&need_incremental);

  // Fall through when we need to inform the incremental marker.
}

3646

3647
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3648
  CEntryStub ces(isolate(), 1, kSaveFPRegs);
3649
  __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
3650 3651
  int parameter_count_offset =
      StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
3652
  __ movp(rbx, MemOperand(rbp, parameter_count_offset));
3653
  masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3654
  __ PopReturnAddressTo(rcx);
3655 3656
  int additional_offset =
      function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
3657
  __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
3658
  __ jmp(rcx);  // Return to IC Miss stub, continuation still on stack.
3659 3660 3661
}


3662
void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
3663
  __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3664
  LoadICStub stub(isolate(), state());
3665
  stub.GenerateForTrampoline(masm);
3666 3667 3668 3669
}


void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
3670
  __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3671
  KeyedLoadICStub stub(isolate(), state());
3672 3673 3674 3675
  stub.GenerateForTrampoline(masm);
}


3676 3677 3678 3679
static void HandleArrayCases(MacroAssembler* masm, Register feedback,
                             Register receiver_map, Register scratch1,
                             Register scratch2, Register scratch3,
                             bool is_polymorphic, Label* miss) {
3680 3681 3682 3683
  // feedback initially contains the feedback array
  Label next_loop, prepare_next;
  Label start_polymorphic;

3684 3685 3686
  Register counter = scratch1;
  Register length = scratch2;
  Register cached_map = scratch3;
3687 3688 3689 3690 3691 3692 3693 3694 3695 3696 3697 3698 3699 3700 3701 3702 3703 3704 3705 3706 3707 3708 3709 3710 3711 3712 3713 3714 3715 3716 3717 3718 3719 3720 3721 3722 3723 3724 3725 3726 3727 3728 3729

  __ movp(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
  __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
  __ j(not_equal, &start_polymorphic);

  // found, now call handler.
  Register handler = feedback;
  __ movp(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
  __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
  __ jmp(handler);

  // Polymorphic, we have to loop from 2 to N
  __ bind(&start_polymorphic);
  __ SmiToInteger32(length, FieldOperand(feedback, FixedArray::kLengthOffset));
  if (!is_polymorphic) {
    // If the IC could be monomorphic we have to make sure we don't go past the
    // end of the feedback array.
    __ cmpl(length, Immediate(2));
    __ j(equal, miss);
  }
  __ movl(counter, Immediate(2));

  __ bind(&next_loop);
  __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
                                   FixedArray::kHeaderSize));
  __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
  __ j(not_equal, &prepare_next);
  __ movp(handler, FieldOperand(feedback, counter, times_pointer_size,
                                FixedArray::kHeaderSize + kPointerSize));
  __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
  __ jmp(handler);

  __ bind(&prepare_next);
  __ addl(counter, Immediate(2));
  __ cmpl(counter, length);
  __ j(less, &next_loop);

  // We exhausted our array of map handler pairs.
  __ jmp(miss);
}


static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
3730 3731 3732 3733 3734 3735
                                  Register receiver_map, Register feedback,
                                  Register vector, Register integer_slot,
                                  Label* compare_map, Label* load_smi_map,
                                  Label* try_array) {
  __ JumpIfSmi(receiver, load_smi_map);
  __ movp(receiver_map, FieldOperand(receiver, 0));
3736

3737 3738 3739 3740
  __ bind(compare_map);
  __ cmpp(receiver_map, FieldOperand(feedback, WeakCell::kValueOffset));
  __ j(not_equal, try_array);
  Register handler = feedback;
3741 3742 3743 3744 3745 3746 3747
  __ movp(handler, FieldOperand(vector, integer_slot, times_pointer_size,
                                FixedArray::kHeaderSize + kPointerSize));
  __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
  __ jmp(handler);
}


3748
void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
3749 3750


3751
void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3752 3753 3754 3755
  GenerateImpl(masm, true);
}


3756 3757 3758 3759 3760
void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
  Register receiver = LoadWithVectorDescriptor::ReceiverRegister();  // rdx
  Register name = LoadWithVectorDescriptor::NameRegister();          // rcx
  Register vector = LoadWithVectorDescriptor::VectorRegister();      // rbx
  Register slot = LoadWithVectorDescriptor::SlotRegister();          // rax
3761 3762
  Register feedback = rdi;
  Register integer_slot = r8;
3763
  Register receiver_map = r9;
3764 3765 3766 3767 3768

  __ SmiToInteger32(integer_slot, slot);
  __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
                                 FixedArray::kHeaderSize));

3769 3770 3771 3772 3773 3774 3775
  // Try to quickly handle the monomorphic case without knowing for sure
  // if we have a weak cell in feedback. We do know it's safe to look
  // at WeakCell::kValueOffset.
  Label try_array, load_smi_map, compare_map;
  Label not_array, miss;
  HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
                        integer_slot, &compare_map, &load_smi_map, &try_array);
3776 3777 3778 3779 3780

  // Is it a fixed array?
  __ bind(&try_array);
  __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
  __ j(not_equal, &not_array);
3781 3782
  HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, true,
                   &miss);
3783 3784 3785 3786 3787 3788 3789

  __ bind(&not_array);
  __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
  __ j(not_equal, &miss);
  Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
      Code::ComputeHandlerFlags(Code::LOAD_IC));
  masm->isolate()->stub_cache()->GenerateProbe(
3790
      masm, Code::LOAD_IC, code_flags, receiver, name, feedback, no_reg);
3791 3792

  __ bind(&miss);
3793
  LoadIC::GenerateMiss(masm);
3794 3795 3796 3797

  __ bind(&load_smi_map);
  __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
  __ jmp(&compare_map);
3798 3799 3800
}


3801
void KeyedLoadICStub::Generate(MacroAssembler* masm) {
3802 3803 3804 3805
  GenerateImpl(masm, false);
}


3806
void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3807 3808 3809 3810
  GenerateImpl(masm, true);
}


3811 3812 3813 3814 3815
void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
  Register receiver = LoadWithVectorDescriptor::ReceiverRegister();  // rdx
  Register key = LoadWithVectorDescriptor::NameRegister();           // rcx
  Register vector = LoadWithVectorDescriptor::VectorRegister();      // rbx
  Register slot = LoadWithVectorDescriptor::SlotRegister();          // rax
3816 3817
  Register feedback = rdi;
  Register integer_slot = r8;
3818
  Register receiver_map = r9;
3819 3820 3821 3822 3823

  __ SmiToInteger32(integer_slot, slot);
  __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
                                 FixedArray::kHeaderSize));

3824 3825 3826 3827 3828 3829 3830
  // Try to quickly handle the monomorphic case without knowing for sure
  // if we have a weak cell in feedback. We do know it's safe to look
  // at WeakCell::kValueOffset.
  Label try_array, load_smi_map, compare_map;
  Label not_array, miss;
  HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
                        integer_slot, &compare_map, &load_smi_map, &try_array);
3831 3832 3833 3834 3835 3836

  __ bind(&try_array);
  // Is it a fixed array?
  __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
  __ j(not_equal, &not_array);

3837
  // We have a polymorphic element handler.
3838 3839
  Label polymorphic, try_poly_name;
  __ bind(&polymorphic);
3840 3841
  HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, true,
                   &miss);
3842 3843 3844 3845 3846 3847

  __ bind(&not_array);
  // Is it generic?
  __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
  __ j(not_equal, &try_poly_name);
  Handle<Code> megamorphic_stub =
3848
      KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3849 3850 3851 3852 3853 3854 3855 3856 3857 3858
  __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);

  __ bind(&try_poly_name);
  // We might have a name in feedback, and a fixed array in the next slot.
  __ cmpp(key, feedback);
  __ j(not_equal, &miss);
  // If the name comparison succeeded, we know we have a fixed array with
  // at least one map/handler pair.
  __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
                                 FixedArray::kHeaderSize + kPointerSize));
3859 3860
  HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, false,
                   &miss);
3861 3862 3863

  __ bind(&miss);
  KeyedLoadIC::GenerateMiss(masm);
3864 3865 3866 3867

  __ bind(&load_smi_map);
  __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
  __ jmp(&compare_map);
3868 3869 3870
}


3871
void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3872
  __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3873 3874 3875 3876 3877 3878
  VectorStoreICStub stub(isolate(), state());
  stub.GenerateForTrampoline(masm);
}


void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3879
  __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3880 3881 3882 3883 3884 3885 3886 3887 3888 3889 3890 3891 3892 3893 3894 3895
  VectorKeyedStoreICStub stub(isolate(), state());
  stub.GenerateForTrampoline(masm);
}


void VectorStoreICStub::Generate(MacroAssembler* masm) {
  GenerateImpl(masm, false);
}


void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
  GenerateImpl(masm, true);
}


void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3896 3897 3898 3899 3900 3901 3902 3903 3904 3905 3906 3907 3908 3909 3910 3911 3912 3913 3914 3915 3916 3917 3918 3919 3920 3921 3922 3923 3924 3925 3926 3927 3928 3929 3930 3931 3932
  Register receiver = VectorStoreICDescriptor::ReceiverRegister();  // rdx
  Register key = VectorStoreICDescriptor::NameRegister();           // rcx
  Register vector = VectorStoreICDescriptor::VectorRegister();      // rbx
  Register slot = VectorStoreICDescriptor::SlotRegister();          // rdi
  DCHECK(VectorStoreICDescriptor::ValueRegister().is(rax));         // rax
  Register feedback = r8;
  Register integer_slot = r9;
  Register receiver_map = r11;
  DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map));

  __ SmiToInteger32(integer_slot, slot);
  __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
                                 FixedArray::kHeaderSize));

  // Try to quickly handle the monomorphic case without knowing for sure
  // if we have a weak cell in feedback. We do know it's safe to look
  // at WeakCell::kValueOffset.
  Label try_array, load_smi_map, compare_map;
  Label not_array, miss;
  HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
                        integer_slot, &compare_map, &load_smi_map, &try_array);

  // Is it a fixed array?
  __ bind(&try_array);
  __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
  __ j(not_equal, &not_array);
  HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, true,
                   &miss);

  __ bind(&not_array);
  __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
  __ j(not_equal, &miss);

  Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
      Code::ComputeHandlerFlags(Code::STORE_IC));
  masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags,
                                               receiver, key, feedback, no_reg);
3933 3934 3935

  __ bind(&miss);
  StoreIC::GenerateMiss(masm);
3936 3937 3938 3939

  __ bind(&load_smi_map);
  __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
  __ jmp(&compare_map);
3940 3941 3942 3943 3944 3945 3946 3947 3948 3949 3950 3951 3952
}


void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
  GenerateImpl(masm, false);
}


void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
  GenerateImpl(masm, true);
}


3953 3954 3955 3956 3957 3958 3959 3960 3961 3962 3963 3964 3965 3966 3967 3968 3969 3970 3971 3972 3973 3974 3975 3976 3977 3978 3979 3980 3981 3982 3983 3984 3985 3986 3987 3988 3989 3990 3991 3992 3993 3994 3995 3996 3997 3998 3999 4000 4001 4002 4003 4004 4005
static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
                                            Register receiver_map,
                                            Register feedback, Register scratch,
                                            Register scratch1,
                                            Register scratch2, Label* miss) {
  // feedback initially contains the feedback array
  Label next, next_loop, prepare_next;
  Label transition_call;

  Register cached_map = scratch;
  Register counter = scratch1;
  Register length = scratch2;

  // Polymorphic, we have to loop from 0 to N - 1
  __ movp(counter, Immediate(0));
  __ movp(length, FieldOperand(feedback, FixedArray::kLengthOffset));
  __ SmiToInteger32(length, length);

  __ bind(&next_loop);
  __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
                                   FixedArray::kHeaderSize));
  __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
  __ j(not_equal, &prepare_next);
  __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
                                   FixedArray::kHeaderSize + kPointerSize));
  __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
  __ j(not_equal, &transition_call);
  __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
                                 FixedArray::kHeaderSize + 2 * kPointerSize));
  __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
  __ jmp(feedback);

  __ bind(&transition_call);
  DCHECK(receiver_map.is(VectorStoreTransitionDescriptor::MapRegister()));
  __ movp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
  // The weak cell may have been cleared.
  __ JumpIfSmi(receiver_map, miss);
  // Get the handler in value.
  __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
                                 FixedArray::kHeaderSize + 2 * kPointerSize));
  __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
  __ jmp(feedback);

  __ bind(&prepare_next);
  __ addl(counter, Immediate(3));
  __ cmpl(counter, length);
  __ j(less, &next_loop);

  // We exhausted our array of map handler pairs.
  __ jmp(miss);
}


4006
void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4007 4008 4009 4010 4011 4012 4013 4014 4015 4016 4017 4018 4019 4020 4021 4022 4023 4024 4025 4026 4027 4028 4029 4030 4031 4032 4033 4034 4035 4036 4037 4038 4039 4040 4041 4042 4043 4044 4045 4046 4047 4048 4049 4050 4051 4052 4053 4054
  Register receiver = VectorStoreICDescriptor::ReceiverRegister();  // rdx
  Register key = VectorStoreICDescriptor::NameRegister();           // rcx
  Register vector = VectorStoreICDescriptor::VectorRegister();      // rbx
  Register slot = VectorStoreICDescriptor::SlotRegister();          // rdi
  DCHECK(VectorStoreICDescriptor::ValueRegister().is(rax));         // rax
  Register feedback = r8;
  Register integer_slot = r9;
  Register receiver_map = r11;
  DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map));

  __ SmiToInteger32(integer_slot, slot);
  __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
                                 FixedArray::kHeaderSize));

  // Try to quickly handle the monomorphic case without knowing for sure
  // if we have a weak cell in feedback. We do know it's safe to look
  // at WeakCell::kValueOffset.
  Label try_array, load_smi_map, compare_map;
  Label not_array, miss;
  HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
                        integer_slot, &compare_map, &load_smi_map, &try_array);

  // Is it a fixed array?
  __ bind(&try_array);
  __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
  __ j(not_equal, &not_array);
  HandlePolymorphicKeyedStoreCase(masm, receiver_map, feedback, integer_slot,
                                  r15, r14, &miss);

  __ bind(&not_array);
  Label try_poly_name;
  __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
  __ j(not_equal, &try_poly_name);

  Handle<Code> megamorphic_stub =
      KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
  __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);

  __ bind(&try_poly_name);
  // We might have a name in feedback, and a fixed array in the next slot.
  __ cmpp(key, feedback);
  __ j(not_equal, &miss);
  // If the name comparison succeeded, we know we have a fixed array with
  // at least one map/handler pair.
  __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
                                 FixedArray::kHeaderSize + kPointerSize));
  HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, false,
                   &miss);
4055 4056 4057

  __ bind(&miss);
  KeyedStoreIC::GenerateMiss(masm);
4058 4059 4060 4061

  __ bind(&load_smi_map);
  __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
  __ jmp(&compare_map);
4062 4063 4064
}


4065
void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4066
  __ EmitLoadTypeFeedbackVector(rbx);
4067 4068 4069 4070 4071
  CallICStub stub(isolate(), state());
  __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
}


4072
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4073
  if (masm->isolate()->function_entry_hook() != NULL) {
4074
    ProfileEntryHookStub stub(masm->isolate());
4075 4076 4077 4078 4079 4080
    masm->CallStub(&stub);
  }
}


void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4081 4082 4083
  // This stub can be called from essentially anywhere, so it needs to save
  // all volatile and callee-save registers.
  const size_t kNumSavedRegisters = 2;
4084 4085
  __ pushq(arg_reg_1);
  __ pushq(arg_reg_2);
4086 4087

  // Calculate the original stack pointer and store it in the second arg.
4088
  __ leap(arg_reg_2,
4089
         Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
4090 4091

  // Calculate the function address to the first arg.
4092
  __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
4093
  __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
4094 4095 4096

  // Save the remainder of the volatile registers.
  masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4097 4098

  // Call the entry hook function.
4099
  __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
4100
          Assembler::RelocInfoNone());
4101 4102 4103 4104 4105 4106 4107 4108

  AllowExternalCallThatCantCauseGC scope(masm);

  const int kArgumentCount = 2;
  __ PrepareCallCFunction(kArgumentCount);
  __ CallCFunction(rax, kArgumentCount);

  // Restore volatile regs.
4109
  masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4110 4111
  __ popq(arg_reg_2);
  __ popq(arg_reg_1);
4112 4113 4114 4115

  __ Ret();
}

4116 4117

template<class T>
4118 4119 4120
static void CreateArrayDispatch(MacroAssembler* masm,
                                AllocationSiteOverrideMode mode) {
  if (mode == DISABLE_ALLOCATION_SITES) {
4121
    T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4122
    __ TailCallStub(&stub);
4123 4124 4125 4126 4127 4128 4129 4130
  } else if (mode == DONT_OVERRIDE) {
    int last_index = GetSequenceIndexFromFastElementsKind(
        TERMINAL_FAST_ELEMENTS_KIND);
    for (int i = 0; i <= last_index; ++i) {
      Label next;
      ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
      __ cmpl(rdx, Immediate(kind));
      __ j(not_equal, &next);
4131
      T stub(masm->isolate(), kind);
4132 4133 4134
      __ TailCallStub(&stub);
      __ bind(&next);
    }
4135

4136 4137 4138 4139 4140
    // If we reached this point there is a problem.
    __ Abort(kUnexpectedElementsKindInArrayConstructor);
  } else {
    UNREACHABLE();
  }
4141 4142 4143
}


4144 4145
static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
                                           AllocationSiteOverrideMode mode) {
4146
  // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4147
  // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
4148 4149
  // rax - number of arguments
  // rdi - constructor?
4150 4151
  // rsp[0] - return address
  // rsp[8] - last argument
4152 4153 4154 4155 4156
  Handle<Object> undefined_sentinel(
      masm->isolate()->heap()->undefined_value(),
      masm->isolate());

  Label normal_sequence;
4157
  if (mode == DONT_OVERRIDE) {
4158 4159 4160 4161 4162 4163
    STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    STATIC_ASSERT(FAST_ELEMENTS == 2);
    STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
    STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
    STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
4164 4165 4166 4167 4168

    // is the low bit set? If so, we are holey and that is good.
    __ testb(rdx, Immediate(1));
    __ j(not_zero, &normal_sequence);
  }
4169 4170

  // look at the first argument
4171
  StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4172
  __ movp(rcx, args.GetArgumentOperand(0));
4173
  __ testp(rcx, rcx);
4174 4175
  __ j(zero, &normal_sequence);

4176 4177 4178
  if (mode == DISABLE_ALLOCATION_SITES) {
    ElementsKind initial = GetInitialFastElementsKind();
    ElementsKind holey_initial = GetHoleyElementsKind(initial);
4179

4180 4181
    ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
                                                  holey_initial,
4182 4183
                                                  DISABLE_ALLOCATION_SITES);
    __ TailCallStub(&stub_holey);
4184

4185
    __ bind(&normal_sequence);
4186 4187
    ArraySingleArgumentConstructorStub stub(masm->isolate(),
                                            initial,
4188
                                            DISABLE_ALLOCATION_SITES);
4189
    __ TailCallStub(&stub);
4190 4191
  } else if (mode == DONT_OVERRIDE) {
    // We are going to create a holey array, but our kind is non-holey.
4192
    // Fix kind and retry (only if we have an allocation site in the slot).
4193
    __ incl(rdx);
4194

4195
    if (FLAG_debug_code) {
4196 4197
      Handle<Map> allocation_site_map =
          masm->isolate()->factory()->allocation_site_map();
4198 4199
      __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
      __ Assert(equal, kExpectedAllocationSite);
4200
    }
4201

4202 4203 4204 4205
    // Save the resulting elements kind in type info. We can't just store r3
    // in the AllocationSite::transition_info field because elements kind is
    // restricted to a portion of the field...upper bits need to be left alone.
    STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4206
    __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset),
4207
                      Smi::FromInt(kFastElementsKindPackedToHoley));
4208 4209 4210 4211 4212 4213 4214 4215 4216

    __ bind(&normal_sequence);
    int last_index = GetSequenceIndexFromFastElementsKind(
        TERMINAL_FAST_ELEMENTS_KIND);
    for (int i = 0; i <= last_index; ++i) {
      Label next;
      ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
      __ cmpl(rdx, Immediate(kind));
      __ j(not_equal, &next);
4217
      ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4218 4219 4220 4221 4222 4223 4224 4225 4226
      __ TailCallStub(&stub);
      __ bind(&next);
    }

    // If we reached this point there is a problem.
    __ Abort(kUnexpectedElementsKindInArrayConstructor);
  } else {
    UNREACHABLE();
  }
4227 4228 4229 4230 4231 4232 4233 4234 4235
}


template<class T>
static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
  int to_index = GetSequenceIndexFromFastElementsKind(
      TERMINAL_FAST_ELEMENTS_KIND);
  for (int i = 0; i <= to_index; ++i) {
    ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4236
    T stub(isolate, kind);
4237
    stub.GetCode();
4238
    if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4239
      T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4240
      stub1.GetCode();
4241
    }
4242 4243 4244 4245 4246 4247 4248 4249 4250 4251 4252 4253 4254 4255
  }
}


void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
  ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
      isolate);
  ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
      isolate);
  ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
      isolate);
}


4256 4257 4258 4259 4260
void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
    Isolate* isolate) {
  ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
  for (int i = 0; i < 2; i++) {
    // For internal arrays we only need a few things
4261
    InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4262
    stubh1.GetCode();
4263
    InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4264
    stubh2.GetCode();
4265
    InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4266
    stubh3.GetCode();
4267 4268 4269
  }
}

4270

4271 4272 4273
void ArrayConstructorStub::GenerateDispatchToArrayStub(
    MacroAssembler* masm,
    AllocationSiteOverrideMode mode) {
4274
  if (argument_count() == ANY) {
4275
    Label not_zero_case, not_one_case;
4276
    __ testp(rax, rax);
4277 4278 4279 4280 4281 4282 4283 4284 4285 4286
    __ j(not_zero, &not_zero_case);
    CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);

    __ bind(&not_zero_case);
    __ cmpl(rax, Immediate(1));
    __ j(greater, &not_one_case);
    CreateArrayDispatchOneArgument(masm, mode);

    __ bind(&not_one_case);
    CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4287
  } else if (argument_count() == NONE) {
4288
    CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4289
  } else if (argument_count() == ONE) {
4290
    CreateArrayDispatchOneArgument(masm, mode);
4291
  } else if (argument_count() == MORE_THAN_ONE) {
4292 4293 4294 4295 4296 4297 4298
    CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
  } else {
    UNREACHABLE();
  }
}


4299 4300
void ArrayConstructorStub::Generate(MacroAssembler* masm) {
  // ----------- S t a t e -------------
4301
  //  -- rax    : argc
4302
  //  -- rbx    : AllocationSite or undefined
4303
  //  -- rdi    : constructor
4304
  //  -- rdx    : new target
4305
  //  -- rsp[0] : return address
4306
  //  -- rsp[8] : last argument
4307 4308 4309 4310 4311 4312
  // -----------------------------------
  if (FLAG_debug_code) {
    // The array construct code is only set for the global and natives
    // builtin Array functions which always have maps.

    // Initial map for the builtin Array function should be a map.
4313
    __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4314 4315 4316
    // Will both indicate a NULL and a Smi.
    STATIC_ASSERT(kSmiTag == 0);
    Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4317
    __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4318
    __ CmpObjectType(rcx, MAP_TYPE, rcx);
4319
    __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4320

4321 4322
    // We should either have undefined in rbx or a valid AllocationSite
    __ AssertUndefinedOrAllocationSite(rbx);
4323 4324
  }

4325 4326 4327
  // Enter the context of the Array function.
  __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));

dslomov's avatar
dslomov committed
4328 4329 4330 4331
  Label subclassing;
  __ cmpp(rdi, rdx);
  __ j(not_equal, &subclassing);

4332
  Label no_info;
4333 4334
  // If the feedback vector is the undefined value call an array constructor
  // that doesn't use AllocationSites.
4335
  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
4336
  __ j(equal, &no_info);
4337

4338
  // Only look at the lower 16 bits of the transition info.
4339
  __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
4340
  __ SmiToInteger32(rdx, rdx);
4341
  STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4342
  __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
4343
  GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4344

4345 4346
  __ bind(&no_info);
  GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
dslomov's avatar
dslomov committed
4347

4348
  // Subclassing
dslomov's avatar
dslomov committed
4349
  __ bind(&subclassing);
4350 4351
  switch (argument_count()) {
    case ANY:
4352 4353 4354 4355
    case MORE_THAN_ONE: {
      StackArgumentsAccessor args(rsp, rax);
      __ movp(args.GetReceiverOperand(), rdi);
      __ addp(rax, Immediate(3));
4356
      break;
4357 4358 4359 4360 4361
    }
    case NONE: {
      StackArgumentsAccessor args(rsp, 0);
      __ movp(args.GetReceiverOperand(), rdi);
      __ Set(rax, 3);
4362
      break;
4363 4364 4365 4366 4367
    }
    case ONE: {
      StackArgumentsAccessor args(rsp, 1);
      __ movp(args.GetReceiverOperand(), rdi);
      __ Set(rax, 4);
4368
      break;
4369
    }
4370
  }
4371 4372 4373 4374
  __ PopReturnAddressTo(rcx);
  __ Push(rdx);
  __ Push(rbx);
  __ PushReturnAddressFrom(rcx);
4375
  __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
4376 4377 4378
}


4379 4380 4381 4382 4383
void InternalArrayConstructorStub::GenerateCase(
    MacroAssembler* masm, ElementsKind kind) {
  Label not_zero_case, not_one_case;
  Label normal_sequence;

4384
  __ testp(rax, rax);
4385
  __ j(not_zero, &not_zero_case);
4386
  InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4387 4388 4389 4390 4391 4392 4393 4394 4395
  __ TailCallStub(&stub0);

  __ bind(&not_zero_case);
  __ cmpl(rax, Immediate(1));
  __ j(greater, &not_one_case);

  if (IsFastPackedElementsKind(kind)) {
    // We might need to create a holey array
    // look at the first argument
4396
    StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4397
    __ movp(rcx, args.GetArgumentOperand(0));
4398
    __ testp(rcx, rcx);
4399 4400 4401
    __ j(zero, &normal_sequence);

    InternalArraySingleArgumentConstructorStub
4402
        stub1_holey(isolate(), GetHoleyElementsKind(kind));
4403 4404 4405 4406
    __ TailCallStub(&stub1_holey);
  }

  __ bind(&normal_sequence);
4407
  InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4408 4409 4410
  __ TailCallStub(&stub1);

  __ bind(&not_one_case);
4411
  InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4412 4413 4414 4415 4416 4417
  __ TailCallStub(&stubN);
}


void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
  // ----------- S t a t e -------------
4418 4419 4420 4421
  //  -- rax    : argc
  //  -- rdi    : constructor
  //  -- rsp[0] : return address
  //  -- rsp[8] : last argument
4422 4423 4424 4425 4426 4427 4428
  // -----------------------------------

  if (FLAG_debug_code) {
    // The array construct code is only set for the global and natives
    // builtin Array functions which always have maps.

    // Initial map for the builtin Array function should be a map.
4429
    __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4430 4431 4432
    // Will both indicate a NULL and a Smi.
    STATIC_ASSERT(kSmiTag == 0);
    Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4433
    __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4434
    __ CmpObjectType(rcx, MAP_TYPE, rcx);
4435
    __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4436 4437
  }

4438
  // Figure out the right elements kind
4439
  __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4440

4441 4442
  // Load the map's "bit field 2" into |result|. We only need the first byte,
  // but the following masking takes care of that anyway.
4443
  __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
4444
  // Retrieve elements_kind from bit field 2.
4445
  __ DecodeField<Map::ElementsKindBits>(rcx);
4446

4447 4448
  if (FLAG_debug_code) {
    Label done;
4449
    __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4450 4451 4452
    __ j(equal, &done);
    __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
    __ Assert(equal,
4453
              kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4454 4455
    __ bind(&done);
  }
4456

4457 4458 4459 4460
  Label fast_elements_case;
  __ cmpl(rcx, Immediate(FAST_ELEMENTS));
  __ j(equal, &fast_elements_case);
  GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4461

4462 4463
  __ bind(&fast_elements_case);
  GenerateCase(masm, FAST_ELEMENTS);
4464 4465 4466
}


4467 4468 4469 4470 4471 4472 4473 4474 4475 4476 4477 4478 4479 4480 4481 4482 4483 4484 4485 4486 4487 4488 4489 4490 4491 4492 4493 4494 4495 4496 4497 4498 4499 4500 4501 4502 4503 4504 4505 4506 4507 4508 4509 4510 4511 4512 4513 4514 4515 4516 4517 4518 4519 4520 4521 4522 4523 4524 4525 4526 4527 4528 4529 4530 4531 4532 4533 4534 4535 4536 4537 4538 4539 4540 4541 4542 4543 4544 4545 4546 4547 4548 4549 4550 4551 4552 4553 4554 4555 4556 4557 4558 4559 4560 4561 4562 4563 4564 4565 4566 4567 4568 4569 4570 4571 4572 4573 4574 4575 4576 4577 4578 4579 4580 4581 4582 4583 4584 4585 4586 4587 4588 4589 4590 4591 4592 4593 4594 4595 4596 4597 4598 4599
void FastNewObjectStub::Generate(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rdi    : target
  //  -- rdx    : new target
  //  -- rsi    : context
  //  -- rsp[0] : return address
  // -----------------------------------
  __ AssertFunction(rdi);
  __ AssertReceiver(rdx);

  // Verify that the new target is a JSFunction.
  Label new_object;
  __ CmpObjectType(rdx, JS_FUNCTION_TYPE, rbx);
  __ j(not_equal, &new_object);

  // Load the initial map and verify that it's in fact a map.
  __ movp(rcx, FieldOperand(rdx, JSFunction::kPrototypeOrInitialMapOffset));
  __ JumpIfSmi(rcx, &new_object);
  __ CmpObjectType(rcx, MAP_TYPE, rbx);
  __ j(not_equal, &new_object);

  // Fall back to runtime if the target differs from the new target's
  // initial map constructor.
  __ cmpp(rdi, FieldOperand(rcx, Map::kConstructorOrBackPointerOffset));
  __ j(not_equal, &new_object);

  // Allocate the JSObject on the heap.
  Label allocate, done_allocate;
  __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
  __ leal(rbx, Operand(rbx, times_pointer_size, 0));
  __ Allocate(rbx, rax, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
  __ bind(&done_allocate);

  // Initialize the JSObject fields.
  __ movp(Operand(rax, JSObject::kMapOffset), rcx);
  __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
  __ movp(Operand(rax, JSObject::kPropertiesOffset), rbx);
  __ movp(Operand(rax, JSObject::kElementsOffset), rbx);
  STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
  __ leap(rbx, Operand(rax, JSObject::kHeaderSize));

  // ----------- S t a t e -------------
  //  -- rax    : result (untagged)
  //  -- rbx    : result fields (untagged)
  //  -- rdi    : result end (untagged)
  //  -- rcx    : initial map
  //  -- rsi    : context
  //  -- rsp[0] : return address
  // -----------------------------------

  // Perform in-object slack tracking if requested.
  Label slack_tracking;
  STATIC_ASSERT(Map::kNoSlackTracking == 0);
  __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
  __ testl(FieldOperand(rcx, Map::kBitField3Offset),
           Immediate(Map::ConstructionCounter::kMask));
  __ j(not_zero, &slack_tracking, Label::kNear);
  {
    // Initialize all in-object fields with undefined.
    __ InitializeFieldsWithFiller(rbx, rdi, r11);

    // Add the object tag to make the JSObject real.
    STATIC_ASSERT(kHeapObjectTag == 1);
    __ incp(rax);
    __ Ret();
  }
  __ bind(&slack_tracking);
  {
    // Decrease generous allocation count.
    STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
    __ subl(FieldOperand(rcx, Map::kBitField3Offset),
            Immediate(1 << Map::ConstructionCounter::kShift));

    // Initialize the in-object fields with undefined.
    __ movzxbl(rdx, FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset));
    __ negp(rdx);
    __ leap(rdx, Operand(rdi, rdx, times_pointer_size, 0));
    __ InitializeFieldsWithFiller(rbx, rdx, r11);

    // Initialize the remaining (reserved) fields with one pointer filler map.
    __ LoadRoot(r11, Heap::kOnePointerFillerMapRootIndex);
    __ InitializeFieldsWithFiller(rdx, rdi, r11);

    // Add the object tag to make the JSObject real.
    STATIC_ASSERT(kHeapObjectTag == 1);
    __ incp(rax);

    // Check if we can finalize the instance size.
    Label finalize;
    STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
    __ testl(FieldOperand(rcx, Map::kBitField3Offset),
             Immediate(Map::ConstructionCounter::kMask));
    __ j(zero, &finalize, Label::kNear);
    __ Ret();

    // Finalize the instance size.
    __ bind(&finalize);
    {
      FrameScope scope(masm, StackFrame::INTERNAL);
      __ Push(rax);
      __ Push(rcx);
      __ CallRuntime(Runtime::kFinalizeInstanceSize);
      __ Pop(rax);
    }
    __ Ret();
  }

  // Fall back to %AllocateInNewSpace.
  __ bind(&allocate);
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    __ Integer32ToSmi(rbx, rbx);
    __ Push(rcx);
    __ Push(rbx);
    __ CallRuntime(Runtime::kAllocateInNewSpace);
    __ Pop(rcx);
  }
  STATIC_ASSERT(kHeapObjectTag == 1);
  __ decp(rax);
  __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
  __ leap(rdi, Operand(rax, rbx, times_pointer_size, 0));
  __ jmp(&done_allocate);

  // Fall back to %NewObject.
  __ bind(&new_object);
  __ PopReturnAddressTo(rcx);
  __ Push(rdi);
  __ Push(rdx);
  __ PushReturnAddressFrom(rcx);
  __ TailCallRuntime(Runtime::kNewObject);
}


4600 4601 4602 4603 4604 4605 4606 4607 4608 4609 4610 4611 4612 4613 4614 4615 4616 4617 4618 4619 4620 4621 4622 4623 4624 4625 4626 4627 4628 4629 4630 4631 4632 4633 4634 4635 4636 4637 4638 4639 4640 4641 4642 4643 4644 4645 4646 4647 4648 4649 4650 4651 4652 4653 4654 4655 4656 4657 4658 4659 4660 4661 4662 4663 4664 4665 4666 4667 4668 4669 4670 4671 4672 4673 4674 4675 4676 4677 4678 4679 4680 4681 4682 4683 4684 4685 4686 4687 4688 4689 4690 4691 4692 4693 4694 4695 4696 4697 4698 4699 4700 4701 4702 4703 4704 4705 4706 4707 4708 4709 4710 4711 4712 4713 4714 4715 4716 4717 4718 4719 4720 4721 4722 4723 4724 4725 4726 4727 4728 4729 4730 4731 4732 4733 4734 4735 4736 4737 4738 4739 4740 4741 4742 4743 4744 4745 4746 4747 4748 4749 4750 4751
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rdi    : function
  //  -- rsi    : context
  //  -- rbp    : frame pointer
  //  -- rsp[0] : return address
  // -----------------------------------
  __ AssertFunction(rdi);

  // For Ignition we need to skip all possible handler/stub frames until
  // we reach the JavaScript frame for the function (similar to what the
  // runtime fallback implementation does). So make rdx point to that
  // JavaScript frame.
  {
    Label loop, loop_entry;
    __ movp(rdx, rbp);
    __ jmp(&loop_entry, Label::kNear);
    __ bind(&loop);
    __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
    __ bind(&loop_entry);
    __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kMarkerOffset));
    __ j(not_equal, &loop);
  }

  // Check if we have rest parameters (only possible if we have an
  // arguments adaptor frame below the function frame).
  Label no_rest_parameters;
  __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
  __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
  __ j(not_equal, &no_rest_parameters, Label::kNear);

  // Check if the arguments adaptor frame contains more arguments than
  // specified by the function's internal formal parameter count.
  Label rest_parameters;
  __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
  __ LoadSharedFunctionInfoSpecialField(
      rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
  __ SmiToInteger32(
      rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
  __ subl(rax, rcx);
  __ j(greater, &rest_parameters);

  // Return an empty rest parameter array.
  __ bind(&no_rest_parameters);
  {
    // ----------- S t a t e -------------
    //  -- rsi    : context
    //  -- rsp[0] : return address
    // -----------------------------------

    // Allocate an empty rest parameter array.
    Label allocate, done_allocate;
    __ Allocate(JSArray::kSize, rax, rdx, rcx, &allocate, TAG_OBJECT);
    __ bind(&done_allocate);

    // Setup the rest parameter array in rax.
    __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
    __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
    __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
    __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
    __ movp(FieldOperand(rax, JSArray::kElementsOffset), rcx);
    __ movp(FieldOperand(rax, JSArray::kLengthOffset), Immediate(0));
    STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
    __ Ret();

    // Fall back to %AllocateInNewSpace.
    __ bind(&allocate);
    {
      FrameScope scope(masm, StackFrame::INTERNAL);
      __ Push(Smi::FromInt(JSArray::kSize));
      __ CallRuntime(Runtime::kAllocateInNewSpace);
    }
    __ jmp(&done_allocate);
  }

  __ bind(&rest_parameters);
  {
    // Compute the pointer to the first rest parameter (skippping the receiver).
    __ leap(rbx, Operand(rbx, rax, times_pointer_size,
                         StandardFrameConstants::kCallerSPOffset -
                             1 * kPointerSize));

    // ----------- S t a t e -------------
    //  -- rsi    : context
    //  -- rax    : number of rest parameters
    //  -- rbx    : pointer to first rest parameters
    //  -- rsp[0] : return address
    // -----------------------------------

    // Allocate space for the rest parameter array plus the backing store.
    Label allocate, done_allocate;
    __ leal(rcx, Operand(rax, times_pointer_size,
                         JSArray::kSize + FixedArray::kHeaderSize));
    __ Allocate(rcx, rdx, rdi, no_reg, &allocate, TAG_OBJECT);
    __ bind(&done_allocate);

    // Compute the arguments.length in rdi.
    __ Integer32ToSmi(rdi, rax);

    // Setup the elements array in rdx.
    __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
    __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
    __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
    {
      Label loop, done_loop;
      __ Set(rcx, 0);
      __ bind(&loop);
      __ cmpl(rcx, rax);
      __ j(equal, &done_loop, Label::kNear);
      __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
      __ movp(
          FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
          kScratchRegister);
      __ subp(rbx, Immediate(1 * kPointerSize));
      __ addl(rcx, Immediate(1));
      __ jmp(&loop);
      __ bind(&done_loop);
    }

    // Setup the rest parameter array in rax.
    __ leap(rax,
            Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
    __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
    __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
    __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
    __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
    __ movp(FieldOperand(rax, JSArray::kElementsOffset), rdx);
    __ movp(FieldOperand(rax, JSArray::kLengthOffset), rdi);
    STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
    __ Ret();

    // Fall back to %AllocateInNewSpace.
    __ bind(&allocate);
    {
      FrameScope scope(masm, StackFrame::INTERNAL);
      __ Integer32ToSmi(rax, rax);
      __ Integer32ToSmi(rcx, rcx);
      __ Push(rax);
      __ Push(rbx);
      __ Push(rcx);
      __ CallRuntime(Runtime::kAllocateInNewSpace);
      __ movp(rdx, rax);
      __ Pop(rbx);
      __ Pop(rax);
      __ SmiToInteger32(rax, rax);
    }
    __ jmp(&done_allocate);
  }
}


4752 4753 4754 4755 4756 4757 4758 4759 4760 4761 4762 4763 4764 4765 4766 4767 4768 4769 4770 4771 4772 4773 4774 4775 4776 4777 4778 4779 4780 4781 4782 4783 4784 4785 4786 4787 4788 4789 4790 4791 4792 4793 4794 4795 4796 4797 4798 4799 4800 4801 4802 4803 4804 4805 4806 4807 4808 4809 4810 4811 4812 4813 4814 4815 4816 4817 4818 4819 4820 4821 4822 4823 4824 4825 4826 4827 4828 4829 4830 4831 4832 4833 4834 4835 4836 4837 4838 4839 4840 4841 4842 4843 4844 4845 4846 4847 4848 4849 4850 4851 4852 4853 4854 4855 4856 4857 4858 4859 4860 4861 4862 4863 4864 4865 4866 4867 4868 4869 4870 4871 4872 4873 4874 4875 4876 4877 4878 4879 4880 4881 4882 4883 4884 4885 4886 4887 4888 4889 4890 4891 4892 4893 4894 4895 4896 4897 4898 4899 4900 4901 4902 4903 4904 4905 4906 4907 4908 4909 4910 4911 4912 4913 4914 4915 4916 4917 4918 4919 4920 4921 4922 4923 4924 4925 4926 4927 4928 4929 4930 4931 4932 4933 4934 4935 4936 4937 4938 4939 4940 4941 4942 4943 4944 4945 4946 4947 4948 4949 4950 4951 4952 4953 4954 4955 4956 4957 4958 4959 4960 4961 4962 4963 4964 4965 4966 4967 4968 4969
void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rdi    : function
  //  -- rsi    : context
  //  -- rbp    : frame pointer
  //  -- rsp[0] : return address
  // -----------------------------------
  __ AssertFunction(rdi);

  // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
  __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
  __ LoadSharedFunctionInfoSpecialField(
      rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
  __ leap(rdx, Operand(rbp, rcx, times_pointer_size,
                       StandardFrameConstants::kCallerSPOffset));
  __ Integer32ToSmi(rcx, rcx);

  // rcx : number of parameters (tagged)
  // rdx : parameters pointer
  // rdi : function
  // rsp[0] : return address
  // Registers used over the whole function:
  //  rbx: the mapped parameter count (untagged)
  //  rax: the allocated object (tagged).
  Factory* factory = isolate()->factory();

  __ SmiToInteger64(rbx, rcx);
  // rbx = parameter count (untagged)

  // Check if the calling frame is an arguments adaptor frame.
  Label adaptor_frame, try_allocate, runtime;
  __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
  __ movp(r8, Operand(rax, StandardFrameConstants::kContextOffset));
  __ Cmp(r8, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
  __ j(equal, &adaptor_frame);

  // No adaptor, parameter count = argument count.
  __ movp(r11, rbx);
  __ jmp(&try_allocate, Label::kNear);

  // We have an adaptor frame. Patch the parameters pointer.
  __ bind(&adaptor_frame);
  __ SmiToInteger64(
      r11, Operand(rax, ArgumentsAdaptorFrameConstants::kLengthOffset));
  __ leap(rdx, Operand(rax, r11, times_pointer_size,
                       StandardFrameConstants::kCallerSPOffset));

  // rbx = parameter count (untagged)
  // r11 = argument count (untagged)
  // Compute the mapped parameter count = min(rbx, r11) in rbx.
  __ cmpp(rbx, r11);
  __ j(less_equal, &try_allocate, Label::kNear);
  __ movp(rbx, r11);

  __ bind(&try_allocate);

  // Compute the sizes of backing store, parameter map, and arguments object.
  // 1. Parameter map, has 2 extra words containing context and backing store.
  const int kParameterMapHeaderSize =
      FixedArray::kHeaderSize + 2 * kPointerSize;
  Label no_parameter_map;
  __ xorp(r8, r8);
  __ testp(rbx, rbx);
  __ j(zero, &no_parameter_map, Label::kNear);
  __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
  __ bind(&no_parameter_map);

  // 2. Backing store.
  __ leap(r8, Operand(r8, r11, times_pointer_size, FixedArray::kHeaderSize));

  // 3. Arguments object.
  __ addp(r8, Immediate(JSSloppyArgumentsObject::kSize));

  // Do the allocation of all three objects in one go.
  __ Allocate(r8, rax, r9, no_reg, &runtime, TAG_OBJECT);

  // rax = address of new object(s) (tagged)
  // r11 = argument count (untagged)
  // Get the arguments map from the current native context into r9.
  Label has_mapped_parameters, instantiate;
  __ movp(r9, NativeContextOperand());
  __ testp(rbx, rbx);
  __ j(not_zero, &has_mapped_parameters, Label::kNear);

  const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX;
  __ movp(r9, Operand(r9, Context::SlotOffset(kIndex)));
  __ jmp(&instantiate, Label::kNear);

  const int kAliasedIndex = Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX;
  __ bind(&has_mapped_parameters);
  __ movp(r9, Operand(r9, Context::SlotOffset(kAliasedIndex)));
  __ bind(&instantiate);

  // rax = address of new object (tagged)
  // rbx = mapped parameter count (untagged)
  // r11 = argument count (untagged)
  // r9 = address of arguments map (tagged)
  __ movp(FieldOperand(rax, JSObject::kMapOffset), r9);
  __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
  __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
  __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister);

  // Set up the callee in-object property.
  __ AssertNotSmi(rdi);
  __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kCalleeOffset), rdi);

  // Use the length (smi tagged) and set that as an in-object property too.
  // Note: r11 is tagged from here on.
  __ Integer32ToSmi(r11, r11);
  __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kLengthOffset), r11);

  // Set up the elements pointer in the allocated arguments object.
  // If we allocated a parameter map, rdi will point there, otherwise to the
  // backing store.
  __ leap(rdi, Operand(rax, JSSloppyArgumentsObject::kSize));
  __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);

  // rax = address of new object (tagged)
  // rbx = mapped parameter count (untagged)
  // r11 = argument count (tagged)
  // rdi = address of parameter map or backing store (tagged)

  // Initialize parameter map. If there are no mapped arguments, we're done.
  Label skip_parameter_map;
  __ testp(rbx, rbx);
  __ j(zero, &skip_parameter_map);

  __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex);
  // rbx contains the untagged argument count. Add 2 and tag to write.
  __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
  __ Integer64PlusConstantToSmi(r9, rbx, 2);
  __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
  __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
  __ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
  __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);

  // Copy the parameter slots and the holes in the arguments.
  // We need to fill in mapped_parameter_count slots. They index the context,
  // where parameters are stored in reverse order, at
  //   MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
  // The mapped parameter thus need to get indices
  //   MIN_CONTEXT_SLOTS+parameter_count-1 ..
  //       MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
  // We loop from right to left.
  Label parameters_loop, parameters_test;

  // Load tagged parameter count into r9.
  __ Integer32ToSmi(r9, rbx);
  __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
  __ addp(r8, rcx);
  __ subp(r8, r9);
  __ movp(rcx, rdi);
  __ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
  __ SmiToInteger64(r9, r9);
  // r9 = loop variable (untagged)
  // r8 = mapping index (tagged)
  // rcx = address of parameter map (tagged)
  // rdi = address of backing store (tagged)
  __ jmp(&parameters_test, Label::kNear);

  __ bind(&parameters_loop);
  __ subp(r9, Immediate(1));
  __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
  __ movp(FieldOperand(rcx, r9, times_pointer_size, kParameterMapHeaderSize),
          r8);
  __ movp(FieldOperand(rdi, r9, times_pointer_size, FixedArray::kHeaderSize),
          kScratchRegister);
  __ SmiAddConstant(r8, r8, Smi::FromInt(1));
  __ bind(&parameters_test);
  __ testp(r9, r9);
  __ j(not_zero, &parameters_loop, Label::kNear);

  __ bind(&skip_parameter_map);

  // r11 = argument count (tagged)
  // rdi = address of backing store (tagged)
  // Copy arguments header and remaining slots (if there are any).
  __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
          factory->fixed_array_map());
  __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r11);

  Label arguments_loop, arguments_test;
  __ movp(r8, rbx);
  // Untag r11 for the loop below.
  __ SmiToInteger64(r11, r11);
  __ leap(kScratchRegister, Operand(r8, times_pointer_size, 0));
  __ subp(rdx, kScratchRegister);
  __ jmp(&arguments_test, Label::kNear);

  __ bind(&arguments_loop);
  __ subp(rdx, Immediate(kPointerSize));
  __ movp(r9, Operand(rdx, 0));
  __ movp(FieldOperand(rdi, r8,
                       times_pointer_size,
                       FixedArray::kHeaderSize),
          r9);
  __ addp(r8, Immediate(1));

  __ bind(&arguments_test);
  __ cmpp(r8, r11);
  __ j(less, &arguments_loop, Label::kNear);

  // Return.
  __ ret(0);

  // Do the runtime call to allocate the arguments object.
  // r11 = argument count (untagged)
  __ bind(&runtime);
  __ Integer32ToSmi(r11, r11);
  __ PopReturnAddressTo(rax);
  __ Push(rdi);  // Push function.
  __ Push(rdx);  // Push parameters pointer.
  __ Push(r11);  // Push parameter count.
  __ PushReturnAddressFrom(rax);
  __ TailCallRuntime(Runtime::kNewSloppyArguments);
}


4970 4971 4972 4973 4974 4975 4976 4977 4978 4979 4980 4981 4982 4983 4984 4985 4986 4987 4988 4989 4990 4991 4992 4993 4994 4995 4996 4997 4998 4999 5000 5001 5002 5003 5004 5005 5006 5007 5008 5009 5010 5011 5012 5013 5014 5015 5016 5017 5018 5019 5020 5021 5022 5023 5024 5025 5026 5027 5028 5029 5030 5031 5032 5033 5034 5035 5036 5037 5038 5039 5040 5041 5042 5043 5044 5045 5046 5047 5048 5049 5050 5051 5052 5053 5054 5055 5056 5057 5058 5059 5060 5061 5062 5063 5064 5065 5066 5067 5068 5069 5070 5071 5072 5073 5074 5075 5076 5077 5078 5079 5080 5081 5082 5083 5084 5085 5086
void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- rdi    : function
  //  -- rsi    : context
  //  -- rbp    : frame pointer
  //  -- rsp[0] : return address
  // -----------------------------------
  __ AssertFunction(rdi);

  // For Ignition we need to skip all possible handler/stub frames until
  // we reach the JavaScript frame for the function (similar to what the
  // runtime fallback implementation does). So make rdx point to that
  // JavaScript frame.
  {
    Label loop, loop_entry;
    __ movp(rdx, rbp);
    __ jmp(&loop_entry, Label::kNear);
    __ bind(&loop);
    __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
    __ bind(&loop_entry);
    __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kMarkerOffset));
    __ j(not_equal, &loop);
  }

  // Check if we have an arguments adaptor frame below the function frame.
  Label arguments_adaptor, arguments_done;
  __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
  __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
  __ j(equal, &arguments_adaptor, Label::kNear);
  {
    __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    __ LoadSharedFunctionInfoSpecialField(
        rax, rax, SharedFunctionInfo::kFormalParameterCountOffset);
    __ leap(rbx, Operand(rdx, rax, times_pointer_size,
                         StandardFrameConstants::kCallerSPOffset -
                             1 * kPointerSize));
  }
  __ jmp(&arguments_done, Label::kNear);
  __ bind(&arguments_adaptor);
  {
    __ SmiToInteger32(
        rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
    __ leap(rbx, Operand(rbx, rax, times_pointer_size,
                         StandardFrameConstants::kCallerSPOffset -
                             1 * kPointerSize));
  }
  __ bind(&arguments_done);

  // ----------- S t a t e -------------
  //  -- rax    : number of arguments
  //  -- rbx    : pointer to the first argument
  //  -- rsi    : context
  //  -- rsp[0] : return address
  // -----------------------------------

  // Allocate space for the strict arguments object plus the backing store.
  Label allocate, done_allocate;
  __ leal(rcx, Operand(rax, times_pointer_size, JSStrictArgumentsObject::kSize +
                                                    FixedArray::kHeaderSize));
  __ Allocate(rcx, rdx, rdi, no_reg, &allocate, TAG_OBJECT);
  __ bind(&done_allocate);

  // Compute the arguments.length in rdi.
  __ Integer32ToSmi(rdi, rax);

  // Setup the elements array in rdx.
  __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
  __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
  __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
  {
    Label loop, done_loop;
    __ Set(rcx, 0);
    __ bind(&loop);
    __ cmpl(rcx, rax);
    __ j(equal, &done_loop, Label::kNear);
    __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
    __ movp(
        FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
        kScratchRegister);
    __ subp(rbx, Immediate(1 * kPointerSize));
    __ addl(rcx, Immediate(1));
    __ jmp(&loop);
    __ bind(&done_loop);
  }

  // Setup the strict arguments object in rax.
  __ leap(rax,
          Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
  __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, rcx);
  __ movp(FieldOperand(rax, JSStrictArgumentsObject::kMapOffset), rcx);
  __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
  __ movp(FieldOperand(rax, JSStrictArgumentsObject::kPropertiesOffset), rcx);
  __ movp(FieldOperand(rax, JSStrictArgumentsObject::kElementsOffset), rdx);
  __ movp(FieldOperand(rax, JSStrictArgumentsObject::kLengthOffset), rdi);
  STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
  __ Ret();

  // Fall back to %AllocateInNewSpace.
  __ bind(&allocate);
  {
    FrameScope scope(masm, StackFrame::INTERNAL);
    __ Integer32ToSmi(rax, rax);
    __ Integer32ToSmi(rcx, rcx);
    __ Push(rax);
    __ Push(rbx);
    __ Push(rcx);
    __ CallRuntime(Runtime::kAllocateInNewSpace);
    __ movp(rdx, rax);
    __ Pop(rbx);
    __ Pop(rax);
    __ SmiToInteger32(rax, rax);
  }
  __ jmp(&done_allocate);
}


5087 5088 5089 5090 5091 5092 5093 5094 5095 5096 5097 5098 5099 5100 5101 5102 5103 5104 5105 5106 5107 5108 5109 5110 5111 5112 5113
void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
  Register context_reg = rsi;
  Register slot_reg = rbx;
  Register result_reg = rax;
  Label slow_case;

  // Go up context chain to the script context.
  for (int i = 0; i < depth(); ++i) {
    __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
    context_reg = rdi;
  }

  // Load the PropertyCell value at the specified slot.
  __ movp(result_reg, ContextOperand(context_reg, slot_reg));
  __ movp(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset));

  // Check that value is not the_hole.
  __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex);
  __ j(equal, &slow_case, Label::kNear);
  __ Ret();

  // Fallback to the runtime.
  __ bind(&slow_case);
  __ Integer32ToSmi(slot_reg, slot_reg);
  __ PopReturnAddressTo(kScratchRegister);
  __ Push(slot_reg);
  __ Push(kScratchRegister);
5114
  __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
5115 5116 5117 5118 5119 5120 5121 5122 5123 5124 5125 5126 5127 5128 5129 5130 5131 5132 5133 5134 5135 5136 5137 5138 5139 5140
}


void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
  Register context_reg = rsi;
  Register slot_reg = rbx;
  Register value_reg = rax;
  Register cell_reg = r8;
  Register cell_details_reg = rdx;
  Register cell_value_reg = r9;
  Label fast_heapobject_case, fast_smi_case, slow_case;

  if (FLAG_debug_code) {
    __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
    __ Check(not_equal, kUnexpectedValue);
  }

  // Go up context chain to the script context.
  for (int i = 0; i < depth(); ++i) {
    __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
    context_reg = rdi;
  }

  // Load the PropertyCell at the specified slot.
  __ movp(cell_reg, ContextOperand(context_reg, slot_reg));

5141 5142
  // Load PropertyDetails for the cell (actually only the cell_type, kind and
  // READ_ONLY bit of attributes).
5143 5144 5145 5146
  __ SmiToInteger32(cell_details_reg,
                    FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
  __ andl(cell_details_reg,
          Immediate(PropertyDetails::PropertyCellTypeField::kMask |
5147 5148
                    PropertyDetails::KindField::kMask |
                    PropertyDetails::kAttributesReadOnlyMask));
5149 5150 5151 5152 5153 5154 5155 5156 5157 5158 5159 5160 5161 5162 5163 5164 5165 5166 5167 5168 5169 5170

  // Check if PropertyCell holds mutable data.
  Label not_mutable_data;
  __ cmpl(cell_details_reg,
          Immediate(PropertyDetails::PropertyCellTypeField::encode(
                        PropertyCellType::kMutable) |
                    PropertyDetails::KindField::encode(kData)));
  __ j(not_equal, &not_mutable_data);
  __ JumpIfSmi(value_reg, &fast_smi_case);
  __ bind(&fast_heapobject_case);
  __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
  __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
                      cell_value_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
                      OMIT_SMI_CHECK);
  // RecordWriteField clobbers the value register, so we need to reload.
  __ movp(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
  __ Ret();
  __ bind(&not_mutable_data);

  // Check if PropertyCell value matches the new value (relevant for Constant,
  // ConstantType and Undefined cells).
  Label not_same_value;
5171
  __ movp(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5172 5173 5174
  __ cmpp(cell_value_reg, value_reg);
  __ j(not_equal, &not_same_value,
       FLAG_debug_code ? Label::kFar : Label::kNear);
5175 5176 5177 5178
  // Make sure the PropertyCell is not marked READ_ONLY.
  __ testl(cell_details_reg,
           Immediate(PropertyDetails::kAttributesReadOnlyMask));
  __ j(not_zero, &slow_case);
5179 5180 5181 5182 5183 5184 5185 5186 5187 5188 5189 5190 5191 5192 5193 5194 5195 5196 5197 5198 5199 5200 5201 5202
  if (FLAG_debug_code) {
    Label done;
    // This can only be true for Constant, ConstantType and Undefined cells,
    // because we never store the_hole via this stub.
    __ cmpl(cell_details_reg,
            Immediate(PropertyDetails::PropertyCellTypeField::encode(
                          PropertyCellType::kConstant) |
                      PropertyDetails::KindField::encode(kData)));
    __ j(equal, &done);
    __ cmpl(cell_details_reg,
            Immediate(PropertyDetails::PropertyCellTypeField::encode(
                          PropertyCellType::kConstantType) |
                      PropertyDetails::KindField::encode(kData)));
    __ j(equal, &done);
    __ cmpl(cell_details_reg,
            Immediate(PropertyDetails::PropertyCellTypeField::encode(
                          PropertyCellType::kUndefined) |
                      PropertyDetails::KindField::encode(kData)));
    __ Check(equal, kUnexpectedValue);
    __ bind(&done);
  }
  __ Ret();
  __ bind(&not_same_value);

5203 5204
  // Check if PropertyCell contains data with constant type (and is not
  // READ_ONLY).
5205 5206 5207 5208 5209 5210 5211 5212 5213 5214 5215 5216 5217 5218 5219 5220 5221 5222 5223 5224 5225 5226 5227 5228 5229 5230 5231 5232 5233 5234 5235 5236
  __ cmpl(cell_details_reg,
          Immediate(PropertyDetails::PropertyCellTypeField::encode(
                        PropertyCellType::kConstantType) |
                    PropertyDetails::KindField::encode(kData)));
  __ j(not_equal, &slow_case, Label::kNear);

  // Now either both old and new values must be SMIs or both must be heap
  // objects with same map.
  Label value_is_heap_object;
  __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
  __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
  // Old and new values are SMIs, no need for a write barrier here.
  __ bind(&fast_smi_case);
  __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
  __ Ret();
  __ bind(&value_is_heap_object);
  __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
  Register cell_value_map_reg = cell_value_reg;
  __ movp(cell_value_map_reg,
          FieldOperand(cell_value_reg, HeapObject::kMapOffset));
  __ cmpp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
  __ j(equal, &fast_heapobject_case);

  // Fallback to the runtime.
  __ bind(&slow_case);
  __ Integer32ToSmi(slot_reg, slot_reg);
  __ PopReturnAddressTo(kScratchRegister);
  __ Push(slot_reg);
  __ Push(value_reg);
  __ Push(kScratchRegister);
  __ TailCallRuntime(is_strict(language_mode())
                         ? Runtime::kStoreGlobalViaContext_Strict
5237
                         : Runtime::kStoreGlobalViaContext_Sloppy);
5238 5239 5240
}


5241 5242 5243 5244 5245 5246 5247 5248 5249 5250 5251 5252 5253 5254 5255 5256 5257 5258 5259 5260 5261 5262 5263 5264 5265 5266 5267 5268 5269 5270 5271 5272 5273 5274 5275 5276 5277 5278 5279 5280 5281 5282 5283 5284 5285 5286 5287 5288 5289 5290 5291 5292 5293 5294 5295 5296 5297 5298 5299 5300 5301 5302 5303 5304 5305 5306 5307 5308 5309 5310 5311 5312 5313 5314 5315 5316 5317 5318 5319 5320 5321 5322 5323 5324 5325 5326 5327 5328 5329 5330 5331 5332 5333 5334 5335 5336 5337 5338 5339 5340 5341 5342 5343 5344 5345 5346 5347
static int Offset(ExternalReference ref0, ExternalReference ref1) {
  int64_t offset = (ref0.address() - ref1.address());
  // Check that fits into int.
  DCHECK(static_cast<int>(offset) == offset);
  return static_cast<int>(offset);
}


// Prepares stack to put arguments (aligns and so on).  WIN64 calling
// convention requires to put the pointer to the return value slot into
// rcx (rcx must be preserverd until CallApiFunctionAndReturn).  Saves
// context (rsi).  Clobbers rax.  Allocates arg_stack_space * kPointerSize
// inside the exit frame (not GCed) accessible via StackSpaceOperand.
static void PrepareCallApiFunction(MacroAssembler* masm, int arg_stack_space) {
  __ EnterApiExitFrame(arg_stack_space);
}


// Calls an API function.  Allocates HandleScope, extracts returned value
// from handle and propagates exceptions.  Clobbers r14, r15, rbx and
// caller-save registers.  Restores context.  On return removes
// stack_space * kPointerSize (GCed).
static void CallApiFunctionAndReturn(MacroAssembler* masm,
                                     Register function_address,
                                     ExternalReference thunk_ref,
                                     Register thunk_last_arg, int stack_space,
                                     Operand* stack_space_operand,
                                     Operand return_value_operand,
                                     Operand* context_restore_operand) {
  Label prologue;
  Label promote_scheduled_exception;
  Label delete_allocated_handles;
  Label leave_exit_frame;
  Label write_back;

  Isolate* isolate = masm->isolate();
  Factory* factory = isolate->factory();
  ExternalReference next_address =
      ExternalReference::handle_scope_next_address(isolate);
  const int kNextOffset = 0;
  const int kLimitOffset = Offset(
      ExternalReference::handle_scope_limit_address(isolate), next_address);
  const int kLevelOffset = Offset(
      ExternalReference::handle_scope_level_address(isolate), next_address);
  ExternalReference scheduled_exception_address =
      ExternalReference::scheduled_exception_address(isolate);

  DCHECK(rdx.is(function_address) || r8.is(function_address));
  // Allocate HandleScope in callee-save registers.
  Register prev_next_address_reg = r14;
  Register prev_limit_reg = rbx;
  Register base_reg = r15;
  __ Move(base_reg, next_address);
  __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
  __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
  __ addl(Operand(base_reg, kLevelOffset), Immediate(1));

  if (FLAG_log_timer_events) {
    FrameScope frame(masm, StackFrame::MANUAL);
    __ PushSafepointRegisters();
    __ PrepareCallCFunction(1);
    __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
    __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
                     1);
    __ PopSafepointRegisters();
  }

  Label profiler_disabled;
  Label end_profiler_check;
  __ Move(rax, ExternalReference::is_profiling_address(isolate));
  __ cmpb(Operand(rax, 0), Immediate(0));
  __ j(zero, &profiler_disabled);

  // Third parameter is the address of the actual getter function.
  __ Move(thunk_last_arg, function_address);
  __ Move(rax, thunk_ref);
  __ jmp(&end_profiler_check);

  __ bind(&profiler_disabled);
  // Call the api function!
  __ Move(rax, function_address);

  __ bind(&end_profiler_check);

  // Call the api function!
  __ call(rax);

  if (FLAG_log_timer_events) {
    FrameScope frame(masm, StackFrame::MANUAL);
    __ PushSafepointRegisters();
    __ PrepareCallCFunction(1);
    __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
    __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
                     1);
    __ PopSafepointRegisters();
  }

  // Load the value from ReturnValue
  __ movp(rax, return_value_operand);
  __ bind(&prologue);

  // No more valid handles (the result handle was the last one). Restore
  // previous handle scope.
  __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
  __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
  __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
  __ j(not_equal, &delete_allocated_handles);
5348 5349

  // Leave the API exit frame.
5350
  __ bind(&leave_exit_frame);
5351 5352 5353 5354 5355 5356 5357 5358
  bool restore_context = context_restore_operand != NULL;
  if (restore_context) {
    __ movp(rsi, *context_restore_operand);
  }
  if (stack_space_operand != nullptr) {
    __ movp(rbx, *stack_space_operand);
  }
  __ LeaveApiExitFrame(!restore_context);
5359 5360

  // Check if the function scheduled an exception.
5361 5362
  __ Move(rdi, scheduled_exception_address);
  __ Cmp(Operand(rdi, 0), factory->the_hole_value());
5363 5364 5365 5366 5367 5368 5369 5370 5371 5372 5373 5374 5375 5376
  __ j(not_equal, &promote_scheduled_exception);

#if DEBUG
  // Check if the function returned a valid JavaScript value.
  Label ok;
  Register return_value = rax;
  Register map = rcx;

  __ JumpIfSmi(return_value, &ok, Label::kNear);
  __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset));

  __ CmpInstanceType(map, LAST_NAME_TYPE);
  __ j(below_equal, &ok, Label::kNear);

5377
  __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
5378 5379 5380 5381 5382 5383 5384 5385 5386 5387 5388 5389 5390 5391 5392 5393 5394 5395 5396 5397 5398 5399 5400 5401 5402 5403 5404 5405 5406 5407 5408
  __ j(above_equal, &ok, Label::kNear);

  __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
  __ j(equal, &ok, Label::kNear);

  __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
  __ j(equal, &ok, Label::kNear);

  __ CompareRoot(return_value, Heap::kTrueValueRootIndex);
  __ j(equal, &ok, Label::kNear);

  __ CompareRoot(return_value, Heap::kFalseValueRootIndex);
  __ j(equal, &ok, Label::kNear);

  __ CompareRoot(return_value, Heap::kNullValueRootIndex);
  __ j(equal, &ok, Label::kNear);

  __ Abort(kAPICallReturnedInvalidObject);

  __ bind(&ok);
#endif

  if (stack_space_operand != nullptr) {
    DCHECK_EQ(stack_space, 0);
    __ PopReturnAddressTo(rcx);
    __ addq(rsp, rbx);
    __ jmp(rcx);
  } else {
    __ ret(stack_space * kPointerSize);
  }

5409
  // Re-throw by promoting a scheduled exception.
5410
  __ bind(&promote_scheduled_exception);
5411
  __ TailCallRuntime(Runtime::kPromoteScheduledException);
5412 5413 5414 5415 5416 5417 5418 5419 5420 5421 5422 5423 5424

  // HandleScope limit has changed. Delete allocated extensions.
  __ bind(&delete_allocated_handles);
  __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
  __ movp(prev_limit_reg, rax);
  __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
  __ LoadAddress(rax,
                 ExternalReference::delete_handle_scope_extensions(isolate));
  __ call(rax);
  __ movp(rax, prev_limit_reg);
  __ jmp(&leave_exit_frame);
}

5425 5426 5427
static void CallApiFunctionStubHelper(MacroAssembler* masm,
                                      const ParameterCount& argc,
                                      bool return_first_arg,
5428
                                      bool call_data_undefined, bool is_lazy) {
5429
  // ----------- S t a t e -------------
5430
  //  -- rdi                 : callee
5431 5432 5433 5434
  //  -- rbx                 : call_data
  //  -- rcx                 : holder
  //  -- rdx                 : api_function_address
  //  -- rsi                 : context
5435
  //  -- rax                 : number of arguments if argc is a register
5436 5437 5438 5439 5440 5441 5442
  //  -- rsp[0]              : return address
  //  -- rsp[8]              : last argument
  //  -- ...
  //  -- rsp[argc * 8]       : first argument
  //  -- rsp[(argc + 1) * 8] : receiver
  // -----------------------------------

5443
  Register callee = rdi;
5444 5445 5446 5447
  Register call_data = rbx;
  Register holder = rcx;
  Register api_function_address = rdx;
  Register context = rsi;
5448
  Register return_address = r8;
5449 5450 5451

  typedef FunctionCallbackArguments FCA;

5452 5453
  STATIC_ASSERT(FCA::kContextSaveIndex == 6);
  STATIC_ASSERT(FCA::kCalleeIndex == 5);
5454 5455 5456 5457 5458
  STATIC_ASSERT(FCA::kDataIndex == 4);
  STATIC_ASSERT(FCA::kReturnValueOffset == 3);
  STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
  STATIC_ASSERT(FCA::kIsolateIndex == 1);
  STATIC_ASSERT(FCA::kHolderIndex == 0);
5459
  STATIC_ASSERT(FCA::kArgsLength == 7);
5460

5461
  DCHECK(argc.is_immediate() || rax.is(argc.reg()));
5462

5463 5464 5465 5466
  __ PopReturnAddressTo(return_address);

  // context save
  __ Push(context);
5467

5468 5469 5470
  // callee
  __ Push(callee);

5471
  // call data
5472
  __ Push(call_data);
5473 5474 5475 5476 5477
  Register scratch = call_data;
  if (!call_data_undefined) {
    __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
  }
  // return value
5478
  __ Push(scratch);
5479
  // return value default
5480
  __ Push(scratch);
5481
  // isolate
5482
  __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
5483
  __ Push(scratch);
5484
  // holder
5485
  __ Push(holder);
5486 5487 5488

  __ movp(scratch, rsp);
  // Push return address back on stack.
5489
  __ PushReturnAddressFrom(return_address);
5490

5491 5492 5493 5494
  if (!is_lazy) {
    // load context from callee
    __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
  }
5495 5496 5497 5498 5499

  // Allocate the v8::Arguments structure in the arguments' space since
  // it's not controlled by GC.
  const int kApiStackSpace = 4;

5500
  PrepareCallApiFunction(masm, kApiStackSpace);
5501 5502 5503

  // FunctionCallbackInfo::implicit_args_.
  __ movp(StackSpaceOperand(0), scratch);
5504 5505 5506 5507 5508 5509 5510 5511 5512 5513 5514 5515 5516 5517 5518 5519 5520 5521 5522 5523 5524
  if (argc.is_immediate()) {
    __ addp(scratch, Immediate((argc.immediate() + FCA::kArgsLength - 1) *
                               kPointerSize));
    // FunctionCallbackInfo::values_.
    __ movp(StackSpaceOperand(1), scratch);
    // FunctionCallbackInfo::length_.
    __ Set(StackSpaceOperand(2), argc.immediate());
    // FunctionCallbackInfo::is_construct_call_.
    __ Set(StackSpaceOperand(3), 0);
  } else {
    __ leap(scratch, Operand(scratch, argc.reg(), times_pointer_size,
                             (FCA::kArgsLength - 1) * kPointerSize));
    // FunctionCallbackInfo::values_.
    __ movp(StackSpaceOperand(1), scratch);
    // FunctionCallbackInfo::length_.
    __ movp(StackSpaceOperand(2), argc.reg());
    // FunctionCallbackInfo::is_construct_call_.
    __ leap(argc.reg(), Operand(argc.reg(), times_pointer_size,
                                (FCA::kArgsLength + 1) * kPointerSize));
    __ movp(StackSpaceOperand(3), argc.reg());
  }
5525 5526 5527 5528 5529 5530 5531 5532 5533

#if defined(__MINGW64__) || defined(_WIN64)
  Register arguments_arg = rcx;
  Register callback_arg = rdx;
#else
  Register arguments_arg = rdi;
  Register callback_arg = rsi;
#endif

5534
  // It's okay if api_function_address == callback_arg
5535
  // but not arguments_arg
5536
  DCHECK(!api_function_address.is(arguments_arg));
5537 5538

  // v8::InvocationCallback's argument.
5539
  __ leap(arguments_arg, StackSpaceOperand(0));
5540

5541
  ExternalReference thunk_ref =
5542
      ExternalReference::invoke_function_callback(masm->isolate());
5543

5544 5545
  // Accessor for FunctionCallbackInfo and first js arg.
  StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
5546 5547
                                       ARGUMENTS_DONT_CONTAIN_RECEIVER);
  Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
5548
      FCA::kArgsLength - FCA::kContextSaveIndex);
5549
  Operand is_construct_call_operand = StackSpaceOperand(3);
5550
  Operand return_value_operand = args_from_rbp.GetArgumentOperand(
5551 5552 5553 5554 5555 5556 5557
      return_first_arg ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
  int stack_space = 0;
  Operand* stack_space_operand = &is_construct_call_operand;
  if (argc.is_immediate()) {
    stack_space = argc.immediate() + FCA::kArgsLength + 1;
    stack_space_operand = nullptr;
  }
5558 5559 5560
  CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
                           stack_space, stack_space_operand,
                           return_value_operand, &context_restore_operand);
5561 5562 5563 5564 5565
}


void CallApiFunctionStub::Generate(MacroAssembler* masm) {
  bool call_data_undefined = this->call_data_undefined();
5566
  CallApiFunctionStubHelper(masm, ParameterCount(rax), false,
5567
                            call_data_undefined, false);
5568 5569 5570 5571 5572
}


void CallApiAccessorStub::Generate(MacroAssembler* masm) {
  bool is_store = this->is_store();
5573
  int argc = this->argc();
5574
  bool call_data_undefined = this->call_data_undefined();
5575
  bool is_lazy = this->is_lazy();
5576
  CallApiFunctionStubHelper(masm, ParameterCount(argc), is_store,
5577
                            call_data_undefined, is_lazy);
5578 5579 5580
}


dcarney@chromium.org's avatar
dcarney@chromium.org committed
5581 5582
void CallApiGetterStub::Generate(MacroAssembler* masm) {
  // ----------- S t a t e -------------
5583 5584 5585
  //  -- rsp[0]                          : return address
  //  -- rsp[8]                          : name
  //  -- rsp[16 .. (16 + kArgsLength*8)] : v8::PropertyCallbackInfo::args_
dcarney@chromium.org's avatar
dcarney@chromium.org committed
5586
  //  -- ...
5587
  //  -- r8                              : api_function_address
dcarney@chromium.org's avatar
dcarney@chromium.org committed
5588 5589 5590 5591 5592 5593 5594 5595 5596 5597 5598
  // -----------------------------------

#if defined(__MINGW64__) || defined(_WIN64)
  Register getter_arg = r8;
  Register accessor_info_arg = rdx;
  Register name_arg = rcx;
#else
  Register getter_arg = rdx;
  Register accessor_info_arg = rsi;
  Register name_arg = rdi;
#endif
5599 5600
  Register api_function_address = ApiGetterDescriptor::function_address();
  DCHECK(api_function_address.is(r8));
dcarney@chromium.org's avatar
dcarney@chromium.org committed
5601 5602
  Register scratch = rax;

5603 5604
  // v8::PropertyCallbackInfo::args_ array and name handle.
  const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
dcarney@chromium.org's avatar
dcarney@chromium.org committed
5605

5606
  // Allocate v8::PropertyCallbackInfo in non-GCed stack space.
dcarney@chromium.org's avatar
dcarney@chromium.org committed
5607 5608
  const int kArgStackSpace = 1;

5609 5610
  // Load address of v8::PropertyAccessorInfo::args_ array.
  __ leap(scratch, Operand(rsp, 2 * kPointerSize));
dcarney@chromium.org's avatar
dcarney@chromium.org committed
5611

5612
  PrepareCallApiFunction(masm, kArgStackSpace);
5613 5614 5615 5616
  // Create v8::PropertyCallbackInfo object on the stack and initialize
  // it's args_ field.
  Operand info_object = StackSpaceOperand(0);
  __ movp(info_object, scratch);
dcarney@chromium.org's avatar
dcarney@chromium.org committed
5617

5618
  __ leap(name_arg, Operand(scratch, -kPointerSize));
dcarney@chromium.org's avatar
dcarney@chromium.org committed
5619 5620
  // The context register (rsi) has been saved in PrepareCallApiFunction and
  // could be used to pass arguments.
5621
  __ leap(accessor_info_arg, info_object);
dcarney@chromium.org's avatar
dcarney@chromium.org committed
5622

5623 5624
  ExternalReference thunk_ref =
      ExternalReference::invoke_accessor_getter_callback(isolate());
dcarney@chromium.org's avatar
dcarney@chromium.org committed
5625 5626 5627

  // It's okay if api_function_address == getter_arg
  // but not accessor_info_arg or name_arg
5628
  DCHECK(!api_function_address.is(accessor_info_arg) &&
dcarney@chromium.org's avatar
dcarney@chromium.org committed
5629 5630
         !api_function_address.is(name_arg));

5631 5632 5633
  // +3 is to skip prolog, return address and name handle.
  Operand return_value_operand(
      rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
5634
  CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
5635 5636
                           kStackUnwindSpace, nullptr, return_value_operand,
                           NULL);
dcarney@chromium.org's avatar
dcarney@chromium.org committed
5637 5638 5639
}


5640 5641
#undef __

5642 5643
}  // namespace internal
}  // namespace v8
5644 5645

#endif  // V8_TARGET_ARCH_X64