code-stubs-ia32.cc 41.2 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5
#if V8_TARGET_ARCH_IA32
6

7
#include "src/api-arguments.h"
8
#include "src/assembler-inl.h"
9
#include "src/base/bits.h"
10
#include "src/bootstrapper.h"
11
#include "src/code-stubs.h"
12 13
#include "src/frame-constants.h"
#include "src/frames.h"
14
#include "src/heap/heap-inl.h"
15
#include "src/ic/ic.h"
16
#include "src/ic/stub-cache.h"
17
#include "src/isolate.h"
18 19
#include "src/regexp/jsregexp.h"
#include "src/regexp/regexp-macro-assembler.h"
20
#include "src/runtime/runtime.h"
21 22 23 24

namespace v8 {
namespace internal {

25
#define __ ACCESS_MASM(masm)
26

27 28 29 30 31 32 33 34
void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
  __ pop(ecx);
  __ mov(MemOperand(esp, eax, times_4, 0), edi);
  __ push(edi);
  __ push(ebx);
  __ push(ecx);
  __ add(eax, Immediate(3));
  __ TailCallRuntime(Runtime::kNewArray);
35 36
}

37

38 39 40
void DoubleToIStub::Generate(MacroAssembler* masm) {
  Register final_result_reg = this->destination();

41
  Label check_negative, process_64_bits, done;
42

43 44
  // Account for return address and saved regs.
  const int kArgumentOffset = 3 * kPointerSize;
45

46 47 48
  MemOperand mantissa_operand(MemOperand(esp, kArgumentOffset));
  MemOperand exponent_operand(
      MemOperand(esp, kArgumentOffset + kDoubleSize / 2));
49

50
  Register scratch1 = no_reg;
51 52 53 54
  {
    Register scratch_candidates[3] = { ebx, edx, edi };
    for (int i = 0; i < 3; i++) {
      scratch1 = scratch_candidates[i];
55
      if (final_result_reg != scratch1) break;
56 57 58 59
    }
  }
  // Since we must use ecx for shifts below, use some other register (eax)
  // to calculate the result if ecx is the requested return register.
60
  Register result_reg = final_result_reg == ecx ? eax : final_result_reg;
61 62 63
  // Save ecx if it isn't the return register and therefore volatile, or if it
  // is the return register, then save the temp register we use in its stead for
  // the result.
64
  Register save_reg = final_result_reg == ecx ? eax : ecx;
65 66 67 68 69
  __ push(scratch1);
  __ push(save_reg);

  __ mov(scratch1, mantissa_operand);
  if (CpuFeatures::IsSupported(SSE3)) {
70
    CpuFeatureScope scope(masm, SSE3);
71
    // Load x87 register with heap number.
72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96
    __ fld_d(mantissa_operand);
  }
  __ mov(ecx, exponent_operand);

  __ and_(ecx, HeapNumber::kExponentMask);
  __ shr(ecx, HeapNumber::kExponentShift);
  __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
  __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
  __ j(below, &process_64_bits);

  // Result is entirely in lower 32-bits of mantissa
  int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
  if (CpuFeatures::IsSupported(SSE3)) {
    __ fstp(0);
  }
  __ sub(ecx, Immediate(delta));
  __ xor_(result_reg, result_reg);
  __ cmp(ecx, Immediate(31));
  __ j(above, &done);
  __ shl_cl(scratch1);
  __ jmp(&check_negative);

  __ bind(&process_64_bits);
  if (CpuFeatures::IsSupported(SSE3)) {
    CpuFeatureScope scope(masm, SSE3);
97 98
    // Reserve space for 64 bit answer.
    __ sub(esp, Immediate(kDoubleSize));  // Nolint.
99 100
    // Do conversion, which cannot fail because we checked the exponent.
    __ fisttp_d(Operand(esp, 0));
101 102
    __ mov(result_reg, Operand(esp, 0));  // Load low word of answer as result
    __ add(esp, Immediate(kDoubleSize));
103
    __ jmp(&done);
104
  } else {
105 106 107
    // Result must be extracted from shifted 32-bit mantissa
    __ sub(ecx, Immediate(delta));
    __ neg(ecx);
108
    __ mov(result_reg, exponent_operand);
109 110 111 112
    __ and_(result_reg,
            Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
    __ add(result_reg,
           Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
113
    __ shrd_cl(scratch1, result_reg);
114 115
    __ shr_cl(result_reg);
    __ test(ecx, Immediate(32));
116
    __ cmov(not_equal, scratch1, result_reg);
117
  }
118

119 120 121 122
  // If the double was negative, negate the integer result.
  __ bind(&check_negative);
  __ mov(result_reg, scratch1);
  __ neg(result_reg);
123 124
  __ cmp(exponent_operand, Immediate(0));
  __ cmov(greater, result_reg, scratch1);
125 126

  // Restore registers
127
  __ bind(&done);
128 129
  if (final_result_reg != result_reg) {
    DCHECK(final_result_reg == ecx);
130 131 132 133 134
    __ mov(final_result_reg, result_reg);
  }
  __ pop(save_reg);
  __ pop(scratch1);
  __ ret(0);
135 136 137
}


138
void MathPowStub::Generate(MacroAssembler* masm) {
139
  const Register exponent = MathPowTaggedDescriptor::exponent();
140
  DCHECK(exponent == eax);
141 142 143 144 145 146
  const Register scratch = ecx;
  const XMMRegister double_result = xmm3;
  const XMMRegister double_base = xmm2;
  const XMMRegister double_exponent = xmm1;
  const XMMRegister double_scratch = xmm4;

147
  Label call_runtime, done, exponent_not_smi, int_exponent;
148 149 150

  // Save 1 in double_result - we need this several times later on.
  __ mov(scratch, Immediate(1));
151
  __ Cvtsi2sd(double_result, scratch);
152

153
  if (exponent_type() == TAGGED) {
154 155
    __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
    __ SmiUntag(exponent);
156 157 158
    __ jmp(&int_exponent);

    __ bind(&exponent_not_smi);
159
    __ movsd(double_exponent,
160
              FieldOperand(exponent, HeapNumber::kValueOffset));
161
  }
162

163
  if (exponent_type() != INTEGER) {
164 165
    Label fast_power, try_arithmetic_simplification;
    __ DoubleToI(exponent, double_exponent, double_scratch,
166 167
                 &try_arithmetic_simplification,
                 &try_arithmetic_simplification);
168 169 170
    __ jmp(&int_exponent);

    __ bind(&try_arithmetic_simplification);
171
    // Skip to runtime if possibly NaN (indicated by the indefinite integer).
172
    __ cvttsd2si(exponent, Operand(double_exponent));
173 174
    __ cmp(exponent, Immediate(0x1));
    __ j(overflow, &call_runtime);
175 176 177 178 179 180 181

    // Using FPU instructions to calculate power.
    Label fast_power_failed;
    __ bind(&fast_power);
    __ fnclex();  // Clear flags to catch exceptions later.
    // Transfer (B)ase and (E)xponent onto the FPU register stack.
    __ sub(esp, Immediate(kDoubleSize));
182
    __ movsd(Operand(esp, 0), double_exponent);
183
    __ fld_d(Operand(esp, 0));  // E
184
    __ movsd(Operand(esp, 0), double_base);
185 186 187 188 189 190 191 192 193 194 195 196 197
    __ fld_d(Operand(esp, 0));  // B, E

    // Exponent is in st(1) and base is in st(0)
    // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
    // FYL2X calculates st(1) * log2(st(0))
    __ fyl2x();    // X
    __ fld(0);     // X, X
    __ frndint();  // rnd(X), X
    __ fsub(1);    // rnd(X), X-rnd(X)
    __ fxch(1);    // X - rnd(X), rnd(X)
    // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
    __ f2xm1();    // 2^(X-rnd(X)) - 1, rnd(X)
    __ fld1();     // 1, 2^(X-rnd(X)) - 1, rnd(X)
198
    __ faddp(1);   // 2^(X-rnd(X)), rnd(X)
199 200
    // FSCALE calculates st(0) * 2^st(1)
    __ fscale();   // 2^X, rnd(X)
201
    __ fstp(1);    // 2^X
202 203
    // Bail out to runtime in case of exceptions in the status word.
    __ fnstsw_ax();
204 205
    __ test_b(eax,
              Immediate(0x5F));  // We check for all but precision exception.
206 207
    __ j(not_zero, &fast_power_failed, Label::kNear);
    __ fstp_d(Operand(esp, 0));
208
    __ movsd(double_result, Operand(esp, 0));
209 210
    __ add(esp, Immediate(kDoubleSize));
    __ jmp(&done);
211

212 213 214
    __ bind(&fast_power_failed);
    __ fninit();
    __ add(esp, Immediate(kDoubleSize));
215
    __ jmp(&call_runtime);
216
  }
217

218 219
  // Calculate power with integer exponent.
  __ bind(&int_exponent);
220
  const XMMRegister double_scratch2 = double_exponent;
221
  __ mov(scratch, exponent);  // Back up exponent.
222 223
  __ movsd(double_scratch, double_base);  // Back up base.
  __ movsd(double_scratch2, double_result);  // Load double_exponent with 1.
224 225

  // Get absolute value of exponent.
226
  Label no_neg, while_true, while_false;
227 228 229 230
  __ test(scratch, scratch);
  __ j(positive, &no_neg, Label::kNear);
  __ neg(scratch);
  __ bind(&no_neg);
231

232
  __ j(zero, &while_false, Label::kNear);
233
  __ shr(scratch, 1);
234 235 236 237 238
  // Above condition means CF==0 && ZF==0.  This means that the
  // bit that has been shifted out is 0 and the result is not 0.
  __ j(above, &while_true, Label::kNear);
  __ movsd(double_result, double_scratch);
  __ j(zero, &while_false, Label::kNear);
239

240 241
  __ bind(&while_true);
  __ shr(scratch, 1);
242
  __ mulsd(double_scratch, double_scratch);
243 244
  __ j(above, &while_true, Label::kNear);
  __ mulsd(double_result, double_scratch);
245 246
  __ j(not_zero, &while_true);

247
  __ bind(&while_false);
248 249
  // scratch has the original value of the exponent - if the exponent is
  // negative, return 1/result.
250
  __ test(exponent, exponent);
251
  __ j(positive, &done);
252 253
  __ divsd(double_scratch2, double_result);
  __ movsd(double_result, double_scratch2);
254 255
  // Test whether result is zero.  Bail out to check for subnormal result.
  // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
256 257
  __ xorps(double_scratch2, double_scratch2);
  __ ucomisd(double_scratch2, double_result);  // Result cannot be NaN.
258 259 260 261
  // double_exponent aliased as double_scratch2 has already been overwritten
  // and may not have contained the exponent value in the first place when the
  // exponent is a smi.  We reset it with exponent value before bailing out.
  __ j(not_equal, &done);
262
  __ Cvtsi2sd(double_exponent, exponent);
263 264

  // Returning or bailing out.
265 266 267 268 269 270 271 272
  __ bind(&call_runtime);
  {
    AllowExternalCallThatCantCauseGC scope(masm);
    __ PrepareCallCFunction(4, scratch);
    __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
    __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
    __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
                     4);
273
  }
274 275 276 277 278 279
  // Return value is in st(0) on ia32.
  // Store it into the (fixed) result register.
  __ sub(esp, Immediate(kDoubleSize));
  __ fstp_d(Operand(esp, 0));
  __ movsd(double_result, Operand(esp, 0));
  __ add(esp, Immediate(kDoubleSize));
280

281 282 283
  __ bind(&done);
  __ ret(0);
}
284

285
Movability CEntryStub::NeedsImmovableCode() { return kMovable; }
286

287 288
void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
  CEntryStub::GenerateAheadOfTime(isolate);
289
  // It is important that the store buffer overflow stubs are generated first.
290
  CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
291
  StoreFastElementStub::GenerateAheadOfTime(isolate);
292 293 294
}


295
void CodeStub::GenerateFPStubs(Isolate* isolate) {
296 297
  // Generate if not already in cache.
  CEntryStub(isolate, 1, kSaveFPRegs).GetCode();
298 299 300
}


301
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
302
  CEntryStub stub(isolate, 1, kDontSaveFPRegs);
303
  stub.GetCode();
304 305 306
}


307 308 309 310 311 312 313
void CEntryStub::Generate(MacroAssembler* masm) {
  // eax: number of arguments including receiver
  // ebx: pointer to C function  (C callee-saved)
  // ebp: frame pointer  (restored after C call)
  // esp: stack pointer  (restored after C call)
  // esi: current context (C callee-saved)
  // edi: JS function of the caller (C callee-saved)
314 315 316
  //
  // If argv_in_register():
  // ecx: pointer to the first argument
317 318 319

  ProfileEntryHookStub::MaybeCallEntryHook(masm);

320 321 322 323
  // Reserve space on the stack for the three arguments passed to the call. If
  // result size is greater than can be returned in registers, also reserve
  // space for the hidden argument for the result location, and space for the
  // result itself.
324
  int arg_stack_space = 3;
325

326
  // Enter the exit frame that transitions from JavaScript to C++.
327 328
  if (argv_in_register()) {
    DCHECK(!save_doubles());
329
    DCHECK(!is_builtin_exit());
330
    __ EnterApiExitFrame(arg_stack_space);
331 332 333 334 335

    // Move argc and argv into the correct registers.
    __ mov(esi, ecx);
    __ mov(edi, eax);
  } else {
336 337 338
    __ EnterExitFrame(
        arg_stack_space, save_doubles(),
        is_builtin_exit() ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
339
  }
340

341 342 343 344 345 346
  // ebx: pointer to C function  (C callee-saved)
  // ebp: frame pointer  (restored after C call)
  // esp: stack pointer  (restored after C call)
  // edi: number of arguments including receiver  (C callee-saved)
  // esi: pointer to the first argument (C callee-saved)

347
  // Result returned in eax, or eax+edx if result size is 2.
348 349 350 351 352 353

  // Check stack alignment.
  if (FLAG_debug_code) {
    __ CheckStackAlignment();
  }
  // Call C function.
354 355 356 357
  __ mov(Operand(esp, 0 * kPointerSize), edi);  // argc.
  __ mov(Operand(esp, 1 * kPointerSize), esi);  // argv.
  __ mov(Operand(esp, 2 * kPointerSize),
         Immediate(ExternalReference::isolate_address(isolate())));
358
  __ call(ebx);
359

360
  // Result is in eax or edx:eax - do not destroy these registers!
361

362 363
  // Check result for exception sentinel.
  Label exception_returned;
364
  __ cmp(eax, isolate()->factory()->exception());
365
  __ j(equal, &exception_returned);
366

367
  // Check that there is no pending exception, otherwise we
368
  // should have returned the exception sentinel.
369 370
  if (FLAG_debug_code) {
    __ push(edx);
371
    __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
372
    Label okay;
373
    ExternalReference pending_exception_address(
374
        IsolateAddressId::kPendingExceptionAddress, isolate());
375 376
    __ cmp(edx, Operand::StaticVariable(pending_exception_address));
    // Cannot use check here as it attempts to generate call into runtime.
377
    __ j(equal, &okay, Label::kNear);
378 379 380 381 382
    __ int3();
    __ bind(&okay);
    __ pop(edx);
  }

383
  // Exit the JavaScript to C++ exit frame.
384
  __ LeaveExitFrame(save_doubles(), !argv_in_register());
385 386
  __ ret(0);

387 388
  // Handling of exception.
  __ bind(&exception_returned);
389 390

  ExternalReference pending_handler_context_address(
391
      IsolateAddressId::kPendingHandlerContextAddress, isolate());
392 393
  ExternalReference pending_handler_entrypoint_address(
      IsolateAddressId::kPendingHandlerEntrypointAddress, isolate());
394
  ExternalReference pending_handler_fp_address(
395
      IsolateAddressId::kPendingHandlerFPAddress, isolate());
396
  ExternalReference pending_handler_sp_address(
397
      IsolateAddressId::kPendingHandlerSPAddress, isolate());
398 399 400

  // Ask the runtime for help to determine the handler. This will set eax to
  // contain the current pending exception, don't clobber it.
401 402
  ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
                                 isolate());
403 404 405 406 407 408 409 410 411 412 413 414 415 416 417
  {
    FrameScope scope(masm, StackFrame::MANUAL);
    __ PrepareCallCFunction(3, eax);
    __ mov(Operand(esp, 0 * kPointerSize), Immediate(0));  // argc.
    __ mov(Operand(esp, 1 * kPointerSize), Immediate(0));  // argv.
    __ mov(Operand(esp, 2 * kPointerSize),
           Immediate(ExternalReference::isolate_address(isolate())));
    __ CallCFunction(find_handler, 3);
  }

  // Retrieve the handler context, SP and FP.
  __ mov(esi, Operand::StaticVariable(pending_handler_context_address));
  __ mov(esp, Operand::StaticVariable(pending_handler_sp_address));
  __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address));

418 419
  // If the handler is a JS frame, restore the context to the frame. Note that
  // the context will be set to (esi == 0) for non-JS frames.
420 421 422 423 424 425 426
  Label skip;
  __ test(esi, esi);
  __ j(zero, &skip, Label::kNear);
  __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
  __ bind(&skip);

  // Compute the handler entry address and jump to it.
427
  __ mov(edi, Operand::StaticVariable(pending_handler_entrypoint_address));
428
  __ jmp(edi);
429 430 431
}


432
void JSEntryStub::Generate(MacroAssembler* masm) {
433
  Label invoke, handler_entry, exit;
434 435
  Label not_outermost_js, not_outermost_js_2;

436 437
  ProfileEntryHookStub::MaybeCallEntryHook(masm);

438
  // Set up frame.
439
  __ push(ebp);
440
  __ mov(ebp, esp);
441 442

  // Push marker in two places.
443 444
  StackFrame::Type marker = type();
  __ push(Immediate(StackFrame::TypeToMarker(marker)));  // marker
445 446
  ExternalReference context_address(IsolateAddressId::kContextAddress,
                                    isolate());
447
  __ push(Operand::StaticVariable(context_address));  // context
448 449 450 451 452 453
  // Save callee-saved registers (C calling conventions).
  __ push(edi);
  __ push(esi);
  __ push(ebx);

  // Save copies of the top frame descriptor on the stack.
454
  ExternalReference c_entry_fp(IsolateAddressId::kCEntryFPAddress, isolate());
455 456 457
  __ push(Operand::StaticVariable(c_entry_fp));

  // If this is the outermost JS call, set js_entry_sp value.
458
  ExternalReference js_entry_sp(IsolateAddressId::kJSEntrySPAddress, isolate());
459
  __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
460
  __ j(not_equal, &not_outermost_js, Label::kNear);
461
  __ mov(Operand::StaticVariable(js_entry_sp), ebp);
462
  __ push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
463
  __ jmp(&invoke, Label::kNear);
464
  __ bind(&not_outermost_js);
465
  __ push(Immediate(StackFrame::INNER_JSENTRY_FRAME));
466

467 468 469 470 471 472 473
  // Jump to a faked try block that does the invoke, with a faked catch
  // block that sets the pending exception.
  __ jmp(&invoke);
  __ bind(&handler_entry);
  handler_offset_ = handler_entry.pos();
  // Caught exception: Store result (exception) in the pending exception
  // field in the JSEnv and return a failure sentinel.
474 475
  ExternalReference pending_exception(
      IsolateAddressId::kPendingExceptionAddress, isolate());
476
  __ mov(Operand::StaticVariable(pending_exception), eax);
477
  __ mov(eax, Immediate(isolate()->factory()->exception()));
478 479
  __ jmp(&exit);

480
  // Invoke: Link this frame into the handler chain.
481
  __ bind(&invoke);
482
  __ PushStackHandler();
483

484 485 486 487
  // Invoke the function by calling through JS entry trampoline builtin and
  // pop the faked function when we return. Notice that we cannot store a
  // reference to the trampoline code directly in this stub, because the
  // builtin stubs may not have been generated yet.
488
  __ Call(EntryTrampoline(), RelocInfo::CODE_TARGET);
489 490

  // Unlink this frame from the handler chain.
491
  __ PopStackHandler();
492

493 494 495
  __ bind(&exit);
  // Check if the current stack frame is marked as the outermost JS frame.
  __ pop(ebx);
496
  __ cmp(ebx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
497 498 499 500 501
  __ j(not_equal, &not_outermost_js_2);
  __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
  __ bind(&not_outermost_js_2);

  // Restore the top frame descriptor from the stack.
502 503
  __ pop(Operand::StaticVariable(
      ExternalReference(IsolateAddressId::kCEntryFPAddress, isolate())));
504 505 506 507 508

  // Restore callee-saved registers (C calling conventions).
  __ pop(ebx);
  __ pop(esi);
  __ pop(edi);
509
  __ add(esp, Immediate(2 * kPointerSize));  // remove markers
510 511 512 513 514 515

  // Restore frame pointer and return.
  __ pop(ebp);
  __ ret(0);
}

516
void ProfileEntryHookStub::MaybeCallEntryHookDelayed(TurboAssembler* tasm,
517
                                                     Zone* zone) {
518
  if (tasm->isolate()->function_entry_hook() != nullptr) {
519
    tasm->CallStubDelayed(new (zone) ProfileEntryHookStub(nullptr));
520 521
  }
}
522

523
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
524
  if (masm->isolate()->function_entry_hook() != nullptr) {
525
    ProfileEntryHookStub stub(masm->isolate());
526 527 528 529 530 531
    masm->CallStub(&stub);
  }
}


void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
532 533 534
  // Save volatile registers.
  const int kNumSavedRegisters = 3;
  __ push(eax);
535
  __ push(ecx);
536
  __ push(edx);
537 538

  // Calculate and push the original stack pointer.
539
  __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
540 541
  __ push(eax);

542 543 544
  // Retrieve our return address and use it to calculate the calling
  // function's address.
  __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
545 546 547 548
  __ sub(eax, Immediate(Assembler::kCallInstructionLength));
  __ push(eax);

  // Call the entry hook.
549
  DCHECK_NOT_NULL(isolate()->function_entry_hook());
550
  __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
551
          RelocInfo::RUNTIME_ENTRY);
552 553 554
  __ add(esp, Immediate(2 * kPointerSize));

  // Restore ecx.
555
  __ pop(edx);
556
  __ pop(ecx);
557 558
  __ pop(eax);

559 560 561
  __ ret(0);
}

562 563

template<class T>
564 565 566
static void CreateArrayDispatch(MacroAssembler* masm,
                                AllocationSiteOverrideMode mode) {
  if (mode == DISABLE_ALLOCATION_SITES) {
567 568
    T stub(masm->isolate(),
           GetInitialFastElementsKind(),
569
           mode);
570
    __ TailCallStub(&stub);
571
  } else if (mode == DONT_OVERRIDE) {
572 573
    int last_index =
        GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
574 575 576 577 578
    for (int i = 0; i <= last_index; ++i) {
      Label next;
      ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
      __ cmp(edx, kind);
      __ j(not_equal, &next);
579
      T stub(masm->isolate(), kind);
580 581 582
      __ TailCallStub(&stub);
      __ bind(&next);
    }
583

584
    // If we reached this point there is a problem.
585
    __ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
586 587 588
  } else {
    UNREACHABLE();
  }
589 590 591
}


592 593
static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
                                           AllocationSiteOverrideMode mode) {
594
  // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
595
  // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
596 597 598 599
  // eax - number of arguments
  // edi - constructor?
  // esp[0] - return address
  // esp[4] - last argument
600 601 602 603 604 605
  STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
  STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
  STATIC_ASSERT(PACKED_ELEMENTS == 2);
  STATIC_ASSERT(HOLEY_ELEMENTS == 3);
  STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
  STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
606

607 608 609
  if (mode == DISABLE_ALLOCATION_SITES) {
    ElementsKind initial = GetInitialFastElementsKind();
    ElementsKind holey_initial = GetHoleyElementsKind(initial);
610

611 612
    ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
                                                  holey_initial,
613 614 615
                                                  DISABLE_ALLOCATION_SITES);
    __ TailCallStub(&stub_holey);
  } else if (mode == DONT_OVERRIDE) {
616 617 618 619 620
    // is the low bit set? If so, we are holey and that is good.
    Label normal_sequence;
    __ test_b(edx, Immediate(1));
    __ j(not_zero, &normal_sequence);

621 622 623
    // We are going to create a holey array, but our kind is non-holey.
    // Fix kind and retry.
    __ inc(edx);
624

625
    if (FLAG_debug_code) {
626 627
      Handle<Map> allocation_site_map =
          masm->isolate()->factory()->allocation_site_map();
628
      __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
629
      __ Assert(equal, AbortReason::kExpectedAllocationSite);
630
    }
631

632 633 634 635
    // Save the resulting elements kind in type info. We can't just store r3
    // in the AllocationSite::transition_info field because elements kind is
    // restricted to a portion of the field...upper bits need to be left alone.
    STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
636 637 638
    __ add(
        FieldOperand(ebx, AllocationSite::kTransitionInfoOrBoilerplateOffset),
        Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
639 640

    __ bind(&normal_sequence);
641 642
    int last_index =
        GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
643 644 645 646 647
    for (int i = 0; i <= last_index; ++i) {
      Label next;
      ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
      __ cmp(edx, kind);
      __ j(not_equal, &next);
648
      ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
649 650 651 652 653
      __ TailCallStub(&stub);
      __ bind(&next);
    }

    // If we reached this point there is a problem.
654
    __ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
655 656 657
  } else {
    UNREACHABLE();
  }
658 659 660 661 662
}


template<class T>
static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
663 664
  int to_index =
      GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
665 666
  for (int i = 0; i <= to_index; ++i) {
    ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
667
    T stub(isolate, kind);
668
    stub.GetCode();
669
    if (AllocationSite::ShouldTrack(kind)) {
670
      T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
671
      stub1.GetCode();
672
    }
673 674 675
  }
}

676
void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
677 678 679 680
  ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
      isolate);
  ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
      isolate);
681 682
  ArrayNArgumentsConstructorStub stub(isolate);
  stub.GetCode();
683

684
  ElementsKind kinds[2] = {PACKED_ELEMENTS, HOLEY_ELEMENTS};
685 686
  for (int i = 0; i < 2; i++) {
    // For internal arrays we only need a few things
687
    InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
688
    stubh1.GetCode();
689
    InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
690
    stubh2.GetCode();
691 692 693
  }
}

694
void ArrayConstructorStub::GenerateDispatchToArrayStub(
695 696 697 698 699
    MacroAssembler* masm, AllocationSiteOverrideMode mode) {
  Label not_zero_case, not_one_case;
  __ test(eax, eax);
  __ j(not_zero, &not_zero_case);
  CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
700

701 702 703 704 705 706 707 708 709
  __ bind(&not_zero_case);
  __ cmp(eax, 1);
  __ j(greater, &not_one_case);
  CreateArrayDispatchOneArgument(masm, mode);

  __ bind(&not_one_case);
  ArrayNArgumentsConstructorStub stub(masm->isolate());
  __ TailCallStub(&stub);
}
710

711 712
void ArrayConstructorStub::Generate(MacroAssembler* masm) {
  // ----------- S t a t e -------------
713
  //  -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE)
714
  //  -- ebx : AllocationSite or undefined
715
  //  -- edi : constructor
dslomov's avatar
dslomov committed
716
  //  -- edx : Original constructor
717 718 719 720 721 722 723 724 725
  //  -- esp[0] : return address
  //  -- esp[4] : last argument
  // -----------------------------------
  if (FLAG_debug_code) {
    // The array construct code is only set for the global and natives
    // builtin Array functions which always have maps.

    // Initial map for the builtin Array function should be a map.
    __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
726
    // Will both indicate a nullptr and a Smi.
727
    __ test(ecx, Immediate(kSmiTagMask));
728
    __ Assert(not_zero, AbortReason::kUnexpectedInitialMapForArrayFunction);
729
    __ CmpObjectType(ecx, MAP_TYPE, ecx);
730
    __ Assert(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
731

732 733
    // We should either have undefined in ebx or a valid AllocationSite
    __ AssertUndefinedOrAllocationSite(ebx);
734 735
  }

dslomov's avatar
dslomov committed
736 737
  Label subclassing;

738 739 740
  // Enter the context of the Array function.
  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));

dslomov's avatar
dslomov committed
741 742 743
  __ cmp(edx, edi);
  __ j(not_equal, &subclassing);

744
  Label no_info;
745 746
  // If the feedback vector is the undefined value call an array constructor
  // that doesn't use AllocationSites.
747
  __ cmp(ebx, isolate()->factory()->undefined_value());
748
  __ j(equal, &no_info);
749

750
  // Only look at the lower 16 bits of the transition info.
751 752
  __ mov(edx,
         FieldOperand(ebx, AllocationSite::kTransitionInfoOrBoilerplateOffset));
753
  __ SmiUntag(edx);
754 755
  STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
  __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
756
  GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
757

758 759
  __ bind(&no_info);
  GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
dslomov's avatar
dslomov committed
760

761
  // Subclassing.
dslomov's avatar
dslomov committed
762
  __ bind(&subclassing);
763 764
  __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
  __ add(eax, Immediate(3));
765 766 767 768 769
  __ PopReturnAddressTo(ecx);
  __ Push(edx);
  __ Push(ebx);
  __ PushReturnAddressFrom(ecx);
  __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
770 771 772
}


773 774 775 776 777 778 779
void InternalArrayConstructorStub::GenerateCase(
    MacroAssembler* masm, ElementsKind kind) {
  Label not_zero_case, not_one_case;
  Label normal_sequence;

  __ test(eax, eax);
  __ j(not_zero, &not_zero_case);
780
  InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
781 782 783 784 785 786 787 788 789 790 791 792 793 794
  __ TailCallStub(&stub0);

  __ bind(&not_zero_case);
  __ cmp(eax, 1);
  __ j(greater, &not_one_case);

  if (IsFastPackedElementsKind(kind)) {
    // We might need to create a holey array
    // look at the first argument
    __ mov(ecx, Operand(esp, kPointerSize));
    __ test(ecx, ecx);
    __ j(zero, &normal_sequence);

    InternalArraySingleArgumentConstructorStub
795
        stub1_holey(isolate(), GetHoleyElementsKind(kind));
796 797 798 799
    __ TailCallStub(&stub1_holey);
  }

  __ bind(&normal_sequence);
800
  InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
801 802 803
  __ TailCallStub(&stub1);

  __ bind(&not_one_case);
804
  ArrayNArgumentsConstructorStub stubN(isolate());
805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822
  __ TailCallStub(&stubN);
}


void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
  // ----------- S t a t e -------------
  //  -- eax : argc
  //  -- edi : constructor
  //  -- esp[0] : return address
  //  -- esp[4] : last argument
  // -----------------------------------

  if (FLAG_debug_code) {
    // The array construct code is only set for the global and natives
    // builtin Array functions which always have maps.

    // Initial map for the builtin Array function should be a map.
    __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
823
    // Will both indicate a nullptr and a Smi.
824
    __ test(ecx, Immediate(kSmiTagMask));
825
    __ Assert(not_zero, AbortReason::kUnexpectedInitialMapForArrayFunction);
826
    __ CmpObjectType(ecx, MAP_TYPE, ecx);
827
    __ Assert(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
828 829
  }

830 831
  // Figure out the right elements kind
  __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
832

833 834 835 836
  // Load the map's "bit field 2" into |result|. We only need the first byte,
  // but the following masking takes care of that anyway.
  __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
  // Retrieve elements_kind from bit field 2.
837
  __ DecodeField<Map::ElementsKindBits>(ecx);
838

839 840
  if (FLAG_debug_code) {
    Label done;
841
    __ cmp(ecx, Immediate(PACKED_ELEMENTS));
842
    __ j(equal, &done);
843
    __ cmp(ecx, Immediate(HOLEY_ELEMENTS));
844 845 846
    __ Assert(
        equal,
        AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
847 848
    __ bind(&done);
  }
849

850
  Label fast_elements_case;
851
  __ cmp(ecx, Immediate(PACKED_ELEMENTS));
852
  __ j(equal, &fast_elements_case);
853
  GenerateCase(masm, HOLEY_ELEMENTS);
854

855
  __ bind(&fast_elements_case);
856
  GenerateCase(masm, PACKED_ELEMENTS);
857 858
}

859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886
// Generates an Operand for saving parameters after PrepareCallApiFunction.
static Operand ApiParameterOperand(int index) {
  return Operand(esp, index * kPointerSize);
}


// Prepares stack to put arguments (aligns and so on). Reserves
// space for return value if needed (assumes the return value is a handle).
// Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
// etc. Saves context (esi). If space was reserved for return value then
// stores the pointer to the reserved slot into esi.
static void PrepareCallApiFunction(MacroAssembler* masm, int argc) {
  __ EnterApiExitFrame(argc);
  if (__ emit_debug_code()) {
    __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
  }
}


// Calls an API function.  Allocates HandleScope, extracts returned value
// from handle and propagates exceptions.  Clobbers ebx, edi and
// caller-save registers.  Restores context.  On return removes
// stack_space * kPointerSize (GCed).
static void CallApiFunctionAndReturn(MacroAssembler* masm,
                                     Register function_address,
                                     ExternalReference thunk_ref,
                                     Operand thunk_last_arg, int stack_space,
                                     Operand* stack_space_operand,
887
                                     Operand return_value_operand) {
888 889 890 891 892 893 894 895 896
  Isolate* isolate = masm->isolate();

  ExternalReference next_address =
      ExternalReference::handle_scope_next_address(isolate);
  ExternalReference limit_address =
      ExternalReference::handle_scope_limit_address(isolate);
  ExternalReference level_address =
      ExternalReference::handle_scope_level_address(isolate);

897
  DCHECK(edx == function_address);
898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917
  // Allocate HandleScope in callee-save registers.
  __ mov(ebx, Operand::StaticVariable(next_address));
  __ mov(edi, Operand::StaticVariable(limit_address));
  __ add(Operand::StaticVariable(level_address), Immediate(1));

  if (FLAG_log_timer_events) {
    FrameScope frame(masm, StackFrame::MANUAL);
    __ PushSafepointRegisters();
    __ PrepareCallCFunction(1, eax);
    __ mov(Operand(esp, 0),
           Immediate(ExternalReference::isolate_address(isolate)));
    __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
                     1);
    __ PopSafepointRegisters();
  }


  Label profiler_disabled;
  Label end_profiler_check;
  __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
918
  __ cmpb(Operand(eax, 0), Immediate(0));
919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956
  __ j(zero, &profiler_disabled);

  // Additional parameter is the address of the actual getter function.
  __ mov(thunk_last_arg, function_address);
  // Call the api function.
  __ mov(eax, Immediate(thunk_ref));
  __ call(eax);
  __ jmp(&end_profiler_check);

  __ bind(&profiler_disabled);
  // Call the api function.
  __ call(function_address);
  __ bind(&end_profiler_check);

  if (FLAG_log_timer_events) {
    FrameScope frame(masm, StackFrame::MANUAL);
    __ PushSafepointRegisters();
    __ PrepareCallCFunction(1, eax);
    __ mov(Operand(esp, 0),
           Immediate(ExternalReference::isolate_address(isolate)));
    __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
                     1);
    __ PopSafepointRegisters();
  }

  Label prologue;
  // Load the value from ReturnValue
  __ mov(eax, return_value_operand);

  Label promote_scheduled_exception;
  Label delete_allocated_handles;
  Label leave_exit_frame;

  __ bind(&prologue);
  // No more valid handles (the result handle was the last one). Restore
  // previous handle scope.
  __ mov(Operand::StaticVariable(next_address), ebx);
  __ sub(Operand::StaticVariable(level_address), Immediate(1));
957
  __ Assert(above_equal, AbortReason::kInvalidHandleScopeLevel);
958 959
  __ cmp(edi, Operand::StaticVariable(limit_address));
  __ j(not_equal, &delete_allocated_handles);
960 961

  // Leave the API exit frame.
962
  __ bind(&leave_exit_frame);
963 964 965
  if (stack_space_operand != nullptr) {
    __ mov(ebx, *stack_space_operand);
  }
966
  __ LeaveApiExitFrame();
967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986

  // Check if the function scheduled an exception.
  ExternalReference scheduled_exception_address =
      ExternalReference::scheduled_exception_address(isolate);
  __ cmp(Operand::StaticVariable(scheduled_exception_address),
         Immediate(isolate->factory()->the_hole_value()));
  __ j(not_equal, &promote_scheduled_exception);

#if DEBUG
  // Check if the function returned a valid JavaScript value.
  Label ok;
  Register return_value = eax;
  Register map = ecx;

  __ JumpIfSmi(return_value, &ok, Label::kNear);
  __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));

  __ CmpInstanceType(map, LAST_NAME_TYPE);
  __ j(below_equal, &ok, Label::kNear);

987
  __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004
  __ j(above_equal, &ok, Label::kNear);

  __ cmp(map, isolate->factory()->heap_number_map());
  __ j(equal, &ok, Label::kNear);

  __ cmp(return_value, isolate->factory()->undefined_value());
  __ j(equal, &ok, Label::kNear);

  __ cmp(return_value, isolate->factory()->true_value());
  __ j(equal, &ok, Label::kNear);

  __ cmp(return_value, isolate->factory()->false_value());
  __ j(equal, &ok, Label::kNear);

  __ cmp(return_value, isolate->factory()->null_value());
  __ j(equal, &ok, Label::kNear);

1005
  __ Abort(AbortReason::kAPICallReturnedInvalidObject);
1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018

  __ bind(&ok);
#endif

  if (stack_space_operand != nullptr) {
    DCHECK_EQ(0, stack_space);
    __ pop(ecx);
    __ add(esp, ebx);
    __ jmp(ecx);
  } else {
    __ ret(stack_space * kPointerSize);
  }

1019
  // Re-throw by promoting a scheduled exception.
1020
  __ bind(&promote_scheduled_exception);
1021
  __ TailCallRuntime(Runtime::kPromoteScheduledException);
1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036

  // HandleScope limit has changed. Delete allocated extensions.
  ExternalReference delete_extensions =
      ExternalReference::delete_handle_scope_extensions(isolate);
  __ bind(&delete_allocated_handles);
  __ mov(Operand::StaticVariable(limit_address), edi);
  __ mov(edi, eax);
  __ mov(Operand(esp, 0),
         Immediate(ExternalReference::isolate_address(isolate)));
  __ mov(eax, Immediate(delete_extensions));
  __ call(eax);
  __ mov(eax, edi);
  __ jmp(&leave_exit_frame);
}

vogelheim's avatar
vogelheim committed
1037
void CallApiCallbackStub::Generate(MacroAssembler* masm) {
1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053
  // ----------- S t a t e -------------
  //  -- ebx                 : call_data
  //  -- ecx                 : holder
  //  -- edx                 : api_function_address
  //  -- esi                 : context
  //  --
  //  -- esp[0]              : return address
  //  -- esp[4]              : last argument
  //  -- ...
  //  -- esp[argc * 4]       : first argument
  //  -- esp[(argc + 1) * 4] : receiver
  // -----------------------------------

  Register call_data = ebx;
  Register holder = ecx;
  Register api_function_address = edx;
1054
  Register return_address = eax;
1055 1056 1057

  typedef FunctionCallbackArguments FCA;

1058 1059
  STATIC_ASSERT(FCA::kArgsLength == 6);
  STATIC_ASSERT(FCA::kNewTargetIndex == 5);
1060 1061 1062 1063 1064 1065
  STATIC_ASSERT(FCA::kDataIndex == 4);
  STATIC_ASSERT(FCA::kReturnValueOffset == 3);
  STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
  STATIC_ASSERT(FCA::kIsolateIndex == 1);
  STATIC_ASSERT(FCA::kHolderIndex == 0);

vogelheim's avatar
vogelheim committed
1066
  __ pop(return_address);
1067 1068 1069 1070

  // new target
  __ PushRoot(Heap::kUndefinedValueRootIndex);

1071 1072 1073
  // call data
  __ push(call_data);

1074
  // return value
1075
  __ PushRoot(Heap::kUndefinedValueRootIndex);
1076
  // return value default
1077
  __ PushRoot(Heap::kUndefinedValueRootIndex);
1078
  // isolate
1079
  __ push(Immediate(ExternalReference::isolate_address(isolate())));
1080 1081 1082
  // holder
  __ push(holder);

1083
  Register scratch = call_data;
1084

1085 1086
  __ mov(scratch, esp);

1087
  // push return address
1088
  __ push(return_address);
1089

1090 1091 1092 1093 1094 1095 1096 1097
  // API function gets reference to the v8::Arguments. If CPU profiler
  // is enabled wrapper function will be called and we need to pass
  // address of the callback as additional parameter, always allocate
  // space for it.
  const int kApiArgc = 1 + 1;

  // Allocate the v8::Arguments structure in the arguments' space since
  // it's not controlled by GC.
1098
  const int kApiStackSpace = 3;
1099

1100
  PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace);
1101 1102 1103

  // FunctionCallbackInfo::implicit_args_.
  __ mov(ApiParameterOperand(2), scratch);
vogelheim's avatar
vogelheim committed
1104 1105 1106 1107 1108
  __ add(scratch, Immediate((argc() + FCA::kArgsLength - 1) * kPointerSize));
  // FunctionCallbackInfo::values_.
  __ mov(ApiParameterOperand(3), scratch);
  // FunctionCallbackInfo::length_.
  __ Move(ApiParameterOperand(4), Immediate(argc()));
1109 1110 1111 1112 1113

  // v8::InvocationCallback's argument.
  __ lea(scratch, ApiParameterOperand(2));
  __ mov(ApiParameterOperand(0), scratch);

1114
  ExternalReference thunk_ref =
1115
      ExternalReference::invoke_function_callback(masm->isolate());
1116

1117
  // Stores return the first js argument
1118
  int return_value_offset = 2 + FCA::kReturnValueOffset;
1119
  Operand return_value_operand(ebp, return_value_offset * kPointerSize);
1120
  const int stack_space = argc() + FCA::kArgsLength + 1;
1121
  Operand* stack_space_operand = nullptr;
1122 1123
  CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
                           ApiParameterOperand(1), stack_space,
1124
                           stack_space_operand, return_value_operand);
1125 1126 1127
}


dcarney@chromium.org's avatar
dcarney@chromium.org committed
1128
void CallApiGetterStub::Generate(MacroAssembler* masm) {
1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153
  // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
  // name below the exit frame to make GC aware of them.
  STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
  STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
  STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
  STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
  STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
  STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);

  Register receiver = ApiGetterDescriptor::ReceiverRegister();
  Register holder = ApiGetterDescriptor::HolderRegister();
  Register callback = ApiGetterDescriptor::CallbackRegister();
  Register scratch = ebx;
  DCHECK(!AreAliased(receiver, holder, callback, scratch));

  __ pop(scratch);  // Pop return address to extend the frame.
  __ push(receiver);
  __ push(FieldOperand(callback, AccessorInfo::kDataOffset));
  __ PushRoot(Heap::kUndefinedValueRootIndex);  // ReturnValue
  // ReturnValue default value
  __ PushRoot(Heap::kUndefinedValueRootIndex);
  __ push(Immediate(ExternalReference::isolate_address(isolate())));
  __ push(holder);
1154
  __ push(Immediate(Smi::kZero));  // should_throw_on_error -> false
1155 1156
  __ push(FieldOperand(callback, AccessorInfo::kNameOffset));
  __ push(scratch);  // Restore return address.
dcarney@chromium.org's avatar
dcarney@chromium.org committed
1157

1158 1159 1160 1161 1162 1163 1164
  // v8::PropertyCallbackInfo::args_ array and name handle.
  const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;

  // Allocate v8::PropertyCallbackInfo object, arguments for callback and
  // space for optional callback address parameter (in case CPU profiler is
  // active) in non-GCed stack space.
  const int kApiArgc = 3 + 1;
dcarney@chromium.org's avatar
dcarney@chromium.org committed
1165

1166 1167
  // Load address of v8::PropertyAccessorInfo::args_ array.
  __ lea(scratch, Operand(esp, 2 * kPointerSize));
dcarney@chromium.org's avatar
dcarney@chromium.org committed
1168

1169
  PrepareCallApiFunction(masm, kApiArgc);
1170 1171 1172 1173 1174
  // Create v8::PropertyCallbackInfo object on the stack and initialize
  // it's args_ field.
  Operand info_object = ApiParameterOperand(3);
  __ mov(info_object, scratch);

1175
  // Name as handle.
1176
  __ sub(scratch, Immediate(kPointerSize));
1177 1178
  __ mov(ApiParameterOperand(0), scratch);
  // Arguments pointer.
1179
  __ lea(scratch, info_object);
1180
  __ mov(ApiParameterOperand(1), scratch);
1181 1182
  // Reserve space for optional callback address parameter.
  Operand thunk_last_arg = ApiParameterOperand(2);
dcarney@chromium.org's avatar
dcarney@chromium.org committed
1183

1184 1185
  ExternalReference thunk_ref =
      ExternalReference::invoke_accessor_getter_callback(isolate());
dcarney@chromium.org's avatar
dcarney@chromium.org committed
1186

1187 1188 1189 1190
  __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
  Register function_address = edx;
  __ mov(function_address,
         FieldOperand(scratch, Foreign::kForeignAddressOffset));
1191 1192 1193
  // +3 is to skip prolog, return address and name handle.
  Operand return_value_operand(
      ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
1194
  CallApiFunctionAndReturn(masm, function_address, thunk_ref, thunk_last_arg,
1195
                           kStackUnwindSpace, nullptr, return_value_operand);
dcarney@chromium.org's avatar
dcarney@chromium.org committed
1196 1197
}

1198 1199
#undef __

1200 1201
}  // namespace internal
}  // namespace v8
1202 1203

#endif  // V8_TARGET_ARCH_IA32