macro-assembler-ia32.h 29.5 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6 7 8
#ifndef INCLUDED_FROM_MACRO_ASSEMBLER_H
#error This header must be included via macro-assembler.h
#endif

9 10
#ifndef V8_IA32_MACRO_ASSEMBLER_IA32_H_
#define V8_IA32_MACRO_ASSEMBLER_IA32_H_
11

12 13
#include "src/codegen/assembler.h"
#include "src/codegen/bailout-reason.h"
14
#include "src/globals.h"
15
#include "src/ia32/assembler-ia32.h"
16

17 18
namespace v8 {
namespace internal {
19

20 21 22 23
// Convenience for platform-independent signatures.  We do not normally
// distinguish memory operands from other operands on ia32.
typedef Operand MemOperand;

24 25 26
enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET };
enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK };

27
class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
28
 public:
29
  using TurboAssemblerBase::TurboAssemblerBase;
30 31 32 33 34 35 36 37 38 39 40 41 42

  void CheckPageFlag(Register object, Register scratch, int mask, Condition cc,
                     Label* condition_met,
                     Label::Distance condition_met_distance = Label::kFar);

  // Activation support.
  void EnterFrame(StackFrame::Type type);
  void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg) {
    // Out-of-line constant pool not implemented on ia32.
    UNREACHABLE();
  }
  void LeaveFrame(StackFrame::Type type);

43 44 45 46 47
// Allocate stack space of given size (i.e. decrement {esp} by the value
// stored in the given register, or by a constant). If you need to perform a
// stack check, do it before calling this function because this function may
// write into the newly allocated space. It may also overwrite the given
// register's value, in the version that takes a register.
48
#ifdef V8_OS_WIN
49 50
  void AllocateStackSpace(Register bytes_scratch);
  void AllocateStackSpace(int bytes);
51
#else
52 53
  void AllocateStackSpace(Register bytes) { sub(esp, bytes); }
  void AllocateStackSpace(int bytes) { sub(esp, Immediate(bytes)); }
54 55
#endif

56
  // Print a message to stdout and abort execution.
57
  void Abort(AbortReason reason);
58 59 60

  // Calls Abort(msg) if the condition cc is not satisfied.
  // Use --debug_code to enable.
61
  void Assert(Condition cc, AbortReason reason);
62

63 64
  // Like Assert(), but without condition.
  // Use --debug_code to enable.
65
  void AssertUnreachable(AbortReason reason);
66

67
  // Like Assert(), but always enabled.
68
  void Check(Condition cc, AbortReason reason);
69 70 71 72 73

  // Check that the stack is aligned.
  void CheckStackAlignment();

  // Move a constant into a destination using the most efficient encoding.
74
  void Move(Register dst, const Immediate& src);
75
  void Move(Register dst, Smi src) { Move(dst, Immediate(src)); }
76 77 78
  void Move(Register dst, Handle<HeapObject> src);
  void Move(Register dst, Register src);
  void Move(Operand dst, const Immediate& src);
79 80 81 82 83 84 85

  // Move an immediate into an XMM register.
  void Move(XMMRegister dst, uint32_t src);
  void Move(XMMRegister dst, uint64_t src);
  void Move(XMMRegister dst, float src) { Move(dst, bit_cast<uint32_t>(src)); }
  void Move(XMMRegister dst, double src) { Move(dst, bit_cast<uint64_t>(src)); }

86
  void Call(Register reg) { call(reg); }
87
  void Call(Label* target) { call(target); }
88 89
  void Call(Handle<Code> code_object, RelocInfo::Mode rmode);

90
  void CallBuiltinPointer(Register builtin_pointer) override;
91

92 93 94 95
  void LoadCodeObjectEntry(Register destination, Register code_object) override;
  void CallCodeObject(Register code_object) override;
  void JumpCodeObject(Register code_object) override;

96 97 98
  void RetpolineCall(Register reg);
  void RetpolineCall(Address destination, RelocInfo::Mode rmode);

99 100
  void Jump(Handle<Code> code_object, RelocInfo::Mode rmode);

101 102
  void RetpolineJump(Register reg);

103
  void CallForDeoptimization(Address target, int deopt_id);
104

105 106 107
  // Call a runtime routine. This expects {centry} to contain a fitting CEntry
  // builtin for the target runtime function and uses an indirect call.
  void CallRuntimeWithCEntry(Runtime::FunctionId fid, Register centry);
108

109 110 111 112 113 114 115 116 117 118 119 120 121
  // Jump the register contains a smi.
  inline void JumpIfSmi(Register value, Label* smi_label,
                        Label::Distance distance = Label::kFar) {
    test(value, Immediate(kSmiTagMask));
    j(zero, smi_label, distance);
  }
  // Jump if the operand is a smi.
  inline void JumpIfSmi(Operand value, Label* smi_label,
                        Label::Distance distance = Label::kFar) {
    test(value, Immediate(kSmiTagMask));
    j(zero, smi_label, distance);
  }

122 123 124 125 126 127 128 129 130 131
  void JumpIfEqual(Register a, int32_t b, Label* dest) {
    cmp(a, Immediate(b));
    j(equal, dest);
  }

  void JumpIfLessThan(Register a, int32_t b, Label* dest) {
    cmp(a, Immediate(b));
    j(less, dest);
  }

132 133
  void SmiUntag(Register reg) { sar(reg, kSmiTagSize); }

134 135 136 137 138 139 140 141
  // Removes current frame and its arguments from the stack preserving the
  // arguments and a return address pushed to the stack for the next call. Both
  // |callee_args_count| and |caller_args_count_reg| do not include receiver.
  // |callee_args_count| is not modified, |caller_args_count_reg| is trashed.
  // |number_of_temp_values_after_return_address| specifies the number of words
  // pushed to the stack after the return address. This is to allow "allocation"
  // of scratch registers that this function requires by saving their values on
  // the stack.
142 143
  void PrepareForTailCall(const ParameterCount& callee_args_count,
                          Register caller_args_count_reg, Register scratch0,
144
                          Register scratch1,
145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166
                          int number_of_temp_values_after_return_address);

  // Before calling a C-function from generated code, align arguments on stack.
  // After aligning the frame, arguments must be stored in esp[0], esp[4],
  // etc., not pushed. The argument count assumes all arguments are word sized.
  // Some compilers/platforms require the stack to be aligned when calling
  // C++ code.
  // Needs a scratch register to do some arithmetic. This register will be
  // trashed.
  void PrepareCallCFunction(int num_arguments, Register scratch);

  // Calls a C function and cleans up the space for arguments allocated
  // by PrepareCallCFunction. The called function is not allowed to trigger a
  // garbage collection, since that might move the code and invalidate the
  // return address (unless this is somehow accounted for by the called
  // function).
  void CallCFunction(ExternalReference function, int num_arguments);
  void CallCFunction(Register function, int num_arguments);

  void ShlPair(Register high, Register low, uint8_t imm8);
  void ShlPair_cl(Register high, Register low);
  void ShrPair(Register high, Register low, uint8_t imm8);
167
  void ShrPair_cl(Register high, Register low);
168 169 170 171 172
  void SarPair(Register high, Register low, uint8_t imm8);
  void SarPair_cl(Register high, Register low);

  // Generates function and stub prologue code.
  void StubPrologue(StackFrame::Type type);
173
  void Prologue();
174 175

  void Lzcnt(Register dst, Register src) { Lzcnt(dst, Operand(src)); }
176
  void Lzcnt(Register dst, Operand src);
177 178

  void Tzcnt(Register dst, Register src) { Tzcnt(dst, Operand(src)); }
179
  void Tzcnt(Register dst, Operand src);
180 181

  void Popcnt(Register dst, Register src) { Popcnt(dst, Operand(src)); }
182
  void Popcnt(Register dst, Operand src);
183 184 185

  void Ret();

186 187 188 189
  // Root register utility functions.

  void InitializeRootRegister();

190
  void LoadRoot(Register destination, RootIndex index) override;
191

192
  // Indirect root-relative loads.
193
  void LoadFromConstantsTable(Register destination,
194 195 196 197
                              int constant_index) override;
  void LoadRootRegisterOffset(Register destination, intptr_t offset) override;
  void LoadRootRelative(Register destination, int32_t offset) override;

198 199 200 201 202 203 204 205
  // Operand pointing to an external reference.
  // May emit code to set up the scratch register. The operand is
  // only guaranteed to be correct as long as the scratch register
  // isn't changed.
  // If the operand is used more than once, use a scratch register
  // that is guaranteed not to be clobbered.
  Operand ExternalReferenceAsOperand(ExternalReference reference,
                                     Register scratch);
206 207
  Operand ExternalReferenceAddressAsOperand(ExternalReference reference);
  Operand HeapObjectAsOperand(Handle<HeapObject> object);
208

209 210
  void LoadAddress(Register destination, ExternalReference source);

211 212 213 214 215
  void CompareStackLimit(Register with);
  void CompareRealStackLimit(Register with);
  void CompareRoot(Register with, RootIndex index);
  void CompareRoot(Register with, Register scratch, RootIndex index);

216 217 218 219
  // Return and drop arguments from stack, where the number of arguments
  // may be bigger than 2^16 - 1.  Requires a scratch register.
  void Ret(int bytes_dropped, Register scratch);

220 221 222 223
  void Pshufhw(XMMRegister dst, XMMRegister src, uint8_t shuffle) {
    Pshufhw(dst, Operand(src), shuffle);
  }
  void Pshufhw(XMMRegister dst, Operand src, uint8_t shuffle);
224 225 226
  void Pshuflw(XMMRegister dst, XMMRegister src, uint8_t shuffle) {
    Pshuflw(dst, Operand(src), shuffle);
  }
227
  void Pshuflw(XMMRegister dst, Operand src, uint8_t shuffle);
228 229 230
  void Pshufd(XMMRegister dst, XMMRegister src, uint8_t shuffle) {
    Pshufd(dst, Operand(src), shuffle);
  }
231
  void Pshufd(XMMRegister dst, Operand src, uint8_t shuffle);
232 233
  void Psraw(XMMRegister dst, uint8_t shift);
  void Psrlw(XMMRegister dst, uint8_t shift);
234 235 236 237 238 239 240 241 242 243 244 245

// SSE/SSE2 instructions with AVX version.
#define AVX_OP2_WITH_TYPE(macro_name, name, dst_type, src_type) \
  void macro_name(dst_type dst, src_type src) {                 \
    if (CpuFeatures::IsSupported(AVX)) {                        \
      CpuFeatureScope scope(this, AVX);                         \
      v##name(dst, src);                                        \
    } else {                                                    \
      name(dst, src);                                           \
    }                                                           \
  }

246 247
  AVX_OP2_WITH_TYPE(Rcpps, rcpps, XMMRegister, const Operand&)
  AVX_OP2_WITH_TYPE(Rsqrtps, rsqrtps, XMMRegister, const Operand&)
248 249
  AVX_OP2_WITH_TYPE(Movdqu, movdqu, XMMRegister, Operand)
  AVX_OP2_WITH_TYPE(Movdqu, movdqu, Operand, XMMRegister)
250
  AVX_OP2_WITH_TYPE(Movd, movd, XMMRegister, Register)
251
  AVX_OP2_WITH_TYPE(Movd, movd, XMMRegister, Operand)
252
  AVX_OP2_WITH_TYPE(Movd, movd, Register, XMMRegister)
253
  AVX_OP2_WITH_TYPE(Movd, movd, Operand, XMMRegister)
254
  AVX_OP2_WITH_TYPE(Cvtdq2ps, cvtdq2ps, XMMRegister, Operand)
255 256 257

#undef AVX_OP2_WITH_TYPE

258 259 260 261 262 263 264 265 266 267 268 269 270
// Only use these macros when non-destructive source of AVX version is not
// needed.
#define AVX_OP3_WITH_TYPE(macro_name, name, dst_type, src_type) \
  void macro_name(dst_type dst, src_type src) {                 \
    if (CpuFeatures::IsSupported(AVX)) {                        \
      CpuFeatureScope scope(this, AVX);                         \
      v##name(dst, dst, src);                                   \
    } else {                                                    \
      name(dst, src);                                           \
    }                                                           \
  }
#define AVX_OP3_XO(macro_name, name)                            \
  AVX_OP3_WITH_TYPE(macro_name, name, XMMRegister, XMMRegister) \
271
  AVX_OP3_WITH_TYPE(macro_name, name, XMMRegister, Operand)
272

273 274
  AVX_OP3_XO(Packsswb, packsswb)
  AVX_OP3_XO(Packuswb, packuswb)
275 276
  AVX_OP3_XO(Pcmpeqb, pcmpeqb)
  AVX_OP3_XO(Pcmpeqw, pcmpeqw)
277
  AVX_OP3_XO(Pcmpeqd, pcmpeqd)
278 279
  AVX_OP3_XO(Psubb, psubb)
  AVX_OP3_XO(Psubw, psubw)
280
  AVX_OP3_XO(Psubd, psubd)
281 282
  AVX_OP3_XO(Punpcklbw, punpcklbw)
  AVX_OP3_XO(Punpckhbw, punpckhbw)
283
  AVX_OP3_XO(Pxor, pxor)
284
  AVX_OP3_XO(Andps, andps)
285
  AVX_OP3_XO(Andnps, andnps)
286
  AVX_OP3_XO(Andpd, andpd)
287 288
  AVX_OP3_XO(Xorps, xorps)
  AVX_OP3_XO(Xorpd, xorpd)
289 290
  AVX_OP3_XO(Sqrtss, sqrtss)
  AVX_OP3_XO(Sqrtsd, sqrtsd)
291 292 293 294

#undef AVX_OP3_XO
#undef AVX_OP3_WITH_TYPE

295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322
// Non-SSE2 instructions.
#define AVX_OP2_WITH_TYPE_SCOPE(macro_name, name, dst_type, src_type, \
                                sse_scope)                            \
  void macro_name(dst_type dst, src_type src) {                       \
    if (CpuFeatures::IsSupported(AVX)) {                              \
      CpuFeatureScope scope(this, AVX);                               \
      v##name(dst, src);                                              \
      return;                                                         \
    }                                                                 \
    if (CpuFeatures::IsSupported(sse_scope)) {                        \
      CpuFeatureScope scope(this, sse_scope);                         \
      name(dst, src);                                                 \
      return;                                                         \
    }                                                                 \
    UNREACHABLE();                                                    \
  }
#define AVX_OP2_XO_SSE4(macro_name, name)                                     \
  AVX_OP2_WITH_TYPE_SCOPE(macro_name, name, XMMRegister, XMMRegister, SSE4_1) \
  AVX_OP2_WITH_TYPE_SCOPE(macro_name, name, XMMRegister, Operand, SSE4_1)

  AVX_OP2_XO_SSE4(Ptest, ptest)
  AVX_OP2_XO_SSE4(Pmovsxbw, pmovsxbw)
  AVX_OP2_XO_SSE4(Pmovsxwd, pmovsxwd)
  AVX_OP2_XO_SSE4(Pmovzxbw, pmovzxbw)
  AVX_OP2_XO_SSE4(Pmovzxwd, pmovzxwd)

#undef AVX_OP2_WITH_TYPE_SCOPE
#undef AVX_OP2_XO_SSE4
323

324
  void Pshufb(XMMRegister dst, XMMRegister src) { Pshufb(dst, Operand(src)); }
325
  void Pshufb(XMMRegister dst, Operand src);
326 327 328 329
  void Pblendw(XMMRegister dst, XMMRegister src, uint8_t imm8) {
    Pblendw(dst, Operand(src), imm8);
  }
  void Pblendw(XMMRegister dst, Operand src, uint8_t imm8);
330 331

  void Psignb(XMMRegister dst, XMMRegister src) { Psignb(dst, Operand(src)); }
332
  void Psignb(XMMRegister dst, Operand src);
333
  void Psignw(XMMRegister dst, XMMRegister src) { Psignw(dst, Operand(src)); }
334
  void Psignw(XMMRegister dst, Operand src);
335
  void Psignd(XMMRegister dst, XMMRegister src) { Psignd(dst, Operand(src)); }
336
  void Psignd(XMMRegister dst, Operand src);
337

338 339 340 341 342
  void Palignr(XMMRegister dst, XMMRegister src, uint8_t imm8) {
    Palignr(dst, Operand(src), imm8);
  }
  void Palignr(XMMRegister dst, Operand src, uint8_t imm8);

343 344 345 346
  void Pextrb(Register dst, XMMRegister src, uint8_t imm8);
  void Pextrw(Register dst, XMMRegister src, uint8_t imm8);
  void Pextrd(Register dst, XMMRegister src, uint8_t imm8);
  void Pinsrd(XMMRegister dst, Register src, uint8_t imm8) {
347
    Pinsrd(dst, Operand(src), imm8);
348
  }
349
  void Pinsrd(XMMRegister dst, Operand src, uint8_t imm8);
350 351 352 353 354

  // Expression support
  // cvtsi2sd instruction only writes to the low 64-bit of dst register, which
  // hinders register renaming and makes dependence chains longer. So we use
  // xorps to clear the dst register before cvtsi2sd to solve this issue.
355 356
  void Cvtsi2ss(XMMRegister dst, Register src) { Cvtsi2ss(dst, Operand(src)); }
  void Cvtsi2ss(XMMRegister dst, Operand src);
357
  void Cvtsi2sd(XMMRegister dst, Register src) { Cvtsi2sd(dst, Operand(src)); }
358
  void Cvtsi2sd(XMMRegister dst, Operand src);
359

360 361 362 363
  void Cvtui2ss(XMMRegister dst, Register src, Register tmp) {
    Cvtui2ss(dst, Operand(src), tmp);
  }
  void Cvtui2ss(XMMRegister dst, Operand src, Register tmp);
364 365 366 367
  void Cvttss2ui(Register dst, XMMRegister src, XMMRegister tmp) {
    Cvttss2ui(dst, Operand(src), tmp);
  }
  void Cvttss2ui(Register dst, Operand src, XMMRegister tmp);
368 369 370 371
  void Cvtui2sd(XMMRegister dst, Register src, Register scratch) {
    Cvtui2sd(dst, Operand(src), scratch);
  }
  void Cvtui2sd(XMMRegister dst, Operand src, Register scratch);
372 373 374 375
  void Cvttsd2ui(Register dst, XMMRegister src, XMMRegister tmp) {
    Cvttsd2ui(dst, Operand(src), tmp);
  }
  void Cvttsd2ui(Register dst, Operand src, XMMRegister tmp);
376 377

  void Push(Register src) { push(src); }
378
  void Push(Operand src) { push(src); }
379
  void Push(Immediate value);
380
  void Push(Handle<HeapObject> handle) { push(Immediate(handle)); }
381
  void Push(Smi smi) { Push(Immediate(smi)); }
382 383 384 385 386 387 388

  void SaveRegisters(RegList registers);
  void RestoreRegisters(RegList registers);

  void CallRecordWriteStub(Register object, Register address,
                           RememberedSetAction remembered_set_action,
                           SaveFPRegsMode fp_mode);
389 390 391
  void CallRecordWriteStub(Register object, Register address,
                           RememberedSetAction remembered_set_action,
                           SaveFPRegsMode fp_mode, Address wasm_target);
392 393
  void CallEphemeronKeyBarrier(Register object, Register address,
                               SaveFPRegsMode fp_mode);
394

395 396 397 398 399 400 401 402 403 404 405 406 407 408 409
  // Calculate how much stack space (in bytes) are required to store caller
  // registers excluding those specified in the arguments.
  int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
                                      Register exclusion1 = no_reg,
                                      Register exclusion2 = no_reg,
                                      Register exclusion3 = no_reg) const;

  // PushCallerSaved and PopCallerSaved do not arrange the registers in any
  // particular order so they are not useful for calls that can cause a GC.
  // The caller can exclude up to 3 registers that do not need to be saved and
  // restored.

  // Push caller saved registers on the stack, and return the number of bytes
  // stack pointer is adjusted.
  int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
410 411
                      Register exclusion2 = no_reg,
                      Register exclusion3 = no_reg);
412 413 414 415 416
  // Restore caller saved registers from the stack, and return the number of
  // bytes stack pointer is adjusted.
  int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
                     Register exclusion2 = no_reg,
                     Register exclusion3 = no_reg);
417

418 419 420 421
  // Compute the start of the generated instruction stream from the current PC.
  // This is an alternative to embedding the {CodeObject} handle as a reference.
  void ComputeCodeStartAddress(Register dst);

422 423
  // TODO(860429): Remove remaining poisoning infrastructure on ia32.
  void ResetSpeculationPoisonRegister() { UNREACHABLE(); }
424 425 426 427 428

  void CallRecordWriteStub(Register object, Register address,
                           RememberedSetAction remembered_set_action,
                           SaveFPRegsMode fp_mode, Handle<Code> code_target,
                           Address wasm_target);
429 430
};

431
// MacroAssembler implements a collection of frequently used macros.
432
class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
433
 public:
434
  using TurboAssembler::TurboAssembler;
435

436 437 438 439 440 441 442 443
  // Load a register with a long value as efficiently as possible.
  void Set(Register dst, int32_t x) {
    if (x == 0) {
      xor_(dst, dst);
    } else {
      mov(dst, Immediate(x));
    }
  }
444
  void Set(Operand dst, int32_t x) { mov(dst, Immediate(x)); }
445

446
  void PushRoot(RootIndex index);
447 448

  // Compare the object in a register to a value and jump if they are equal.
449
  void JumpIfRoot(Register with, RootIndex index, Label* if_equal,
450
                  Label::Distance if_equal_distance = Label::kFar) {
451 452 453 454 455
    CompareRoot(with, index);
    j(equal, if_equal, if_equal_distance);
  }

  // Compare the object in a register to a value and jump if they are not equal.
456
  void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal,
457
                     Label::Distance if_not_equal_distance = Label::kFar) {
458 459 460
    CompareRoot(with, index);
    j(not_equal, if_not_equal, if_not_equal_distance);
  }
461

462 463 464 465 466 467 468
  // Checks if value is in range [lower_limit, higher_limit] using a single
  // comparison.
  void JumpIfIsInRange(Register value, unsigned lower_limit,
                       unsigned higher_limit, Register scratch,
                       Label* on_in_range,
                       Label::Distance near_jump = Label::kFar);

469 470
  // ---------------------------------------------------------------------------
  // GC Support
471 472 473 474 475 476
  // Notify the garbage collector that we wrote a pointer into an object.
  // |object| is the object being stored into, |value| is the object being
  // stored.  value and scratch registers are clobbered by the operation.
  // The offset is the offset from the start of the object, not the offset from
  // the tagged HeapObject pointer.  For use with FieldOperand(reg, off).
  void RecordWriteField(
477
      Register object, int offset, Register value, Register scratch,
478 479
      SaveFPRegsMode save_fp,
      RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
480
      SmiCheck smi_check = INLINE_SMI_CHECK);
481

482 483
  // For page containing |object| mark region covering |address|
  // dirty. |object| is the object being stored into, |value| is the
484
  // object being stored. The address and value registers are clobbered by the
485 486
  // operation. RecordWrite filters out smis so it does not update the
  // write barrier if the value is a smi.
487
  void RecordWrite(
488
      Register object, Register address, Register value, SaveFPRegsMode save_fp,
489
      RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
490
      SmiCheck smi_check = INLINE_SMI_CHECK);
491

492
  // Frame restart support
493
  void MaybeDropFrames();
494

495 496 497 498
  // Enter specific kind of exit frame. Expects the number of
  // arguments in register eax and sets up the number of arguments in
  // register edi and the pointer to the first argument in register
  // esi.
499
  void EnterExitFrame(int argc, bool save_doubles, StackFrame::Type frame_type);
500

501
  void EnterApiExitFrame(int argc, Register scratch);
502 503 504

  // Leave the current exit frame. Expects the return value in
  // register eax:edx (untouched) and the pointer to the first
505 506
  // argument in register esi (if pop_arguments == true).
  void LeaveExitFrame(bool save_doubles, bool pop_arguments = true);
507

508 509
  // Leave the current exit frame. Expects the return value in
  // register eax (untouched).
510
  void LeaveApiExitFrame();
511

512 513 514
  // Load the global proxy from the current context.
  void LoadGlobalProxy(Register dst);

515 516 517
  // Load the global function with the given index.
  void LoadGlobalFunction(int index, Register function);

518 519 520 521
  // Push and pop the registers that can hold pointers.
  void PushSafepointRegisters() { pushad(); }
  void PopSafepointRegisters() { popad(); }

522 523 524
  // ---------------------------------------------------------------------------
  // JavaScript invokes

525

526
  // Invoke the JavaScript function code by either calling or jumping.
527

528 529
  void InvokeFunctionCode(Register function, Register new_target,
                          const ParameterCount& expected,
530
                          const ParameterCount& actual, InvokeFlag flag);
531

532
  // On function call, call into the debugger if necessary.
533
  // This may clobber ecx.
534 535 536
  void CheckDebugHook(Register fun, Register new_target,
                      const ParameterCount& expected,
                      const ParameterCount& actual);
537

538 539
  // Invoke the JavaScript function in the given register. Changes the
  // current context to the context in the function before invoking.
540
  void InvokeFunction(Register function, Register new_target,
541
                      const ParameterCount& actual, InvokeFlag flag);
542

543 544 545 546 547 548 549
  // Compare object type for heap object.
  // Incoming register is heap_object and outgoing register is map.
  void CmpObjectType(Register heap_object, InstanceType type, Register map);

  // Compare instance type for map.
  void CmpInstanceType(Register map, InstanceType type);

550
  void DoubleToI(Register result_reg, XMMRegister input_reg,
551
                 XMMRegister scratch, Label* lost_precision, Label* is_nan,
552
                 Label::Distance dst = Label::kFar);
553

554 555
  // Smi tagging support.
  void SmiTag(Register reg) {
556 557
    STATIC_ASSERT(kSmiTag == 0);
    STATIC_ASSERT(kSmiTagSize == 1);
558
    add(reg, reg);
559 560
  }

561
  // Modifies the register even if it does not contain a Smi!
562
  void UntagSmi(Register reg, Label* is_smi) {
563
    STATIC_ASSERT(kSmiTagSize == 1);
564
    sar(reg, kSmiTagSize);
565
    STATIC_ASSERT(kSmiTag == 0);
566 567 568
    j(not_carry, is_smi);
  }

569
  // Jump if register contain a non-smi.
570
  inline void JumpIfNotSmi(Register value, Label* not_smi_label,
571
                           Label::Distance distance = Label::kFar) {
572
    test(value, Immediate(kSmiTagMask));
573
    j(not_zero, not_smi_label, distance);
574
  }
575 576 577 578 579 580
  // Jump if the operand is not a smi.
  inline void JumpIfNotSmi(Operand value, Label* smi_label,
                           Label::Distance distance = Label::kFar) {
    test(value, Immediate(kSmiTagMask));
    j(not_zero, smi_label, distance);
  }
581

582 583
  template<typename Field>
  void DecodeField(Register reg) {
584
    static const int shift = Field::kShift;
585
    static const int mask = Field::kMask >> Field::kShift;
586 587 588
    if (shift != 0) {
      sar(reg, shift);
    }
589
    and_(reg, Immediate(mask));
590
  }
591

592 593
  // Abort execution if argument is not a smi, enabled via --debug-code.
  void AssertSmi(Register object);
594

595 596
  // Abort execution if argument is a smi, enabled via --debug-code.
  void AssertNotSmi(Register object);
597

598 599 600
  // Abort execution if argument is not a JSFunction, enabled via --debug-code.
  void AssertFunction(Register object);

601 602 603
  // Abort execution if argument is not a Constructor, enabled via --debug-code.
  void AssertConstructor(Register object);

604 605 606 607
  // Abort execution if argument is not a JSBoundFunction,
  // enabled via --debug-code.
  void AssertBoundFunction(Register object);

608
  // Abort execution if argument is not a JSGeneratorObject (or subclass),
609
  // enabled via --debug-code.
610
  void AssertGeneratorObject(Register object);
611

612 613
  // Abort execution if argument is not undefined or an AllocationSite, enabled
  // via --debug-code.
614
  void AssertUndefinedOrAllocationSite(Register object, Register scratch);
615

616 617 618
  // ---------------------------------------------------------------------------
  // Exception handling

619
  // Push a new stack handler and link it into stack handler chain.
620
  void PushStackHandler(Register scratch);
621

622
  // Unlink the stack handler on top of the stack from the stack handler chain.
623
  void PopStackHandler(Register scratch);
624

625 626 627 628
  // ---------------------------------------------------------------------------
  // Runtime calls

  // Call a runtime routine.
629
  void CallRuntime(const Runtime::Function* f, int num_arguments,
630
                   SaveFPRegsMode save_doubles = kDontSaveFPRegs);
631 632

  // Convenience function: Same as above, but takes the fid instead.
633
  void CallRuntime(Runtime::FunctionId fid,
634
                   SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
635 636 637 638 639 640 641 642
    const Runtime::Function* function = Runtime::FunctionForId(fid);
    CallRuntime(function, function->nargs, save_doubles);
  }

  // Convenience function: Same as above, but takes the fid instead.
  void CallRuntime(Runtime::FunctionId fid, int num_arguments,
                   SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
    CallRuntime(Runtime::FunctionForId(fid), num_arguments, save_doubles);
643
  }
644

serya@chromium.org's avatar
serya@chromium.org committed
645
  // Convenience function: tail call a runtime routine (jump).
646
  void TailCallRuntime(Runtime::FunctionId fid);
647

648
  // Jump to a runtime routine.
649 650
  void JumpToExternalReference(const ExternalReference& ext,
                               bool builtin_exit_frame = false);
651

652
  // Generates a trampoline to jump to the off-heap instruction stream.
653
  void JumpToInstructionStream(Address entry);
654

655 656 657
  // ---------------------------------------------------------------------------
  // Utilities

658 659
  // Emit code to discard a non-negative number of pointer-sized elements
  // from the stack, clobbering only the esp register.
660 661
  void Drop(int element_count);

662
  void Pop(Register dst) { pop(dst); }
663
  void Pop(Operand dst) { pop(dst); }
664 665
  void PushReturnAddressFrom(Register src) { push(src); }
  void PopReturnAddressTo(Register dst) { pop(dst); }
666

667 668 669 670
  // ---------------------------------------------------------------------------
  // In-place weak references.
  void LoadWeakValue(Register in_out, Label* target_if_cleared);

671 672 673
  // ---------------------------------------------------------------------------
  // StatsCounter support

674 675
  void IncrementCounter(StatsCounter* counter, int value, Register scratch);
  void DecrementCounter(StatsCounter* counter, int value, Register scratch);
676

677 678 679 680
  static int SafepointRegisterStackIndex(Register reg) {
    return SafepointRegisterStackIndex(reg.code());
  }

681 682 683
 private:
  // Helper functions for generating invokes.
  void InvokePrologue(const ParameterCount& expected,
684 685
                      const ParameterCount& actual, Label* done,
                      bool* definitely_mismatches, InvokeFlag flag,
686
                      Label::Distance done_distance);
687

688
  void EnterExitFramePrologue(StackFrame::Type frame_type, Register scratch);
689
  void EnterExitFrameEpilogue(int argc, bool save_doubles);
serya@chromium.org's avatar
serya@chromium.org committed
690

691
  void LeaveExitFrameEpilogue();
692

693 694 695
  // Compute memory operands for safepoint stack slots.
  static int SafepointRegisterStackIndex(int reg_code);

696
  // Needs access to SafepointRegisterStackIndex for compiled frame
697
  // traversal.
698
  friend class StandardFrame;
699 700

  DISALLOW_IMPLICIT_CONSTRUCTORS(MacroAssembler);
701 702 703 704 705 706
};

// -----------------------------------------------------------------------------
// Static helper functions.

// Generate an Operand for loading a field from an object.
707
inline Operand FieldOperand(Register object, int offset) {
708 709 710
  return Operand(object, offset - kHeapObjectTag);
}

711
// Generate an Operand for loading an indexed field from an object.
712
inline Operand FieldOperand(Register object, Register index, ScaleFactor scale,
713
                            int offset) {
714 715 716
  return Operand(object, index, scale, offset - kHeapObjectTag);
}

717
inline Operand ContextOperand(Register context, int index) {
718 719 720
  return Operand(context, Context::SlotOffset(index));
}

721
inline Operand ContextOperand(Register context, Register index) {
722
  return Operand(context, index, times_tagged_size, Context::SlotOffset(0));
723 724
}

725 726
inline Operand NativeContextOperand() {
  return ContextOperand(esi, Context::NATIVE_CONTEXT_INDEX);
727 728
}

729 730
#define ACCESS_MASM(masm) masm->

731 732
}  // namespace internal
}  // namespace v8
733

734
#endif  // V8_IA32_MACRO_ASSEMBLER_IA32_H_