macro-assembler-ia32.h 26.1 KB
Newer Older
1
// Copyright 2006-2009 the V8 project authors. All rights reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
//     * Redistributions of source code must retain the above copyright
//       notice, this list of conditions and the following disclaimer.
//     * Redistributions in binary form must reproduce the above
//       copyright notice, this list of conditions and the following
//       disclaimer in the documentation and/or other materials provided
//       with the distribution.
//     * Neither the name of Google Inc. nor the names of its
//       contributors may be used to endorse or promote products derived
//       from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

28 29
#ifndef V8_IA32_MACRO_ASSEMBLER_IA32_H_
#define V8_IA32_MACRO_ASSEMBLER_IA32_H_
30 31

#include "assembler.h"
32
#include "type-info.h"
33

34 35
namespace v8 {
namespace internal {
36

37 38 39 40 41 42 43 44 45 46 47
// Flags used for the AllocateInNewSpace functions.
enum AllocationFlags {
  // No special flags.
  NO_ALLOCATION_FLAGS = 0,
  // Return the pointer to the allocated already tagged as a heap object.
  TAG_OBJECT = 1 << 0,
  // The content of the result register already contains the allocation top in
  // new space.
  RESULT_CONTAINS_TOP = 1 << 1
};

48 49 50 51
// Convenience for platform-independent signatures.  We do not normally
// distinguish memory operands from other operands on ia32.
typedef Operand MemOperand;

52 53
// Forward declaration.
class JumpTarget;
54 55 56 57 58 59 60 61 62

// MacroAssembler implements a collection of frequently used macros.
class MacroAssembler: public Assembler {
 public:
  MacroAssembler(void* buffer, int size);

  // ---------------------------------------------------------------------------
  // GC Support

63 64
  // For page containing |object| mark region covering |addr| dirty.
  // RecordWriteHelper only works if the object is not in new
65
  // space.
66 67 68 69 70 71 72 73 74 75 76
  void RecordWriteHelper(Register object,
                         Register addr,
                         Register scratch);

  // Check if object is in new space.
  // scratch can be object itself, but it will be clobbered.
  void InNewSpace(Register object,
                  Register scratch,
                  Condition cc,  // equal for new space, not_equal otherwise.
                  Label* branch);

77 78 79 80 81 82 83
  // For page containing |object| mark region covering [object+offset]
  // dirty. |object| is the object being stored into, |value| is the
  // object being stored. If offset is zero, then the scratch register
  // contains the array index into the elements array represented as a
  // Smi. All registers are clobbered by the operation. RecordWrite
  // filters out smis so it does not update the write barrier if the
  // value is a smi.
84 85 86 87 88
  void RecordWrite(Register object,
                   int offset,
                   Register value,
                   Register scratch);

89 90 91 92 93 94 95 96 97
  // For page containing |object| mark region covering |address|
  // dirty. |object| is the object being stored into, |value| is the
  // object being stored. All registers are clobbered by the
  // operation. RecordWrite filters out smis so it does not update the
  // write barrier if the value is a smi.
  void RecordWrite(Register object,
                   Register address,
                   Register value);

98
#ifdef ENABLE_DEBUGGER_SUPPORT
99 100 101
  // ---------------------------------------------------------------------------
  // Debugger Support

serya@chromium.org's avatar
serya@chromium.org committed
102
  void DebugBreak();
103
#endif
104

105 106 107 108 109 110
  // ---------------------------------------------------------------------------
  // Stack limit support

  // Do simple test for stack overflow. This doesn't handle an overflow.
  void StackLimitCheck(Label* on_stack_limit_hit);

111 112 113
  // ---------------------------------------------------------------------------
  // Activation frames

114 115 116 117 118
  void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
  void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }

  void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
  void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
119

120 121
  // Enter specific kind of exit frame; either in normal or debug mode.
  // Expects the number of arguments in register eax and
122 123
  // sets up the number of arguments in register edi and the pointer
  // to the first argument in register esi.
124
  void EnterExitFrame();
125

126
  void EnterApiExitFrame(int stack_space, int argc);
127 128 129 130

  // Leave the current exit frame. Expects the return value in
  // register eax:edx (untouched) and the pointer to the first
  // argument in register esi.
131
  void LeaveExitFrame();
132

133 134
  // Find the function context up the context chain.
  void LoadContext(Register dst, int context_chain_length);
135

136 137 138 139 140 141 142
  // Load the global function with the given index.
  void LoadGlobalFunction(int index, Register function);

  // Load the initial map from the global function. The registers
  // function and map can be the same.
  void LoadGlobalFunctionInitialMap(Register function, Register map);

143 144 145 146 147 148 149 150 151 152 153 154
  // ---------------------------------------------------------------------------
  // JavaScript invokes

  // Invoke the JavaScript function code by either calling or jumping.
  void InvokeCode(const Operand& code,
                  const ParameterCount& expected,
                  const ParameterCount& actual,
                  InvokeFlag flag);

  void InvokeCode(Handle<Code> code,
                  const ParameterCount& expected,
                  const ParameterCount& actual,
155
                  RelocInfo::Mode rmode,
156 157 158 159 160 161 162 163
                  InvokeFlag flag);

  // Invoke the JavaScript function in the given register. Changes the
  // current context to the context in the function before invoking.
  void InvokeFunction(Register function,
                      const ParameterCount& actual,
                      InvokeFlag flag);

164 165 166 167
  void InvokeFunction(JSFunction* function,
                      const ParameterCount& actual,
                      InvokeFlag flag);

168 169 170 171
  // Invoke specified builtin JavaScript function. Adds an entry to
  // the unresolved list if the name does not resolve.
  void InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag);

172 173 174
  // Store the function for the given builtin in the target register.
  void GetBuiltinFunction(Register target, Builtins::JavaScript id);

175 176 177 178 179 180 181
  // Store the code object for the given builtin in the target register.
  void GetBuiltinEntry(Register target, Builtins::JavaScript id);

  // Expression support
  void Set(Register dst, const Immediate& x);
  void Set(const Operand& dst, const Immediate& x);

182 183 184 185 186 187 188
  // Compare object type for heap object.
  // Incoming register is heap_object and outgoing register is map.
  void CmpObjectType(Register heap_object, InstanceType type, Register map);

  // Compare instance type for map.
  void CmpInstanceType(Register map, InstanceType type);

189 190 191 192 193 194 195 196
  // Check if the map of an object is equal to a specified map and
  // branch to label if not. Skip the smi check if not required
  // (object is known to be a heap object)
  void CheckMap(Register obj,
                Handle<Map> map,
                Label* fail,
                bool is_heap_object);

197 198 199 200 201 202 203 204 205
  // Check if the object in register heap_object is a string. Afterwards the
  // register map contains the object map and the register instance_type
  // contains the instance_type. The registers map and instance_type can be the
  // same in which case it contains the instance type afterwards. Either of the
  // registers map and instance_type can be the same as heap_object.
  Condition IsObjectStringType(Register heap_object,
                               Register map,
                               Register instance_type);

206 207 208 209 210 211 212 213 214 215 216 217
  // Check if a heap object's type is in the JSObject range, not including
  // JSFunction.  The object's map will be loaded in the map register.
  // Any or all of the three registers may be the same.
  // The contents of the scratch register will always be overwritten.
  void IsObjectJSObjectType(Register heap_object,
                            Register map,
                            Register scratch,
                            Label* fail);

  // The contents of the scratch register will be overwritten.
  void IsInstanceJSObjectType(Register map, Register scratch, Label* fail);

218 219 220 221
  // FCmp is similar to integer cmp, but requires unsigned
  // jcc instructions (je, ja, jae, jb, jbe, je, and jz).
  void FCmp();

222 223 224
  // Smi tagging support.
  void SmiTag(Register reg) {
    ASSERT(kSmiTag == 0);
225 226
    ASSERT(kSmiTagSize == 1);
    add(reg, Operand(reg));
227 228 229 230 231
  }
  void SmiUntag(Register reg) {
    sar(reg, kSmiTagSize);
  }

232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260
  // Modifies the register even if it does not contain a Smi!
  void SmiUntag(Register reg, TypeInfo info, Label* non_smi) {
    ASSERT(kSmiTagSize == 1);
    sar(reg, kSmiTagSize);
    if (info.IsSmi()) {
      ASSERT(kSmiTag == 0);
      j(carry, non_smi);
    }
  }

  // Modifies the register even if it does not contain a Smi!
  void SmiUntag(Register reg, Label* is_smi) {
    ASSERT(kSmiTagSize == 1);
    sar(reg, kSmiTagSize);
    ASSERT(kSmiTag == 0);
    j(not_carry, is_smi);
  }

  // Assumes input is a heap object.
  void JumpIfNotNumber(Register reg, TypeInfo info, Label* on_not_number);

  // Assumes input is a heap number.  Jumps on things out of range.  Also jumps
  // on the min negative int32.  Ignores frational parts.
  void ConvertToInt32(Register dst,
                      Register src,      // Can be the same as dst.
                      Register scratch,  // Can be no_reg or dst, but not src.
                      TypeInfo info,
                      Label* on_not_int32);

261
  // Abort execution if argument is not a number. Used in debug code.
262
  void AbortIfNotNumber(Register object);
263

264
  // Abort execution if argument is not a smi. Used in debug code.
265
  void AbortIfNotSmi(Register object);
266

267 268 269
  // Abort execution if argument is a smi. Used in debug code.
  void AbortIfSmi(Register object);

270 271 272
  // ---------------------------------------------------------------------------
  // Exception handling

273 274
  // Push a new try handler and link into try handler chain.  The return
  // address must be pushed before calling this helper.
275 276
  void PushTryHandler(CodeLocation try_location, HandlerType type);

277 278 279
  // Unlink the stack handler on top of the stack from the try handler chain.
  void PopTryHandler();

280 281 282 283 284 285
  // ---------------------------------------------------------------------------
  // Inline caching support

  // Generate code for checking access rights - used for security checks
  // on access to global objects across environments. The holder register
  // is left untouched, but the scratch register is clobbered.
286 287 288
  void CheckAccessGlobalProxy(Register holder_reg,
                              Register scratch,
                              Label* miss);
289 290


291 292 293 294 295 296 297 298 299
  // ---------------------------------------------------------------------------
  // Allocation support

  // Allocate an object in new space. If the new space is exhausted control
  // continues at the gc_required label. The allocated object is returned in
  // result and end of the new object is returned in result_end. The register
  // scratch can be passed as no_reg in which case an additional object
  // reference will be added to the reloc info. The returned pointers in result
  // and result_end have not yet been tagged as heap objects. If
300
  // result_contains_top_on_entry is true the content of result is known to be
301
  // the allocation top on entry (could be result_end from a previous call to
302
  // AllocateInNewSpace). If result_contains_top_on_entry is true scratch
303
  // should be no_reg as it is never used.
304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325
  void AllocateInNewSpace(int object_size,
                          Register result,
                          Register result_end,
                          Register scratch,
                          Label* gc_required,
                          AllocationFlags flags);

  void AllocateInNewSpace(int header_size,
                          ScaleFactor element_size,
                          Register element_count,
                          Register result,
                          Register result_end,
                          Register scratch,
                          Label* gc_required,
                          AllocationFlags flags);

  void AllocateInNewSpace(Register object_size,
                          Register result,
                          Register result_end,
                          Register scratch,
                          Label* gc_required,
                          AllocationFlags flags);
326 327 328 329 330 331 332

  // Undo allocation in new space. The object passed and objects allocated after
  // it will no longer be allocated. Make sure that no pointers are left to the
  // object(s) no longer allocated as they would be invalid when allocation is
  // un-done.
  void UndoAllocationInNewSpace(Register object);

333 334 335 336 337 338 339 340 341
  // Allocate a heap number in new space with undefined value. The
  // register scratch2 can be passed as no_reg; the others must be
  // valid registers. Returns tagged pointer in result register, or
  // jumps to gc_required if new space is full.
  void AllocateHeapNumber(Register result,
                          Register scratch1,
                          Register scratch2,
                          Label* gc_required);

342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367
  // Allocate a sequential string. All the header fields of the string object
  // are initialized.
  void AllocateTwoByteString(Register result,
                             Register length,
                             Register scratch1,
                             Register scratch2,
                             Register scratch3,
                             Label* gc_required);
  void AllocateAsciiString(Register result,
                           Register length,
                           Register scratch1,
                           Register scratch2,
                           Register scratch3,
                           Label* gc_required);

  // Allocate a raw cons string object. Only the map field of the result is
  // initialized.
  void AllocateConsString(Register result,
                          Register scratch1,
                          Register scratch2,
                          Label* gc_required);
  void AllocateAsciiConsString(Register result,
                               Register scratch1,
                               Register scratch2,
                               Label* gc_required);

368 369 370 371 372 373
  // ---------------------------------------------------------------------------
  // Support functions.

  // Check if result is zero and op is negative.
  void NegativeZeroTest(Register result, Register op, Label* then_label);

374 375 376 377 378 379
  // Check if result is zero and op is negative in code using jump targets.
  void NegativeZeroTest(CodeGenerator* cgen,
                        Register result,
                        Register op,
                        JumpTarget* then_target);

380 381 382 383 384
  // Check if result is zero and any of op1 and op2 are negative.
  // Register scratch is destroyed, and it must be different from op2.
  void NegativeZeroTest(Register result, Register op1, Register op2,
                        Register scratch, Label* then_label);

385 386 387 388 389 390 391 392 393 394
  // Try to get function prototype of a function and puts the value in
  // the result register. Checks that the function really is a
  // function and jumps to the miss label if the fast checks fail. The
  // function register will be untouched; the other registers may be
  // clobbered.
  void TryGetFunctionPrototype(Register function,
                               Register result,
                               Register scratch,
                               Label* miss);

395
  // Generates code for reporting that an illegal operation has
396 397
  // occurred.
  void IllegalOperation(int num_arguments);
398 399 400 401

  // ---------------------------------------------------------------------------
  // Runtime calls

402
  // Call a code stub.  Generate the code if necessary.
403 404
  void CallStub(CodeStub* stub);

405 406 407 408 409 410
  // Call a code stub and return the code object called.  Try to generate
  // the code if necessary.  Do not perform a GC but instead return a retry
  // after GC failure.
  Object* TryCallStub(CodeStub* stub);

  // Tail call a code stub (jump).  Generate the code if necessary.
411 412
  void TailCallStub(CodeStub* stub);

413 414 415 416 417
  // Tail call a code stub (jump) and return the code object called.  Try to
  // generate the code if necessary.  Do not perform a GC but instead return
  // a retry after GC failure.
  Object* TryTailCallStub(CodeStub* stub);

418 419 420 421 422 423
  // Return from a code stub after popping its arguments.
  void StubReturn(int argc);

  // Call a runtime routine.
  void CallRuntime(Runtime::Function* f, int num_arguments);

424
  // Call a runtime function, returning the CodeStub object called.
425 426 427 428
  // Try to generate the stub code if necessary.  Do not perform a GC
  // but instead return a retry after GC failure.
  Object* TryCallRuntime(Runtime::Function* f, int num_arguments);

429 430 431
  // Convenience function: Same as above, but takes the fid instead.
  void CallRuntime(Runtime::FunctionId id, int num_arguments);

432 433 434
  // Convenience function: Same as above, but takes the fid instead.
  Object* TryCallRuntime(Runtime::FunctionId id, int num_arguments);

435 436 437
  // Convenience function: call an external reference.
  void CallExternalReference(ExternalReference ref, int num_arguments);

438
  // Tail call of a runtime routine (jump).
serya@chromium.org's avatar
serya@chromium.org committed
439 440 441 442 443 444 445 446
  // Like JumpToExternalReference, but also takes care of passing the number
  // of parameters.
  void TailCallExternalReference(const ExternalReference& ext,
                                 int num_arguments,
                                 int result_size);

  // Convenience function: tail call a runtime routine (jump).
  void TailCallRuntime(Runtime::FunctionId fid,
447 448
                       int num_arguments,
                       int result_size);
449

450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466
  // Before calling a C-function from generated code, align arguments on stack.
  // After aligning the frame, arguments must be stored in esp[0], esp[4],
  // etc., not pushed. The argument count assumes all arguments are word sized.
  // Some compilers/platforms require the stack to be aligned when calling
  // C++ code.
  // Needs a scratch register to do some arithmetic. This register will be
  // trashed.
  void PrepareCallCFunction(int num_arguments, Register scratch);

  // Calls a C function and cleans up the space for arguments allocated
  // by PrepareCallCFunction. The called function is not allowed to trigger a
  // garbage collection, since that might move the code and invalidate the
  // return address (unless this is somehow accounted for by the called
  // function).
  void CallCFunction(ExternalReference function, int num_arguments);
  void CallCFunction(Register function, int num_arguments);

467
  void PushHandleScope(Register scratch);
468 469

  // Pops a handle scope using the specified scratch register and
470
  // ensuring that saved register is left unchanged.
471
  void PopHandleScope(Register saved, Register scratch);
472

473 474 475 476
  // As PopHandleScope, but does not perform a GC.  Instead, returns a
  // retry after GC failure object if GC is necessary.
  Object* TryPopHandleScope(Register saved, Register scratch);

477
  // Jump to a runtime routine.
serya@chromium.org's avatar
serya@chromium.org committed
478
  void JumpToExternalReference(const ExternalReference& ext);
479

480 481 482 483

  // ---------------------------------------------------------------------------
  // Utilities

484 485
  void Ret();

486 487
  // Emit code to discard a non-negative number of pointer-sized elements
  // from the stack, clobbering only the esp register.
488 489 490 491 492 493
  void Drop(int element_count);

  void Call(Label* target) { call(target); }

  void Move(Register target, Handle<Object> value);

494 495
  Handle<Object> CodeObject() { return code_object_; }

496 497 498 499 500 501 502

  // ---------------------------------------------------------------------------
  // StatsCounter support

  void SetCounter(StatsCounter* counter, int value);
  void IncrementCounter(StatsCounter* counter, int value);
  void DecrementCounter(StatsCounter* counter, int value);
503 504
  void IncrementCounter(Condition cc, StatsCounter* counter, int value);
  void DecrementCounter(Condition cc, StatsCounter* counter, int value);
505 506 507 508 509 510 511 512 513


  // ---------------------------------------------------------------------------
  // Debugging

  // Calls Abort(msg) if the condition cc is not satisfied.
  // Use --debug_code to enable.
  void Assert(Condition cc, const char* msg);

514 515
  void AssertFastElements(Register elements);

516 517 518 519 520 521
  // Like Assert(), but always enabled.
  void Check(Condition cc, const char* msg);

  // Print a message to stdout and abort execution.
  void Abort(const char* msg);

522 523 524
  // Check that the stack is aligned.
  void CheckStackAlignment();

525 526 527
  // Verify restrictions about code generated in stubs.
  void set_generating_stub(bool value) { generating_stub_ = value; }
  bool generating_stub() { return generating_stub_; }
528 529
  void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
  bool allow_stub_calls() { return allow_stub_calls_; }
530

531 532 533
  // ---------------------------------------------------------------------------
  // String utilities.

534 535 536 537 538
  // Check whether the instance type represents a flat ascii string. Jump to the
  // label if not. If the instance type can be scratched specify same register
  // for both instance type and scratch.
  void JumpIfInstanceTypeIsNotSequentialAscii(Register instance_type,
                                              Register scratch,
539
                                              Label* on_not_flat_ascii_string);
540

541 542 543 544 545 546
  // Checks if both objects are sequential ASCII strings, and jumps to label
  // if either is not.
  void JumpIfNotBothSequentialAsciiStrings(Register object1,
                                           Register object2,
                                           Register scratch1,
                                           Register scratch2,
547
                                           Label* on_not_flat_ascii_strings);
548

549 550
 private:
  bool generating_stub_;
551
  bool allow_stub_calls_;
552 553
  // This handle will be patched with the code object on installation.
  Handle<Object> code_object_;
554 555 556 557 558 559 560 561

  // Helper functions for generating invokes.
  void InvokePrologue(const ParameterCount& expected,
                      const ParameterCount& actual,
                      Handle<Code> code_constant,
                      const Operand& code_operand,
                      Label* done,
                      InvokeFlag flag);
562

563 564 565
  // Activation support.
  void EnterFrame(StackFrame::Type type);
  void LeaveFrame(StackFrame::Type type);
566

567 568
  void EnterExitFramePrologue();
  void EnterExitFrameEpilogue(int argc);
569

570 571 572 573
  // Allocation support helpers.
  void LoadAllocationTopHelper(Register result,
                               Register result_end,
                               Register scratch,
574
                               AllocationFlags flags);
575
  void UpdateAllocationTopHelper(Register result_end, Register scratch);
576 577 578 579 580 581 582

  // Helper for PopHandleScope.  Allowed to perform a GC and returns
  // NULL if gc_allowed.  Does not perform a GC if !gc_allowed, and
  // possibly returns a failure object indicating an allocation failure.
  Object* PopHandleScopeHelper(Register saved,
                               Register scratch,
                               bool gc_allowed);
583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604
};


// The code patcher is used to patch (typically) small parts of code e.g. for
// debugging and other types of instrumentation. When using the code patcher
// the exact number of bytes specified must be emitted. Is not legal to emit
// relocation information. If any of these constraints are violated it causes
// an assertion.
class CodePatcher {
 public:
  CodePatcher(byte* address, int size);
  virtual ~CodePatcher();

  // Macro assembler to emit code.
  MacroAssembler* masm() { return &masm_; }

 private:
  byte* address_;  // The address of the code being patched.
  int size_;  // Number of bytes of the expected patch size.
  MacroAssembler masm_;  // Macro assembler used to generate the code.
};

605

606 607 608 609 610 611 612 613 614
// -----------------------------------------------------------------------------
// Static helper functions.

// Generate an Operand for loading a field from an object.
static inline Operand FieldOperand(Register object, int offset) {
  return Operand(object, offset - kHeapObjectTag);
}


615 616 617 618 619 620 621 622
// Generate an Operand for loading an indexed field from an object.
static inline Operand FieldOperand(Register object,
                                   Register index,
                                   ScaleFactor scale,
                                   int offset) {
  return Operand(object, index, scale, offset - kHeapObjectTag);
}

623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645

#ifdef GENERATED_CODE_COVERAGE
extern void LogGeneratedCodeCoverage(const char* file_line);
#define CODE_COVERAGE_STRINGIFY(x) #x
#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
#define ACCESS_MASM(masm) {                                               \
    byte* ia32_coverage_function =                                        \
        reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
    masm->pushfd();                                                       \
    masm->pushad();                                                       \
    masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__)));         \
    masm->call(ia32_coverage_function, RelocInfo::RUNTIME_ENTRY);         \
    masm->pop(eax);                                                       \
    masm->popad();                                                        \
    masm->popfd();                                                        \
  }                                                                       \
  masm->
#else
#define ACCESS_MASM(masm) masm->
#endif


646 647
} }  // namespace v8::internal

648
#endif  // V8_IA32_MACRO_ASSEMBLER_IA32_H_