deoptimizer.h 29.5 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
//     * Redistributions of source code must retain the above copyright
//       notice, this list of conditions and the following disclaimer.
//     * Redistributions in binary form must reproduce the above
//       copyright notice, this list of conditions and the following
//       disclaimer in the documentation and/or other materials provided
//       with the distribution.
//     * Neither the name of Google Inc. nor the names of its
//       contributors may be used to endorse or promote products derived
//       from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

#ifndef V8_DEOPTIMIZER_H_
#define V8_DEOPTIMIZER_H_

#include "v8.h"

33
#include "allocation.h"
34 35 36 37 38 39 40
#include "macro-assembler.h"
#include "zone-inl.h"


namespace v8 {
namespace internal {

41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58

static inline double read_double_value(Address p) {
#ifdef V8_HOST_CAN_READ_UNALIGNED
  return Memory::double_at(p);
#else  // V8_HOST_CAN_READ_UNALIGNED
  // Prevent gcc from using load-double (mips ldc1) on (possibly)
  // non-64-bit aligned address.
  union conversion {
    double d;
    uint32_t u[2];
  } c;
  c.u[0] = *reinterpret_cast<uint32_t*>(p);
  c.u[1] = *reinterpret_cast<uint32_t*>(p + 4);
  return c.d;
#endif  // V8_HOST_CAN_READ_UNALIGNED
}


59 60
class FrameDescription;
class TranslationIterator;
61
class DeoptimizedFrameInfo;
62

63
template<typename T>
64
class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
65
 public:
66 67
  HeapNumberMaterializationDescriptor(T destination, double value)
      : destination_(destination), value_(value) { }
68

69 70
  T destination() const { return destination_; }
  double value() const { return value_; }
71 72

 private:
73 74
  T destination_;
  double value_;
75 76 77
};


78
class ObjectMaterializationDescriptor BASE_EMBEDDED {
79
 public:
80 81 82 83 84 85 86
  ObjectMaterializationDescriptor(
      Address slot_address, int frame, int length, int duplicate, bool is_args)
      : slot_address_(slot_address),
        jsframe_index_(frame),
        object_length_(length),
        duplicate_object_(duplicate),
        is_arguments_(is_args) { }
87 88

  Address slot_address() const { return slot_address_; }
89
  int jsframe_index() const { return jsframe_index_; }
90
  int object_length() const { return object_length_; }
91 92 93 94 95 96 97
  int duplicate_object() const { return duplicate_object_; }
  bool is_arguments() const { return is_arguments_; }

  // Only used for allocated receivers in DoComputeConstructStubFrame.
  void patch_slot_address(intptr_t slot) {
    slot_address_ = reinterpret_cast<Address>(slot);
  }
98 99 100

 private:
  Address slot_address_;
101
  int jsframe_index_;
102
  int object_length_;
103 104
  int duplicate_object_;
  bool is_arguments_;
105 106 107
};


108 109 110 111 112
class OptimizedFunctionVisitor BASE_EMBEDDED {
 public:
  virtual ~OptimizedFunctionVisitor() {}

  // Function which is called before iteration of any optimized functions
113
  // from given native context.
114 115 116 117 118
  virtual void EnterContext(Context* context) = 0;

  virtual void VisitFunction(JSFunction* function) = 0;

  // Function which is called after iteration of all optimized functions
119
  // from given native context.
120 121 122 123 124 125 126 127 128
  virtual void LeaveContext(Context* context) = 0;
};


class Deoptimizer : public Malloced {
 public:
  enum BailoutType {
    EAGER,
    LAZY,
129
    SOFT,
130 131 132
    // This last bailout type is not really a bailout, but used by the
    // debugger to deoptimize stack frames to allow inspection.
    DEBUGGER
133 134
  };

135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150
  static const int kBailoutTypesWithCodeEntry = SOFT + 1;

  struct JumpTableEntry {
    inline JumpTableEntry(Address entry,
                          Deoptimizer::BailoutType type,
                          bool frame)
        : label(),
          address(entry),
          bailout_type(type),
          needs_frame(frame) { }
    Label label;
    Address address;
    Deoptimizer::BailoutType bailout_type;
    bool needs_frame;
  };

151 152
  static bool TraceEnabledFor(BailoutType deopt_type,
                              StackFrame::Type frame_type);
153 154
  static const char* MessageFor(BailoutType type);

155 156
  int output_count() const { return output_count_; }

157 158 159
  Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
  Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
  BailoutType bailout_type() const { return bailout_type_; }
160

161 162 163
  // Number of created JS frames. Not all created frames are necessarily JS.
  int jsframe_count() const { return jsframe_count_; }

164 165 166 167
  static Deoptimizer* New(JSFunction* function,
                          BailoutType type,
                          unsigned bailout_id,
                          Address from,
168 169 170
                          int fp_to_sp_delta,
                          Isolate* isolate);
  static Deoptimizer* Grab(Isolate* isolate);
171

172 173 174 175
#ifdef ENABLE_DEBUGGER_SUPPORT
  // The returned object with information on the optimized frame needs to be
  // freed before another one can be generated.
  static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
176
                                                        int jsframe_index,
177 178 179 180 181
                                                        Isolate* isolate);
  static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
                                             Isolate* isolate);
#endif

182 183 184 185 186 187 188
  // Makes sure that there is enough room in the relocation
  // information of a code object to perform lazy deoptimization
  // patching. If there is not enough room a new relocation
  // information object is allocated and comments are added until it
  // is big enough.
  static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);

189 190 191 192 193
  // Deoptimize the function now. Its current optimized code will never be run
  // again and any activations of the optimized code will get deoptimized when
  // execution returns.
  static void DeoptimizeFunction(JSFunction* function);

194
  // Deoptimize all code in the given isolate.
195
  static void DeoptimizeAll(Isolate* isolate);
196

197
  // Deoptimize code associated with the given global object.
198 199
  static void DeoptimizeGlobalObject(JSObject* object);

200 201 202 203
  // Deoptimizes all optimized code that has been previously marked
  // (via code->set_marked_for_deoptimization) and unlinks all functions that
  // refer to that code.
  static void DeoptimizeMarkedCode(Isolate* isolate);
204

205 206 207
  // Visit all the known optimized functions in a given isolate.
  static void VisitAllOptimizedFunctions(
      Isolate* isolate, OptimizedFunctionVisitor* visitor);
208

209 210 211
  // The size in bytes of the code required at a lazy deopt patch site.
  static int patch_size();

212 213
  ~Deoptimizer();

214
  void MaterializeHeapObjects(JavaScriptFrameIterator* it);
215 216
#ifdef ENABLE_DEBUGGER_SUPPORT
  void MaterializeHeapNumbersForDebuggerInspectableFrame(
217 218 219 220 221
      Address parameters_top,
      uint32_t parameters_size,
      Address expressions_top,
      uint32_t expressions_size,
      DeoptimizedFrameInfo* info);
222
#endif
223

224
  static void ComputeOutputFrames(Deoptimizer* deoptimizer);
225

226 227 228 229 230 231 232 233

  enum GetEntryMode {
    CALCULATE_ENTRY_ADDRESS,
    ENSURE_ENTRY_CODE
  };


  static Address GetDeoptimizationEntry(
234
      Isolate* isolate,
235 236 237
      int id,
      BailoutType type,
      GetEntryMode mode = ENSURE_ENTRY_CODE);
238 239 240
  static int GetDeoptimizationId(Isolate* isolate,
                                 Address addr,
                                 BailoutType type);
241
  static int GetOutputInfo(DeoptimizationOutputData* data,
242
                           BailoutId node_id,
243
                           SharedFunctionInfo* shared);
244 245 246 247 248 249 250 251

  // Code generation support.
  static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
  static int output_count_offset() {
    return OFFSET_OF(Deoptimizer, output_count_);
  }
  static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }

252 253 254 255
  static int has_alignment_padding_offset() {
    return OFFSET_OF(Deoptimizer, has_alignment_padding_);
  }

256
  static int GetDeoptimizedCodeCount(Isolate* isolate);
257 258 259 260 261 262 263 264 265 266 267 268 269 270 271

  static const int kNotDeoptimizationEntry = -1;

  // Generators for the deoptimization entry code.
  class EntryGenerator BASE_EMBEDDED {
   public:
    EntryGenerator(MacroAssembler* masm, BailoutType type)
        : masm_(masm), type_(type) { }
    virtual ~EntryGenerator() { }

    void Generate();

   protected:
    MacroAssembler* masm() const { return masm_; }
    BailoutType type() const { return type_; }
272
    Isolate* isolate() const { return masm_->isolate(); }
273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294

    virtual void GeneratePrologue() { }

   private:
    MacroAssembler* masm_;
    Deoptimizer::BailoutType type_;
  };

  class TableEntryGenerator : public EntryGenerator {
   public:
    TableEntryGenerator(MacroAssembler* masm, BailoutType type,  int count)
        : EntryGenerator(masm, type), count_(count) { }

   protected:
    virtual void GeneratePrologue();

   private:
    int count() const { return count_; }

    int count_;
  };

295 296
  int ConvertJSFrameIndexToFrameIndex(int jsframe_index);

297 298
  static size_t GetMaxDeoptTableSize();

299 300
  static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
                                               BailoutType type,
301 302
                                               int max_entry_id);

303 304
  Isolate* isolate() const { return isolate_; }

305
 private:
306 307
  static const int kMinNumberOfEntries = 64;
  static const int kMaxNumberOfEntries = 16384;
308

309 310
  Deoptimizer(Isolate* isolate,
              JSFunction* function,
311 312 313
              BailoutType type,
              unsigned bailout_id,
              Address from,
314 315
              int fp_to_sp_delta,
              Code* optimized_code);
316 317
  Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
  void PrintFunctionName();
318 319 320
  void DeleteFrameDescriptions();

  void DoComputeOutputFrames();
321 322 323
  void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
  void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
                                      int frame_index);
324 325
  void DoComputeConstructStubFrame(TranslationIterator* iterator,
                                   int frame_index);
326 327 328
  void DoComputeAccessorStubFrame(TranslationIterator* iterator,
                                  int frame_index,
                                  bool is_setter_stub_frame);
329 330
  void DoComputeCompiledStubFrame(TranslationIterator* iterator,
                                  int frame_index);
331

332
  void DoTranslateObject(TranslationIterator* iterator,
333
                         int object_index,
334 335
                         int field_index);

336
  void DoTranslateCommand(TranslationIterator* iterator,
337 338
                          int frame_index,
                          unsigned output_offset);
339

340 341 342 343 344 345 346 347
  unsigned ComputeInputFrameSize() const;
  unsigned ComputeFixedSize(JSFunction* function) const;

  unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
  unsigned ComputeOutgoingArgumentSize() const;

  Object* ComputeLiteral(int index) const;

348 349
  void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments);
  void AddObjectDuplication(intptr_t slot, int object_index);
350 351
  void AddObjectTaggedValue(intptr_t value);
  void AddObjectDoubleValue(double value);
352
  void AddDoubleValue(intptr_t slot_address, double value);
353

354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369
  bool ArgumentsObjectIsAdapted(int object_index) {
    ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
    int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
    return jsframe_has_adapted_arguments_[reverse_jsframe_index];
  }

  Handle<JSFunction> ArgumentsObjectFunction(int object_index) {
    ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
    int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
    return jsframe_functions_[reverse_jsframe_index];
  }

  // Helper function for heap object materialization.
  Handle<Object> MaterializeNextHeapObject();
  Handle<Object> MaterializeNextValue();

370 371 372
  static void GenerateDeoptimizationEntries(
      MacroAssembler* masm, int count, BailoutType type);

373 374 375 376 377 378
  // Marks all the code in the given context for deoptimization.
  static void MarkAllCodeForContext(Context* native_context);

  // Visit all the known optimized functions in a given context.
  static void VisitAllOptimizedFunctionsForContext(
      Context* context, OptimizedFunctionVisitor* visitor);
379

380 381
  // Deoptimizes all code marked in the given context.
  static void DeoptimizeMarkedCodeForContext(Context* native_context);
382 383 384

  // Patch the given code so that it will deoptimize itself.
  static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
385

386 387 388 389 390
  // Searches the list of known deoptimizing code for a Code object
  // containing the given address (which is supposedly faster than
  // searching all code objects).
  Code* FindDeoptimizingCode(Address addr);

391 392 393 394 395
  // Fill the input from from a JavaScript frame. This is used when
  // the debugger needs to inspect an optimized frame. For normal
  // deoptimizations the input frame is filled in generated code.
  void FillInputFrame(Address tos, JavaScriptFrame* frame);

396 397 398 399 400 401 402 403 404
  // Fill the given output frame's registers to contain the failure handler
  // address and the number of parameters for a stub failure trampoline.
  void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
                                        CodeStubInterfaceDescriptor* desc);

  // Fill the given output frame's double registers with the original values
  // from the input frame's double registers.
  void CopyDoubleRegisters(FrameDescription* output_frame);

405 406 407 408
  // Determines whether the input frame contains alignment padding by looking
  // at the dynamic alignment state slot inside the frame.
  bool HasAlignmentPadding(JSFunction* function);

409
  Isolate* isolate_;
410
  JSFunction* function_;
411
  Code* compiled_code_;
412 413 414 415
  unsigned bailout_id_;
  BailoutType bailout_type_;
  Address from_;
  int fp_to_sp_delta_;
416
  int has_alignment_padding_;
417 418 419 420 421

  // Input frame description.
  FrameDescription* input_;
  // Number of output frames.
  int output_count_;
422 423
  // Number of output js frames.
  int jsframe_count_;
424 425 426
  // Array of output frame descriptions.
  FrameDescription** output_;

427
  // Deferred values to be materialized.
428
  List<Object*> deferred_objects_tagged_values_;
429 430
  List<HeapNumberMaterializationDescriptor<int> >
      deferred_objects_double_values_;
431
  List<ObjectMaterializationDescriptor> deferred_objects_;
432
  List<HeapNumberMaterializationDescriptor<Address> > deferred_heap_numbers_;
433 434 435 436 437 438 439 440 441 442 443

  // Output frame information. Only used during heap object materialization.
  List<Handle<JSFunction> > jsframe_functions_;
  List<bool> jsframe_has_adapted_arguments_;

  // Materialized objects. Only used during heap object materialization.
  List<Handle<Object> >* materialized_values_;
  List<Handle<Object> >* materialized_objects_;
  int materialization_value_index_;
  int materialization_object_index_;

444 445 446
#ifdef DEBUG
  DisallowHeapAllocation* disallow_heap_allocation_;
#endif  // DEBUG
447

448
  CodeTracer::Scope* trace_scope_;
449

450
  static const int table_entry_size_;
451 452

  friend class FrameDescription;
453
  friend class DeoptimizedFrameInfo;
454 455 456 457 458 459 460 461 462
};


class FrameDescription {
 public:
  FrameDescription(uint32_t frame_size,
                   JSFunction* function);

  void* operator new(size_t size, uint32_t frame_size) {
463 464 465
    // Subtracts kPointerSize, as the member frame_content_ already supplies
    // the first element of the area to store the frame.
    return malloc(size + frame_size - kPointerSize);
466 467
  }

468 469 470 471
  void operator delete(void* pointer, uint32_t frame_size) {
    free(pointer);
  }

472 473 474 475
  void operator delete(void* description) {
    free(description);
  }

476 477 478 479
  uint32_t GetFrameSize() const {
    ASSERT(static_cast<uint32_t>(frame_size_) == frame_size_);
    return static_cast<uint32_t>(frame_size_);
  }
480 481 482

  JSFunction* GetFunction() const { return function_; }

483
  unsigned GetOffsetFromSlotIndex(int slot_index);
484

485
  intptr_t GetFrameSlot(unsigned offset) {
486 487 488 489
    return *GetFrameSlotPointer(offset);
  }

  double GetDoubleFrameSlot(unsigned offset) {
490
    intptr_t* ptr = GetFrameSlotPointer(offset);
491
    return read_double_value(reinterpret_cast<Address>(ptr));
492 493
  }

494
  void SetFrameSlot(unsigned offset, intptr_t value) {
495 496 497
    *GetFrameSlotPointer(offset) = value;
  }

498 499 500 501
  void SetCallerPc(unsigned offset, intptr_t value);

  void SetCallerFp(unsigned offset, intptr_t value);

502
  intptr_t GetRegister(unsigned n) const {
503 504 505 506 507 508 509 510 511
#if DEBUG
    // This convoluted ASSERT is needed to work around a gcc problem that
    // improperly detects an array bounds overflow in optimized debug builds
    // when using a plain ASSERT.
    if (n >= ARRAY_SIZE(registers_)) {
      ASSERT(false);
      return 0;
    }
#endif
512 513 514 515 516 517 518 519
    return registers_[n];
  }

  double GetDoubleRegister(unsigned n) const {
    ASSERT(n < ARRAY_SIZE(double_registers_));
    return double_registers_[n];
  }

520
  void SetRegister(unsigned n, intptr_t value) {
521 522 523 524 525 526 527 528 529
    ASSERT(n < ARRAY_SIZE(registers_));
    registers_[n] = value;
  }

  void SetDoubleRegister(unsigned n, double value) {
    ASSERT(n < ARRAY_SIZE(double_registers_));
    double_registers_[n] = value;
  }

530 531
  intptr_t GetTop() const { return top_; }
  void SetTop(intptr_t top) { top_ = top; }
532

533 534
  intptr_t GetPc() const { return pc_; }
  void SetPc(intptr_t pc) { pc_ = pc; }
535

536 537
  intptr_t GetFp() const { return fp_; }
  void SetFp(intptr_t fp) { fp_ = fp; }
538

539 540 541
  intptr_t GetContext() const { return context_; }
  void SetContext(intptr_t context) { context_ = context; }

542 543 544
  Smi* GetState() const { return state_; }
  void SetState(Smi* state) { state_ = state; }

545
  void SetContinuation(intptr_t pc) { continuation_ = pc; }
546

547 548
  StackFrame::Type GetFrameType() const { return type_; }
  void SetFrameType(StackFrame::Type type) { type_ = type; }
549

550 551 552 553
  // Get the incoming arguments count.
  int ComputeParametersCount();

  // Get a parameter value for an unoptimized frame.
554
  Object* GetParameter(int index);
555

556
  // Get the expression stack height for a unoptimized frame.
557
  unsigned GetExpressionCount();
558 559

  // Get the expression stack value for an unoptimized frame.
560
  Object* GetExpression(int index);
561

562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586
  static int registers_offset() {
    return OFFSET_OF(FrameDescription, registers_);
  }

  static int double_registers_offset() {
    return OFFSET_OF(FrameDescription, double_registers_);
  }

  static int frame_size_offset() {
    return OFFSET_OF(FrameDescription, frame_size_);
  }

  static int pc_offset() {
    return OFFSET_OF(FrameDescription, pc_);
  }

  static int state_offset() {
    return OFFSET_OF(FrameDescription, state_);
  }

  static int continuation_offset() {
    return OFFSET_OF(FrameDescription, continuation_);
  }

  static int frame_content_offset() {
587
    return OFFSET_OF(FrameDescription, frame_content_);
588 589 590 591 592
  }

 private:
  static const uint32_t kZapUint32 = 0xbeeddead;

593 594 595
  // Frame_size_ must hold a uint32_t value.  It is only a uintptr_t to
  // keep the variable-size array frame_content_ of type intptr_t at
  // the end of the structure aligned.
596
  uintptr_t frame_size_;  // Number of bytes.
597
  JSFunction* function_;
598
  intptr_t registers_[Register::kNumRegisters];
599
  double double_registers_[DoubleRegister::kMaxNumRegisters];
600 601 602
  intptr_t top_;
  intptr_t pc_;
  intptr_t fp_;
603
  intptr_t context_;
604
  StackFrame::Type type_;
605 606 607 608
  Smi* state_;

  // Continuation is the PC where the execution continues after
  // deoptimizing.
609
  intptr_t continuation_;
610

611 612 613 614
  // This must be at the end of the object as the object is allocated larger
  // than it's definition indicate to extend this array.
  intptr_t frame_content_[1];

615
  intptr_t* GetFrameSlotPointer(unsigned offset) {
616
    ASSERT(offset < frame_size_);
617
    return reinterpret_cast<intptr_t*>(
618 619
        reinterpret_cast<Address>(this) + frame_content_offset() + offset);
  }
620 621

  int ComputeFixedSize();
622 623 624
};


625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642
class DeoptimizerData {
 public:
  explicit DeoptimizerData(MemoryAllocator* allocator);
  ~DeoptimizerData();

#ifdef ENABLE_DEBUGGER_SUPPORT
  void Iterate(ObjectVisitor* v);
#endif

 private:
  MemoryAllocator* allocator_;
  int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
  MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];

#ifdef ENABLE_DEBUGGER_SUPPORT
  DeoptimizedFrameInfo* deoptimized_frame_info_;
#endif

643
  Deoptimizer* current_;
644 645 646 647 648 649 650

  friend class Deoptimizer;

  DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
};


651 652
class TranslationBuffer BASE_EMBEDDED {
 public:
653
  explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
654 655

  int CurrentIndex() const { return contents_.length(); }
656
  void Add(int32_t value, Zone* zone);
657

658
  Handle<ByteArray> CreateByteArray(Factory* factory);
659 660 661 662 663 664 665 666 667 668 669 670 671 672 673

 private:
  ZoneList<uint8_t> contents_;
};


class TranslationIterator BASE_EMBEDDED {
 public:
  TranslationIterator(ByteArray* buffer, int index)
      : buffer_(buffer), index_(index) {
    ASSERT(index >= 0 && index < buffer->length());
  }

  int32_t Next();

674
  bool HasNext() const { return index_ < buffer_->length(); }
675 676 677 678 679 680 681 682 683 684 685

  void Skip(int n) {
    for (int i = 0; i < n; i++) Next();
  }

 private:
  ByteArray* buffer_;
  int index_;
};


686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707
#define TRANSLATION_OPCODE_LIST(V)                                             \
  V(BEGIN)                                                                     \
  V(JS_FRAME)                                                                  \
  V(CONSTRUCT_STUB_FRAME)                                                      \
  V(GETTER_STUB_FRAME)                                                         \
  V(SETTER_STUB_FRAME)                                                         \
  V(ARGUMENTS_ADAPTOR_FRAME)                                                   \
  V(COMPILED_STUB_FRAME)                                                       \
  V(DUPLICATED_OBJECT)                                                         \
  V(ARGUMENTS_OBJECT)                                                          \
  V(CAPTURED_OBJECT)                                                           \
  V(REGISTER)                                                                  \
  V(INT32_REGISTER)                                                            \
  V(UINT32_REGISTER)                                                           \
  V(DOUBLE_REGISTER)                                                           \
  V(STACK_SLOT)                                                                \
  V(INT32_STACK_SLOT)                                                          \
  V(UINT32_STACK_SLOT)                                                         \
  V(DOUBLE_STACK_SLOT)                                                         \
  V(LITERAL)


708 709
class Translation BASE_EMBEDDED {
 public:
710
#define DECLARE_TRANSLATION_OPCODE_ENUM(item) item,
711
  enum Opcode {
712 713
    TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM)
    LAST = LITERAL
714
  };
715
#undef DECLARE_TRANSLATION_OPCODE_ENUM
716

717 718
  Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
              Zone* zone)
719
      : buffer_(buffer),
720 721 722 723 724
        index_(buffer->CurrentIndex()),
        zone_(zone) {
    buffer_->Add(BEGIN, zone);
    buffer_->Add(frame_count, zone);
    buffer_->Add(jsframe_count, zone);
725 726 727 728 729
  }

  int index() const { return index_; }

  // Commands.
730
  void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
731
  void BeginCompiledStubFrame();
732
  void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
733
  void BeginConstructStubFrame(int literal_id, unsigned height);
734
  void BeginGetterStubFrame(int literal_id);
735
  void BeginSetterStubFrame(int literal_id);
736
  void BeginArgumentsObject(int args_length);
737 738
  void BeginCapturedObject(int length);
  void DuplicateObject(int object_index);
739 740
  void StoreRegister(Register reg);
  void StoreInt32Register(Register reg);
741
  void StoreUint32Register(Register reg);
742 743 744
  void StoreDoubleRegister(DoubleRegister reg);
  void StoreStackSlot(int index);
  void StoreInt32StackSlot(int index);
745
  void StoreUint32StackSlot(int index);
746 747
  void StoreDoubleStackSlot(int index);
  void StoreLiteral(int literal_id);
748
  void StoreArgumentsObject(bool args_known, int args_index, int args_length);
749

750
  Zone* zone() const { return zone_; }
751

752 753
  static int NumberOfOperandsFor(Opcode opcode);

754
#if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
755 756 757
  static const char* StringFor(Opcode opcode);
#endif

758 759 760
  // A literal id which refers to the JSFunction itself.
  static const int kSelfLiteralId = -239;

761 762 763
 private:
  TranslationBuffer* buffer_;
  int index_;
764
  Zone* zone_;
765 766 767
};


768 769 770 771 772 773
class SlotRef BASE_EMBEDDED {
 public:
  enum SlotRepresentation {
    UNKNOWN,
    TAGGED,
    INT32,
774
    UINT32,
775 776 777 778 779 780 781 782 783 784
    DOUBLE,
    LITERAL
  };

  SlotRef()
      : addr_(NULL), representation_(UNKNOWN) { }

  SlotRef(Address addr, SlotRepresentation representation)
      : addr_(addr), representation_(representation) { }

785 786
  SlotRef(Isolate* isolate, Object* literal)
      : literal_(literal, isolate), representation_(LITERAL) { }
787

788
  Handle<Object> GetValue(Isolate* isolate) {
789 790
    switch (representation_) {
      case TAGGED:
791
        return Handle<Object>(Memory::Object_at(addr_), isolate);
792 793 794 795

      case INT32: {
        int value = Memory::int32_at(addr_);
        if (Smi::IsValid(value)) {
796
          return Handle<Object>(Smi::FromInt(value), isolate);
797
        } else {
798
          return isolate->factory()->NewNumberFromInt(value);
799 800 801
        }
      }

802 803 804
      case UINT32: {
        uint32_t value = Memory::uint32_at(addr_);
        if (value <= static_cast<uint32_t>(Smi::kMaxValue)) {
805
          return Handle<Object>(Smi::FromInt(static_cast<int>(value)), isolate);
806
        } else {
807
          return isolate->factory()->NewNumber(static_cast<double>(value));
808 809 810
        }
      }

811
      case DOUBLE: {
812
        double value = read_double_value(addr_);
813
        return isolate->factory()->NewNumber(value);
814 815 816 817 818 819 820 821 822 823 824
      }

      case LITERAL:
        return literal_;

      default:
        UNREACHABLE();
        return Handle<Object>::null();
    }
  }

825 826 827 828
  static Vector<SlotRef> ComputeSlotMappingForArguments(
      JavaScriptFrame* frame,
      int inlined_frame_index,
      int formal_parameter_count);
829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847

 private:
  Address addr_;
  Handle<Object> literal_;
  SlotRepresentation representation_;

  static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
    if (slot_index >= 0) {
      const int offset = JavaScriptFrameConstants::kLocal0Offset;
      return frame->fp() + offset - (slot_index * kPointerSize);
    } else {
      const int offset = JavaScriptFrameConstants::kLastParameterOffset;
      return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
    }
  }

  static SlotRef ComputeSlotForNextArgument(TranslationIterator* iterator,
                                            DeoptimizationInputData* data,
                                            JavaScriptFrame* frame);
848 849 850 851 852 853

  static void ComputeSlotsForArguments(
      Vector<SlotRef>* args_slots,
      TranslationIterator* iterator,
      DeoptimizationInputData* data,
      JavaScriptFrame* frame);
854 855 856
};


857 858 859 860 861
#ifdef ENABLE_DEBUGGER_SUPPORT
// Class used to represent an unoptimized frame when the debugger
// needs to inspect a frame that is part of an optimized frame. The
// internally used FrameDescription objects are not GC safe so for use
// by the debugger frame information is copied to an object of this type.
862 863
// Represents parameters in unadapted form so their number might mismatch
// formal parameter count.
864 865
class DeoptimizedFrameInfo : public Malloced {
 public:
866 867
  DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
                       int frame_index,
868 869
                       bool has_arguments_adaptor,
                       bool has_construct_stub);
870 871 872 873 874
  virtual ~DeoptimizedFrameInfo();

  // GC support.
  void Iterate(ObjectVisitor* v);

875 876 877
  // Return the number of incoming arguments.
  int parameters_count() { return parameters_count_; }

878 879 880
  // Return the height of the expression stack.
  int expression_count() { return expression_count_; }

881 882 883 884 885
  // Get the frame function.
  JSFunction* GetFunction() {
    return function_;
  }

886 887 888 889 890 891
  // Check if this frame is preceded by construct stub frame.  The bottom-most
  // inlined frame might still be called by an uninlined construct stub.
  bool HasConstructStub() {
    return has_construct_stub_;
  }

892 893 894 895 896 897
  // Get an incoming argument.
  Object* GetParameter(int index) {
    ASSERT(0 <= index && index < parameters_count());
    return parameters_[index];
  }

898 899 900 901 902 903
  // Get an expression from the expression stack.
  Object* GetExpression(int index) {
    ASSERT(0 <= index && index < expression_count());
    return expression_stack_[index];
  }

904 905
  int GetSourcePosition() {
    return source_position_;
906 907
  }

908
 private:
909 910 911 912 913 914
  // Set an incoming argument.
  void SetParameter(int index, Object* obj) {
    ASSERT(0 <= index && index < parameters_count());
    parameters_[index] = obj;
  }

915 916 917 918 919 920
  // Set an expression on the expression stack.
  void SetExpression(int index, Object* obj) {
    ASSERT(0 <= index && index < expression_count());
    expression_stack_[index] = obj;
  }

921
  JSFunction* function_;
922
  bool has_construct_stub_;
923
  int parameters_count_;
924
  int expression_count_;
925
  Object** parameters_;
926
  Object** expression_stack_;
927
  int source_position_;
928 929 930 931 932

  friend class Deoptimizer;
};
#endif

933 934 935
} }  // namespace v8::internal

#endif  // V8_DEOPTIMIZER_H_