lithium.h 20.3 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
//     * Redistributions of source code must retain the above copyright
//       notice, this list of conditions and the following disclaimer.
//     * Redistributions in binary form must reproduce the above
//       copyright notice, this list of conditions and the following
//       disclaimer in the documentation and/or other materials provided
//       with the distribution.
//     * Neither the name of Google Inc. nor the names of its
//       contributors may be used to endorse or promote products derived
//       from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

#ifndef V8_LITHIUM_H_
#define V8_LITHIUM_H_

31
#include "allocation.h"
32 33
#include "hydrogen.h"
#include "safepoint-table.h"
34 35 36 37

namespace v8 {
namespace internal {

38 39 40 41 42 43 44 45
#define LITHIUM_OPERAND_LIST(V)         \
  V(ConstantOperand, CONSTANT_OPERAND)  \
  V(StackSlot,       STACK_SLOT)        \
  V(DoubleStackSlot, DOUBLE_STACK_SLOT) \
  V(Register,        REGISTER)          \
  V(DoubleRegister,  DOUBLE_REGISTER)


46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
class LOperand: public ZoneObject {
 public:
  enum Kind {
    INVALID,
    UNALLOCATED,
    CONSTANT_OPERAND,
    STACK_SLOT,
    DOUBLE_STACK_SLOT,
    REGISTER,
    DOUBLE_REGISTER,
    ARGUMENT
  };

  LOperand() : value_(KindField::encode(INVALID)) { }

  Kind kind() const { return KindField::decode(value_); }
  int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
63 64 65 66 67 68 69
#define LITHIUM_OPERAND_PREDICATE(name, type) \
  bool Is##name() const { return kind() == type; }
  LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_PREDICATE)
  LITHIUM_OPERAND_PREDICATE(Argument, ARGUMENT)
  LITHIUM_OPERAND_PREDICATE(Unallocated, UNALLOCATED)
  LITHIUM_OPERAND_PREDICATE(Ignored, INVALID)
#undef LITHIUM_OPERAND_PREDICATE
70 71 72 73 74 75 76 77 78
  bool Equals(LOperand* other) const { return value_ == other->value_; }

  void PrintTo(StringStream* stream);
  void ConvertTo(Kind kind, int index) {
    value_ = KindField::encode(kind);
    value_ |= index << kKindFieldWidth;
    ASSERT(this->index() == index);
  }

79
  // Calls SetUpCache()/TearDownCache() for each subclass.
80
  static void SetUpCaches();
81
  static void TearDownCaches();
82

83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102
 protected:
  static const int kKindFieldWidth = 3;
  class KindField : public BitField<Kind, 0, kKindFieldWidth> { };

  LOperand(Kind kind, int index) { ConvertTo(kind, index); }

  unsigned value_;
};


class LUnallocated: public LOperand {
 public:
  enum Policy {
    NONE,
    ANY,
    FIXED_REGISTER,
    FIXED_DOUBLE_REGISTER,
    FIXED_SLOT,
    MUST_HAVE_REGISTER,
    WRITABLE_REGISTER,
103
    SAME_AS_FIRST_INPUT
104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133
  };

  // Lifetime of operand inside the instruction.
  enum Lifetime {
    // USED_AT_START operand is guaranteed to be live only at
    // instruction start. Register allocator is free to assign the same register
    // to some other operand used inside instruction (i.e. temporary or
    // output).
    USED_AT_START,

    // USED_AT_END operand is treated as live until the end of
    // instruction. This means that register allocator will not reuse it's
    // register for any other operand inside instruction.
    USED_AT_END
  };

  explicit LUnallocated(Policy policy) : LOperand(UNALLOCATED, 0) {
    Initialize(policy, 0, USED_AT_END);
  }

  LUnallocated(Policy policy, int fixed_index) : LOperand(UNALLOCATED, 0) {
    Initialize(policy, fixed_index, USED_AT_END);
  }

  LUnallocated(Policy policy, Lifetime lifetime) : LOperand(UNALLOCATED, 0) {
    Initialize(policy, 0, lifetime);
  }

  // The superclass has a KindField.  Some policies have a signed fixed
  // index in the upper bits.
134
  static const int kPolicyWidth = 3;
135
  static const int kLifetimeWidth = 1;
136
  static const int kVirtualRegisterWidth = 15;
137 138 139 140 141 142

  static const int kPolicyShift = kKindFieldWidth;
  static const int kLifetimeShift = kPolicyShift + kPolicyWidth;
  static const int kVirtualRegisterShift = kLifetimeShift + kLifetimeWidth;
  static const int kFixedIndexShift =
      kVirtualRegisterShift + kVirtualRegisterWidth;
143 144
  static const int kFixedIndexWidth = 32 - kFixedIndexShift;
  STATIC_ASSERT(kFixedIndexWidth > 5);
145 146 147 148 149 150 151 152 153 154 155 156 157

  class PolicyField : public BitField<Policy, kPolicyShift, kPolicyWidth> { };

  class LifetimeField
      : public BitField<Lifetime, kLifetimeShift, kLifetimeWidth> {
  };

  class VirtualRegisterField
      : public BitField<unsigned,
                        kVirtualRegisterShift,
                        kVirtualRegisterWidth> {
  };

158
  static const int kMaxVirtualRegisters = 1 << kVirtualRegisterWidth;
159 160
  static const int kMaxFixedIndex = (1 << (kFixedIndexWidth - 1)) - 1;
  static const int kMinFixedIndex = -(1 << (kFixedIndexWidth - 1));
161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177

  bool HasAnyPolicy() const {
    return policy() == ANY;
  }
  bool HasFixedPolicy() const {
    return policy() == FIXED_REGISTER ||
        policy() == FIXED_DOUBLE_REGISTER ||
        policy() == FIXED_SLOT;
  }
  bool HasRegisterPolicy() const {
    return policy() == WRITABLE_REGISTER || policy() == MUST_HAVE_REGISTER;
  }
  bool HasSameAsInputPolicy() const {
    return policy() == SAME_AS_FIRST_INPUT;
  }
  Policy policy() const { return PolicyField::decode(value_); }
  void set_policy(Policy policy) {
178
    value_ = PolicyField::update(value_, policy);
179 180 181 182 183
  }
  int fixed_index() const {
    return static_cast<int>(value_) >> kFixedIndexShift;
  }

184
  int virtual_register() const {
185 186 187 188
    return VirtualRegisterField::decode(value_);
  }

  void set_virtual_register(unsigned id) {
189
    value_ = VirtualRegisterField::update(value_, id);
190 191
  }

192 193
  LUnallocated* CopyUnconstrained(Zone* zone) {
    LUnallocated* result = new(zone) LUnallocated(ANY);
194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246
    result->set_virtual_register(virtual_register());
    return result;
  }

  static LUnallocated* cast(LOperand* op) {
    ASSERT(op->IsUnallocated());
    return reinterpret_cast<LUnallocated*>(op);
  }

  bool IsUsedAtStart() {
    return LifetimeField::decode(value_) == USED_AT_START;
  }

 private:
  void Initialize(Policy policy, int fixed_index, Lifetime lifetime) {
    value_ |= PolicyField::encode(policy);
    value_ |= LifetimeField::encode(lifetime);
    value_ |= fixed_index << kFixedIndexShift;
    ASSERT(this->fixed_index() == fixed_index);
  }
};


class LMoveOperands BASE_EMBEDDED {
 public:
  LMoveOperands(LOperand* source, LOperand* destination)
      : source_(source), destination_(destination) {
  }

  LOperand* source() const { return source_; }
  void set_source(LOperand* operand) { source_ = operand; }

  LOperand* destination() const { return destination_; }
  void set_destination(LOperand* operand) { destination_ = operand; }

  // The gap resolver marks moves as "in-progress" by clearing the
  // destination (but not the source).
  bool IsPending() const {
    return destination_ == NULL && source_ != NULL;
  }

  // True if this move a move into the given destination operand.
  bool Blocks(LOperand* operand) const {
    return !IsEliminated() && source()->Equals(operand);
  }

  // A move is redundant if it's been eliminated, if its source and
  // destination are the same, or if its destination is unneeded.
  bool IsRedundant() const {
    return IsEliminated() || source_->Equals(destination_) || IsIgnored();
  }

  bool IsIgnored() const {
247
    return destination_ != NULL && destination_->IsIgnored();
248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264
  }

  // We clear both operands to indicate move that's been eliminated.
  void Eliminate() { source_ = destination_ = NULL; }
  bool IsEliminated() const {
    ASSERT(source_ != NULL || destination_ == NULL);
    return source_ == NULL;
  }

 private:
  LOperand* source_;
  LOperand* destination_;
};


class LConstantOperand: public LOperand {
 public:
265
  static LConstantOperand* Create(int index, Zone* zone) {
266 267
    ASSERT(index >= 0);
    if (index < kNumCachedOperands) return &cache[index];
268
    return new(zone) LConstantOperand(index);
269 270 271 272 273 274 275
  }

  static LConstantOperand* cast(LOperand* op) {
    ASSERT(op->IsConstantOperand());
    return reinterpret_cast<LConstantOperand*>(op);
  }

276
  static void SetUpCache();
277
  static void TearDownCache();
278 279 280

 private:
  static const int kNumCachedOperands = 128;
281
  static LConstantOperand* cache;
282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300

  LConstantOperand() : LOperand() { }
  explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { }
};


class LArgument: public LOperand {
 public:
  explicit LArgument(int index) : LOperand(ARGUMENT, index) { }

  static LArgument* cast(LOperand* op) {
    ASSERT(op->IsArgument());
    return reinterpret_cast<LArgument*>(op);
  }
};


class LStackSlot: public LOperand {
 public:
301
  static LStackSlot* Create(int index, Zone* zone) {
302 303
    ASSERT(index >= 0);
    if (index < kNumCachedOperands) return &cache[index];
304
    return new(zone) LStackSlot(index);
305 306 307 308 309 310 311
  }

  static LStackSlot* cast(LOperand* op) {
    ASSERT(op->IsStackSlot());
    return reinterpret_cast<LStackSlot*>(op);
  }

312
  static void SetUpCache();
313
  static void TearDownCache();
314 315 316

 private:
  static const int kNumCachedOperands = 128;
317
  static LStackSlot* cache;
318 319 320 321 322 323 324 325

  LStackSlot() : LOperand() { }
  explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { }
};


class LDoubleStackSlot: public LOperand {
 public:
326
  static LDoubleStackSlot* Create(int index, Zone* zone) {
327 328
    ASSERT(index >= 0);
    if (index < kNumCachedOperands) return &cache[index];
329
    return new(zone) LDoubleStackSlot(index);
330 331 332 333 334 335 336
  }

  static LDoubleStackSlot* cast(LOperand* op) {
    ASSERT(op->IsStackSlot());
    return reinterpret_cast<LDoubleStackSlot*>(op);
  }

337
  static void SetUpCache();
338
  static void TearDownCache();
339 340 341

 private:
  static const int kNumCachedOperands = 128;
342
  static LDoubleStackSlot* cache;
343 344 345 346 347 348 349 350

  LDoubleStackSlot() : LOperand() { }
  explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { }
};


class LRegister: public LOperand {
 public:
351
  static LRegister* Create(int index, Zone* zone) {
352 353
    ASSERT(index >= 0);
    if (index < kNumCachedOperands) return &cache[index];
354
    return new(zone) LRegister(index);
355 356 357 358 359 360 361
  }

  static LRegister* cast(LOperand* op) {
    ASSERT(op->IsRegister());
    return reinterpret_cast<LRegister*>(op);
  }

362
  static void SetUpCache();
363
  static void TearDownCache();
364 365 366

 private:
  static const int kNumCachedOperands = 16;
367
  static LRegister* cache;
368 369 370 371 372 373 374 375

  LRegister() : LOperand() { }
  explicit LRegister(int index) : LOperand(REGISTER, index) { }
};


class LDoubleRegister: public LOperand {
 public:
376
  static LDoubleRegister* Create(int index, Zone* zone) {
377 378
    ASSERT(index >= 0);
    if (index < kNumCachedOperands) return &cache[index];
379
    return new(zone) LDoubleRegister(index);
380 381 382 383 384 385 386
  }

  static LDoubleRegister* cast(LOperand* op) {
    ASSERT(op->IsDoubleRegister());
    return reinterpret_cast<LDoubleRegister*>(op);
  }

387
  static void SetUpCache();
388
  static void TearDownCache();
389 390 391

 private:
  static const int kNumCachedOperands = 16;
392
  static LDoubleRegister* cache;
393 394 395 396 397 398

  LDoubleRegister() : LOperand() { }
  explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { }
};


399 400
class LParallelMove : public ZoneObject {
 public:
401
  explicit LParallelMove(Zone* zone) : move_operands_(4, zone) { }
402

403 404
  void AddMove(LOperand* from, LOperand* to, Zone* zone) {
    move_operands_.Add(LMoveOperands(from, to), zone);
405 406 407 408 409 410 411 412 413 414 415 416 417 418 419
  }

  bool IsRedundant() const;

  const ZoneList<LMoveOperands>* move_operands() const {
    return &move_operands_;
  }

  void PrintDataTo(StringStream* stream) const;

 private:
  ZoneList<LMoveOperands> move_operands_;
};


420 421
class LPointerMap: public ZoneObject {
 public:
422 423 424
  explicit LPointerMap(int position, Zone* zone)
      : pointer_operands_(8, zone),
        untagged_operands_(0, zone),
425 426 427 428 429 430 431 432 433 434
        position_(position),
        lithium_position_(-1) { }

  const ZoneList<LOperand*>* GetNormalizedOperands() {
    for (int i = 0; i < untagged_operands_.length(); ++i) {
      RemovePointer(untagged_operands_[i]);
    }
    untagged_operands_.Clear();
    return &pointer_operands_;
  }
435 436 437 438 439 440 441 442
  int position() const { return position_; }
  int lithium_position() const { return lithium_position_; }

  void set_lithium_position(int pos) {
    ASSERT(lithium_position_ == -1);
    lithium_position_ = pos;
  }

443
  void RecordPointer(LOperand* op, Zone* zone);
444
  void RemovePointer(LOperand* op);
445
  void RecordUntagged(LOperand* op, Zone* zone);
446 447 448 449
  void PrintTo(StringStream* stream);

 private:
  ZoneList<LOperand*> pointer_operands_;
450
  ZoneList<LOperand*> untagged_operands_;
451 452 453 454 455 456 457 458
  int position_;
  int lithium_position_;
};


class LEnvironment: public ZoneObject {
 public:
  LEnvironment(Handle<JSFunction> closure,
459
               FrameType frame_type,
460
               BailoutId ast_id,
461 462 463
               int parameter_count,
               int argument_count,
               int value_count,
464
               LEnvironment* outer,
465
               HEnterInlined* entry,
466
               Zone* zone)
467
      : closure_(closure),
468
        frame_type_(frame_type),
469 470 471 472 473
        arguments_stack_height_(argument_count),
        deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
        translation_index_(-1),
        ast_id_(ast_id),
        parameter_count_(parameter_count),
474
        pc_offset_(-1),
475
        values_(value_count, zone),
476
        is_tagged_(value_count, zone),
477
        is_uint32_(value_count, zone),
478 479
        spilled_registers_(NULL),
        spilled_double_registers_(NULL),
480
        outer_(outer),
481
        entry_(entry),
482
        zone_(zone) { }
483 484

  Handle<JSFunction> closure() const { return closure_; }
485
  FrameType frame_type() const { return frame_type_; }
486 487 488
  int arguments_stack_height() const { return arguments_stack_height_; }
  int deoptimization_index() const { return deoptimization_index_; }
  int translation_index() const { return translation_index_; }
489
  BailoutId ast_id() const { return ast_id_; }
490
  int parameter_count() const { return parameter_count_; }
491
  int pc_offset() const { return pc_offset_; }
492 493 494 495 496 497
  LOperand** spilled_registers() const { return spilled_registers_; }
  LOperand** spilled_double_registers() const {
    return spilled_double_registers_;
  }
  const ZoneList<LOperand*>* values() const { return &values_; }
  LEnvironment* outer() const { return outer_; }
498
  HEnterInlined* entry() { return entry_; }
499

500 501 502
  void AddValue(LOperand* operand,
                Representation representation,
                bool is_uint32) {
503
    values_.Add(operand, zone());
504
    if (representation.IsTagged()) {
505
      ASSERT(!is_uint32);
506 507
      is_tagged_.Add(values_.length() - 1);
    }
508 509 510 511

    if (is_uint32) {
      is_uint32_.Add(values_.length() - 1);
    }
512 513 514
  }

  bool HasTaggedValueAt(int index) const {
515
    return is_tagged_.Contains(index);
516 517
  }

518 519 520 521
  bool HasUint32ValueAt(int index) const {
    return is_uint32_.Contains(index);
  }

522 523 524
  void Register(int deoptimization_index,
                int translation_index,
                int pc_offset) {
525 526 527
    ASSERT(!HasBeenRegistered());
    deoptimization_index_ = deoptimization_index;
    translation_index_ = translation_index;
528
    pc_offset_ = pc_offset;
529 530 531 532 533 534 535 536 537 538 539 540 541
  }
  bool HasBeenRegistered() const {
    return deoptimization_index_ != Safepoint::kNoDeoptimizationIndex;
  }

  void SetSpilledRegisters(LOperand** registers,
                           LOperand** double_registers) {
    spilled_registers_ = registers;
    spilled_double_registers_ = double_registers;
  }

  void PrintTo(StringStream* stream);

542
  Zone* zone() const { return zone_; }
543

544 545
 private:
  Handle<JSFunction> closure_;
546
  FrameType frame_type_;
547 548 549
  int arguments_stack_height_;
  int deoptimization_index_;
  int translation_index_;
550
  BailoutId ast_id_;
551
  int parameter_count_;
552
  int pc_offset_;
553
  ZoneList<LOperand*> values_;
554
  BitVector is_tagged_;
555
  BitVector is_uint32_;
556 557 558 559 560 561 562 563

  // Allocation index indexed arrays of spill slot operands for registers
  // that are also in spill slots at an OSR entry.  NULL for environments
  // that do not correspond to an OSR entry.
  LOperand** spilled_registers_;
  LOperand** spilled_double_registers_;

  LEnvironment* outer_;
564
  HEnterInlined* entry_;
565 566

  Zone* zone_;
567 568
};

569 570 571 572 573 574 575 576

// Iterates over the non-null, non-constant operands in an environment.
class ShallowIterator BASE_EMBEDDED {
 public:
  explicit ShallowIterator(LEnvironment* env)
      : env_(env),
        limit_(env != NULL ? env->values()->length() : 0),
        current_(0) {
577
    SkipUninteresting();
578 579
  }

580
  bool Done() { return current_ >= limit_; }
581

582 583
  LOperand* Current() {
    ASSERT(!Done());
584
    ASSERT(env_->values()->at(current_) != NULL);
585 586 587
    return env_->values()->at(current_);
  }

588 589 590 591
  void Advance() {
    ASSERT(!Done());
    ++current_;
    SkipUninteresting();
592 593
  }

594
  LEnvironment* env() { return env_; }
595 596

 private:
597
  bool ShouldSkip(LOperand* op) {
598 599 600
    return op == NULL || op->IsConstantOperand() || op->IsArgument();
  }

601 602 603 604
  // Skip until something interesting, beginning with and including current_.
  void SkipUninteresting() {
    while (current_ < limit_ && ShouldSkip(env_->values()->at(current_))) {
      ++current_;
605 606 607 608 609 610 611 612 613 614 615 616 617
    }
  }

  LEnvironment* env_;
  int limit_;
  int current_;
};


// Iterator for non-null, non-constant operands incl. outer environments.
class DeepIterator BASE_EMBEDDED {
 public:
  explicit DeepIterator(LEnvironment* env)
618 619
      : current_iterator_(env) {
    SkipUninteresting();
620 621
  }

622 623 624 625
  bool Done() { return current_iterator_.Done(); }

  LOperand* Current() {
    ASSERT(!current_iterator_.Done());
626
    ASSERT(current_iterator_.Current() != NULL);
627
    return current_iterator_.Current();
628 629
  }

630 631 632
  void Advance() {
    current_iterator_.Advance();
    SkipUninteresting();
633 634 635
  }

 private:
636 637 638 639
  void SkipUninteresting() {
    while (current_iterator_.env() != NULL && current_iterator_.Done()) {
      current_iterator_ = ShallowIterator(current_iterator_.env()->outer());
    }
640 641 642 643 644
  }

  ShallowIterator current_iterator_;
};

645

646
class LPlatformChunk;
647 648 649 650
class LGap;
class LLabel;

// Superclass providing data and behavior common to all the
651 652
// arch-specific LPlatformChunk classes.
class LChunk: public ZoneObject {
653
 public:
654
  static LChunk* NewChunk(HGraph* graph);
655 656 657

  void AddInstruction(LInstruction* instruction, HBasicBlock* block);
  LConstantOperand* DefineConstantOperand(HConstant* constant);
658
  HConstant* LookupConstant(LConstantOperand* operand) const;
659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686
  Representation LookupLiteralRepresentation(LConstantOperand* operand) const;

  int ParameterAt(int index);
  int GetParameterStackSlot(int index) const;
  int spill_slot_count() const { return spill_slot_count_; }
  CompilationInfo* info() const { return info_; }
  HGraph* graph() const { return graph_; }
  const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
  void AddGapMove(int index, LOperand* from, LOperand* to);
  LGap* GetGapAt(int index) const;
  bool IsGapAt(int index) const;
  int NearestGapPos(int index) const;
  void MarkEmptyBlocks();
  const ZoneList<LPointerMap*>* pointer_maps() const { return &pointer_maps_; }
  LLabel* GetLabel(int block_id) const;
  int LookupDestination(int block_id) const;
  Label* GetAssemblyLabel(int block_id) const;

  const ZoneList<Handle<JSFunction> >* inlined_closures() const {
    return &inlined_closures_;
  }

  void AddInlinedClosure(Handle<JSFunction> closure) {
    inlined_closures_.Add(closure, zone());
  }

  Zone* zone() const { return info_->zone(); }

687
  Handle<Code> Codegen(Code::Kind kind);
688

689
 protected:
690
  LChunk(CompilationInfo* info, HGraph* graph)
691 692 693 694 695 696 697
      : spill_slot_count_(0),
        info_(info),
        graph_(graph),
        instructions_(32, graph->zone()),
        pointer_maps_(8, graph->zone()),
        inlined_closures_(1, graph->zone()) { }

698 699 700 701 702 703 704 705 706 707 708
  int spill_slot_count_;

 private:
  CompilationInfo* info_;
  HGraph* const graph_;
  ZoneList<LInstruction*> instructions_;
  ZoneList<LPointerMap*> pointer_maps_;
  ZoneList<Handle<JSFunction> > inlined_closures_;
};


709
int ElementsKindToShiftSize(ElementsKind elements_kind);
710 711


712 713 714
} }  // namespace v8::internal

#endif  // V8_LITHIUM_H_