lithium-allocator.cc 74.5 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
//     * Redistributions of source code must retain the above copyright
//       notice, this list of conditions and the following disclaimer.
//     * Redistributions in binary form must reproduce the above
//       copyright notice, this list of conditions and the following
//       disclaimer in the documentation and/or other materials provided
//       with the distribution.
//     * Neither the name of Google Inc. nor the names of its
//       contributors may be used to endorse or promote products derived
//       from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

28
#include "v8.h"
29
#include "lithium-allocator-inl.h"
30 31 32 33 34 35 36 37 38 39

#include "hydrogen.h"
#include "string-stream.h"

#if V8_TARGET_ARCH_IA32
#include "ia32/lithium-ia32.h"
#elif V8_TARGET_ARCH_X64
#include "x64/lithium-x64.h"
#elif V8_TARGET_ARCH_ARM
#include "arm/lithium-arm.h"
40 41
#elif V8_TARGET_ARCH_MIPS
#include "mips/lithium-mips.h"
42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58
#else
#error "Unknown architecture."
#endif

namespace v8 {
namespace internal {

static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
  return a.Value() < b.Value() ? a : b;
}


static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
  return a.Value() > b.Value() ? a : b;
}


59 60 61
UsePosition::UsePosition(LifetimePosition pos,
                         LOperand* operand,
                         LOperand* hint)
62
    : operand_(operand),
63
      hint_(hint),
64 65 66 67 68 69 70 71
      pos_(pos),
      next_(NULL),
      requires_reg_(false),
      register_beneficial_(true) {
  if (operand_ != NULL && operand_->IsUnallocated()) {
    LUnallocated* unalloc = LUnallocated::cast(operand_);
    requires_reg_ = unalloc->HasRegisterPolicy();
    register_beneficial_ = !unalloc->HasAnyPolicy();
72
  }
73
  ASSERT(pos_.IsValid());
74 75
}

76 77 78

bool UsePosition::HasHint() const {
  return hint_ != NULL && !hint_->IsUnallocated();
79 80 81 82 83 84 85 86 87 88 89 90 91
}


bool UsePosition::RequiresRegister() const {
  return requires_reg_;
}


bool UsePosition::RegisterIsBeneficial() const {
  return register_beneficial_;
}


92
void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
93
  ASSERT(Contains(pos) && pos.Value() != start().Value());
94
  UseInterval* after = new(zone) UseInterval(pos, end_);
95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130
  after->next_ = next_;
  next_ = after;
  end_ = pos;
}


#ifdef DEBUG


void LiveRange::Verify() const {
  UsePosition* cur = first_pos_;
  while (cur != NULL) {
    ASSERT(Start().Value() <= cur->pos().Value() &&
           cur->pos().Value() <= End().Value());
    cur = cur->next();
  }
}


bool LiveRange::HasOverlap(UseInterval* target) const {
  UseInterval* current_interval = first_interval_;
  while (current_interval != NULL) {
    // Intervals overlap if the start of one is contained in the other.
    if (current_interval->Contains(target->start()) ||
        target->Contains(current_interval->start())) {
      return true;
    }
    current_interval = current_interval->next();
  }
  return false;
}


#endif


131
LiveRange::LiveRange(int id, Zone* zone)
132 133
    : id_(id),
      spilled_(false),
134
      kind_(UNALLOCATED_REGISTERS),
135 136 137 138 139 140 141 142
      assigned_register_(kInvalidAssignment),
      last_interval_(NULL),
      first_interval_(NULL),
      first_pos_(NULL),
      parent_(NULL),
      next_(NULL),
      current_interval_(NULL),
      last_processed_use_(NULL),
143
      current_hint_operand_(NULL),
144
      spill_operand_(new(zone) LOperand()),
145
      spill_start_index_(kMaxInt) { }
146 147


148
void LiveRange::set_assigned_register(int reg, Zone* zone) {
149 150
  ASSERT(!HasRegisterAssigned() && !IsSpilled());
  assigned_register_ = reg;
151
  ConvertOperands(zone);
152 153 154
}


155
void LiveRange::MakeSpilled(Zone* zone) {
156 157 158 159
  ASSERT(!IsSpilled());
  ASSERT(TopLevel()->HasAllocatedSpillOperand());
  spilled_ = true;
  assigned_register_ = kInvalidAssignment;
160
  ConvertOperands(zone);
161 162 163 164
}


bool LiveRange::HasAllocatedSpillOperand() const {
165 166
  ASSERT(spill_operand_ != NULL);
  return !spill_operand_->IsIgnored();
167 168 169 170 171 172
}


void LiveRange::SetSpillOperand(LOperand* operand) {
  ASSERT(!operand->IsUnallocated());
  ASSERT(spill_operand_ != NULL);
173
  ASSERT(spill_operand_->IsIgnored());
174 175 176 177
  spill_operand_->ConvertTo(operand->kind(), operand->index());
}


178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198
UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
  UsePosition* use_pos = last_processed_use_;
  if (use_pos == NULL) use_pos = first_pos();
  while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
    use_pos = use_pos->next();
  }
  last_processed_use_ = use_pos;
  return use_pos;
}


UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
    LifetimePosition start) {
  UsePosition* pos = NextUsePosition(start);
  while (pos != NULL && !pos->RegisterIsBeneficial()) {
    pos = pos->next();
  }
  return pos;
}


199 200 201 202 203 204 205 206 207 208 209 210
UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
    LifetimePosition start) {
  UsePosition* pos = first_pos();
  UsePosition* prev = NULL;
  while (pos != NULL && pos->pos().Value() < start.Value()) {
    if (pos->RegisterIsBeneficial()) prev = pos;
    pos = pos->next();
  }
  return prev;
}


211 212 213 214 215 216 217 218 219 220 221 222 223 224
UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
  UsePosition* pos = NextUsePosition(start);
  while (pos != NULL && !pos->RequiresRegister()) {
    pos = pos->next();
  }
  return pos;
}


bool LiveRange::CanBeSpilled(LifetimePosition pos) {
  // We cannot spill a live range that has a use requiring a register
  // at the current or the immediate next position.
  UsePosition* use_pos = NextRegisterPosition(pos);
  if (use_pos == NULL) return true;
225 226
  return
      use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value();
227 228 229
}


230
LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
231 232 233
  LOperand* op = NULL;
  if (HasRegisterAssigned()) {
    ASSERT(!IsSpilled());
234 235 236 237 238 239 240 241 242
    switch (Kind()) {
      case GENERAL_REGISTERS:
        op = LRegister::Create(assigned_register(), zone);
        break;
      case DOUBLE_REGISTERS:
        op = LDoubleRegister::Create(assigned_register(), zone);
        break;
      default:
        UNREACHABLE();
243 244 245 246 247 248
    }
  } else if (IsSpilled()) {
    ASSERT(!HasRegisterAssigned());
    op = TopLevel()->GetSpillOperand();
    ASSERT(!op->IsUnallocated());
  } else {
249
    LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280
    unalloc->set_virtual_register(id_);
    op = unalloc;
  }
  return op;
}


UseInterval* LiveRange::FirstSearchIntervalForPosition(
    LifetimePosition position) const {
  if (current_interval_ == NULL) return first_interval_;
  if (current_interval_->start().Value() > position.Value()) {
    current_interval_ = NULL;
    return first_interval_;
  }
  return current_interval_;
}


void LiveRange::AdvanceLastProcessedMarker(
    UseInterval* to_start_of, LifetimePosition but_not_past) const {
  if (to_start_of == NULL) return;
  if (to_start_of->start().Value() > but_not_past.Value()) return;
  LifetimePosition start =
      current_interval_ == NULL ? LifetimePosition::Invalid()
                                : current_interval_->start();
  if (to_start_of->start().Value() > start.Value()) {
    current_interval_ = to_start_of;
  }
}


281 282 283
void LiveRange::SplitAt(LifetimePosition position,
                        LiveRange* result,
                        Zone* zone) {
vegorov@chromium.org's avatar
vegorov@chromium.org committed
284
  ASSERT(Start().Value() < position.Value());
285 286 287 288 289
  ASSERT(result->IsEmpty());
  // Find the last interval that ends before the position. If the
  // position is contained in one of the intervals in the chain, we
  // split that interval and use the first part.
  UseInterval* current = FirstSearchIntervalForPosition(position);
vegorov@chromium.org's avatar
vegorov@chromium.org committed
290 291 292 293 294

  // If the split position coincides with the beginning of a use interval
  // we need to split use positons in a special way.
  bool split_at_start = false;

295 296 297 298 299
  if (current->start().Value() == position.Value()) {
    // When splitting at start we need to locate the previous use interval.
    current = first_interval_;
  }

300 301
  while (current != NULL) {
    if (current->Contains(position)) {
302
      current->SplitAt(position, zone);
303 304 305
      break;
    }
    UseInterval* next = current->next();
vegorov@chromium.org's avatar
vegorov@chromium.org committed
306 307 308 309
    if (next->start().Value() >= position.Value()) {
      split_at_start = (next->start().Value() == position.Value());
      break;
    }
310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325
    current = next;
  }

  // Partition original use intervals to the two live ranges.
  UseInterval* before = current;
  UseInterval* after = before->next();
  result->last_interval_ = (last_interval_ == before)
      ? after            // Only interval in the range after split.
      : last_interval_;  // Last interval of the original range.
  result->first_interval_ = after;
  last_interval_ = before;

  // Find the last use position before the split and the first use
  // position after it.
  UsePosition* use_after = first_pos_;
  UsePosition* use_before = NULL;
vegorov@chromium.org's avatar
vegorov@chromium.org committed
326 327 328 329 330 331 332 333 334 335 336 337 338
  if (split_at_start) {
    // The split position coincides with the beginning of a use interval (the
    // end of a lifetime hole). Use at this position should be attributed to
    // the split child because split child owns use interval covering it.
    while (use_after != NULL && use_after->pos().Value() < position.Value()) {
      use_before = use_after;
      use_after = use_after->next();
    }
  } else {
    while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
      use_before = use_after;
      use_after = use_after->next();
    }
339 340 341 342 343 344 345 346 347 348
  }

  // Partition original use positions to the two live ranges.
  if (use_before != NULL) {
    use_before->next_ = NULL;
  } else {
    first_pos_ = NULL;
  }
  result->first_pos_ = use_after;

349 350 351 352 353
  // Discard cached iteration state. It might be pointing
  // to the use that no longer belongs to this live range.
  last_processed_use_ = NULL;
  current_interval_ = NULL;

354 355 356
  // Link the new live range in the chain before any of the other
  // ranges linked from the range before the split.
  result->parent_ = (parent_ == NULL) ? this : parent_;
357
  result->kind_ = result->parent_->kind_;
358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376
  result->next_ = next_;
  next_ = result;

#ifdef DEBUG
  Verify();
  result->Verify();
#endif
}


// This implements an ordering on live ranges so that they are ordered by their
// start positions.  This is needed for the correctness of the register
// allocation algorithm.  If two live ranges start at the same offset then there
// is a tie breaker based on where the value is first used.  This part of the
// ordering is merely a heuristic.
bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
  LifetimePosition start = Start();
  LifetimePosition other_start = other->Start();
  if (start.Value() == other_start.Value()) {
377
    UsePosition* pos = first_pos();
378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395
    if (pos == NULL) return false;
    UsePosition* other_pos = other->first_pos();
    if (other_pos == NULL) return true;
    return pos->pos().Value() < other_pos->pos().Value();
  }
  return start.Value() < other_start.Value();
}


void LiveRange::ShortenTo(LifetimePosition start) {
  LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
  ASSERT(first_interval_ != NULL);
  ASSERT(first_interval_->start().Value() <= start.Value());
  ASSERT(start.Value() < first_interval_->end().Value());
  first_interval_->set_start(start);
}


396 397 398
void LiveRange::EnsureInterval(LifetimePosition start,
                               LifetimePosition end,
                               Zone* zone) {
399 400 401 402 403 404 405 406 407 408 409 410 411
  LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
                         id_,
                         start.Value(),
                         end.Value());
  LifetimePosition new_end = end;
  while (first_interval_ != NULL &&
         first_interval_->start().Value() <= end.Value()) {
    if (first_interval_->end().Value() > end.Value()) {
      new_end = first_interval_->end();
    }
    first_interval_ = first_interval_->next();
  }

412
  UseInterval* new_interval = new(zone) UseInterval(start, new_end);
413 414 415 416 417 418 419 420
  new_interval->next_ = first_interval_;
  first_interval_ = new_interval;
  if (new_interval->next() == NULL) {
    last_interval_ = new_interval;
  }
}


421 422 423
void LiveRange::AddUseInterval(LifetimePosition start,
                               LifetimePosition end,
                               Zone* zone) {
424 425 426 427 428
  LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
                         id_,
                         start.Value(),
                         end.Value());
  if (first_interval_ == NULL) {
429
    UseInterval* interval = new(zone) UseInterval(start, end);
430 431 432 433 434 435
    first_interval_ = interval;
    last_interval_ = interval;
  } else {
    if (end.Value() == first_interval_->start().Value()) {
      first_interval_->set_start(start);
    } else if (end.Value() < first_interval_->start().Value()) {
436
      UseInterval* interval = new(zone) UseInterval(start, end);
437 438 439 440 441 442 443 444 445 446 447 448 449 450
      interval->set_next(first_interval_);
      first_interval_ = interval;
    } else {
      // Order of instruction's processing (see ProcessInstructions) guarantees
      // that each new use interval either precedes or intersects with
      // last added interval.
      ASSERT(start.Value() < first_interval_->end().Value());
      first_interval_->start_ = Min(start, first_interval_->start_);
      first_interval_->end_ = Max(end, first_interval_->end_);
    }
  }
}


451 452 453 454
void LiveRange::AddUsePosition(LifetimePosition pos,
                               LOperand* operand,
                               LOperand* hint,
                               Zone* zone) {
455 456 457
  LAllocator::TraceAlloc("Add to live range %d use position %d\n",
                         id_,
                         pos.Value());
458
  UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint);
459
  UsePosition* prev_hint = NULL;
460 461 462
  UsePosition* prev = NULL;
  UsePosition* current = first_pos_;
  while (current != NULL && current->pos().Value() < pos.Value()) {
463
    prev_hint = current->HasHint() ? current : prev_hint;
464 465 466 467 468 469 470 471 472 473 474
    prev = current;
    current = current->next();
  }

  if (prev == NULL) {
    use_pos->set_next(first_pos_);
    first_pos_ = use_pos;
  } else {
    use_pos->next_ = prev->next_;
    prev->next_ = use_pos;
  }
475 476 477 478

  if (prev_hint == NULL && use_pos->HasHint()) {
    current_hint_operand_ = hint;
  }
479 480 481
}


482 483
void LiveRange::ConvertOperands(Zone* zone) {
  LOperand* op = CreateAssignedOperand(zone);
484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535
  UsePosition* use_pos = first_pos();
  while (use_pos != NULL) {
    ASSERT(Start().Value() <= use_pos->pos().Value() &&
           use_pos->pos().Value() <= End().Value());

    if (use_pos->HasOperand()) {
      ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
             !use_pos->RequiresRegister());
      use_pos->operand()->ConvertTo(op->kind(), op->index());
    }
    use_pos = use_pos->next();
  }
}


bool LiveRange::CanCover(LifetimePosition position) const {
  if (IsEmpty()) return false;
  return Start().Value() <= position.Value() &&
         position.Value() < End().Value();
}


bool LiveRange::Covers(LifetimePosition position) {
  if (!CanCover(position)) return false;
  UseInterval* start_search = FirstSearchIntervalForPosition(position);
  for (UseInterval* interval = start_search;
       interval != NULL;
       interval = interval->next()) {
    ASSERT(interval->next() == NULL ||
           interval->next()->start().Value() >= interval->start().Value());
    AdvanceLastProcessedMarker(interval, position);
    if (interval->Contains(position)) return true;
    if (interval->start().Value() > position.Value()) return false;
  }
  return false;
}


LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
  UseInterval* b = other->first_interval();
  if (b == NULL) return LifetimePosition::Invalid();
  LifetimePosition advance_last_processed_up_to = b->start();
  UseInterval* a = FirstSearchIntervalForPosition(b->start());
  while (a != NULL && b != NULL) {
    if (a->start().Value() > other->End().Value()) break;
    if (b->start().Value() > End().Value()) break;
    LifetimePosition cur_intersection = a->Intersect(b);
    if (cur_intersection.IsValid()) {
      return cur_intersection;
    }
    if (a->start().Value() < b->start().Value()) {
      a = a->next();
536
      if (a == NULL || a->start().Value() > other->End().Value()) break;
537 538 539 540 541 542 543 544 545
      AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
    } else {
      b = b->next();
    }
  }
  return LifetimePosition::Invalid();
}


546
LAllocator::LAllocator(int num_values, HGraph* graph)
547
    : zone_(graph->isolate()),
548
      chunk_(NULL),
549 550
      live_in_sets_(graph->blocks()->length(), zone()),
      live_ranges_(num_values * 2, zone()),
551 552
      fixed_live_ranges_(NULL),
      fixed_double_live_ranges_(NULL),
553 554 555 556
      unhandled_live_ranges_(num_values * 2, zone()),
      active_live_ranges_(8, zone()),
      inactive_live_ranges_(8, zone()),
      reusable_slots_(8, zone()),
557 558
      next_virtual_register_(num_values),
      first_artificial_register_(num_values),
559
      mode_(UNALLOCATED_REGISTERS),
560 561
      num_registers_(-1),
      graph_(graph),
562 563
      has_osr_entry_(false),
      allocation_ok_(true) { }
564 565


566 567
void LAllocator::InitializeLivenessAnalysis() {
  // Initialize the live_in sets for each block to NULL.
568
  int block_count = graph_->blocks()->length();
569 570
  live_in_sets_.Initialize(block_count, zone());
  live_in_sets_.AddBlock(NULL, block_count, zone());
571 572 573 574 575 576
}


BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
  // Compute live out for the given block, except not including backward
  // successor edges.
577
  BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone());
578 579

  // Process all successor blocks.
580
  for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
581 582
    // Add values live on entry to the successor. Note the successor's
    // live_in will not be computed yet for backwards edges.
583
    HBasicBlock* successor = it.Current();
584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609
    BitVector* live_in = live_in_sets_[successor->block_id()];
    if (live_in != NULL) live_out->Union(*live_in);

    // All phi input operands corresponding to this successor edge are live
    // out from this block.
    int index = successor->PredecessorIndexOf(block);
    const ZoneList<HPhi*>* phis = successor->phis();
    for (int i = 0; i < phis->length(); ++i) {
      HPhi* phi = phis->at(i);
      if (!phi->OperandAt(index)->IsConstant()) {
        live_out->Add(phi->OperandAt(index)->id());
      }
    }
  }

  return live_out;
}


void LAllocator::AddInitialIntervals(HBasicBlock* block,
                                     BitVector* live_out) {
  // Add an interval that includes the entire block to the live range for
  // each live_out value.
  LifetimePosition start = LifetimePosition::FromInstructionIndex(
      block->first_instruction_index());
  LifetimePosition end = LifetimePosition::FromInstructionIndex(
610
      block->last_instruction_index()).NextInstruction();
611 612 613 614
  BitVector::Iterator iterator(live_out);
  while (!iterator.Done()) {
    int operand_index = iterator.Current();
    LiveRange* range = LiveRangeFor(operand_index);
615
    range->AddUseInterval(start, end, zone());
616 617 618 619 620 621
    iterator.Advance();
  }
}


int LAllocator::FixedDoubleLiveRangeID(int index) {
622
  return -index - 1 - Register::kMaxNumAllocatableRegisters;
623 624 625 626 627 628 629 630
}


LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
                                    int pos,
                                    bool is_tagged) {
  TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
  ASSERT(operand->HasFixedPolicy());
631 632 633 634
  if (operand->HasFixedSlotPolicy()) {
    operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index());
  } else if (operand->HasFixedRegisterPolicy()) {
    int reg_index = operand->fixed_register_index();
635
    operand->ConvertTo(LOperand::REGISTER, reg_index);
636 637
  } else if (operand->HasFixedDoubleRegisterPolicy()) {
    int reg_index = operand->fixed_register_index();
638 639 640 641 642 643
    operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
  } else {
    UNREACHABLE();
  }
  if (is_tagged) {
    TraceAlloc("Fixed reg is tagged at %d\n", pos);
644
    LInstruction* instr = InstructionAt(pos);
645
    if (instr->HasPointerMap()) {
646
      instr->pointer_map()->RecordPointer(operand, chunk()->zone());
647 648 649 650 651 652 653
    }
  }
  return operand;
}


LiveRange* LAllocator::FixedLiveRangeFor(int index) {
654
  ASSERT(index < Register::kMaxNumAllocatableRegisters);
655 656
  LiveRange* result = fixed_live_ranges_[index];
  if (result == NULL) {
657
    result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone());
658
    ASSERT(result->IsFixed());
659 660
    result->kind_ = GENERAL_REGISTERS;
    SetLiveRangeAssignedRegister(result, index);
661 662 663 664 665 666 667
    fixed_live_ranges_[index] = result;
  }
  return result;
}


LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
668
  ASSERT(index < DoubleRegister::NumAllocatableRegisters());
669 670
  LiveRange* result = fixed_double_live_ranges_[index];
  if (result == NULL) {
671 672
    result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index),
                                   chunk()->zone());
673
    ASSERT(result->IsFixed());
674 675
    result->kind_ = DOUBLE_REGISTERS;
    SetLiveRangeAssignedRegister(result, index);
676 677 678 679 680
    fixed_double_live_ranges_[index] = result;
  }
  return result;
}

681

682 683
LiveRange* LAllocator::LiveRangeFor(int index) {
  if (index >= live_ranges_.length()) {
684
    live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
685 686 687
  }
  LiveRange* result = live_ranges_[index];
  if (result == NULL) {
688
    result = new(zone()) LiveRange(index, chunk()->zone());
689 690 691 692 693 694
    live_ranges_[index] = result;
  }
  return result;
}


695
LGap* LAllocator::GetLastGap(HBasicBlock* block) {
696 697
  int last_instruction = block->last_instruction_index();
  int index = chunk_->NearestGapPos(last_instruction);
698
  return GapAt(index);
699 700 701 702 703
}


HPhi* LAllocator::LookupPhi(LOperand* operand) const {
  if (!operand->IsUnallocated()) return NULL;
704
  int index = LUnallocated::cast(operand)->virtual_register();
705
  HValue* instr = graph_->LookupValue(index);
706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733
  if (instr != NULL && instr->IsPhi()) {
    return HPhi::cast(instr);
  }
  return NULL;
}


LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
  if (operand->IsUnallocated()) {
    return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
  } else if (operand->IsRegister()) {
    return FixedLiveRangeFor(operand->index());
  } else if (operand->IsDoubleRegister()) {
    return FixedDoubleLiveRangeFor(operand->index());
  } else {
    return NULL;
  }
}


void LAllocator::Define(LifetimePosition position,
                        LOperand* operand,
                        LOperand* hint) {
  LiveRange* range = LiveRangeFor(operand);
  if (range == NULL) return;

  if (range->IsEmpty() || range->Start().Value() > position.Value()) {
    // Can happen if there is a definition without use.
734 735
    range->AddUseInterval(position, position.NextInstruction(), zone());
    range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
736 737 738 739 740 741
  } else {
    range->ShortenTo(position);
  }

  if (operand->IsUnallocated()) {
    LUnallocated* unalloc_operand = LUnallocated::cast(operand);
742
    range->AddUsePosition(position, unalloc_operand, hint, zone());
743 744 745 746 747 748 749 750 751 752 753 754
  }
}


void LAllocator::Use(LifetimePosition block_start,
                     LifetimePosition position,
                     LOperand* operand,
                     LOperand* hint) {
  LiveRange* range = LiveRangeFor(operand);
  if (range == NULL) return;
  if (operand->IsUnallocated()) {
    LUnallocated* unalloc_operand = LUnallocated::cast(operand);
755
    range->AddUsePosition(position, unalloc_operand, hint, zone());
756
  }
757
  range->AddUseInterval(block_start, position, zone());
758 759 760 761 762 763
}


void LAllocator::AddConstraintsGapMove(int index,
                                       LOperand* from,
                                       LOperand* to) {
764
  LGap* gap = GapAt(index);
765 766
  LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
                                                     chunk()->zone());
767 768 769 770
  if (from->IsUnallocated()) {
    const ZoneList<LMoveOperands>* move_operands = move->move_operands();
    for (int i = 0; i < move_operands->length(); ++i) {
      LMoveOperands cur = move_operands->at(i);
771
      LOperand* cur_to = cur.destination();
772
      if (cur_to->IsUnallocated()) {
773 774
        if (LUnallocated::cast(cur_to)->virtual_register() ==
            LUnallocated::cast(from)->virtual_register()) {
775
          move->AddMove(cur.source(), to, chunk()->zone());
776 777 778 779 780
          return;
        }
      }
    }
  }
781
  move->AddMove(from, to, chunk()->zone());
782 783 784 785 786 787
}


void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
  int start = block->first_instruction_index();
  int end = block->last_instruction_index();
788
  if (start == -1) return;
789
  for (int i = start; i <= end; ++i) {
790 791 792 793 794 795
    if (IsGapAt(i)) {
      LInstruction* instr = NULL;
      LInstruction* prev_instr = NULL;
      if (i < end) instr = InstructionAt(i + 1);
      if (i > start) prev_instr = InstructionAt(i - 1);
      MeetConstraintsBetween(prev_instr, instr, i);
796
      if (!AllocationOk()) return;
797 798 799 800 801
    }
  }
}


802 803
void LAllocator::MeetConstraintsBetween(LInstruction* first,
                                        LInstruction* second,
804 805 806
                                        int gap_index) {
  // Handle fixed temporaries.
  if (first != NULL) {
807 808
    for (TempIterator it(first); !it.Done(); it.Advance()) {
      LUnallocated* temp = LUnallocated::cast(it.Current());
809 810 811 812 813 814 815 816 817
      if (temp->HasFixedPolicy()) {
        AllocateFixed(temp, gap_index - 1, false);
      }
    }
  }

  // Handle fixed output operand.
  if (first != NULL && first->Output() != NULL) {
    LUnallocated* first_output = LUnallocated::cast(first->Output());
818
    LiveRange* range = LiveRangeFor(first_output->virtual_register());
819 820
    bool assigned = false;
    if (first_output->HasFixedPolicy()) {
821 822
      LUnallocated* output_copy = first_output->CopyUnconstrained(
          chunk()->zone());
823
      bool is_tagged = HasTaggedValue(first_output->virtual_register());
824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841
      AllocateFixed(first_output, gap_index, is_tagged);

      // This value is produced on the stack, we never need to spill it.
      if (first_output->IsStackSlot()) {
        range->SetSpillOperand(first_output);
        range->SetSpillStartIndex(gap_index - 1);
        assigned = true;
      }
      chunk_->AddGapMove(gap_index, first_output, output_copy);
    }

    if (!assigned) {
      range->SetSpillStartIndex(gap_index);

      // This move to spill operand is not a real use. Liveness analysis
      // and splitting of live ranges do not account for it.
      // Thus it should be inserted to a lifetime position corresponding to
      // the instruction end.
842
      LGap* gap = GapAt(gap_index);
843 844 845 846
      LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE,
                                                         chunk()->zone());
      move->AddMove(first_output, range->GetSpillOperand(),
                    chunk()->zone());
847 848 849 850 851
    }
  }

  // Handle fixed input operands of second instruction.
  if (second != NULL) {
852 853
    for (UseIterator it(second); !it.Done(); it.Advance()) {
      LUnallocated* cur_input = LUnallocated::cast(it.Current());
854
      if (cur_input->HasFixedPolicy()) {
855 856
        LUnallocated* input_copy = cur_input->CopyUnconstrained(
            chunk()->zone());
857
        bool is_tagged = HasTaggedValue(cur_input->virtual_register());
858 859
        AllocateFixed(cur_input, gap_index + 1, is_tagged);
        AddConstraintsGapMove(gap_index, input_copy, cur_input);
860
      } else if (cur_input->HasWritableRegisterPolicy()) {
861 862 863 864
        // The live range of writable input registers always goes until the end
        // of the instruction.
        ASSERT(!cur_input->IsUsedAtStart());

865 866
        LUnallocated* input_copy = cur_input->CopyUnconstrained(
            chunk()->zone());
867
        int vreg = GetVirtualRegister();
868
        if (!AllocationOk()) return;
869
        cur_input->set_virtual_register(vreg);
870 871 872 873

        if (RequiredRegisterKind(input_copy->virtual_register()) ==
            DOUBLE_REGISTERS) {
          double_artificial_registers_.Add(
874
              cur_input->virtual_register() - first_artificial_register_,
875
              zone());
876 877
        }

878 879 880 881 882 883 884 885 886
        AddConstraintsGapMove(gap_index, input_copy, cur_input);
      }
    }
  }

  // Handle "output same as input" for second instruction.
  if (second != NULL && second->Output() != NULL) {
    LUnallocated* second_output = LUnallocated::cast(second->Output());
    if (second_output->HasSameAsInputPolicy()) {
887
      LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
888 889
      int output_vreg = second_output->virtual_register();
      int input_vreg = cur_input->virtual_register();
890

891 892
      LUnallocated* input_copy = cur_input->CopyUnconstrained(
          chunk()->zone());
893 894 895 896 897
      cur_input->set_virtual_register(second_output->virtual_register());
      AddConstraintsGapMove(gap_index, input_copy, cur_input);

      if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
        int index = gap_index + 1;
898
        LInstruction* instr = InstructionAt(index);
899
        if (instr->HasPointerMap()) {
900
          instr->pointer_map()->RecordPointer(input_copy, chunk()->zone());
901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925
        }
      } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
        // The input is assumed to immediately have a tagged representation,
        // before the pointer map can be used. I.e. the pointer map at the
        // instruction will include the output operand (whose value at the
        // beginning of the instruction is equal to the input operand). If
        // this is not desired, then the pointer map at this instruction needs
        // to be adjusted manually.
      }
    }
  }
}


void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
  int block_start = block->first_instruction_index();
  int index = block->last_instruction_index();

  LifetimePosition block_start_position =
      LifetimePosition::FromInstructionIndex(block_start);

  while (index >= block_start) {
    LifetimePosition curr_position =
        LifetimePosition::FromInstructionIndex(index);

926
    if (IsGapAt(index)) {
927
      // We have a gap at this position.
928
      LGap* gap = GapAt(index);
929 930
      LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
                                                         chunk()->zone());
931 932 933 934
      const ZoneList<LMoveOperands>* move_operands = move->move_operands();
      for (int i = 0; i < move_operands->length(); ++i) {
        LMoveOperands* cur = &move_operands->at(i);
        if (cur->IsIgnored()) continue;
935 936
        LOperand* from = cur->source();
        LOperand* to = cur->destination();
937 938 939 940 941
        HPhi* phi = LookupPhi(to);
        LOperand* hint = to;
        if (phi != NULL) {
          // This is a phi resolving move.
          if (!phi->block()->IsLoopHeader()) {
942
            hint = LiveRangeFor(phi->id())->current_hint_operand();
943 944 945
          }
        } else {
          if (to->IsUnallocated()) {
946
            if (live->Contains(LUnallocated::cast(to)->virtual_register())) {
947
              Define(curr_position, to, from);
948
              live->Remove(LUnallocated::cast(to)->virtual_register());
949 950 951 952 953 954 955 956 957 958
            } else {
              cur->Eliminate();
              continue;
            }
          } else {
            Define(curr_position, to, from);
          }
        }
        Use(block_start_position, curr_position, from, hint);
        if (from->IsUnallocated()) {
959
          live->Add(LUnallocated::cast(from)->virtual_register());
960 961 962
        }
      }
    } else {
963 964
      ASSERT(!IsGapAt(index));
      LInstruction* instr = InstructionAt(index);
965

966 967
      if (instr != NULL) {
        LOperand* output = instr->Output();
968
        if (output != NULL) {
969 970 971
          if (output->IsUnallocated()) {
            live->Remove(LUnallocated::cast(output)->virtual_register());
          }
972 973 974
          Define(curr_position, output, NULL);
        }

975 976
        if (instr->ClobbersRegisters()) {
          for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) {
977 978 979 980
            if (output == NULL || !output->IsRegister() ||
                output->index() != i) {
              LiveRange* range = FixedLiveRangeFor(i);
              range->AddUseInterval(curr_position,
981
                                    curr_position.InstructionEnd(),
982
                                    zone());
983 984
            }
          }
985 986
        }

987 988
        if (instr->ClobbersDoubleRegisters()) {
          for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
989 990 991 992
            if (output == NULL || !output->IsDoubleRegister() ||
                output->index() != i) {
              LiveRange* range = FixedDoubleLiveRangeFor(i);
              range->AddUseInterval(curr_position,
993
                                    curr_position.InstructionEnd(),
994
                                    zone());
995 996 997 998
            }
          }
        }

999 1000
        for (UseIterator it(instr); !it.Done(); it.Advance()) {
          LOperand* input = it.Current();
1001 1002 1003 1004 1005 1006 1007 1008 1009 1010

          LifetimePosition use_pos;
          if (input->IsUnallocated() &&
              LUnallocated::cast(input)->IsUsedAtStart()) {
            use_pos = curr_position;
          } else {
            use_pos = curr_position.InstructionEnd();
          }

          Use(block_start_position, use_pos, input, NULL);
1011 1012 1013
          if (input->IsUnallocated()) {
            live->Add(LUnallocated::cast(input)->virtual_register());
          }
1014 1015
        }

1016 1017
        for (TempIterator it(instr); !it.Done(); it.Advance()) {
          LOperand* temp = it.Current();
1018
          if (instr->ClobbersTemps()) {
1019 1020 1021 1022 1023 1024 1025 1026
            if (temp->IsRegister()) continue;
            if (temp->IsUnallocated()) {
              LUnallocated* temp_unalloc = LUnallocated::cast(temp);
              if (temp_unalloc->HasFixedPolicy()) {
                continue;
              }
            }
          }
1027 1028
          Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
          Define(curr_position, temp, NULL);
1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041
        }
      }
    }

    index = index - 1;
  }
}


void LAllocator::ResolvePhis(HBasicBlock* block) {
  const ZoneList<HPhi*>* phis = block->phis();
  for (int i = 0; i < phis->length(); ++i) {
    HPhi* phi = phis->at(i);
1042 1043
    LUnallocated* phi_operand =
        new(chunk()->zone()) LUnallocated(LUnallocated::NONE);
1044 1045 1046 1047 1048 1049 1050 1051 1052
    phi_operand->set_virtual_register(phi->id());
    for (int j = 0; j < phi->OperandCount(); ++j) {
      HValue* op = phi->OperandAt(j);
      LOperand* operand = NULL;
      if (op->IsConstant() && op->EmitAtUses()) {
        HConstant* constant = HConstant::cast(op);
        operand = chunk_->DefineConstantOperand(constant);
      } else {
        ASSERT(!op->EmitAtUses());
1053 1054
        LUnallocated* unalloc =
            new(chunk()->zone()) LUnallocated(LUnallocated::ANY);
1055 1056 1057 1058 1059 1060 1061 1062 1063
        unalloc->set_virtual_register(op->id());
        operand = unalloc;
      }
      HBasicBlock* cur_block = block->predecessors()->at(j);
      // The gap move must be added without any special processing as in
      // the AddConstraintsGapMove.
      chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
                         operand,
                         phi_operand);
1064 1065 1066 1067 1068 1069 1070 1071 1072

      // We are going to insert a move before the branch instruction.
      // Some branch instructions (e.g. loops' back edges)
      // can potentially cause a GC so they have a pointer map.
      // By inserting a move we essentially create a copy of a
      // value which is invisible to PopulatePointerMaps(), because we store
      // it into a location different from the operand of a live range
      // covering a branch instruction.
      // Thus we need to manually record a pointer.
1073 1074 1075
      LInstruction* branch =
          InstructionAt(cur_block->last_instruction_index());
      if (branch->HasPointerMap()) {
1076
        if (phi->representation().IsTagged() && !phi->type().IsSmi()) {
1077
          branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone());
1078
        } else if (!phi->representation().IsDouble()) {
1079
          branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone());
1080 1081
        }
      }
1082 1083 1084
    }

    LiveRange* live_range = LiveRangeFor(phi->id());
fschneider@chromium.org's avatar
fschneider@chromium.org committed
1085
    LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1086 1087
    label->GetOrCreateParallelMove(LGap::START, chunk()->zone())->
        AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone());
1088 1089 1090 1091 1092
    live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
  }
}


1093
bool LAllocator::Allocate(LChunk* chunk) {
1094
  ASSERT(chunk_ == NULL);
1095
  chunk_ = static_cast<LPlatformChunk*>(chunk);
1096
  assigned_registers_ =
1097 1098
      new(chunk->zone()) BitVector(Register::NumAllocatableRegisters(),
                                   chunk->zone());
1099
  assigned_double_registers_ =
1100 1101
      new(chunk->zone()) BitVector(DoubleRegister::NumAllocatableRegisters(),
                                   chunk->zone());
1102
  MeetRegisterConstraints();
1103
  if (!AllocationOk()) return false;
1104 1105 1106
  ResolvePhis();
  BuildLiveRanges();
  AllocateGeneralRegisters();
1107
  if (!AllocationOk()) return false;
1108
  AllocateDoubleRegisters();
1109
  if (!AllocationOk()) return false;
1110 1111 1112
  PopulatePointerMaps();
  ConnectRanges();
  ResolveControlFlow();
1113
  return true;
1114 1115 1116 1117
}


void LAllocator::MeetRegisterConstraints() {
1118
  LAllocatorPhase phase("L_Register constraints", this);
1119
  first_artificial_register_ = next_virtual_register_;
1120
  const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1121 1122 1123
  for (int i = 0; i < blocks->length(); ++i) {
    HBasicBlock* block = blocks->at(i);
    MeetRegisterConstraints(block);
1124
    if (!AllocationOk()) return;
1125 1126 1127 1128 1129
  }
}


void LAllocator::ResolvePhis() {
1130
  LAllocatorPhase phase("L_Resolve phis", this);
1131 1132

  // Process the blocks in reverse order.
1133
  const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144
  for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
    HBasicBlock* block = blocks->at(block_id);
    ResolvePhis(block);
  }
}


void LAllocator::ResolveControlFlow(LiveRange* range,
                                    HBasicBlock* block,
                                    HBasicBlock* pred) {
  LifetimePosition pred_end =
1145
      LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165
  LifetimePosition cur_start =
      LifetimePosition::FromInstructionIndex(block->first_instruction_index());
  LiveRange* pred_cover = NULL;
  LiveRange* cur_cover = NULL;
  LiveRange* cur_range = range;
  while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
    if (cur_range->CanCover(cur_start)) {
      ASSERT(cur_cover == NULL);
      cur_cover = cur_range;
    }
    if (cur_range->CanCover(pred_end)) {
      ASSERT(pred_cover == NULL);
      pred_cover = cur_range;
    }
    cur_range = cur_range->next();
  }

  if (cur_cover->IsSpilled()) return;
  ASSERT(pred_cover != NULL && cur_cover != NULL);
  if (pred_cover != cur_cover) {
1166 1167
    LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone());
    LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone());
1168
    if (!pred_op->Equals(cur_op)) {
fschneider@chromium.org's avatar
fschneider@chromium.org committed
1169
      LGap* gap = NULL;
1170
      if (block->predecessors()->length() == 1) {
fschneider@chromium.org's avatar
fschneider@chromium.org committed
1171
        gap = GapAt(block->first_instruction_index());
1172 1173
      } else {
        ASSERT(pred->end()->SecondSuccessor() == NULL);
fschneider@chromium.org's avatar
fschneider@chromium.org committed
1174
        gap = GetLastGap(pred);
1175 1176 1177 1178

        // We are going to insert a move before the branch instruction.
        // Some branch instructions (e.g. loops' back edges)
        // can potentially cause a GC so they have a pointer map.
1179
        // By inserting a move we essentially create a copy of a
1180 1181 1182 1183
        // value which is invisible to PopulatePointerMaps(), because we store
        // it into a location different from the operand of a live range
        // covering a branch instruction.
        // Thus we need to manually record a pointer.
1184 1185 1186
        LInstruction* branch = InstructionAt(pred->last_instruction_index());
        if (branch->HasPointerMap()) {
          if (HasTaggedValue(range->id())) {
1187
            branch->pointer_map()->RecordPointer(cur_op, chunk()->zone());
1188 1189 1190
          } else if (!cur_op->IsDoubleStackSlot() &&
                     !cur_op->IsDoubleRegister()) {
            branch->pointer_map()->RemovePointer(cur_op);
1191 1192
          }
        }
1193
      }
1194
      gap->GetOrCreateParallelMove(
1195 1196
          LGap::START, chunk()->zone())->AddMove(pred_op, cur_op,
                                                 chunk()->zone());
1197 1198 1199 1200 1201 1202 1203
    }
  }
}


LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
  int index = pos.InstructionIndex();
1204 1205
  if (IsGapAt(index)) {
    LGap* gap = GapAt(index);
1206
    return gap->GetOrCreateParallelMove(
1207
        pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone());
1208 1209
  }
  int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1210
  return GapAt(gap_pos)->GetOrCreateParallelMove(
1211
      (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone());
1212 1213 1214 1215
}


HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1216
  LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1217 1218 1219 1220 1221
  return gap->block();
}


void LAllocator::ConnectRanges() {
1222
  LAllocatorPhase phase("L_Connect ranges", this);
1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240
  for (int i = 0; i < live_ranges()->length(); ++i) {
    LiveRange* first_range = live_ranges()->at(i);
    if (first_range == NULL || first_range->parent() != NULL) continue;

    LiveRange* second_range = first_range->next();
    while (second_range != NULL) {
      LifetimePosition pos = second_range->Start();

      if (!second_range->IsSpilled()) {
        // Add gap move if the two live ranges touch and there is no block
        // boundary.
        if (first_range->End().Value() == pos.Value()) {
          bool should_insert = true;
          if (IsBlockBoundary(pos)) {
            should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
          }
          if (should_insert) {
            LParallelMove* move = GetConnectingParallelMove(pos);
1241 1242 1243 1244 1245 1246
            LOperand* prev_operand = first_range->CreateAssignedOperand(
                chunk()->zone());
            LOperand* cur_operand = second_range->CreateAssignedOperand(
                chunk()->zone());
            move->AddMove(prev_operand, cur_operand,
                          chunk()->zone());
1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264
          }
        }
      }

      first_range = second_range;
      second_range = second_range->next();
    }
  }
}


bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
  if (block->predecessors()->length() != 1) return false;
  return block->predecessors()->first()->block_id() == block->block_id() - 1;
}


void LAllocator::ResolveControlFlow() {
1265
  LAllocatorPhase phase("L_Resolve control flow", this);
1266
  const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285
  for (int block_id = 1; block_id < blocks->length(); ++block_id) {
    HBasicBlock* block = blocks->at(block_id);
    if (CanEagerlyResolveControlFlow(block)) continue;
    BitVector* live = live_in_sets_[block->block_id()];
    BitVector::Iterator iterator(live);
    while (!iterator.Done()) {
      int operand_index = iterator.Current();
      for (int i = 0; i < block->predecessors()->length(); ++i) {
        HBasicBlock* cur = block->predecessors()->at(i);
        LiveRange* cur_range = LiveRangeFor(operand_index);
        ResolveControlFlow(cur_range, block, cur);
      }
      iterator.Advance();
    }
  }
}


void LAllocator::BuildLiveRanges() {
1286
  LAllocatorPhase phase("L_Build live ranges", this);
1287 1288
  InitializeLivenessAnalysis();
  // Process the blocks in reverse order.
1289
  const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310
  for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
    HBasicBlock* block = blocks->at(block_id);
    BitVector* live = ComputeLiveOut(block);
    // Initially consider all live_out values live for the entire block. We
    // will shorten these intervals if necessary.
    AddInitialIntervals(block, live);

    // Process the instructions in reverse order, generating and killing
    // live values.
    ProcessInstructions(block, live);
    // All phi output operands are killed by this block.
    const ZoneList<HPhi*>* phis = block->phis();
    for (int i = 0; i < phis->length(); ++i) {
      // The live range interval already ends at the first instruction of the
      // block.
      HPhi* phi = phis->at(i);
      live->Remove(phi->id());

      LOperand* hint = NULL;
      LOperand* phi_operand = NULL;
      LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1311 1312
      LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
                                                         chunk()->zone());
1313
      for (int j = 0; j < move->move_operands()->length(); ++j) {
1314
        LOperand* to = move->move_operands()->at(j).destination();
1315 1316
        if (to->IsUnallocated() &&
            LUnallocated::cast(to)->virtual_register() == phi->id()) {
1317
          hint = move->move_operands()->at(j).source();
1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344
          phi_operand = to;
          break;
        }
      }
      ASSERT(hint != NULL);

      LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
              block->first_instruction_index());
      Define(block_start, phi_operand, hint);
    }

    // Now live is live_in for this block except not including values live
    // out on backward successor edges.
    live_in_sets_[block_id] = live;

    // If this block is a loop header go back and patch up the necessary
    // predecessor blocks.
    if (block->IsLoopHeader()) {
      // TODO(kmillikin): Need to be able to get the last block of the loop
      // in the loop information. Add a live range stretching from the first
      // loop instruction to the last for each value live on entry to the
      // header.
      HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
      BitVector::Iterator iterator(live);
      LifetimePosition start = LifetimePosition::FromInstructionIndex(
          block->first_instruction_index());
      LifetimePosition end = LifetimePosition::FromInstructionIndex(
1345
          back_edge->last_instruction_index()).NextInstruction();
1346 1347 1348
      while (!iterator.Done()) {
        int operand_index = iterator.Current();
        LiveRange* range = LiveRangeFor(operand_index);
1349
        range->EnsureInterval(start, end, zone());
1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364
        iterator.Advance();
      }

      for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
        live_in_sets_[i]->Union(*live);
      }
    }

#ifdef DEBUG
    if (block_id == 0) {
      BitVector::Iterator iterator(live);
      bool found = false;
      while (!iterator.Done()) {
        found = true;
        int operand_index = iterator.Current();
1365 1366 1367 1368 1369
        if (chunk_->info()->IsStub()) {
          CodeStub::Major major_key = chunk_->info()->code_stub()->MajorKey();
          PrintF("Function: %s\n", CodeStub::MajorName(major_key, false));
        } else {
          ASSERT(chunk_->info()->IsOptimizing());
1370
          AllowHandleDereference allow_deref;
1371 1372 1373
          PrintF("Function: %s\n",
                 *chunk_->info()->function()->debug_name()->ToCString());
        }
1374 1375 1376 1377 1378 1379 1380 1381 1382
        PrintF("Value %d used before first definition!\n", operand_index);
        LiveRange* range = LiveRangeFor(operand_index);
        PrintF("First use is at %d\n", range->first_pos()->pos().Value());
        iterator.Advance();
      }
      ASSERT(!found);
    }
#endif
  }
1383 1384 1385 1386 1387 1388

  for (int i = 0; i < live_ranges_.length(); ++i) {
    if (live_ranges_[i] != NULL) {
      live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id());
    }
  }
1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404
}


bool LAllocator::SafePointsAreInOrder() const {
  const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
  int safe_point = 0;
  for (int i = 0; i < pointer_maps->length(); ++i) {
    LPointerMap* map = pointer_maps->at(i);
    if (safe_point > map->lithium_position()) return false;
    safe_point = map->lithium_position();
  }
  return true;
}


void LAllocator::PopulatePointerMaps() {
1405
  LAllocatorPhase phase("L_Populate pointer maps", this);
1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464
  const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();

  ASSERT(SafePointsAreInOrder());

  // Iterate over all safe point positions and record a pointer
  // for all spilled live ranges at this point.
  int first_safe_point_index = 0;
  int last_range_start = 0;
  for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
    LiveRange* range = live_ranges()->at(range_idx);
    if (range == NULL) continue;
    // Iterate over the first parts of multi-part live ranges.
    if (range->parent() != NULL) continue;
    // Skip non-pointer values.
    if (!HasTaggedValue(range->id())) continue;
    // Skip empty live ranges.
    if (range->IsEmpty()) continue;

    // Find the extent of the range and its children.
    int start = range->Start().InstructionIndex();
    int end = 0;
    for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
      LifetimePosition this_end = cur->End();
      if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
      ASSERT(cur->Start().InstructionIndex() >= start);
    }

    // Most of the ranges are in order, but not all.  Keep an eye on when
    // they step backwards and reset the first_safe_point_index so we don't
    // miss any safe points.
    if (start < last_range_start) {
      first_safe_point_index = 0;
    }
    last_range_start = start;

    // Step across all the safe points that are before the start of this range,
    // recording how far we step in order to save doing this for the next range.
    while (first_safe_point_index < pointer_maps->length()) {
      LPointerMap* map = pointer_maps->at(first_safe_point_index);
      int safe_point = map->lithium_position();
      if (safe_point >= start) break;
      first_safe_point_index++;
    }

    // Step through the safe points to see whether they are in the range.
    for (int safe_point_index = first_safe_point_index;
         safe_point_index < pointer_maps->length();
         ++safe_point_index) {
      LPointerMap* map = pointer_maps->at(safe_point_index);
      int safe_point = map->lithium_position();

      // The safe points are sorted so we can stop searching here.
      if (safe_point - 1 > end) break;

      // Advance to the next active range that covers the current
      // safe point position.
      LifetimePosition safe_point_pos =
          LifetimePosition::FromInstructionIndex(safe_point);
      LiveRange* cur = range;
1465
      while (cur != NULL && !cur->Covers(safe_point_pos)) {
1466 1467 1468 1469 1470 1471 1472 1473 1474 1475
        cur = cur->next();
      }
      if (cur == NULL) continue;

      // Check if the live range is spilled and the safe point is after
      // the spill position.
      if (range->HasAllocatedSpillOperand() &&
          safe_point >= range->spill_start_index()) {
        TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
                   range->id(), range->spill_start_index(), safe_point);
1476
        map->RecordPointer(range->GetSpillOperand(), chunk()->zone());
1477 1478 1479 1480 1481 1482
      }

      if (!cur->IsSpilled()) {
        TraceAlloc("Pointer in register for range %d (start at %d) "
                   "at safe point %d\n",
                   cur->id(), cur->Start().Value(), safe_point);
1483
        LOperand* operand = cur->CreateAssignedOperand(chunk()->zone());
1484
        ASSERT(!operand->IsStackSlot());
1485
        map->RecordPointer(operand, chunk()->zone());
1486 1487 1488 1489 1490 1491
      }
    }
  }
}


vegorov@chromium.org's avatar
vegorov@chromium.org committed
1492
void LAllocator::AllocateGeneralRegisters() {
1493
  LAllocatorPhase phase("L_Allocate general registers", this);
1494
  num_registers_ = Register::NumAllocatableRegisters();
1495
  mode_ = GENERAL_REGISTERS;
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1496 1497 1498 1499
  AllocateRegisters();
}


1500
void LAllocator::AllocateDoubleRegisters() {
1501
  LAllocatorPhase phase("L_Allocate double registers", this);
1502
  num_registers_ = DoubleRegister::NumAllocatableRegisters();
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1503
  mode_ = DOUBLE_REGISTERS;
1504 1505 1506 1507 1508
  AllocateRegisters();
}


void LAllocator::AllocateRegisters() {
1509
  ASSERT(unhandled_live_ranges_.is_empty());
1510 1511 1512

  for (int i = 0; i < live_ranges_.length(); ++i) {
    if (live_ranges_[i] != NULL) {
1513
      if (live_ranges_[i]->Kind() == mode_) {
1514 1515 1516 1517 1518 1519 1520
        AddToUnhandledUnsorted(live_ranges_[i]);
      }
    }
  }
  SortUnhandled();
  ASSERT(UnhandledIsSorted());

1521
  ASSERT(reusable_slots_.is_empty());
1522 1523 1524
  ASSERT(active_live_ranges_.is_empty());
  ASSERT(inactive_live_ranges_.is_empty());

vegorov@chromium.org's avatar
vegorov@chromium.org committed
1525
  if (mode_ == DOUBLE_REGISTERS) {
1526
    for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
1527 1528 1529 1530 1531 1532
      LiveRange* current = fixed_double_live_ranges_.at(i);
      if (current != NULL) {
        AddToInactive(current);
      }
    }
  } else {
1533
    ASSERT(mode_ == GENERAL_REGISTERS);
1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546
    for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
      LiveRange* current = fixed_live_ranges_.at(i);
      if (current != NULL) {
        AddToInactive(current);
      }
    }
  }

  while (!unhandled_live_ranges_.is_empty()) {
    ASSERT(UnhandledIsSorted());
    LiveRange* current = unhandled_live_ranges_.RemoveLast();
    ASSERT(UnhandledIsSorted());
    LifetimePosition position = current->Start();
1547 1548 1549
#ifdef DEBUG
    allocation_finger_ = position;
#endif
1550 1551 1552 1553 1554 1555 1556
    TraceAlloc("Processing interval %d start=%d\n",
               current->id(),
               position.Value());

    if (current->HasAllocatedSpillOperand()) {
      TraceAlloc("Live range %d already has a spill operand\n", current->id());
      LifetimePosition next_pos = position;
1557
      if (IsGapAt(next_pos.InstructionIndex())) {
1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569
        next_pos = next_pos.NextInstruction();
      }
      UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
      // If the range already has a spill operand and it doesn't need a
      // register immediately, split it and spill the first part of the range.
      if (pos == NULL) {
        Spill(current);
        continue;
      } else if (pos->pos().Value() >
                 current->Start().NextInstruction().Value()) {
        // Do not spill live range eagerly if use position that can benefit from
        // the register is too close to the start of live range.
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1570
        SpillBetween(current, current->Start(), pos->pos());
1571
        if (!AllocationOk()) return;
1572 1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600 1601
        ASSERT(UnhandledIsSorted());
        continue;
      }
    }

    for (int i = 0; i < active_live_ranges_.length(); ++i) {
      LiveRange* cur_active = active_live_ranges_.at(i);
      if (cur_active->End().Value() <= position.Value()) {
        ActiveToHandled(cur_active);
        --i;  // The live range was removed from the list of active live ranges.
      } else if (!cur_active->Covers(position)) {
        ActiveToInactive(cur_active);
        --i;  // The live range was removed from the list of active live ranges.
      }
    }

    for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
      LiveRange* cur_inactive = inactive_live_ranges_.at(i);
      if (cur_inactive->End().Value() <= position.Value()) {
        InactiveToHandled(cur_inactive);
        --i;  // Live range was removed from the list of inactive live ranges.
      } else if (cur_inactive->Covers(position)) {
        InactiveToActive(cur_inactive);
        --i;  // Live range was removed from the list of inactive live ranges.
      }
    }

    ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());

    bool result = TryAllocateFreeReg(current);
1602 1603 1604 1605
    if (!AllocationOk()) return;

    if (!result) AllocateBlockedReg(current);
    if (!AllocationOk()) return;
1606 1607 1608 1609 1610 1611

    if (current->HasRegisterAssigned()) {
      AddToActive(current);
    }
  }

1612 1613 1614
  reusable_slots_.Rewind(0);
  active_live_ranges_.Rewind(0);
  inactive_live_ranges_.Rewind(0);
1615 1616 1617
}


vegorov@chromium.org's avatar
vegorov@chromium.org committed
1618 1619 1620 1621 1622 1623 1624 1625 1626
const char* LAllocator::RegisterName(int allocation_index) {
  if (mode_ == GENERAL_REGISTERS) {
    return Register::AllocationIndexToString(allocation_index);
  } else {
    return DoubleRegister::AllocationIndexToString(allocation_index);
  }
}


1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637
void LAllocator::TraceAlloc(const char* msg, ...) {
  if (FLAG_trace_alloc) {
    va_list arguments;
    va_start(arguments, msg);
    OS::VPrint(msg, arguments);
    va_end(arguments);
  }
}


bool LAllocator::HasTaggedValue(int virtual_register) const {
1638
  HValue* value = graph_->LookupValue(virtual_register);
1639
  if (value == NULL) return false;
1640
  return value->representation().IsTagged() && !value->type().IsSmi();
1641 1642 1643
}


vegorov@chromium.org's avatar
vegorov@chromium.org committed
1644
RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
1645
  if (virtual_register < first_artificial_register_) {
1646
    HValue* value = graph_->LookupValue(virtual_register);
1647 1648 1649 1650 1651
    if (value != NULL && value->representation().IsDouble()) {
      return DOUBLE_REGISTERS;
    }
  } else if (double_artificial_registers_.Contains(
      virtual_register - first_artificial_register_)) {
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1652 1653
    return DOUBLE_REGISTERS;
  }
1654

vegorov@chromium.org's avatar
vegorov@chromium.org committed
1655
  return GENERAL_REGISTERS;
1656 1657 1658 1659 1660
}


void LAllocator::AddToActive(LiveRange* range) {
  TraceAlloc("Add live range %d to active\n", range->id());
1661
  active_live_ranges_.Add(range, zone());
1662 1663 1664 1665 1666
}


void LAllocator::AddToInactive(LiveRange* range) {
  TraceAlloc("Add live range %d to inactive\n", range->id());
1667
  inactive_live_ranges_.Add(range, zone());
1668 1669 1670 1671 1672 1673
}


void LAllocator::AddToUnhandledSorted(LiveRange* range) {
  if (range == NULL || range->IsEmpty()) return;
  ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1674
  ASSERT(allocation_finger_.Value() <= range->Start().Value());
1675 1676 1677 1678
  for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
    LiveRange* cur_range = unhandled_live_ranges_.at(i);
    if (range->ShouldBeAllocatedBefore(cur_range)) {
      TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1679
      unhandled_live_ranges_.InsertAt(i + 1, range, zone());
1680 1681 1682 1683 1684
      ASSERT(UnhandledIsSorted());
      return;
    }
  }
  TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1685
  unhandled_live_ranges_.InsertAt(0, range, zone());
1686 1687 1688 1689 1690 1691 1692 1693
  ASSERT(UnhandledIsSorted());
}


void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
  if (range == NULL || range->IsEmpty()) return;
  ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
  TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1694
  unhandled_live_ranges_.Add(range, zone());
1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734
}


static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
  ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
         !(*b)->ShouldBeAllocatedBefore(*a));
  if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
  if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
  return (*a)->id() - (*b)->id();
}


// Sort the unhandled live ranges so that the ranges to be processed first are
// at the end of the array list.  This is convenient for the register allocation
// algorithm because it is efficient to remove elements from the end.
void LAllocator::SortUnhandled() {
  TraceAlloc("Sort unhandled\n");
  unhandled_live_ranges_.Sort(&UnhandledSortHelper);
}


bool LAllocator::UnhandledIsSorted() {
  int len = unhandled_live_ranges_.length();
  for (int i = 1; i < len; i++) {
    LiveRange* a = unhandled_live_ranges_.at(i - 1);
    LiveRange* b = unhandled_live_ranges_.at(i);
    if (a->Start().Value() < b->Start().Value()) return false;
  }
  return true;
}


void LAllocator::FreeSpillSlot(LiveRange* range) {
  // Check that we are the last range.
  if (range->next() != NULL) return;

  if (!range->TopLevel()->HasAllocatedSpillOperand()) return;

  int index = range->TopLevel()->GetSpillOperand()->index();
  if (index >= 0) {
1735
    reusable_slots_.Add(range, zone());
1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747 1748 1749 1750 1751 1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762
  }
}


LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
  if (reusable_slots_.is_empty()) return NULL;
  if (reusable_slots_.first()->End().Value() >
      range->TopLevel()->Start().Value()) {
    return NULL;
  }
  LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
  reusable_slots_.Remove(0);
  return result;
}


void LAllocator::ActiveToHandled(LiveRange* range) {
  ASSERT(active_live_ranges_.Contains(range));
  active_live_ranges_.RemoveElement(range);
  TraceAlloc("Moving live range %d from active to handled\n", range->id());
  FreeSpillSlot(range);
}


void LAllocator::ActiveToInactive(LiveRange* range) {
  ASSERT(active_live_ranges_.Contains(range));
  active_live_ranges_.RemoveElement(range);
1763
  inactive_live_ranges_.Add(range, zone());
1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778
  TraceAlloc("Moving live range %d from active to inactive\n", range->id());
}


void LAllocator::InactiveToHandled(LiveRange* range) {
  ASSERT(inactive_live_ranges_.Contains(range));
  inactive_live_ranges_.RemoveElement(range);
  TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
  FreeSpillSlot(range);
}


void LAllocator::InactiveToActive(LiveRange* range) {
  ASSERT(inactive_live_ranges_.Contains(range));
  inactive_live_ranges_.RemoveElement(range);
1779
  active_live_ranges_.Add(range, zone());
1780 1781 1782 1783
  TraceAlloc("Moving live range %d from inactive to active\n", range->id());
}


vegorov@chromium.org's avatar
vegorov@chromium.org committed
1784 1785
// TryAllocateFreeReg and AllocateBlockedReg assume this
// when allocating local arrays.
1786 1787
STATIC_ASSERT(DoubleRegister::kMaxNumAllocatableRegisters >=
              Register::kMaxNumAllocatableRegisters);
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1788 1789


1790
bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1791
  LifetimePosition free_until_pos[DoubleRegister::kMaxNumAllocatableRegisters];
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1792

1793
  for (int i = 0; i < num_registers_; i++) {
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1794 1795 1796
    free_until_pos[i] = LifetimePosition::MaxPosition();
  }

1797 1798
  for (int i = 0; i < active_live_ranges_.length(); ++i) {
    LiveRange* cur_active = active_live_ranges_.at(i);
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1799
    free_until_pos[cur_active->assigned_register()] =
1800 1801 1802 1803 1804 1805 1806 1807 1808 1809
        LifetimePosition::FromInstructionIndex(0);
  }

  for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
    LiveRange* cur_inactive = inactive_live_ranges_.at(i);
    ASSERT(cur_inactive->End().Value() > current->Start().Value());
    LifetimePosition next_intersection =
        cur_inactive->FirstIntersection(current);
    if (!next_intersection.IsValid()) continue;
    int cur_reg = cur_inactive->assigned_register();
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1810
    free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1811 1812
  }

1813 1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824 1825 1826 1827
  LOperand* hint = current->FirstHint();
  if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) {
    int register_index = hint->index();
    TraceAlloc(
        "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
        RegisterName(register_index),
        free_until_pos[register_index].Value(),
        current->id(),
        current->End().Value());

    // The desired register is free until the end of the current live range.
    if (free_until_pos[register_index].Value() >= current->End().Value()) {
      TraceAlloc("Assigning preferred reg %s to live range %d\n",
                 RegisterName(register_index),
                 current->id());
1828
      SetLiveRangeAssignedRegister(current, register_index);
1829
      return true;
1830 1831 1832
    }
  }

vegorov@chromium.org's avatar
vegorov@chromium.org committed
1833 1834
  // Find the register which stays free for the longest time.
  int reg = 0;
1835
  for (int i = 1; i < RegisterCount(); ++i) {
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1836 1837
    if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
      reg = i;
1838 1839 1840
    }
  }

vegorov@chromium.org's avatar
vegorov@chromium.org committed
1841 1842 1843 1844
  LifetimePosition pos = free_until_pos[reg];

  if (pos.Value() <= current->Start().Value()) {
    // All registers are blocked.
1845 1846 1847
    return false;
  }

vegorov@chromium.org's avatar
vegorov@chromium.org committed
1848 1849 1850
  if (pos.Value() < current->End().Value()) {
    // Register reg is available at the range start but becomes blocked before
    // the range end. Split current at position where it becomes blocked.
1851 1852
    LiveRange* tail = SplitRangeAt(current, pos);
    if (!AllocationOk()) return false;
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1853 1854 1855 1856 1857 1858 1859
    AddToUnhandledSorted(tail);
  }


  // Register reg is available at the range start and is free until
  // the range end.
  ASSERT(pos.Value() >= current->End().Value());
1860
  TraceAlloc("Assigning free reg %s to live range %d\n",
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1861 1862
             RegisterName(reg),
             current->id());
1863
  SetLiveRangeAssignedRegister(current, reg);
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1864

1865 1866 1867 1868 1869
  return true;
}


void LAllocator::AllocateBlockedReg(LiveRange* current) {
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1870 1871 1872 1873 1874 1875 1876 1877 1878
  UsePosition* register_use = current->NextRegisterPosition(current->Start());
  if (register_use == NULL) {
    // There is no use in the current live range that requires a register.
    // We can just spill it.
    Spill(current);
    return;
  }


1879 1880
  LifetimePosition use_pos[DoubleRegister::kMaxNumAllocatableRegisters];
  LifetimePosition block_pos[DoubleRegister::kMaxNumAllocatableRegisters];
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1881

1882
  for (int i = 0; i < num_registers_; i++) {
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1883 1884
    use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
  }
1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906 1907 1908 1909 1910 1911 1912 1913 1914 1915 1916

  for (int i = 0; i < active_live_ranges_.length(); ++i) {
    LiveRange* range = active_live_ranges_[i];
    int cur_reg = range->assigned_register();
    if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
      block_pos[cur_reg] = use_pos[cur_reg] =
          LifetimePosition::FromInstructionIndex(0);
    } else {
      UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
          current->Start());
      if (next_use == NULL) {
        use_pos[cur_reg] = range->End();
      } else {
        use_pos[cur_reg] = next_use->pos();
      }
    }
  }

  for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
    LiveRange* range = inactive_live_ranges_.at(i);
    ASSERT(range->End().Value() > current->Start().Value());
    LifetimePosition next_intersection = range->FirstIntersection(current);
    if (!next_intersection.IsValid()) continue;
    int cur_reg = range->assigned_register();
    if (range->IsFixed()) {
      block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
      use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
    } else {
      use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
    }
  }

vegorov@chromium.org's avatar
vegorov@chromium.org committed
1917
  int reg = 0;
1918
  for (int i = 1; i < RegisterCount(); ++i) {
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1919 1920
    if (use_pos[i].Value() > use_pos[reg].Value()) {
      reg = i;
1921 1922 1923
    }
  }

vegorov@chromium.org's avatar
vegorov@chromium.org committed
1924 1925 1926 1927 1928 1929 1930 1931
  LifetimePosition pos = use_pos[reg];

  if (pos.Value() < register_use->pos().Value()) {
    // All registers are blocked before the first use that requires a register.
    // Spill starting part of live range up to that use.
    SpillBetween(current, current->Start(), register_use->pos());
    return;
  }
1932

vegorov@chromium.org's avatar
vegorov@chromium.org committed
1933 1934 1935 1936 1937 1938
  if (block_pos[reg].Value() < current->End().Value()) {
    // Register becomes blocked before the current range end. Split before that
    // position.
    LiveRange* tail = SplitBetween(current,
                                   current->Start(),
                                   block_pos[reg].InstructionStart());
1939
    if (!AllocationOk()) return;
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1940
    AddToUnhandledSorted(tail);
1941
  }
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1942 1943 1944

  // Register reg is not blocked for the whole range.
  ASSERT(block_pos[reg].Value() >= current->End().Value());
1945
  TraceAlloc("Assigning blocked reg %s to live range %d\n",
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1946 1947
             RegisterName(reg),
             current->id());
1948
  SetLiveRangeAssignedRegister(current, reg);
vegorov@chromium.org's avatar
vegorov@chromium.org committed
1949 1950 1951 1952 1953

  // This register was not free. Thus we need to find and spill
  // parts of active and inactive live regions that use the same register
  // at the same lifetime positions as current.
  SplitAndSpillIntersecting(current);
1954 1955 1956
}


1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977 1978 1979 1980 1981 1982 1983 1984 1985 1986 1987 1988 1989
LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range,
                                                    LifetimePosition pos) {
  HBasicBlock* block = GetBlock(pos.InstructionStart());
  HBasicBlock* loop_header =
      block->IsLoopHeader() ? block : block->parent_loop_header();

  if (loop_header == NULL) return pos;

  UsePosition* prev_use =
    range->PreviousUsePositionRegisterIsBeneficial(pos);

  while (loop_header != NULL) {
    // We are going to spill live range inside the loop.
    // If possible try to move spilling position backwards to loop header.
    // This will reduce number of memory moves on the back edge.
    LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
        loop_header->first_instruction_index());

    if (range->Covers(loop_start)) {
      if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
        // No register beneficial use inside the loop before the pos.
        pos = loop_start;
      }
    }

    // Try hoisting out to an outer loop.
    loop_header = loop_header->parent_loop_header();
  }

  return pos;
}


1990 1991 1992
void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
  ASSERT(current->HasRegisterAssigned());
  int reg = current->assigned_register();
1993
  LifetimePosition split_pos = current->Start();
1994 1995 1996 1997
  for (int i = 0; i < active_live_ranges_.length(); ++i) {
    LiveRange* range = active_live_ranges_[i];
    if (range->assigned_register() == reg) {
      UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1998
      LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
1999
      if (next_pos == NULL) {
2000
        SpillAfter(range, spill_pos);
2001
      } else {
2002 2003 2004 2005 2006 2007 2008 2009 2010
        // When spilling between spill_pos and next_pos ensure that the range
        // remains spilled at least until the start of the current live range.
        // This guarantees that we will not introduce new unhandled ranges that
        // start before the current range as this violates allocation invariant
        // and will lead to an inconsistent state of active and inactive
        // live-ranges: ranges are allocated in order of their start positions,
        // ranges are retired from active/inactive when the start of the
        // current live-range is larger than their end.
        SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
2011
      }
2012
      if (!AllocationOk()) return;
2013 2014 2015 2016 2017 2018 2019 2020 2021 2022 2023 2024 2025
      ActiveToHandled(range);
      --i;
    }
  }

  for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
    LiveRange* range = inactive_live_ranges_[i];
    ASSERT(range->End().Value() > current->Start().Value());
    if (range->assigned_register() == reg && !range->IsFixed()) {
      LifetimePosition next_intersection = range->FirstIntersection(current);
      if (next_intersection.IsValid()) {
        UsePosition* next_pos = range->NextRegisterPosition(current->Start());
        if (next_pos == NULL) {
vegorov@chromium.org's avatar
vegorov@chromium.org committed
2026
          SpillAfter(range, split_pos);
2027 2028
        } else {
          next_intersection = Min(next_intersection, next_pos->pos());
vegorov@chromium.org's avatar
vegorov@chromium.org committed
2029
          SpillBetween(range, split_pos, next_intersection);
2030
        }
2031
        if (!AllocationOk()) return;
2032 2033 2034 2035 2036 2037 2038 2039
        InactiveToHandled(range);
        --i;
      }
    }
  }
}


vegorov@chromium.org's avatar
vegorov@chromium.org committed
2040 2041
bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
  return pos.IsInstructionStart() &&
2042
      InstructionAt(pos.InstructionIndex())->IsLabel();
vegorov@chromium.org's avatar
vegorov@chromium.org committed
2043 2044 2045
}


2046
LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
vegorov@chromium.org's avatar
vegorov@chromium.org committed
2047 2048 2049 2050 2051
  ASSERT(!range->IsFixed());
  TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());

  if (pos.Value() <= range->Start().Value()) return range;

2052 2053 2054 2055 2056
  // We can't properly connect liveranges if split occured at the end
  // of control instruction.
  ASSERT(pos.IsInstructionStart() ||
         !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());

2057
  int vreg = GetVirtualRegister();
2058
  if (!AllocationOk()) return NULL;
2059
  LiveRange* result = LiveRangeFor(vreg);
2060
  range->SplitAt(pos, result, zone());
vegorov@chromium.org's avatar
vegorov@chromium.org committed
2061 2062 2063 2064 2065 2066 2067
  return result;
}


LiveRange* LAllocator::SplitBetween(LiveRange* range,
                                    LifetimePosition start,
                                    LifetimePosition end) {
2068
  ASSERT(!range->IsFixed());
vegorov@chromium.org's avatar
vegorov@chromium.org committed
2069
  TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2070 2071 2072 2073
             range->id(),
             start.Value(),
             end.Value());

vegorov@chromium.org's avatar
vegorov@chromium.org committed
2074
  LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2075
  ASSERT(split_pos.Value() >= start.Value());
2076
  return SplitRangeAt(range, split_pos);
2077 2078 2079 2080 2081 2082 2083 2084 2085 2086 2087 2088
}


LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
                                                 LifetimePosition end) {
  int start_instr = start.InstructionIndex();
  int end_instr = end.InstructionIndex();
  ASSERT(start_instr <= end_instr);

  // We have no choice
  if (start_instr == end_instr) return end;

2089 2090
  HBasicBlock* start_block = GetBlock(start);
  HBasicBlock* end_block = GetBlock(end);
2091 2092

  if (end_block == start_block) {
2093 2094
    // The interval is split in the same basic block. Split at the latest
    // possible position.
2095 2096 2097 2098
    return end;
  }

  HBasicBlock* block = end_block;
vegorov@chromium.org's avatar
vegorov@chromium.org committed
2099
  // Find header of outermost loop.
2100 2101 2102 2103 2104
  while (block->parent_loop_header() != NULL &&
      block->parent_loop_header()->block_id() > start_block->block_id()) {
    block = block->parent_loop_header();
  }

2105 2106 2107
  // We did not find any suitable outer loop. Split at the latest possible
  // position unless end_block is a loop header itself.
  if (block == end_block && !end_block->IsLoopHeader()) return end;
2108 2109 2110 2111 2112 2113

  return LifetimePosition::FromInstructionIndex(
      block->first_instruction_index());
}


vegorov@chromium.org's avatar
vegorov@chromium.org committed
2114
void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2115 2116
  LiveRange* second_part = SplitRangeAt(range, pos);
  if (!AllocationOk()) return;
vegorov@chromium.org's avatar
vegorov@chromium.org committed
2117
  Spill(second_part);
2118 2119 2120
}


vegorov@chromium.org's avatar
vegorov@chromium.org committed
2121 2122 2123
void LAllocator::SpillBetween(LiveRange* range,
                              LifetimePosition start,
                              LifetimePosition end) {
2124 2125 2126 2127 2128 2129 2130 2131
  SpillBetweenUntil(range, start, start, end);
}


void LAllocator::SpillBetweenUntil(LiveRange* range,
                                   LifetimePosition start,
                                   LifetimePosition until,
                                   LifetimePosition end) {
2132
  CHECK(start.Value() < end.Value());
2133 2134
  LiveRange* second_part = SplitRangeAt(range, start);
  if (!AllocationOk()) return;
2135

vegorov@chromium.org's avatar
vegorov@chromium.org committed
2136 2137 2138 2139 2140 2141
  if (second_part->Start().Value() < end.Value()) {
    // The split result intersects with [start, end[.
    // Split it at position between ]start+1, end[, spill the middle part
    // and put the rest to unhandled.
    LiveRange* third_part = SplitBetween(
        second_part,
2142
        Max(second_part->Start().InstructionEnd(), until),
vegorov@chromium.org's avatar
vegorov@chromium.org committed
2143
        end.PrevInstruction().InstructionEnd());
2144
    if (!AllocationOk()) return;
2145

vegorov@chromium.org's avatar
vegorov@chromium.org committed
2146
    ASSERT(third_part != second_part);
2147

vegorov@chromium.org's avatar
vegorov@chromium.org committed
2148
    Spill(second_part);
2149 2150
    AddToUnhandledSorted(third_part);
  } else {
vegorov@chromium.org's avatar
vegorov@chromium.org committed
2151 2152 2153
    // The split result does not intersect with [start, end[.
    // Nothing to spill. Just put it to unhandled as whole.
    AddToUnhandledSorted(second_part);
2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164
  }
}


void LAllocator::Spill(LiveRange* range) {
  ASSERT(!range->IsSpilled());
  TraceAlloc("Spilling live range %d\n", range->id());
  LiveRange* first = range->TopLevel();

  if (!first->HasAllocatedSpillOperand()) {
    LOperand* op = TryReuseSpillSlot(range);
2165
    if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind());
2166 2167
    first->SetSpillOperand(op);
  }
2168
  range->MakeSpilled(chunk()->zone());
2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190
}


int LAllocator::RegisterCount() const {
  return num_registers_;
}


#ifdef DEBUG


void LAllocator::Verify() const {
  for (int i = 0; i < live_ranges()->length(); ++i) {
    LiveRange* current = live_ranges()->at(i);
    if (current != NULL) current->Verify();
  }
}


#endif


2191 2192 2193 2194 2195 2196 2197 2198 2199 2200
LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator)
    : CompilationPhase(name, allocator->graph()->info()),
      allocator_(allocator) {
  if (FLAG_hydrogen_stats) {
    allocator_zone_start_allocation_size_ =
        allocator->zone()->allocation_size();
  }
}


2201
LAllocatorPhase::~LAllocatorPhase() {
2202 2203 2204
  if (FLAG_hydrogen_stats) {
    unsigned size = allocator_->zone()->allocation_size() -
                    allocator_zone_start_allocation_size_;
2205
    isolate()->GetHStatistics()->SaveTiming(name(), TimeDelta(), size);
2206 2207
  }

2208 2209 2210 2211 2212 2213 2214 2215 2216 2217 2218
  if (ShouldProduceTraceOutput()) {
    isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk());
    isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_);
  }

#ifdef DEBUG
  if (allocator_ != NULL) allocator_->Verify();
#endif
}


2219
} }  // namespace v8::internal