instruction.cc 36.2 KB
Newer Older
1 2 3 4
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5
#include "src/compiler/backend/instruction.h"
6

7 8
#include <iomanip>

9
#include "src/compiler/common-operator.h"
10
#include "src/compiler/graph.h"
11
#include "src/compiler/schedule.h"
12
#include "src/compiler/state-values-utils.h"
13
#include "src/register-configuration.h"
14
#include "src/source-position.h"
15 16 17 18 19

namespace v8 {
namespace internal {
namespace compiler {

20
const RegisterConfiguration* (*GetRegConfig)() = RegisterConfiguration::Default;
21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55

FlagsCondition CommuteFlagsCondition(FlagsCondition condition) {
  switch (condition) {
    case kSignedLessThan:
      return kSignedGreaterThan;
    case kSignedGreaterThanOrEqual:
      return kSignedLessThanOrEqual;
    case kSignedLessThanOrEqual:
      return kSignedGreaterThanOrEqual;
    case kSignedGreaterThan:
      return kSignedLessThan;
    case kUnsignedLessThan:
      return kUnsignedGreaterThan;
    case kUnsignedGreaterThanOrEqual:
      return kUnsignedLessThanOrEqual;
    case kUnsignedLessThanOrEqual:
      return kUnsignedGreaterThanOrEqual;
    case kUnsignedGreaterThan:
      return kUnsignedLessThan;
    case kFloatLessThanOrUnordered:
      return kFloatGreaterThanOrUnordered;
    case kFloatGreaterThanOrEqual:
      return kFloatLessThanOrEqual;
    case kFloatLessThanOrEqual:
      return kFloatGreaterThanOrEqual;
    case kFloatGreaterThanOrUnordered:
      return kFloatLessThanOrUnordered;
    case kFloatLessThan:
      return kFloatGreaterThan;
    case kFloatGreaterThanOrEqualOrUnordered:
      return kFloatLessThanOrEqualOrUnordered;
    case kFloatLessThanOrEqualOrUnordered:
      return kFloatGreaterThanOrEqualOrUnordered;
    case kFloatGreaterThan:
      return kFloatLessThan;
56 57 58 59
    case kPositiveOrZero:
    case kNegative:
      UNREACHABLE();
      break;
60 61 62 63 64 65 66 67 68 69 70
    case kEqual:
    case kNotEqual:
    case kOverflow:
    case kNotOverflow:
    case kUnorderedEqual:
    case kUnorderedNotEqual:
      return condition;
  }
  UNREACHABLE();
}

71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99
bool InstructionOperand::InterferesWith(const InstructionOperand& other) const {
  if (kSimpleFPAliasing || !this->IsFPLocationOperand() ||
      !other.IsFPLocationOperand())
    return EqualsCanonicalized(other);
  // Aliasing is complex and both operands are fp locations.
  const LocationOperand& loc = *LocationOperand::cast(this);
  const LocationOperand& other_loc = LocationOperand::cast(other);
  LocationOperand::LocationKind kind = loc.location_kind();
  LocationOperand::LocationKind other_kind = other_loc.location_kind();
  if (kind != other_kind) return false;
  MachineRepresentation rep = loc.representation();
  MachineRepresentation other_rep = other_loc.representation();
  if (rep == other_rep) return EqualsCanonicalized(other);
  if (kind == LocationOperand::REGISTER) {
    // FP register-register interference.
    return GetRegConfig()->AreAliases(rep, loc.register_code(), other_rep,
                                      other_loc.register_code());
  } else {
    // FP slot-slot interference. Slots of different FP reps can alias because
    // the gap resolver may break a move into 2 or 4 equivalent smaller moves.
    DCHECK_EQ(LocationOperand::STACK_SLOT, kind);
    int index_hi = loc.index();
    int index_lo = index_hi - (1 << ElementSizeLog2Of(rep)) / kPointerSize + 1;
    int other_index_hi = other_loc.index();
    int other_index_lo =
        other_index_hi - (1 << ElementSizeLog2Of(other_rep)) / kPointerSize + 1;
    return other_index_hi >= index_lo && index_hi >= other_index_lo;
  }
  return false;
100
}
101

102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121
bool LocationOperand::IsCompatible(LocationOperand* op) {
  if (IsRegister() || IsStackSlot()) {
    return op->IsRegister() || op->IsStackSlot();
  } else if (kSimpleFPAliasing) {
    // A backend may choose to generate the same instruction sequence regardless
    // of the FP representation. As a result, we can relax the compatibility and
    // allow a Double to be moved in a Float for example. However, this is only
    // allowed if registers do not overlap.
    return (IsFPRegister() || IsFPStackSlot()) &&
           (op->IsFPRegister() || op->IsFPStackSlot());
  } else if (IsFloatRegister() || IsFloatStackSlot()) {
    return op->IsFloatRegister() || op->IsFloatStackSlot();
  } else if (IsDoubleRegister() || IsDoubleStackSlot()) {
    return op->IsDoubleRegister() || op->IsDoubleStackSlot();
  } else {
    return (IsSimd128Register() || IsSimd128StackSlot()) &&
           (op->IsSimd128Register() || op->IsSimd128StackSlot());
  }
}

122
void InstructionOperand::Print() const { StdoutStream{} << *this << std::endl; }
123

124
std::ostream& operator<<(std::ostream& os, const InstructionOperand& op) {
125 126 127 128 129 130 131 132 133 134 135
  switch (op.kind()) {
    case InstructionOperand::UNALLOCATED: {
      const UnallocatedOperand* unalloc = UnallocatedOperand::cast(&op);
      os << "v" << unalloc->virtual_register();
      if (unalloc->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
        return os << "(=" << unalloc->fixed_slot_index() << "S)";
      }
      switch (unalloc->extended_policy()) {
        case UnallocatedOperand::NONE:
          return os;
        case UnallocatedOperand::FIXED_REGISTER:
136
          return os << "(="
137
                    << Register::from_code(unalloc->fixed_register_index())
138
                    << ")";
139
        case UnallocatedOperand::FIXED_FP_REGISTER:
140
          return os << "(="
141
                    << DoubleRegister::from_code(
142 143
                           unalloc->fixed_register_index())
                    << ")";
144 145
        case UnallocatedOperand::MUST_HAVE_REGISTER:
          return os << "(R)";
146 147
        case UnallocatedOperand::MUST_HAVE_SLOT:
          return os << "(S)";
148 149
        case UnallocatedOperand::SAME_AS_FIRST_INPUT:
          return os << "(1)";
150
        case UnallocatedOperand::REGISTER_OR_SLOT:
151
          return os << "(-)";
152 153
        case UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT:
          return os << "(*)";
154 155 156
      }
    }
    case InstructionOperand::CONSTANT:
157 158
      return os << "[constant:" << ConstantOperand::cast(op).virtual_register()
                << "]";
159
    case InstructionOperand::IMMEDIATE: {
160
      ImmediateOperand imm = ImmediateOperand::cast(op);
161 162 163 164 165 166 167
      switch (imm.type()) {
        case ImmediateOperand::INLINE:
          return os << "#" << imm.inline_value();
        case ImmediateOperand::INDEXED:
          return os << "[immediate:" << imm.indexed_value() << "]";
      }
    }
168
    case InstructionOperand::EXPLICIT:
169
    case InstructionOperand::ALLOCATED: {
170
      LocationOperand allocated = LocationOperand::cast(op);
171
      if (op.IsStackSlot()) {
172
        os << "[stack:" << allocated.index();
173
      } else if (op.IsFPStackSlot()) {
174
        os << "[fp_stack:" << allocated.index();
175
      } else if (op.IsRegister()) {
176 177 178 179 180
        const char* name =
            allocated.register_code() < Register::kNumRegisters
                ? RegisterName(Register::from_code(allocated.register_code()))
                : Assembler::GetSpecialRegisterName(allocated.register_code());
        os << "[" << name << "|R";
181
      } else if (op.IsDoubleRegister()) {
182
        os << "[" << DoubleRegister::from_code(allocated.register_code())
183
           << "|R";
184
      } else if (op.IsFloatRegister()) {
185
        os << "[" << FloatRegister::from_code(allocated.register_code())
186
           << "|R";
187 188
      } else {
        DCHECK(op.IsSimd128Register());
189
        os << "[" << Simd128Register::from_code(allocated.register_code())
190
           << "|R";
191 192 193
      }
      if (allocated.IsExplicit()) {
        os << "|E";
194
      }
195
      switch (allocated.representation()) {
196 197 198 199 200 201 202 203 204 205 206 207
        case MachineRepresentation::kNone:
          os << "|-";
          break;
        case MachineRepresentation::kBit:
          os << "|b";
          break;
        case MachineRepresentation::kWord8:
          os << "|w8";
          break;
        case MachineRepresentation::kWord16:
          os << "|w16";
          break;
208
        case MachineRepresentation::kWord32:
209 210
          os << "|w32";
          break;
211
        case MachineRepresentation::kWord64:
212 213
          os << "|w64";
          break;
214
        case MachineRepresentation::kFloat32:
215 216
          os << "|f32";
          break;
217
        case MachineRepresentation::kFloat64:
218 219
          os << "|f64";
          break;
220 221 222
        case MachineRepresentation::kSimd128:
          os << "|s128";
          break;
223 224 225 226 227 228
        case MachineRepresentation::kTaggedSigned:
          os << "|ts";
          break;
        case MachineRepresentation::kTaggedPointer:
          os << "|tp";
          break;
229
        case MachineRepresentation::kTagged:
230 231 232 233 234
          os << "|t";
          break;
      }
      return os << "]";
    }
235 236
    case InstructionOperand::INVALID:
      return os << "(x)";
237 238 239 240
  }
  UNREACHABLE();
}

241
void MoveOperands::Print() const {
242
  StdoutStream{} << destination() << " = " << source() << std::endl;
243 244
}

245 246
std::ostream& operator<<(std::ostream& os, const MoveOperands& mo) {
  os << mo.destination();
247
  if (!mo.source().Equals(mo.destination())) {
248
    os << " = " << mo.source();
249
  }
250 251 252 253
  return os << ";";
}

bool ParallelMove::IsRedundant() const {
254
  for (MoveOperands* move : *this) {
255
    if (!move->IsRedundant()) return false;
256 257 258 259
  }
  return true;
}

260 261 262 263
void ParallelMove::PrepareInsertAfter(
    MoveOperands* move, ZoneVector<MoveOperands*>* to_eliminate) const {
  bool no_aliasing =
      kSimpleFPAliasing || !move->destination().IsFPLocationOperand();
dcarney's avatar
dcarney committed
264
  MoveOperands* replacement = nullptr;
265
  MoveOperands* eliminated = nullptr;
266
  for (MoveOperands* curr : *this) {
dcarney's avatar
dcarney committed
267
    if (curr->IsEliminated()) continue;
268
    if (curr->destination().EqualsCanonicalized(move->source())) {
269 270
      // We must replace move's source with curr's destination in order to
      // insert it into this ParallelMove.
dcarney's avatar
dcarney committed
271 272
      DCHECK(!replacement);
      replacement = curr;
273 274 275 276 277 278 279
      if (no_aliasing && eliminated != nullptr) break;
    } else if (curr->destination().InterferesWith(move->destination())) {
      // We can eliminate curr, since move overwrites at least a part of its
      // destination, implying its value is no longer live.
      eliminated = curr;
      to_eliminate->push_back(curr);
      if (no_aliasing && replacement != nullptr) break;
dcarney's avatar
dcarney committed
280 281 282 283 284
    }
  }
  if (replacement != nullptr) move->set_source(replacement->source());
}

285
ExplicitOperand::ExplicitOperand(LocationKind kind, MachineRepresentation rep,
286
                                 int index)
287 288
    : LocationOperand(EXPLICIT, kind, rep, index) {
  DCHECK_IMPLIES(kind == REGISTER && !IsFloatingPoint(rep),
289
                 GetRegConfig()->IsAllocatableGeneralCode(index));
290
  DCHECK_IMPLIES(kind == REGISTER && rep == MachineRepresentation::kFloat32,
291
                 GetRegConfig()->IsAllocatableFloatCode(index));
292
  DCHECK_IMPLIES(kind == REGISTER && (rep == MachineRepresentation::kFloat64),
293
                 GetRegConfig()->IsAllocatableDoubleCode(index));
294 295
}

296 297 298
Instruction::Instruction(InstructionCode opcode)
    : opcode_(opcode),
      bit_field_(OutputCountField::encode(0) | InputCountField::encode(0) |
299
                 TempCountField::encode(0) | IsCallField::encode(false)),
300 301
      reference_map_(nullptr),
      block_(nullptr) {
302 303 304
  parallel_moves_[0] = nullptr;
  parallel_moves_[1] = nullptr;
}
305 306

Instruction::Instruction(InstructionCode opcode, size_t output_count,
307 308 309
                         InstructionOperand* outputs, size_t input_count,
                         InstructionOperand* inputs, size_t temp_count,
                         InstructionOperand* temps)
310 311 312 313
    : opcode_(opcode),
      bit_field_(OutputCountField::encode(output_count) |
                 InputCountField::encode(input_count) |
                 TempCountField::encode(temp_count) |
314
                 IsCallField::encode(false)),
315 316
      reference_map_(nullptr),
      block_(nullptr) {
317 318
  parallel_moves_[0] = nullptr;
  parallel_moves_[1] = nullptr;
319 320
  size_t offset = 0;
  for (size_t i = 0; i < output_count; ++i) {
321 322
    DCHECK(!outputs[i].IsInvalid());
    operands_[offset++] = outputs[i];
323 324
  }
  for (size_t i = 0; i < input_count; ++i) {
325 326
    DCHECK(!inputs[i].IsInvalid());
    operands_[offset++] = inputs[i];
327 328
  }
  for (size_t i = 0; i < temp_count; ++i) {
329 330
    DCHECK(!temps[i].IsInvalid());
    operands_[offset++] = temps[i];
331 332 333
  }
}

334 335 336 337
bool Instruction::AreMovesRedundant() const {
  for (int i = Instruction::FIRST_GAP_POSITION;
       i <= Instruction::LAST_GAP_POSITION; i++) {
    if (parallel_moves_[i] != nullptr && !parallel_moves_[i]->IsRedundant()) {
338
      return false;
339
    }
340 341 342 343
  }
  return true;
}

344
void Instruction::Print() const { StdoutStream{} << *this << std::endl; }
345

346 347
std::ostream& operator<<(std::ostream& os, const ParallelMove& pm) {
  const char* space = "";
348
  for (MoveOperands* move : pm) {
349
    if (move->IsEliminated()) continue;
350 351
    os << space << *move;
    space = " ";
352 353 354 355
  }
  return os;
}

356
void ReferenceMap::RecordReference(const AllocatedOperand& op) {
357
  // Do not record arguments as pointers.
358
  if (op.IsStackSlot() && LocationOperand::cast(op).index() < 0) return;
359
  DCHECK(!op.IsFPRegister() && !op.IsFPStackSlot());
360
  reference_operands_.push_back(op);
361 362
}

363
std::ostream& operator<<(std::ostream& os, const ReferenceMap& pm) {
364
  os << "{";
365
  const char* separator = "";
366
  for (const InstructionOperand& op : pm.reference_operands_) {
367 368
    os << separator << op;
    separator = ";";
369 370 371 372
  }
  return os << "}";
}

373
std::ostream& operator<<(std::ostream& os, const ArchOpcode& ao) {
374 375 376 377 378 379 380 381 382 383
  switch (ao) {
#define CASE(Name) \
  case k##Name:    \
    return os << #Name;
    ARCH_OPCODE_LIST(CASE)
#undef CASE
  }
  UNREACHABLE();
}

384
std::ostream& operator<<(std::ostream& os, const AddressingMode& am) {
385 386 387 388 389 390 391 392 393 394 395 396
  switch (am) {
    case kMode_None:
      return os;
#define CASE(Name)   \
  case kMode_##Name: \
    return os << #Name;
      TARGET_ADDRESSING_MODE_LIST(CASE)
#undef CASE
  }
  UNREACHABLE();
}

397
std::ostream& operator<<(std::ostream& os, const FlagsMode& fm) {
398 399 400 401 402
  switch (fm) {
    case kFlags_none:
      return os;
    case kFlags_branch:
      return os << "branch";
403 404
    case kFlags_branch_and_poison:
      return os << "branch_and_poison";
405 406
    case kFlags_deoptimize:
      return os << "deoptimize";
407 408
    case kFlags_deoptimize_and_poison:
      return os << "deoptimize_and_poison";
409 410
    case kFlags_set:
      return os << "set";
411 412
    case kFlags_trap:
      return os << "trap";
413 414 415 416
  }
  UNREACHABLE();
}

417
std::ostream& operator<<(std::ostream& os, const FlagsCondition& fc) {
418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438
  switch (fc) {
    case kEqual:
      return os << "equal";
    case kNotEqual:
      return os << "not equal";
    case kSignedLessThan:
      return os << "signed less than";
    case kSignedGreaterThanOrEqual:
      return os << "signed greater than or equal";
    case kSignedLessThanOrEqual:
      return os << "signed less than or equal";
    case kSignedGreaterThan:
      return os << "signed greater than";
    case kUnsignedLessThan:
      return os << "unsigned less than";
    case kUnsignedGreaterThanOrEqual:
      return os << "unsigned greater than or equal";
    case kUnsignedLessThanOrEqual:
      return os << "unsigned less than or equal";
    case kUnsignedGreaterThan:
      return os << "unsigned greater than";
439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454
    case kFloatLessThanOrUnordered:
      return os << "less than or unordered (FP)";
    case kFloatGreaterThanOrEqual:
      return os << "greater than or equal (FP)";
    case kFloatLessThanOrEqual:
      return os << "less than or equal (FP)";
    case kFloatGreaterThanOrUnordered:
      return os << "greater than or unordered (FP)";
    case kFloatLessThan:
      return os << "less than (FP)";
    case kFloatGreaterThanOrEqualOrUnordered:
      return os << "greater than, equal or unordered (FP)";
    case kFloatLessThanOrEqualOrUnordered:
      return os << "less than, equal or unordered (FP)";
    case kFloatGreaterThan:
      return os << "greater than (FP)";
455 456 457 458
    case kUnorderedEqual:
      return os << "unordered equal";
    case kUnorderedNotEqual:
      return os << "unordered not equal";
459 460 461 462
    case kOverflow:
      return os << "overflow";
    case kNotOverflow:
      return os << "not overflow";
463 464 465 466
    case kPositiveOrZero:
      return os << "positive or zero";
    case kNegative:
      return os << "negative";
467 468 469 470
  }
  UNREACHABLE();
}

471
std::ostream& operator<<(std::ostream& os, const Instruction& instr) {
472 473 474 475
  os << "gap ";
  for (int i = Instruction::FIRST_GAP_POSITION;
       i <= Instruction::LAST_GAP_POSITION; i++) {
    os << "(";
476
    if (instr.parallel_moves()[i] != nullptr) {
477
      os << *instr.parallel_moves()[i];
478 479 480 481 482
    }
    os << ") ";
  }
  os << "\n          ";

483 484 485 486 487 488 489 490
  if (instr.OutputCount() == 1) {
    os << *instr.OutputAt(0) << " = ";
  } else if (instr.OutputCount() > 1) {
    os << "(" << *instr.OutputAt(0);
    for (size_t i = 1; i < instr.OutputCount(); i++) {
      os << ", " << *instr.OutputAt(i);
    }
    os << ") = ";
491 492
  }

493 494 495 496 497 498 499 500
  os << ArchOpcodeField::decode(instr.opcode());
  AddressingMode am = AddressingModeField::decode(instr.opcode());
  if (am != kMode_None) {
    os << " : " << AddressingModeField::decode(instr.opcode());
  }
  FlagsMode fm = FlagsModeField::decode(instr.opcode());
  if (fm != kFlags_none) {
    os << " && " << fm << " if " << FlagsConditionField::decode(instr.opcode());
501
  }
502 503
  for (size_t i = 0; i < instr.InputCount(); i++) {
    os << " " << *instr.InputAt(i);
504
  }
505
  return os;
506 507
}

508 509
Constant::Constant(int32_t v) : type_(kInt32), value_(v) {}

510 511 512 513 514 515 516 517 518 519
Constant::Constant(RelocatablePtrConstantInfo info) {
  if (info.type() == RelocatablePtrConstantInfo::kInt32) {
    type_ = kInt32;
  } else if (info.type() == RelocatablePtrConstantInfo::kInt64) {
    type_ = kInt64;
  } else {
    UNREACHABLE();
  }
  value_ = info.value();
  rmode_ = info.rmode();
520
}
521

522 523 524
Handle<HeapObject> Constant::ToHeapObject() const {
  DCHECK_EQ(kHeapObject, type());
  Handle<HeapObject> value(
525
      reinterpret_cast<Address*>(static_cast<intptr_t>(value_)));
526 527 528
  return value;
}

529 530
Handle<Code> Constant::ToCode() const {
  DCHECK_EQ(kHeapObject, type());
531
  Handle<Code> value(reinterpret_cast<Address*>(static_cast<intptr_t>(value_)));
532 533 534
  return value;
}

535 536 537 538 539 540 541
const StringConstantBase* Constant::ToDelayedStringConstant() const {
  DCHECK_EQ(kDelayedStringConstant, type());
  const StringConstantBase* value =
      bit_cast<StringConstantBase*>(static_cast<intptr_t>(value_));
  return value;
}

542
std::ostream& operator<<(std::ostream& os, const Constant& constant) {
543 544 545 546 547
  switch (constant.type()) {
    case Constant::kInt32:
      return os << constant.ToInt32();
    case Constant::kInt64:
      return os << constant.ToInt64() << "l";
548 549
    case Constant::kFloat32:
      return os << constant.ToFloat32() << "f";
550
    case Constant::kFloat64:
551
      return os << constant.ToFloat64().value();
552
    case Constant::kExternalReference:
553
      return os << constant.ToExternalReference().address();
554 555
    case Constant::kHeapObject:
      return os << Brief(*constant.ToHeapObject());
556 557
    case Constant::kRpoNumber:
      return os << "RPO" << constant.ToRpoNumber().ToInt();
558 559 560
    case Constant::kDelayedStringConstant:
      return os << "DelayedStringConstant: "
                << constant.ToDelayedStringConstant();
561 562 563 564
  }
  UNREACHABLE();
}

565 566 567 568
PhiInstruction::PhiInstruction(Zone* zone, int virtual_register,
                               size_t input_count)
    : virtual_register_(virtual_register),
      output_(UnallocatedOperand(UnallocatedOperand::NONE, virtual_register)),
569 570
      operands_(input_count, InstructionOperand::kInvalidVirtualRegister,
                zone) {}
571 572

void PhiInstruction::SetInput(size_t offset, int virtual_register) {
573
  DCHECK_EQ(InstructionOperand::kInvalidVirtualRegister, operands_[offset]);
574 575 576
  operands_[offset] = virtual_register;
}

577 578 579 580
void PhiInstruction::RenameInput(size_t offset, int virtual_register) {
  DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, operands_[offset]);
  operands_[offset] = virtual_register;
}
581

582 583
InstructionBlock::InstructionBlock(Zone* zone, RpoNumber rpo_number,
                                   RpoNumber loop_header, RpoNumber loop_end,
584
                                   bool deferred, bool handler)
585 586 587
    : successors_(zone),
      predecessors_(zone),
      phis_(zone),
588
      ao_number_(RpoNumber::Invalid()),
589 590 591
      rpo_number_(rpo_number),
      loop_header_(loop_header),
      loop_end_(loop_end),
592
      deferred_(deferred),
593
      handler_(handler) {}
594

595
size_t InstructionBlock::PredecessorIndexOf(RpoNumber rpo_number) const {
596 597 598 599 600 601 602 603
  size_t j = 0;
  for (InstructionBlock::Predecessors::const_iterator i = predecessors_.begin();
       i != predecessors_.end(); ++i, ++j) {
    if (*i == rpo_number) break;
  }
  return j;
}

604
static RpoNumber GetRpo(const BasicBlock* block) {
605
  if (block == nullptr) return RpoNumber::Invalid();
606
  return RpoNumber::FromInt(block->rpo_number());
607 608
}

609 610 611
static RpoNumber GetLoopEndRpo(const BasicBlock* block) {
  if (!block->IsLoopHeader()) return RpoNumber::Invalid();
  return RpoNumber::FromInt(block->loop_end()->rpo_number());
612 613
}

614 615
static InstructionBlock* InstructionBlockFor(Zone* zone,
                                             const BasicBlock* block) {
616 617
  bool is_handler =
      !block->empty() && block->front()->opcode() == IrOpcode::kIfException;
618 619
  InstructionBlock* instr_block = new (zone)
      InstructionBlock(zone, GetRpo(block), GetRpo(block->loop_header()),
620
                       GetLoopEndRpo(block), block->deferred(), is_handler);
621
  // Map successors and precessors
622
  instr_block->successors().reserve(block->SuccessorCount());
623
  for (BasicBlock* successor : block->successors()) {
624
    instr_block->successors().push_back(GetRpo(successor));
625
  }
626
  instr_block->predecessors().reserve(block->PredecessorCount());
627
  for (BasicBlock* predecessor : block->predecessors()) {
628
    instr_block->predecessors().push_back(GetRpo(predecessor));
629
  }
630
  return instr_block;
631 632
}

633
std::ostream& operator<<(std::ostream& os,
634
                         const PrintableInstructionBlock& printable_block) {
635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657
  const InstructionBlock* block = printable_block.block_;
  const InstructionSequence* code = printable_block.code_;

  os << "B" << block->rpo_number();
  os << ": AO#" << block->ao_number();
  if (block->IsDeferred()) os << " (deferred)";
  if (!block->needs_frame()) os << " (no frame)";
  if (block->must_construct_frame()) os << " (construct frame)";
  if (block->must_deconstruct_frame()) os << " (deconstruct frame)";
  if (block->IsLoopHeader()) {
    os << " loop blocks: [" << block->rpo_number() << ", " << block->loop_end()
       << ")";
  }
  os << "  instructions: [" << block->code_start() << ", " << block->code_end()
     << ")" << std::endl
     << " predecessors:";

  for (RpoNumber pred : block->predecessors()) {
    os << " B" << pred.ToInt();
  }
  os << std::endl;

  for (const PhiInstruction* phi : block->phis()) {
658
    os << "     phi: " << phi->output() << " =";
659 660 661 662 663 664 665 666
    for (int input : phi->operands()) {
      os << " v" << input;
    }
    os << std::endl;
  }

  for (int j = block->first_instruction_index();
       j <= block->last_instruction_index(); j++) {
667 668
    os << "   " << std::setw(5) << j << ": " << *code->InstructionAt(j)
       << std::endl;
669 670
  }

671
  os << " successors:";
672 673 674 675 676 677 678
  for (RpoNumber succ : block->successors()) {
    os << " B" << succ.ToInt();
  }
  os << std::endl;
  return os;
}

679 680 681 682
InstructionBlocks* InstructionSequence::InstructionBlocksFor(
    Zone* zone, const Schedule* schedule) {
  InstructionBlocks* blocks = zone->NewArray<InstructionBlocks>(1);
  new (blocks) InstructionBlocks(
683
      static_cast<int>(schedule->rpo_order()->size()), nullptr, zone);
684 685 686
  size_t rpo_number = 0;
  for (BasicBlockVector::const_iterator it = schedule->rpo_order()->begin();
       it != schedule->rpo_order()->end(); ++it, ++rpo_number) {
687
    DCHECK(!(*blocks)[rpo_number]);
688
    DCHECK(GetRpo(*it).ToSize() == rpo_number);
689
    (*blocks)[rpo_number] = InstructionBlockFor(zone, *it);
690
  }
691
  return blocks;
692 693
}

694
void InstructionSequence::ValidateEdgeSplitForm() const {
695 696 697 698 699 700 701 702 703 704 705 706 707
  // Validate blocks are in edge-split form: no block with multiple successors
  // has an edge to a block (== a successor) with more than one predecessors.
  for (const InstructionBlock* block : instruction_blocks()) {
    if (block->SuccessorCount() > 1) {
      for (const RpoNumber& successor_id : block->successors()) {
        const InstructionBlock* successor = InstructionBlockAt(successor_id);
        // Expect precisely one predecessor: "block".
        CHECK(successor->PredecessorCount() == 1 &&
              successor->predecessors()[0] == block->rpo_number());
      }
    }
  }
}
708

709
void InstructionSequence::ValidateDeferredBlockExitPaths() const {
710 711 712 713 714 715 716 717 718 719
  // A deferred block with more than one successor must have all its successors
  // deferred.
  for (const InstructionBlock* block : instruction_blocks()) {
    if (!block->IsDeferred() || block->SuccessorCount() <= 1) continue;
    for (RpoNumber successor_id : block->successors()) {
      CHECK(InstructionBlockAt(successor_id)->IsDeferred());
    }
  }
}

720 721 722 723 724 725 726 727 728 729 730 731 732 733 734
void InstructionSequence::ValidateDeferredBlockEntryPaths() const {
  // If a deferred block has multiple predecessors, they have to
  // all be deferred. Otherwise, we can run into a situation where a range
  // that spills only in deferred blocks inserts its spill in the block, but
  // other ranges need moves inserted by ResolveControlFlow in the predecessors,
  // which may clobber the register of this range.
  for (const InstructionBlock* block : instruction_blocks()) {
    if (!block->IsDeferred() || block->PredecessorCount() <= 1) continue;
    for (RpoNumber predecessor_id : block->predecessors()) {
      CHECK(InstructionBlockAt(predecessor_id)->IsDeferred());
    }
  }
}

void InstructionSequence::ValidateSSA() const {
735 736 737 738 739 740 741 742 743 744 745 746 747 748
  // TODO(mtrofin): We could use a local zone here instead.
  BitVector definitions(VirtualRegisterCount(), zone());
  for (const Instruction* instruction : *this) {
    for (size_t i = 0; i < instruction->OutputCount(); ++i) {
      const InstructionOperand* output = instruction->OutputAt(i);
      int vreg = (output->IsConstant())
                     ? ConstantOperand::cast(output)->virtual_register()
                     : UnallocatedOperand::cast(output)->virtual_register();
      CHECK(!definitions.Contains(vreg));
      definitions.Add(vreg);
    }
  }
}

749
void InstructionSequence::ComputeAssemblyOrder() {
750
  int ao = 0;
751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781
  RpoNumber invalid = RpoNumber::Invalid();

  ao_blocks_ = zone()->NewArray<InstructionBlocks>(1);
  new (ao_blocks_) InstructionBlocks(zone());
  ao_blocks_->reserve(instruction_blocks_->size());

  // Place non-deferred blocks.
  for (InstructionBlock* const block : *instruction_blocks_) {
    DCHECK_NOT_NULL(block);
    if (block->IsDeferred()) continue;            // skip deferred blocks.
    if (block->ao_number() != invalid) continue;  // loop rotated.
    if (block->IsLoopHeader()) {
      bool header_align = true;
      if (FLAG_turbo_loop_rotation) {
        // Perform loop rotation for non-deferred loops.
        InstructionBlock* loop_end =
            instruction_blocks_->at(block->loop_end().ToSize() - 1);
        if (loop_end->SuccessorCount() == 1 && /* ends with goto */
            loop_end != block /* not a degenerate infinite loop */) {
          // If the last block has an unconditional jump back to the header,
          // then move it to be in front of the header in the assembly order.
          DCHECK_EQ(block->rpo_number(), loop_end->successors()[0]);
          loop_end->set_ao_number(RpoNumber::FromInt(ao++));
          ao_blocks_->push_back(loop_end);
          // This block will be the new machine-level loop header, so align
          // this block instead of the loop header block.
          loop_end->set_alignment(true);
          header_align = false;
        }
      }
      block->set_alignment(header_align);
782
    }
783 784
    block->set_ao_number(RpoNumber::FromInt(ao++));
    ao_blocks_->push_back(block);
785
  }
786 787 788
  // Add all leftover (deferred) blocks.
  for (InstructionBlock* const block : *instruction_blocks_) {
    if (block->ao_number() == invalid) {
789
      block->set_ao_number(RpoNumber::FromInt(ao++));
790
      ao_blocks_->push_back(block);
791 792
    }
  }
793 794 795 796 797 798 799 800 801
  DCHECK_EQ(instruction_blocks_->size(), ao);
}

void InstructionSequence::RecomputeAssemblyOrderForTesting() {
  RpoNumber invalid = RpoNumber::Invalid();
  for (InstructionBlock* block : *instruction_blocks_) {
    block->set_ao_number(invalid);
  }
  ComputeAssemblyOrder();
802 803
}

804 805
InstructionSequence::InstructionSequence(Isolate* isolate,
                                         Zone* instruction_zone,
806
                                         InstructionBlocks* instruction_blocks)
807 808
    : isolate_(isolate),
      zone_(instruction_zone),
809
      instruction_blocks_(instruction_blocks),
810
      ao_blocks_(nullptr),
811
      source_positions_(zone()),
812 813 814 815 816
      constants_(ConstantMap::key_compare(),
                 ConstantMap::allocator_type(zone())),
      immediates_(zone()),
      instructions_(zone()),
      next_virtual_register_(0),
817
      reference_maps_(zone()),
818
      representations_(zone()),
819
      representation_mask_(0),
820
      deoptimization_entries_(zone()),
821 822 823
      current_block_(nullptr) {
  ComputeAssemblyOrder();
}
824

825 826
int InstructionSequence::NextVirtualRegister() {
  int virtual_register = next_virtual_register_++;
827
  CHECK_NE(virtual_register, InstructionOperand::kInvalidVirtualRegister);
828 829 830
  return virtual_register;
}

831
Instruction* InstructionSequence::GetBlockStart(RpoNumber rpo) const {
832
  const InstructionBlock* block = InstructionBlockAt(rpo);
833
  return InstructionAt(block->code_start());
834 835
}

836
void InstructionSequence::StartBlock(RpoNumber rpo) {
837 838
  DCHECK_NULL(current_block_);
  current_block_ = InstructionBlockAt(rpo);
839
  int code_start = static_cast<int>(instructions_.size());
840
  current_block_->set_code_start(code_start);
841 842
}

843
void InstructionSequence::EndBlock(RpoNumber rpo) {
844
  int end = static_cast<int>(instructions_.size());
845
  DCHECK_EQ(current_block_->rpo_number(), rpo);
846 847
  CHECK(current_block_->code_start() >= 0 &&
        current_block_->code_start() < end);
848 849
  current_block_->set_code_end(end);
  current_block_ = nullptr;
850 851
}

852
int InstructionSequence::AddInstruction(Instruction* instr) {
853
  DCHECK_NOT_NULL(current_block_);
854
  int index = static_cast<int>(instructions_.size());
855
  instr->set_block(current_block_);
856
  instructions_.push_back(instr);
857
  if (instr->NeedsReferenceMap()) {
858
    DCHECK_NULL(instr->reference_map());
859 860 861 862
    ReferenceMap* reference_map = new (zone()) ReferenceMap(zone());
    reference_map->set_instruction_position(index);
    instr->set_reference_map(reference_map);
    reference_maps_.push_back(reference_map);
863 864 865 866
  }
  return index;
}

867
InstructionBlock* InstructionSequence::GetInstructionBlock(
868
    int instruction_index) const {
869
  return instructions()[instruction_index]->block();
870 871
}

872
static MachineRepresentation FilterRepresentation(MachineRepresentation rep) {
873
  switch (rep) {
874 875 876
    case MachineRepresentation::kBit:
    case MachineRepresentation::kWord8:
    case MachineRepresentation::kWord16:
877
      return InstructionSequence::DefaultRepresentation();
878 879
    case MachineRepresentation::kWord32:
    case MachineRepresentation::kWord64:
880 881
    case MachineRepresentation::kTaggedSigned:
    case MachineRepresentation::kTaggedPointer:
882
    case MachineRepresentation::kTagged:
883 884 885
    case MachineRepresentation::kFloat32:
    case MachineRepresentation::kFloat64:
    case MachineRepresentation::kSimd128:
886
      return rep;
887
    case MachineRepresentation::kNone:
888 889
      break;
  }
890

891
  UNREACHABLE();
892 893
}

894 895
MachineRepresentation InstructionSequence::GetRepresentation(
    int virtual_register) const {
896 897 898 899 900 901
  DCHECK_LE(0, virtual_register);
  DCHECK_LT(virtual_register, VirtualRegisterCount());
  if (virtual_register >= static_cast<int>(representations_.size())) {
    return DefaultRepresentation();
  }
  return representations_[virtual_register];
902 903
}

904
void InstructionSequence::MarkAsRepresentation(MachineRepresentation rep,
905 906 907 908 909 910
                                               int virtual_register) {
  DCHECK_LE(0, virtual_register);
  DCHECK_LT(virtual_register, VirtualRegisterCount());
  if (virtual_register >= static_cast<int>(representations_.size())) {
    representations_.resize(VirtualRegisterCount(), DefaultRepresentation());
  }
911 912
  rep = FilterRepresentation(rep);
  DCHECK_IMPLIES(representations_[virtual_register] != rep,
913
                 representations_[virtual_register] == DefaultRepresentation());
914
  representations_[virtual_register] = rep;
915
  representation_mask_ |= RepresentationBit(rep);
916 917
}

918
int InstructionSequence::AddDeoptimizationEntry(
919
    FrameStateDescriptor* descriptor, DeoptimizeKind kind,
920
    DeoptimizeReason reason, VectorSlotPair const& feedback) {
921
  int deoptimization_id = static_cast<int>(deoptimization_entries_.size());
922
  deoptimization_entries_.push_back(
923
      DeoptimizationEntry(descriptor, kind, reason, feedback));
924
  return deoptimization_id;
925 926
}

927 928 929
DeoptimizationEntry const& InstructionSequence::GetDeoptimizationEntry(
    int state_id) {
  return deoptimization_entries_[state_id];
930 931
}

932 933 934 935
RpoNumber InstructionSequence::InputRpo(Instruction* instr, size_t index) {
  InstructionOperand* operand = instr->InputAt(index);
  Constant constant =
      operand->IsImmediate()
936
          ? GetImmediate(ImmediateOperand::cast(operand))
937 938 939 940
          : GetConstant(ConstantOperand::cast(operand)->virtual_register());
  return constant.ToRpoNumber();
}

941 942 943 944 945 946 947 948 949 950 951 952 953
bool InstructionSequence::GetSourcePosition(const Instruction* instr,
                                            SourcePosition* result) const {
  auto it = source_positions_.find(instr);
  if (it == source_positions_.end()) return false;
  *result = it->second;
  return true;
}

void InstructionSequence::SetSourcePosition(const Instruction* instr,
                                            SourcePosition value) {
  source_positions_.insert(std::make_pair(instr, value));
}

954
void InstructionSequence::Print() const {
955
  StdoutStream{} << *this << std::endl;
956 957
}

958
void InstructionSequence::PrintBlock(int block_id) const {
959 960 961
  RpoNumber rpo = RpoNumber::FromInt(block_id);
  const InstructionBlock* block = InstructionBlockAt(rpo);
  CHECK(block->rpo_number() == rpo);
962
  StdoutStream{} << PrintableInstructionBlock{block, this} << std::endl;
963
}
964

965 966 967 968 969
const RegisterConfiguration*
    InstructionSequence::registerConfigurationForTesting_ = nullptr;

const RegisterConfiguration*
InstructionSequence::RegisterConfigurationForTesting() {
970
  DCHECK_NOT_NULL(registerConfigurationForTesting_);
971 972 973 974 975 976 977 978 979
  return registerConfigurationForTesting_;
}

void InstructionSequence::SetRegisterConfigurationForTesting(
    const RegisterConfiguration* regConfig) {
  registerConfigurationForTesting_ = regConfig;
  GetRegConfig = InstructionSequence::RegisterConfigurationForTesting;
}

980
FrameStateDescriptor::FrameStateDescriptor(
981 982
    Zone* zone, FrameStateType type, BailoutId bailout_id,
    OutputFrameStateCombine state_combine, size_t parameters_count,
983 984 985
    size_t locals_count, size_t stack_count,
    MaybeHandle<SharedFunctionInfo> shared_info,
    FrameStateDescriptor* outer_state)
986 987 988
    : type_(type),
      bailout_id_(bailout_id),
      frame_state_combine_(state_combine),
989 990 991
      parameters_count_(parameters_count),
      locals_count_(locals_count),
      stack_count_(stack_count),
992
      values_(zone),
993
      shared_info_(shared_info),
994
      outer_state_(outer_state) {}
995

996 997 998
size_t FrameStateDescriptor::GetSize() const {
  return 1 + parameters_count() + locals_count() + stack_count() +
         (HasContext() ? 1 : 0);
999 1000 1001 1002
}

size_t FrameStateDescriptor::GetTotalSize() const {
  size_t total_size = 0;
1003
  for (const FrameStateDescriptor* iter = this; iter != nullptr;
1004 1005 1006 1007 1008 1009 1010 1011
       iter = iter->outer_state_) {
    total_size += iter->GetSize();
  }
  return total_size;
}

size_t FrameStateDescriptor::GetFrameCount() const {
  size_t count = 0;
1012
  for (const FrameStateDescriptor* iter = this; iter != nullptr;
1013 1014 1015 1016 1017 1018 1019 1020
       iter = iter->outer_state_) {
    ++count;
  }
  return count;
}

size_t FrameStateDescriptor::GetJSFrameCount() const {
  size_t count = 0;
1021
  for (const FrameStateDescriptor* iter = this; iter != nullptr;
1022
       iter = iter->outer_state_) {
1023
    if (FrameStateFunctionInfo::IsJSFunctionType(iter->type_)) {
1024 1025 1026 1027 1028 1029
      ++count;
    }
  }
  return count;
}

1030 1031 1032 1033
std::ostream& operator<<(std::ostream& os, const RpoNumber& rpo) {
  return os << rpo.ToSize();
}

1034
std::ostream& operator<<(std::ostream& os, const InstructionSequence& code) {
1035 1036 1037 1038 1039 1040 1041 1042 1043
  for (size_t i = 0; i < code.immediates_.size(); ++i) {
    Constant constant = code.immediates_[i];
    os << "IMM#" << i << ": " << constant << "\n";
  }
  int i = 0;
  for (ConstantMap::const_iterator it = code.constants_.begin();
       it != code.constants_.end(); ++i, ++it) {
    os << "CST#" << i << ": v" << it->first << " = " << it->second << "\n";
  }
1044
  for (int i = 0; i < code.InstructionBlockCount(); i++) {
1045 1046
    auto* block = code.InstructionBlockAt(RpoNumber::FromInt(i));
    os << PrintableInstructionBlock{block, &code};
1047 1048 1049 1050 1051 1052 1053
  }
  return os;
}

}  // namespace compiler
}  // namespace internal
}  // namespace v8