instruction.h 63.9 KB
Newer Older
1 2 3 4
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5 6
#ifndef V8_COMPILER_BACKEND_INSTRUCTION_H_
#define V8_COMPILER_BACKEND_INSTRUCTION_H_
7 8

#include <deque>
9
#include <iosfwd>
10 11 12
#include <map>
#include <set>

13
#include "src/base/compiler-specific.h"
14
#include "src/codegen/external-reference.h"
15 16
#include "src/codegen/register-arch.h"
#include "src/codegen/source-position.h"
17
#include "src/common/globals.h"
18
#include "src/compiler/backend/instruction-codes.h"
19
#include "src/compiler/common-operator.h"
20
#include "src/compiler/feedback-source.h"
21 22
#include "src/compiler/frame.h"
#include "src/compiler/opcodes.h"
23
#include "src/numbers/double.h"
24
#include "src/zone/zone-allocator.h"
25 26 27

namespace v8 {
namespace internal {
28 29 30

class RegisterConfiguration;

31 32
namespace compiler {

33
class Schedule;
34
class SourcePositionTable;
35

36 37 38 39 40 41 42 43 44 45
#if defined(V8_CC_MSVC) && defined(V8_TARGET_ARCH_IA32)
// MSVC on x86 has issues with ALIGNAS(8) on InstructionOperand, but does
// align the object to 8 bytes anyway (covered by a static assert below).
// See crbug.com/v8/10796
#define INSTRUCTION_OPERAND_ALIGN
#else
#define INSTRUCTION_OPERAND_ALIGN ALIGNAS(8)
#endif

class V8_EXPORT_PRIVATE INSTRUCTION_OPERAND_ALIGN InstructionOperand {
46
 public:
47 48
  static const int kInvalidVirtualRegister = -1;

49 50 51 52 53
  enum Kind {
    INVALID,
    UNALLOCATED,
    CONSTANT,
    IMMEDIATE,
54
    PENDING,
55 56
    // Location operand kinds.
    ALLOCATED,
57
    FIRST_LOCATION_OPERAND_KIND = ALLOCATED
58 59
    // Location operand kinds must be last.
  };
60

61
  InstructionOperand() : InstructionOperand(INVALID) {}
62

63
  Kind kind() const { return KindField::decode(value_); }
64

65
#define INSTRUCTION_OPERAND_PREDICATE(name, type) \
66
  bool Is##name() const { return kind() == type; }
67
  INSTRUCTION_OPERAND_PREDICATE(Invalid, INVALID)
68 69
  // UnallocatedOperands are place-holder operands created before register
  // allocation. They later are assigned registers and become AllocatedOperands.
70
  INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED)
71 72 73 74
  // Constant operands participate in register allocation. They are allocated to
  // registers but have a special "spilling" behavior. When a ConstantOperand
  // value must be rematerialized, it is loaded from an immediate constant
  // rather from an unspilled slot.
75
  INSTRUCTION_OPERAND_PREDICATE(Constant, CONSTANT)
76 77 78
  // ImmediateOperands do not participate in register allocation and are only
  // embedded directly in instructions, e.g. small integers and on some
  // platforms Objects.
79
  INSTRUCTION_OPERAND_PREDICATE(Immediate, IMMEDIATE)
80 81 82 83
  // PendingOperands are pending allocation during register allocation and
  // shouldn't be seen elsewhere. They chain together multiple operators that
  // will be replaced together with the same value when finalized.
  INSTRUCTION_OPERAND_PREDICATE(Pending, PENDING)
84 85
  // AllocatedOperands are registers or stack slots that are assigned by the
  // register allocator and are always associated with a virtual register.
86
  INSTRUCTION_OPERAND_PREDICATE(Allocated, ALLOCATED)
87
#undef INSTRUCTION_OPERAND_PREDICATE
88

89 90 91
  inline bool IsAnyLocationOperand() const;
  inline bool IsLocationOperand() const;
  inline bool IsFPLocationOperand() const;
92
  inline bool IsAnyRegister() const;
93
  inline bool IsRegister() const;
94 95
  inline bool IsFPRegister() const;
  inline bool IsFloatRegister() const;
96
  inline bool IsDoubleRegister() const;
97
  inline bool IsSimd128Register() const;
98
  inline bool IsAnyStackSlot() const;
99
  inline bool IsStackSlot() const;
100 101
  inline bool IsFPStackSlot() const;
  inline bool IsFloatStackSlot() const;
102
  inline bool IsDoubleStackSlot() const;
103
  inline bool IsSimd128StackSlot() const;
104

105 106
  template <typename SubKindOperand>
  static SubKindOperand* New(Zone* zone, const SubKindOperand& op) {
107
    return zone->New<SubKindOperand>(op);
108 109
  }

110 111 112
  static void ReplaceWith(InstructionOperand* dest,
                          const InstructionOperand* src) {
    *dest = *src;
113 114
  }

115
  bool Equals(const InstructionOperand& that) const {
116 117 118 119
    if (IsPending()) {
      // Pending operands are only equal if they are the same operand.
      return this == &that;
    }
120 121 122 123 124 125 126
    return this->value_ == that.value_;
  }

  bool Compare(const InstructionOperand& that) const {
    return this->value_ < that.value_;
  }

127
  bool EqualsCanonicalized(const InstructionOperand& that) const {
128 129 130 131
    if (IsPending()) {
      // Pending operands can't be canonicalized, so just compare for equality.
      return Equals(that);
    }
132
    return this->GetCanonicalizedValue() == that.GetCanonicalizedValue();
133 134
  }

135
  bool CompareCanonicalized(const InstructionOperand& that) const {
136
    DCHECK(!IsPending());
137
    return this->GetCanonicalizedValue() < that.GetCanonicalizedValue();
138 139
  }

140
  bool InterferesWith(const InstructionOperand& other) const;
141

142
  // APIs to aid debugging. For general-stream APIs, use operator<<.
143
  void Print() const;
144

145
 protected:
146
  explicit InstructionOperand(Kind kind) : value_(KindField::encode(kind)) {}
147

148
  inline uint64_t GetCanonicalizedValue() const;
149

150
  using KindField = base::BitField64<Kind, 0, 3>;
151 152

  uint64_t value_;
153 154
};

155
using InstructionOperandVector = ZoneVector<InstructionOperand>;
156

157
std::ostream& operator<<(std::ostream&, const InstructionOperand&);
158

159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175
#define INSTRUCTION_OPERAND_CASTS(OperandType, OperandKind)      \
                                                                 \
  static OperandType* cast(InstructionOperand* op) {             \
    DCHECK_EQ(OperandKind, op->kind());                          \
    return static_cast<OperandType*>(op);                        \
  }                                                              \
                                                                 \
  static const OperandType* cast(const InstructionOperand* op) { \
    DCHECK_EQ(OperandKind, op->kind());                          \
    return static_cast<const OperandType*>(op);                  \
  }                                                              \
                                                                 \
  static OperandType cast(const InstructionOperand& op) {        \
    DCHECK_EQ(OperandKind, op.kind());                           \
    return *static_cast<const OperandType*>(&op);                \
  }

176
class UnallocatedOperand final : public InstructionOperand {
177 178 179 180 181
 public:
  enum BasicPolicy { FIXED_SLOT, EXTENDED_POLICY };

  enum ExtendedPolicy {
    NONE,
182 183
    REGISTER_OR_SLOT,
    REGISTER_OR_SLOT_OR_CONSTANT,
184
    FIXED_REGISTER,
185
    FIXED_FP_REGISTER,
186
    MUST_HAVE_REGISTER,
187
    MUST_HAVE_SLOT,
188
    SAME_AS_INPUT
189 190 191 192
  };

  // Lifetime of operand inside the instruction.
  enum Lifetime {
193 194 195
    // USED_AT_START operand is guaranteed to be live only at instruction start.
    // The register allocator is free to assign the same register to some other
    // operand used inside instruction (i.e. temporary or output).
196 197
    USED_AT_START,

198 199 200
    // USED_AT_END operand is treated as live until the end of instruction.
    // This means that register allocator will not reuse its register for any
    // other operand inside instruction.
201 202 203
    USED_AT_END
  };

204
  UnallocatedOperand(ExtendedPolicy policy, int virtual_register)
205
      : UnallocatedOperand(virtual_register) {
206 207 208 209 210
    value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
    value_ |= ExtendedPolicyField::encode(policy);
    value_ |= LifetimeField::encode(USED_AT_END);
  }

211 212 213 214 215 216 217 218
  UnallocatedOperand(int virtual_register, int input_index)
      : UnallocatedOperand(virtual_register) {
    value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
    value_ |= ExtendedPolicyField::encode(SAME_AS_INPUT);
    value_ |= LifetimeField::encode(USED_AT_END);
    value_ |= InputIndexField::encode(input_index);
  }

219
  UnallocatedOperand(BasicPolicy policy, int index, int virtual_register)
220
      : UnallocatedOperand(virtual_register) {
221
    DCHECK(policy == FIXED_SLOT);
222
    value_ |= BasicPolicyField::encode(policy);
223 224
    value_ |= static_cast<uint64_t>(static_cast<int64_t>(index))
              << FixedSlotIndexField::kShift;
225
    DCHECK(this->fixed_slot_index() == index);
226 227
  }

228
  UnallocatedOperand(ExtendedPolicy policy, int index, int virtual_register)
229
      : UnallocatedOperand(virtual_register) {
230
    DCHECK(policy == FIXED_REGISTER || policy == FIXED_FP_REGISTER);
231 232 233 234 235 236
    value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
    value_ |= ExtendedPolicyField::encode(policy);
    value_ |= LifetimeField::encode(USED_AT_END);
    value_ |= FixedRegisterField::encode(index);
  }

237 238
  UnallocatedOperand(ExtendedPolicy policy, Lifetime lifetime,
                     int virtual_register)
239
      : UnallocatedOperand(virtual_register) {
240 241 242 243 244
    value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
    value_ |= ExtendedPolicyField::encode(policy);
    value_ |= LifetimeField::encode(lifetime);
  }

245 246 247 248 249 250
  UnallocatedOperand(int reg_id, int slot_id, int virtual_register)
      : UnallocatedOperand(FIXED_REGISTER, reg_id, virtual_register) {
    value_ |= HasSecondaryStorageField::encode(true);
    value_ |= SecondaryStorageField::encode(slot_id);
  }

251 252 253 254 255 256
  UnallocatedOperand(const UnallocatedOperand& other, int virtual_register) {
    DCHECK_NE(kInvalidVirtualRegister, virtual_register);
    value_ = VirtualRegisterField::update(
        other.value_, static_cast<uint32_t>(virtual_register));
  }

257
  // Predicates for the operand policy.
258 259 260 261 262 263 264
  bool HasRegisterOrSlotPolicy() const {
    return basic_policy() == EXTENDED_POLICY &&
           extended_policy() == REGISTER_OR_SLOT;
  }
  bool HasRegisterOrSlotOrConstantPolicy() const {
    return basic_policy() == EXTENDED_POLICY &&
           extended_policy() == REGISTER_OR_SLOT_OR_CONSTANT;
265 266 267 268
  }
  bool HasFixedPolicy() const {
    return basic_policy() == FIXED_SLOT ||
           extended_policy() == FIXED_REGISTER ||
269
           extended_policy() == FIXED_FP_REGISTER;
270 271 272 273 274
  }
  bool HasRegisterPolicy() const {
    return basic_policy() == EXTENDED_POLICY &&
           extended_policy() == MUST_HAVE_REGISTER;
  }
275 276 277 278
  bool HasSlotPolicy() const {
    return basic_policy() == EXTENDED_POLICY &&
           extended_policy() == MUST_HAVE_SLOT;
  }
279 280
  bool HasSameAsInputPolicy() const {
    return basic_policy() == EXTENDED_POLICY &&
281
           extended_policy() == SAME_AS_INPUT;
282 283 284 285 286 287
  }
  bool HasFixedSlotPolicy() const { return basic_policy() == FIXED_SLOT; }
  bool HasFixedRegisterPolicy() const {
    return basic_policy() == EXTENDED_POLICY &&
           extended_policy() == FIXED_REGISTER;
  }
288
  bool HasFixedFPRegisterPolicy() const {
289
    return basic_policy() == EXTENDED_POLICY &&
290
           extended_policy() == FIXED_FP_REGISTER;
291
  }
292 293 294 295 296 297 298 299 300
  bool HasSecondaryStorage() const {
    return basic_policy() == EXTENDED_POLICY &&
           extended_policy() == FIXED_REGISTER &&
           HasSecondaryStorageField::decode(value_);
  }
  int GetSecondaryStorage() const {
    DCHECK(HasSecondaryStorage());
    return SecondaryStorageField::decode(value_);
  }
301 302

  // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
303
  BasicPolicy basic_policy() const { return BasicPolicyField::decode(value_); }
304 305 306

  // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
  ExtendedPolicy extended_policy() const {
307
    DCHECK(basic_policy() == EXTENDED_POLICY);
308 309 310
    return ExtendedPolicyField::decode(value_);
  }

311 312 313 314 315
  int input_index() const {
    DCHECK(HasSameAsInputPolicy());
    return InputIndexField::decode(value_);
  }

316 317
  // [fixed_slot_index]: Only for FIXED_SLOT.
  int fixed_slot_index() const {
318
    DCHECK(HasFixedSlotPolicy());
319
    return static_cast<int>(static_cast<int64_t>(value_) >>
320
                            FixedSlotIndexField::kShift);
321 322
  }

323
  // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_FP_REGISTER.
324
  int fixed_register_index() const {
325
    DCHECK(HasFixedRegisterPolicy() || HasFixedFPRegisterPolicy());
326 327 328 329
    return FixedRegisterField::decode(value_);
  }

  // [virtual_register]: The virtual register ID for this operand.
330
  int32_t virtual_register() const {
331
    return static_cast<int32_t>(VirtualRegisterField::decode(value_));
332 333
  }

334
  // [lifetime]: Only for non-FIXED_SLOT.
335
  bool IsUsedAtStart() const {
336 337
    return basic_policy() == EXTENDED_POLICY &&
           LifetimeField::decode(value_) == USED_AT_START;
338
  }
339

340
  INSTRUCTION_OPERAND_CASTS(UnallocatedOperand, UNALLOCATED)
341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359

  // The encoding used for UnallocatedOperand operands depends on the policy
  // that is
  // stored within the operand. The FIXED_SLOT policy uses a compact encoding
  // because it accommodates a larger pay-load.
  //
  // For FIXED_SLOT policy:
  //     +------------------------------------------------+
  //     |      slot_index   | 0 | virtual_register | 001 |
  //     +------------------------------------------------+
  //
  // For all other (extended) policies:
  //     +-----------------------------------------------------+
  //     |  reg_index  | L | PPP |  1 | virtual_register | 001 |
  //     +-----------------------------------------------------+
  //     L ... Lifetime
  //     P ... Policy
  //
  // The slot index is a signed value which requires us to decode it manually
360
  // instead of using the base::BitField utility class.
361 362 363

  STATIC_ASSERT(KindField::kSize == 3);

364
  using VirtualRegisterField = base::BitField64<uint32_t, 3, 32>;
365

366 367
  // base::BitFields for all unallocated operands.
  using BasicPolicyField = base::BitField64<BasicPolicy, 35, 1>;
368 369

  // BitFields specific to BasicPolicy::FIXED_SLOT.
370
  using FixedSlotIndexField = base::BitField64<int, 36, 28>;
371 372

  // BitFields specific to BasicPolicy::EXTENDED_POLICY.
373 374 375 376 377
  using ExtendedPolicyField = base::BitField64<ExtendedPolicy, 36, 3>;
  using LifetimeField = base::BitField64<Lifetime, 39, 1>;
  using HasSecondaryStorageField = base::BitField64<bool, 40, 1>;
  using FixedRegisterField = base::BitField64<int, 41, 6>;
  using SecondaryStorageField = base::BitField64<int, 47, 3>;
378
  using InputIndexField = base::BitField64<int, 50, 3>;
379 380 381 382 383 384 385

 private:
  explicit UnallocatedOperand(int virtual_register)
      : InstructionOperand(UNALLOCATED) {
    value_ |=
        VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register));
  }
386 387
};

388 389 390
class ConstantOperand : public InstructionOperand {
 public:
  explicit ConstantOperand(int virtual_register)
391 392 393 394
      : InstructionOperand(CONSTANT) {
    value_ |=
        VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register));
  }
395 396 397 398 399 400 401 402 403

  int32_t virtual_register() const {
    return static_cast<int32_t>(VirtualRegisterField::decode(value_));
  }

  static ConstantOperand* New(Zone* zone, int virtual_register) {
    return InstructionOperand::New(zone, ConstantOperand(virtual_register));
  }

404
  INSTRUCTION_OPERAND_CASTS(ConstantOperand, CONSTANT)
405 406

  STATIC_ASSERT(KindField::kSize == 3);
407
  using VirtualRegisterField = base::BitField64<uint32_t, 3, 32>;
408 409 410 411
};

class ImmediateOperand : public InstructionOperand {
 public:
412
  enum ImmediateType { INLINE_INT32, INLINE_INT64, INDEXED_RPO, INDEXED_IMM };
413 414 415 416

  explicit ImmediateOperand(ImmediateType type, int32_t value)
      : InstructionOperand(IMMEDIATE) {
    value_ |= TypeField::encode(type);
417 418
    value_ |= static_cast<uint64_t>(static_cast<int64_t>(value))
              << ValueField::kShift;
419
  }
420

421 422
  ImmediateType type() const { return TypeField::decode(value_); }

423 424 425 426 427 428 429
  int32_t inline_int32_value() const {
    DCHECK_EQ(INLINE_INT32, type());
    return static_cast<int64_t>(value_) >> ValueField::kShift;
  }

  int64_t inline_int64_value() const {
    DCHECK_EQ(INLINE_INT64, type());
430 431 432 433
    return static_cast<int64_t>(value_) >> ValueField::kShift;
  }

  int32_t indexed_value() const {
434
    DCHECK(type() == INDEXED_IMM || type() == INDEXED_RPO);
435
    return static_cast<int64_t>(value_) >> ValueField::kShift;
436 437
  }

438 439
  static ImmediateOperand* New(Zone* zone, ImmediateType type, int32_t value) {
    return InstructionOperand::New(zone, ImmediateOperand(type, value));
440 441
  }

442
  INSTRUCTION_OPERAND_CASTS(ImmediateOperand, IMMEDIATE)
443 444

  STATIC_ASSERT(KindField::kSize == 3);
445
  using TypeField = base::BitField64<ImmediateType, 3, 2>;
446
  using ValueField = base::BitField64<int32_t, 32, 32>;
447 448
};

449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486
class PendingOperand : public InstructionOperand {
 public:
  PendingOperand() : InstructionOperand(PENDING) {}
  explicit PendingOperand(PendingOperand* next_operand) : PendingOperand() {
    set_next(next_operand);
  }

  void set_next(PendingOperand* next) {
    DCHECK_NULL(this->next());
    uintptr_t shifted_value =
        reinterpret_cast<uintptr_t>(next) >> kPointerShift;
    DCHECK_EQ(reinterpret_cast<uintptr_t>(next),
              shifted_value << kPointerShift);
    value_ |= NextOperandField::encode(static_cast<uint64_t>(shifted_value));
  }

  PendingOperand* next() const {
    uintptr_t shifted_value =
        static_cast<uint64_t>(NextOperandField::decode(value_));
    return reinterpret_cast<PendingOperand*>(shifted_value << kPointerShift);
  }

  static PendingOperand* New(Zone* zone, PendingOperand* previous_operand) {
    return InstructionOperand::New(zone, PendingOperand(previous_operand));
  }

  INSTRUCTION_OPERAND_CASTS(PendingOperand, PENDING)

 private:
  // Operands are uint64_t values and so are aligned to 8 byte boundaries,
  // therefore we can shift off the bottom three zeros without losing data.
  static const uint64_t kPointerShift = 3;
  STATIC_ASSERT(alignof(InstructionOperand) >= (1 << kPointerShift));

  STATIC_ASSERT(KindField::kSize == 3);
  using NextOperandField = base::BitField64<uint64_t, 3, 61>;
};

487
class LocationOperand : public InstructionOperand {
488
 public:
489
  enum LocationKind { REGISTER, STACK_SLOT };
490

491 492
  LocationOperand(InstructionOperand::Kind operand_kind,
                  LocationOperand::LocationKind location_kind,
493
                  MachineRepresentation rep, int index)
494 495
      : InstructionOperand(operand_kind) {
    DCHECK_IMPLIES(location_kind == REGISTER, index >= 0);
496
    DCHECK(IsSupportedRepresentation(rep));
497
    value_ |= LocationKindField::encode(location_kind);
498
    value_ |= RepresentationField::encode(rep);
499 500
    value_ |= static_cast<uint64_t>(static_cast<int64_t>(index))
              << IndexField::kShift;
501 502
  }

503
  int index() const {
504
    DCHECK(IsStackSlot() || IsFPStackSlot());
505 506 507
    return static_cast<int64_t>(value_) >> IndexField::kShift;
  }

508 509 510 511 512
  int register_code() const {
    DCHECK(IsRegister() || IsFPRegister());
    return static_cast<int64_t>(value_) >> IndexField::kShift;
  }

513
  Register GetRegister() const {
514
    DCHECK(IsRegister());
515
    return Register::from_code(register_code());
516 517
  }

518 519
  FloatRegister GetFloatRegister() const {
    DCHECK(IsFloatRegister());
520
    return FloatRegister::from_code(register_code());
521 522
  }

523
  DoubleRegister GetDoubleRegister() const {
524 525 526
    // On platforms where FloatRegister, DoubleRegister, and Simd128Register
    // are all the same type, it's convenient to treat everything as a
    // DoubleRegister, so be lax about type checking here.
527
    DCHECK(IsFPRegister());
528
    return DoubleRegister::from_code(register_code());
529 530
  }

531 532
  Simd128Register GetSimd128Register() const {
    DCHECK(IsSimd128Register());
533
    return Simd128Register::from_code(register_code());
534 535
  }

536 537
  LocationKind location_kind() const {
    return LocationKindField::decode(value_);
538 539
  }

540 541 542
  MachineRepresentation representation() const {
    return RepresentationField::decode(value_);
  }
543

544 545 546 547 548 549
  static bool IsSupportedRepresentation(MachineRepresentation rep) {
    switch (rep) {
      case MachineRepresentation::kWord32:
      case MachineRepresentation::kWord64:
      case MachineRepresentation::kFloat32:
      case MachineRepresentation::kFloat64:
550
      case MachineRepresentation::kSimd128:
551 552
      case MachineRepresentation::kTaggedSigned:
      case MachineRepresentation::kTaggedPointer:
553
      case MachineRepresentation::kTagged:
554 555
      case MachineRepresentation::kCompressedPointer:
      case MachineRepresentation::kCompressed:
556
        return true;
557 558 559 560
      case MachineRepresentation::kBit:
      case MachineRepresentation::kWord8:
      case MachineRepresentation::kWord16:
      case MachineRepresentation::kNone:
561
        return false;
562 563
      case MachineRepresentation::kMapWord:
        break;
564
    }
565
    UNREACHABLE();
566 567
  }

568 569 570
  // Return true if the locations can be moved to one another.
  bool IsCompatible(LocationOperand* op);

571
  static LocationOperand* cast(InstructionOperand* op) {
572
    DCHECK(op->IsAnyLocationOperand());
573 574 575 576
    return static_cast<LocationOperand*>(op);
  }

  static const LocationOperand* cast(const InstructionOperand* op) {
577
    DCHECK(op->IsAnyLocationOperand());
578 579 580 581
    return static_cast<const LocationOperand*>(op);
  }

  static LocationOperand cast(const InstructionOperand& op) {
582
    DCHECK(op.IsAnyLocationOperand());
583 584
    return *static_cast<const LocationOperand*>(&op);
  }
585

586
  STATIC_ASSERT(KindField::kSize == 3);
587 588 589
  using LocationKindField = base::BitField64<LocationKind, 3, 2>;
  using RepresentationField = base::BitField64<MachineRepresentation, 5, 8>;
  using IndexField = base::BitField64<int32_t, 35, 29>;
590
};
591

592 593
class AllocatedOperand : public LocationOperand {
 public:
594 595
  AllocatedOperand(LocationKind kind, MachineRepresentation rep, int index)
      : LocationOperand(ALLOCATED, kind, rep, index) {}
596 597

  static AllocatedOperand* New(Zone* zone, LocationKind kind,
598 599
                               MachineRepresentation rep, int index) {
    return InstructionOperand::New(zone, AllocatedOperand(kind, rep, index));
600 601
  }

602
  INSTRUCTION_OPERAND_CASTS(AllocatedOperand, ALLOCATED)
603 604
};

605 606
#undef INSTRUCTION_OPERAND_CASTS

607 608 609 610 611 612 613 614 615 616 617 618 619
bool InstructionOperand::IsAnyLocationOperand() const {
  return this->kind() >= FIRST_LOCATION_OPERAND_KIND;
}

bool InstructionOperand::IsLocationOperand() const {
  return IsAnyLocationOperand() &&
         !IsFloatingPoint(LocationOperand::cast(this)->representation());
}

bool InstructionOperand::IsFPLocationOperand() const {
  return IsAnyLocationOperand() &&
         IsFloatingPoint(LocationOperand::cast(this)->representation());
}
620

621
bool InstructionOperand::IsAnyRegister() const {
622
  return IsAnyLocationOperand() &&
623
         LocationOperand::cast(this)->location_kind() ==
624 625 626 627 628
             LocationOperand::REGISTER;
}

bool InstructionOperand::IsRegister() const {
  return IsAnyRegister() &&
629
         !IsFloatingPoint(LocationOperand::cast(this)->representation());
630
}
631

632
bool InstructionOperand::IsFPRegister() const {
633
  return IsAnyRegister() &&
634
         IsFloatingPoint(LocationOperand::cast(this)->representation());
635 636
}

637
bool InstructionOperand::IsFloatRegister() const {
638 639
  return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
                                MachineRepresentation::kFloat32;
640 641 642
}

bool InstructionOperand::IsDoubleRegister() const {
643 644
  return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
                                MachineRepresentation::kFloat64;
645 646
}

647
bool InstructionOperand::IsSimd128Register() const {
648 649
  return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
                                MachineRepresentation::kSimd128;
650 651
}

652 653
bool InstructionOperand::IsAnyStackSlot() const {
  return IsAnyLocationOperand() &&
654
         LocationOperand::cast(this)->location_kind() ==
655 656 657 658 659
             LocationOperand::STACK_SLOT;
}

bool InstructionOperand::IsStackSlot() const {
  return IsAnyStackSlot() &&
660
         !IsFloatingPoint(LocationOperand::cast(this)->representation());
661 662
}

663
bool InstructionOperand::IsFPStackSlot() const {
664
  return IsAnyStackSlot() &&
665
         IsFloatingPoint(LocationOperand::cast(this)->representation());
666
}
667

668
bool InstructionOperand::IsFloatStackSlot() const {
669
  return IsAnyLocationOperand() &&
670 671 672 673 674 675 676
         LocationOperand::cast(this)->location_kind() ==
             LocationOperand::STACK_SLOT &&
         LocationOperand::cast(this)->representation() ==
             MachineRepresentation::kFloat32;
}

bool InstructionOperand::IsDoubleStackSlot() const {
677
  return IsAnyLocationOperand() &&
678 679 680 681 682 683
         LocationOperand::cast(this)->location_kind() ==
             LocationOperand::STACK_SLOT &&
         LocationOperand::cast(this)->representation() ==
             MachineRepresentation::kFloat64;
}

684
bool InstructionOperand::IsSimd128StackSlot() const {
685
  return IsAnyLocationOperand() &&
686 687 688 689 690 691
         LocationOperand::cast(this)->location_kind() ==
             LocationOperand::STACK_SLOT &&
         LocationOperand::cast(this)->representation() ==
             MachineRepresentation::kSimd128;
}

692
uint64_t InstructionOperand::GetCanonicalizedValue() const {
693
  if (IsAnyLocationOperand()) {
694
    MachineRepresentation canonical = MachineRepresentation::kNone;
695
    if (IsFPRegister()) {
696 697 698 699 700 701 702 703
      if (kSimpleFPAliasing) {
        // We treat all FP register operands the same for simple aliasing.
        canonical = MachineRepresentation::kFloat64;
      } else {
        // We need to distinguish FP register operands of different reps when
        // aliasing is not simple (e.g. ARM).
        canonical = LocationOperand::cast(this)->representation();
      }
704
    }
705
    return InstructionOperand::KindField::update(
706
        LocationOperand::RepresentationField::update(this->value_, canonical),
707
        LocationOperand::ALLOCATED);
708 709 710 711 712 713 714 715
  }
  return this->value_;
}

// Required for maps that don't care about machine type.
struct CompareOperandModuloType {
  bool operator()(const InstructionOperand& a,
                  const InstructionOperand& b) const {
716
    return a.CompareCanonicalized(b);
717 718 719
  }
};

720 721
class V8_EXPORT_PRIVATE MoveOperands final
    : public NON_EXPORTED_BASE(ZoneObject) {
722
 public:
723 724 725 726 727
  MoveOperands(const InstructionOperand& source,
               const InstructionOperand& destination)
      : source_(source), destination_(destination) {
    DCHECK(!source.IsInvalid() && !destination.IsInvalid());
  }
728

729 730 731
  MoveOperands(const MoveOperands&) = delete;
  MoveOperands& operator=(const MoveOperands&) = delete;

732 733 734
  const InstructionOperand& source() const { return source_; }
  InstructionOperand& source() { return source_; }
  void set_source(const InstructionOperand& operand) { source_ = operand; }
735

736 737 738 739 740
  const InstructionOperand& destination() const { return destination_; }
  InstructionOperand& destination() { return destination_; }
  void set_destination(const InstructionOperand& operand) {
    destination_ = operand;
  }
741 742 743

  // The gap resolver marks moves as "in-progress" by clearing the
  // destination (but not the source).
744 745 746 747
  bool IsPending() const {
    return destination_.IsInvalid() && !source_.IsInvalid();
  }
  void SetPending() { destination_ = InstructionOperand(); }
748

749 750
  // A move is redundant if it's been eliminated or if its source and
  // destination are the same.
751
  bool IsRedundant() const {
752
    DCHECK_IMPLIES(!destination_.IsInvalid(), !destination_.IsConstant());
753
    return IsEliminated() || source_.EqualsCanonicalized(destination_);
754 755 756
  }

  // We clear both operands to indicate move that's been eliminated.
757
  void Eliminate() { source_ = destination_ = InstructionOperand(); }
758
  bool IsEliminated() const {
759 760
    DCHECK_IMPLIES(source_.IsInvalid(), destination_.IsInvalid());
    return source_.IsInvalid();
761 762
  }

763
  // APIs to aid debugging. For general-stream APIs, use operator<<.
764
  void Print() const;
765

766
 private:
767 768
  InstructionOperand source_;
  InstructionOperand destination_;
769 770
};

771
V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&, const MoveOperands&);
772

773
class V8_EXPORT_PRIVATE ParallelMove final
774
    : public NON_EXPORTED_BASE(ZoneVector<MoveOperands*>),
775
      public NON_EXPORTED_BASE(ZoneObject) {
776
 public:
777
  explicit ParallelMove(Zone* zone) : ZoneVector<MoveOperands*>(zone) {}
778 779
  ParallelMove(const ParallelMove&) = delete;
  ParallelMove& operator=(const ParallelMove&) = delete;
780

781 782
  MoveOperands* AddMove(const InstructionOperand& from,
                        const InstructionOperand& to) {
783 784 785 786 787 788 789
    Zone* zone = get_allocator().zone();
    return AddMove(from, to, zone);
  }

  MoveOperands* AddMove(const InstructionOperand& from,
                        const InstructionOperand& to,
                        Zone* operand_allocation_zone) {
790
    if (from.EqualsCanonicalized(to)) return nullptr;
791
    MoveOperands* move = operand_allocation_zone->New<MoveOperands>(from, to);
792
    if (empty()) reserve(4);
793 794
    push_back(move);
    return move;
795 796 797 798
  }

  bool IsRedundant() const;

dcarney's avatar
dcarney committed
799
  // Prepare this ParallelMove to insert move as if it happened in a subsequent
800 801 802 803
  // ParallelMove.  move->source() may be changed.  Any MoveOperands added to
  // to_eliminate must be Eliminated.
  void PrepareInsertAfter(MoveOperands* move,
                          ZoneVector<MoveOperands*>* to_eliminate) const;
804 805
};

806
std::ostream& operator<<(std::ostream&, const ParallelMove&);
807

808
class ReferenceMap final : public ZoneObject {
809
 public:
810 811 812 813 814
  explicit ReferenceMap(Zone* zone)
      : reference_operands_(8, zone), instruction_position_(-1) {}

  const ZoneVector<InstructionOperand>& reference_operands() const {
    return reference_operands_;
815 816 817 818
  }
  int instruction_position() const { return instruction_position_; }

  void set_instruction_position(int pos) {
819
    DCHECK_EQ(-1, instruction_position_);
820 821 822
    instruction_position_ = pos;
  }

823
  void RecordReference(const AllocatedOperand& op);
824 825

 private:
826
  friend std::ostream& operator<<(std::ostream&, const ReferenceMap&);
827

828
  ZoneVector<InstructionOperand> reference_operands_;
829 830 831
  int instruction_position_;
};

832
std::ostream& operator<<(std::ostream&, const ReferenceMap&);
833

834 835
class InstructionBlock;

836
class V8_EXPORT_PRIVATE Instruction final {
837
 public:
838 839 840
  Instruction(const Instruction&) = delete;
  Instruction& operator=(const Instruction&) = delete;

841
  size_t OutputCount() const { return OutputCountField::decode(bit_field_); }
842
  const InstructionOperand* OutputAt(size_t i) const {
843
    DCHECK_LT(i, OutputCount());
844 845 846
    return &operands_[i];
  }
  InstructionOperand* OutputAt(size_t i) {
847
    DCHECK_LT(i, OutputCount());
848
    return &operands_[i];
849 850
  }

851
  bool HasOutput() const { return OutputCount() > 0; }
852 853
  const InstructionOperand* Output() const { return OutputAt(0); }
  InstructionOperand* Output() { return OutputAt(0); }
854

855
  size_t InputCount() const { return InputCountField::decode(bit_field_); }
856
  const InstructionOperand* InputAt(size_t i) const {
857
    DCHECK_LT(i, InputCount());
858
    return &operands_[OutputCount() + i];
859
  }
860
  InstructionOperand* InputAt(size_t i) {
861
    DCHECK_LT(i, InputCount());
862
    return &operands_[OutputCount() + i];
863
  }
864 865

  size_t TempCount() const { return TempCountField::decode(bit_field_); }
866
  const InstructionOperand* TempAt(size_t i) const {
867
    DCHECK_LT(i, TempCount());
868 869 870
    return &operands_[OutputCount() + InputCount() + i];
  }
  InstructionOperand* TempAt(size_t i) {
871
    DCHECK_LT(i, TempCount());
872
    return &operands_[OutputCount() + InputCount() + i];
873 874 875 876 877 878 879 880 881 882 883
  }

  InstructionCode opcode() const { return opcode_; }
  ArchOpcode arch_opcode() const { return ArchOpcodeField::decode(opcode()); }
  AddressingMode addressing_mode() const {
    return AddressingModeField::decode(opcode());
  }
  FlagsMode flags_mode() const { return FlagsModeField::decode(opcode()); }
  FlagsCondition flags_condition() const {
    return FlagsConditionField::decode(opcode());
  }
884
  int misc() const { return MiscField::decode(opcode()); }
885 886

  static Instruction* New(Zone* zone, InstructionCode opcode) {
887
    return New(zone, opcode, 0, nullptr, 0, nullptr, 0, nullptr);
888 889 890
  }

  static Instruction* New(Zone* zone, InstructionCode opcode,
891 892 893
                          size_t output_count, InstructionOperand* outputs,
                          size_t input_count, InstructionOperand* inputs,
                          size_t temp_count, InstructionOperand* temps) {
894 895 896
    DCHECK(output_count == 0 || outputs != nullptr);
    DCHECK(input_count == 0 || inputs != nullptr);
    DCHECK(temp_count == 0 || temps != nullptr);
897
    // TODO(turbofan): Handle this gracefully. See crbug.com/582702.
898 899
    CHECK(InputCountField::is_valid(input_count));

900 901 902
    size_t total_extra_ops = output_count + input_count + temp_count;
    if (total_extra_ops != 0) total_extra_ops--;
    int size = static_cast<int>(
903 904
        RoundUp(sizeof(Instruction), sizeof(InstructionOperand)) +
        total_extra_ops * sizeof(InstructionOperand));
905
    return new (zone->Allocate<Instruction>(size)) Instruction(
906 907 908 909 910 911 912 913
        opcode, output_count, outputs, input_count, inputs, temp_count, temps);
  }

  Instruction* MarkAsCall() {
    bit_field_ = IsCallField::update(bit_field_, true);
    return this;
  }
  bool IsCall() const { return IsCallField::decode(bit_field_); }
914
  bool NeedsReferenceMap() const { return IsCall(); }
915
  bool HasReferenceMap() const { return reference_map_ != nullptr; }
916 917 918 919

  bool ClobbersRegisters() const { return IsCall(); }
  bool ClobbersTemps() const { return IsCall(); }
  bool ClobbersDoubleRegisters() const { return IsCall(); }
920
  ReferenceMap* reference_map() const { return reference_map_; }
921

922 923 924 925
  void set_reference_map(ReferenceMap* map) {
    DCHECK(NeedsReferenceMap());
    DCHECK(!reference_map_);
    reference_map_ = map;
926 927
  }

928 929 930
  void OverwriteWithNop() {
    opcode_ = ArchOpcodeField::encode(kArchNop);
    bit_field_ = 0;
931
    reference_map_ = nullptr;
932 933
  }

934
  bool IsNop() const { return arch_opcode() == kArchNop; }
935

936 937
  bool IsDeoptimizeCall() const {
    return arch_opcode() == ArchOpcode::kArchDeoptimize ||
938 939
           FlagsModeField::decode(opcode()) == kFlags_deoptimize ||
           FlagsModeField::decode(opcode()) == kFlags_deoptimize_and_poison;
940 941
  }

942 943 944 945
  bool IsTrap() const {
    return FlagsModeField::decode(opcode()) == kFlags_trap;
  }

946 947 948
  bool IsJump() const { return arch_opcode() == ArchOpcode::kArchJmp; }
  bool IsRet() const { return arch_opcode() == ArchOpcode::kArchRet; }
  bool IsTailCall() const {
949
#if V8_ENABLE_WEBASSEMBLY
950
    return arch_opcode() <= ArchOpcode::kArchTailCallWasm;
951 952 953
#else
    return arch_opcode() <= ArchOpcode::kArchTailCallAddress;
#endif  // V8_ENABLE_WEBASSEMBLY
954 955 956 957 958
  }
  bool IsThrow() const {
    return arch_opcode() == ArchOpcode::kArchThrowTerminator;
  }

959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975
  static constexpr bool IsCallWithDescriptorFlags(InstructionCode arch_opcode) {
    return arch_opcode <= ArchOpcode::kArchCallBuiltinPointer;
  }
  bool IsCallWithDescriptorFlags() const {
    return IsCallWithDescriptorFlags(arch_opcode());
  }
  bool HasCallDescriptorFlag(CallDescriptor::Flag flag) const {
    DCHECK(IsCallWithDescriptorFlags());
    STATIC_ASSERT(CallDescriptor::kFlagsBitsEncodedInInstructionCode == 10);
#ifdef DEBUG
    static constexpr int kInstructionCodeFlagsMask =
        ((1 << CallDescriptor::kFlagsBitsEncodedInInstructionCode) - 1);
    DCHECK_EQ(static_cast<int>(flag) & kInstructionCodeFlagsMask, flag);
#endif
    return MiscField::decode(opcode()) & flag;
  }

976 977 978 979 980 981 982 983 984
  enum GapPosition {
    START,
    END,
    FIRST_GAP_POSITION = START,
    LAST_GAP_POSITION = END
  };

  ParallelMove* GetOrCreateParallelMove(GapPosition pos, Zone* zone) {
    if (parallel_moves_[pos] == nullptr) {
985
      parallel_moves_[pos] = zone->New<ParallelMove>(zone);
986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002
    }
    return parallel_moves_[pos];
  }

  ParallelMove* GetParallelMove(GapPosition pos) {
    return parallel_moves_[pos];
  }

  const ParallelMove* GetParallelMove(GapPosition pos) const {
    return parallel_moves_[pos];
  }

  bool AreMovesRedundant() const;

  ParallelMove* const* parallel_moves() const { return &parallel_moves_[0]; }
  ParallelMove** parallel_moves() { return &parallel_moves_[0]; }

1003 1004 1005 1006 1007 1008 1009 1010 1011
  // The block_id may be invalidated in JumpThreading. It is only important for
  // register allocation, to avoid searching for blocks from instruction
  // indexes.
  InstructionBlock* block() const { return block_; }
  void set_block(InstructionBlock* block) {
    DCHECK_NOT_NULL(block);
    block_ = block;
  }

1012
  // APIs to aid debugging. For general-stream APIs, use operator<<.
1013
  void Print() const;
1014

1015 1016 1017
  using OutputCountField = base::BitField<size_t, 0, 8>;
  using InputCountField = base::BitField<size_t, 8, 16>;
  using TempCountField = base::BitField<size_t, 24, 6>;
1018 1019 1020 1021 1022

  static const size_t kMaxOutputCount = OutputCountField::kMax;
  static const size_t kMaxInputCount = InputCountField::kMax;
  static const size_t kMaxTempCount = TempCountField::kMax;

1023
 private:
1024
  explicit Instruction(InstructionCode opcode);
1025

1026
  Instruction(InstructionCode opcode, size_t output_count,
1027 1028 1029
              InstructionOperand* outputs, size_t input_count,
              InstructionOperand* inputs, size_t temp_count,
              InstructionOperand* temps);
1030

1031
  using IsCallField = base::BitField<bool, 30, 1>;
1032 1033 1034

  InstructionCode opcode_;
  uint32_t bit_field_;
1035
  ParallelMove* parallel_moves_[2];
1036
  ReferenceMap* reference_map_;
1037
  InstructionBlock* block_;
1038
  InstructionOperand operands_[1];
1039 1040
};

1041
std::ostream& operator<<(std::ostream&, const Instruction&);
1042

1043
class RpoNumber final {
1044 1045
 public:
  static const int kInvalidRpoNumber = -1;
1046 1047
  RpoNumber() : index_(kInvalidRpoNumber) {}

1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064
  int ToInt() const {
    DCHECK(IsValid());
    return index_;
  }
  size_t ToSize() const {
    DCHECK(IsValid());
    return static_cast<size_t>(index_);
  }
  bool IsValid() const { return index_ >= 0; }
  static RpoNumber FromInt(int index) { return RpoNumber(index); }
  static RpoNumber Invalid() { return RpoNumber(kInvalidRpoNumber); }

  bool IsNext(const RpoNumber other) const {
    DCHECK(IsValid());
    return other.index_ == this->index_ + 1;
  }

1065 1066 1067 1068 1069
  RpoNumber Next() const {
    DCHECK(IsValid());
    return RpoNumber(index_ + 1);
  }

1070 1071 1072 1073 1074 1075 1076
  // Comparison operators.
  bool operator==(RpoNumber other) const { return index_ == other.index_; }
  bool operator!=(RpoNumber other) const { return index_ != other.index_; }
  bool operator>(RpoNumber other) const { return index_ > other.index_; }
  bool operator<(RpoNumber other) const { return index_ < other.index_; }
  bool operator<=(RpoNumber other) const { return index_ <= other.index_; }
  bool operator>=(RpoNumber other) const { return index_ >= other.index_; }
1077 1078 1079 1080 1081 1082

 private:
  explicit RpoNumber(int32_t index) : index_(index) {}
  int32_t index_;
};

1083
V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&, const RpoNumber&);
1084

1085
class V8_EXPORT_PRIVATE Constant final {
1086
 public:
1087 1088 1089 1090 1091 1092
  enum Type {
    kInt32,
    kInt64,
    kFloat32,
    kFloat64,
    kExternalReference,
1093
    kCompressedHeapObject,
1094
    kHeapObject,
1095 1096
    kRpoNumber,
    kDelayedStringConstant
1097
  };
1098

1099
  explicit Constant(int32_t v);
1100
  explicit Constant(int64_t v) : type_(kInt64), value_(v) {}
1101
  explicit Constant(float v) : type_(kFloat32), value_(bit_cast<int32_t>(v)) {}
1102
  explicit Constant(double v) : type_(kFloat64), value_(bit_cast<int64_t>(v)) {}
1103
  explicit Constant(ExternalReference ref)
1104
      : type_(kExternalReference), value_(bit_cast<intptr_t>(ref.address())) {}
1105 1106 1107
  explicit Constant(Handle<HeapObject> obj, bool is_compressed = false)
      : type_(is_compressed ? kCompressedHeapObject : kHeapObject),
        value_(bit_cast<intptr_t>(obj)) {}
1108
  explicit Constant(RpoNumber rpo) : type_(kRpoNumber), value_(rpo.ToInt()) {}
1109 1110
  explicit Constant(const StringConstantBase* str)
      : type_(kDelayedStringConstant), value_(bit_cast<intptr_t>(str)) {}
1111
  explicit Constant(RelocatablePtrConstantInfo info);
1112 1113 1114

  Type type() const { return type_; }

1115 1116
  RelocInfo::Mode rmode() const { return rmode_; }

1117 1118 1119 1120 1121 1122 1123
  bool FitsInInt32() const {
    if (type() == kInt32) return true;
    DCHECK(type() == kInt64);
    return value_ >= std::numeric_limits<int32_t>::min() &&
           value_ <= std::numeric_limits<int32_t>::max();
  }

1124
  int32_t ToInt32() const {
1125
    DCHECK(FitsInInt32());
1126 1127 1128
    const int32_t value = static_cast<int32_t>(value_);
    DCHECK_EQ(value_, static_cast<int64_t>(value));
    return value;
1129 1130 1131 1132
  }

  int64_t ToInt64() const {
    if (type() == kInt32) return ToInt32();
1133
    DCHECK_EQ(kInt64, type());
1134 1135 1136
    return value_;
  }

1137
  float ToFloat32() const {
1138 1139 1140
    // TODO(ahaas): We should remove this function. If value_ has the bit
    // representation of a signalling NaN, then returning it as float can cause
    // the signalling bit to flip, and value_ is returned as a quiet NaN.
1141 1142 1143 1144
    DCHECK_EQ(kFloat32, type());
    return bit_cast<float>(static_cast<int32_t>(value_));
  }

1145 1146 1147 1148 1149
  uint32_t ToFloat32AsInt() const {
    DCHECK_EQ(kFloat32, type());
    return bit_cast<uint32_t>(static_cast<int32_t>(value_));
  }

1150
  Double ToFloat64() const {
1151
    DCHECK_EQ(kFloat64, type());
1152
    return Double(bit_cast<uint64_t>(value_));
1153 1154
  }

1155
  ExternalReference ToExternalReference() const {
1156
    DCHECK_EQ(kExternalReference, type());
1157
    return ExternalReference::FromRawAddress(static_cast<Address>(value_));
1158 1159
  }

1160
  RpoNumber ToRpoNumber() const {
1161
    DCHECK_EQ(kRpoNumber, type());
1162
    return RpoNumber::FromInt(static_cast<int>(value_));
1163 1164
  }

1165
  Handle<HeapObject> ToHeapObject() const;
1166
  Handle<Code> ToCode() const;
1167
  const StringConstantBase* ToDelayedStringConstant() const;
1168 1169 1170

 private:
  Type type_;
1171
  RelocInfo::Mode rmode_ = RelocInfo::NONE;
1172
  int64_t value_;
1173 1174
};

1175
std::ostream& operator<<(std::ostream&, const Constant&);
1176 1177 1178 1179

// Forward declarations.
class FrameStateDescriptor;

1180
enum class StateValueKind : uint8_t {
1181
  kArgumentsElements,
1182
  kArgumentsLength,
1183 1184 1185 1186 1187
  kPlain,
  kOptimizedOut,
  kNested,
  kDuplicate
};
1188 1189 1190

class StateValueDescriptor {
 public:
1191
  StateValueDescriptor()
1192
      : kind_(StateValueKind::kPlain), type_(MachineType::AnyTagged()) {}
1193

1194
  static StateValueDescriptor ArgumentsElements(ArgumentsStateType type) {
1195 1196
    StateValueDescriptor descr(StateValueKind::kArgumentsElements,
                               MachineType::AnyTagged());
1197
    descr.args_type_ = type;
1198
    return descr;
1199
  }
1200 1201 1202
  static StateValueDescriptor ArgumentsLength() {
    return StateValueDescriptor(StateValueKind::kArgumentsLength,
                                MachineType::AnyTagged());
1203
  }
1204
  static StateValueDescriptor Plain(MachineType type) {
1205
    return StateValueDescriptor(StateValueKind::kPlain, type);
1206
  }
1207 1208
  static StateValueDescriptor OptimizedOut() {
    return StateValueDescriptor(StateValueKind::kOptimizedOut,
1209
                                MachineType::AnyTagged());
1210 1211
  }
  static StateValueDescriptor Recursive(size_t id) {
1212 1213 1214 1215
    StateValueDescriptor descr(StateValueKind::kNested,
                               MachineType::AnyTagged());
    descr.id_ = id;
    return descr;
1216
  }
1217
  static StateValueDescriptor Duplicate(size_t id) {
1218 1219 1220 1221
    StateValueDescriptor descr(StateValueKind::kDuplicate,
                               MachineType::AnyTagged());
    descr.id_ = id;
    return descr;
1222 1223
  }

1224 1225 1226
  bool IsArgumentsElements() const {
    return kind_ == StateValueKind::kArgumentsElements;
  }
1227 1228 1229
  bool IsArgumentsLength() const {
    return kind_ == StateValueKind::kArgumentsLength;
  }
1230 1231 1232 1233
  bool IsPlain() const { return kind_ == StateValueKind::kPlain; }
  bool IsOptimizedOut() const { return kind_ == StateValueKind::kOptimizedOut; }
  bool IsNested() const { return kind_ == StateValueKind::kNested; }
  bool IsDuplicate() const { return kind_ == StateValueKind::kDuplicate; }
1234
  MachineType type() const { return type_; }
1235 1236 1237 1238 1239
  size_t id() const {
    DCHECK(kind_ == StateValueKind::kDuplicate ||
           kind_ == StateValueKind::kNested);
    return id_;
  }
1240
  ArgumentsStateType arguments_type() const {
1241
    DCHECK(kind_ == StateValueKind::kArgumentsElements);
1242
    return args_type_;
1243
  }
1244 1245

 private:
1246 1247
  StateValueDescriptor(StateValueKind kind, MachineType type)
      : kind_(kind), type_(type) {}
1248 1249 1250

  StateValueKind kind_;
  MachineType type_;
1251 1252
  union {
    size_t id_;
1253
    ArgumentsStateType args_type_;
1254
  };
1255 1256
};

1257 1258 1259 1260 1261 1262
class StateValueList {
 public:
  explicit StateValueList(Zone* zone) : fields_(zone), nested_(zone) {}

  size_t size() { return fields_.size(); }

1263 1264
  size_t nested_count() { return nested_.size(); }

1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305
  struct Value {
    StateValueDescriptor* desc;
    StateValueList* nested;

    Value(StateValueDescriptor* desc, StateValueList* nested)
        : desc(desc), nested(nested) {}
  };

  class iterator {
   public:
    // Bare minimum of operators needed for range iteration.
    bool operator!=(const iterator& other) const {
      return field_iterator != other.field_iterator;
    }
    bool operator==(const iterator& other) const {
      return field_iterator == other.field_iterator;
    }
    iterator& operator++() {
      if (field_iterator->IsNested()) {
        nested_iterator++;
      }
      ++field_iterator;
      return *this;
    }
    Value operator*() {
      StateValueDescriptor* desc = &(*field_iterator);
      StateValueList* nested = desc->IsNested() ? *nested_iterator : nullptr;
      return Value(desc, nested);
    }

   private:
    friend class StateValueList;

    iterator(ZoneVector<StateValueDescriptor>::iterator it,
             ZoneVector<StateValueList*>::iterator nested)
        : field_iterator(it), nested_iterator(nested) {}

    ZoneVector<StateValueDescriptor>::iterator field_iterator;
    ZoneVector<StateValueList*>::iterator nested_iterator;
  };

1306 1307 1308 1309 1310 1311 1312 1313
  struct Slice {
    Slice(ZoneVector<StateValueDescriptor>::iterator start, size_t fields)
        : start_position(start), fields_count(fields) {}

    ZoneVector<StateValueDescriptor>::iterator start_position;
    size_t fields_count;
  };

1314 1315
  void ReserveSize(size_t size) { fields_.reserve(size); }

1316 1317
  StateValueList* PushRecursiveField(Zone* zone, size_t id) {
    fields_.push_back(StateValueDescriptor::Recursive(id));
1318
    StateValueList* nested = zone->New<StateValueList>(zone);
1319 1320 1321
    nested_.push_back(nested);
    return nested;
  }
1322 1323
  void PushArgumentsElements(ArgumentsStateType type) {
    fields_.push_back(StateValueDescriptor::ArgumentsElements(type));
1324
  }
1325 1326
  void PushArgumentsLength() {
    fields_.push_back(StateValueDescriptor::ArgumentsLength());
1327
  }
1328 1329 1330 1331 1332 1333
  void PushDuplicate(size_t id) {
    fields_.push_back(StateValueDescriptor::Duplicate(id));
  }
  void PushPlain(MachineType type) {
    fields_.push_back(StateValueDescriptor::Plain(type));
  }
1334 1335
  void PushOptimizedOut(size_t num = 1) {
    fields_.insert(fields_.end(), num, StateValueDescriptor::OptimizedOut());
1336
  }
1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348
  void PushCachedSlice(const Slice& cached) {
    fields_.insert(fields_.end(), cached.start_position,
                   cached.start_position + cached.fields_count);
  }

  // Returns a Slice representing the (non-nested) fields in StateValueList from
  // values_start to  the current end position.
  Slice MakeSlice(size_t values_start) {
    DCHECK(!HasNestedFieldsAfter(values_start));
    size_t fields_count = fields_.size() - values_start;
    return Slice(fields_.begin() + values_start, fields_count);
  }
1349 1350 1351 1352 1353

  iterator begin() { return iterator(fields_.begin(), nested_.begin()); }
  iterator end() { return iterator(fields_.end(), nested_.end()); }

 private:
1354 1355 1356 1357 1358 1359 1360 1361
  bool HasNestedFieldsAfter(size_t values_start) {
    auto it = fields_.begin() + values_start;
    for (; it != fields_.end(); it++) {
      if (it->IsNested()) return true;
    }
    return false;
  }

1362 1363 1364
  ZoneVector<StateValueDescriptor> fields_;
  ZoneVector<StateValueList*> nested_;
};
1365

1366 1367
class FrameStateDescriptor : public ZoneObject {
 public:
1368 1369
  FrameStateDescriptor(Zone* zone, FrameStateType type,
                       BytecodeOffset bailout_id,
1370
                       OutputFrameStateCombine state_combine,
1371 1372
                       size_t parameters_count, size_t locals_count,
                       size_t stack_count,
1373
                       MaybeHandle<SharedFunctionInfo> shared_info,
1374
                       FrameStateDescriptor* outer_state = nullptr);
1375

1376
  FrameStateType type() const { return type_; }
1377
  BytecodeOffset bailout_id() const { return bailout_id_; }
1378
  OutputFrameStateCombine state_combine() const { return frame_state_combine_; }
1379 1380 1381
  size_t parameters_count() const { return parameters_count_; }
  size_t locals_count() const { return locals_count_; }
  size_t stack_count() const { return stack_count_; }
1382
  MaybeHandle<SharedFunctionInfo> shared_info() const { return shared_info_; }
1383
  FrameStateDescriptor* outer_state() const { return outer_state_; }
1384
  bool HasContext() const {
1385
    return FrameStateFunctionInfo::IsJSFunctionType(type_) ||
1386
           type_ == FrameStateType::kBuiltinContinuation ||
1387
#if V8_ENABLE_WEBASSEMBLY
1388
           type_ == FrameStateType::kJSToWasmBuiltinContinuation ||
1389
#endif  // V8_ENABLE_WEBASSEMBLY
1390
           type_ == FrameStateType::kConstructStub;
1391
  }
1392

1393 1394 1395 1396 1397 1398 1399
  // The frame height on the stack, in number of slots, as serialized into a
  // Translation and later used by the deoptimizer. Does *not* include
  // information from the chain of outer states. Unlike |GetSize| this does not
  // always include parameters, locals, and stack slots; instead, the returned
  // slot kinds depend on the frame type.
  size_t GetHeight() const;

1400 1401 1402 1403 1404 1405 1406
  // Returns an overapproximation of the unoptimized stack frame size in bytes,
  // as later produced by the deoptimizer. Considers both this and the chain of
  // outer states.
  size_t total_conservative_frame_size_in_bytes() const {
    return total_conservative_frame_size_in_bytes_;
  }

1407
  size_t GetSize() const;
1408 1409 1410
  size_t GetTotalSize() const;
  size_t GetFrameCount() const;
  size_t GetJSFrameCount() const;
1411

1412
  StateValueList* GetStateValueDescriptors() { return &values_; }
1413

1414 1415
  static const int kImpossibleValue = 0xdead;

1416
 private:
1417
  FrameStateType type_;
1418
  BytecodeOffset bailout_id_;
1419
  OutputFrameStateCombine frame_state_combine_;
1420 1421 1422 1423
  const size_t parameters_count_;
  const size_t locals_count_;
  const size_t stack_count_;
  const size_t total_conservative_frame_size_in_bytes_;
1424
  StateValueList values_;
1425
  MaybeHandle<SharedFunctionInfo> const shared_info_;
1426
  FrameStateDescriptor* const outer_state_;
1427 1428
};

1429
#if V8_ENABLE_WEBASSEMBLY
1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440
class JSToWasmFrameStateDescriptor : public FrameStateDescriptor {
 public:
  JSToWasmFrameStateDescriptor(Zone* zone, FrameStateType type,
                               BytecodeOffset bailout_id,
                               OutputFrameStateCombine state_combine,
                               size_t parameters_count, size_t locals_count,
                               size_t stack_count,
                               MaybeHandle<SharedFunctionInfo> shared_info,
                               FrameStateDescriptor* outer_state,
                               const wasm::FunctionSig* wasm_signature);

1441
  base::Optional<wasm::ValueKind> return_kind() const { return return_kind_; }
1442 1443

 private:
1444
  base::Optional<wasm::ValueKind> return_kind_;
1445
};
1446
#endif  // V8_ENABLE_WEBASSEMBLY
1447

1448 1449 1450 1451
// A deoptimization entry is a pair of the reason why we deoptimize and the
// frame state descriptor that we have to go back to.
class DeoptimizationEntry final {
 public:
1452
  DeoptimizationEntry() = default;
1453
  DeoptimizationEntry(FrameStateDescriptor* descriptor, DeoptimizeKind kind,
1454
                      DeoptimizeReason reason, FeedbackSource const& feedback)
1455 1456 1457 1458
      : descriptor_(descriptor),
        kind_(kind),
        reason_(reason),
        feedback_(feedback) {}
1459 1460

  FrameStateDescriptor* descriptor() const { return descriptor_; }
1461
  DeoptimizeKind kind() const { return kind_; }
1462
  DeoptimizeReason reason() const { return reason_; }
1463
  FeedbackSource const& feedback() const { return feedback_; }
1464

1465 1466
 private:
  FrameStateDescriptor* descriptor_ = nullptr;
1467
  DeoptimizeKind kind_ = DeoptimizeKind::kEager;
1468
  DeoptimizeReason reason_ = DeoptimizeReason::kUnknown;
1469
  FeedbackSource feedback_ = FeedbackSource();
1470
};
1471

1472
using DeoptimizationVector = ZoneVector<DeoptimizationEntry>;
1473

1474 1475
class V8_EXPORT_PRIVATE PhiInstruction final
    : public NON_EXPORTED_BASE(ZoneObject) {
1476
 public:
1477
  using Inputs = ZoneVector<InstructionOperand>;
1478

1479 1480 1481
  PhiInstruction(Zone* zone, int virtual_register, size_t input_count);

  void SetInput(size_t offset, int virtual_register);
1482
  void RenameInput(size_t offset, int virtual_register);
1483 1484 1485

  int virtual_register() const { return virtual_register_; }
  const IntVector& operands() const { return operands_; }
1486

1487 1488
  // TODO(dcarney): this has no real business being here, since it's internal to
  // the register allocator, but putting it here was convenient.
1489 1490
  const InstructionOperand& output() const { return output_; }
  InstructionOperand& output() { return output_; }
1491 1492 1493

 private:
  const int virtual_register_;
1494
  InstructionOperand output_;
1495 1496 1497 1498
  IntVector operands_;
};

// Analogue of BasicBlock for Instructions instead of Nodes.
1499 1500
class V8_EXPORT_PRIVATE InstructionBlock final
    : public NON_EXPORTED_BASE(ZoneObject) {
1501
 public:
1502
  InstructionBlock(Zone* zone, RpoNumber rpo_number, RpoNumber loop_header,
1503 1504
                   RpoNumber loop_end, RpoNumber dominator, bool deferred,
                   bool handler);
1505 1506 1507

  // Instruction indexes (used by the register allocator).
  int first_instruction_index() const {
1508 1509 1510
    DCHECK_LE(0, code_start_);
    DCHECK_LT(0, code_end_);
    DCHECK_GE(code_end_, code_start_);
1511 1512 1513
    return code_start_;
  }
  int last_instruction_index() const {
1514 1515 1516
    DCHECK_LE(0, code_start_);
    DCHECK_LT(0, code_end_);
    DCHECK_GE(code_end_, code_start_);
1517 1518 1519 1520 1521 1522 1523 1524 1525
    return code_end_ - 1;
  }

  int32_t code_start() const { return code_start_; }
  void set_code_start(int32_t start) { code_start_ = start; }

  int32_t code_end() const { return code_end_; }
  void set_code_end(int32_t end) { code_end_ = end; }

1526
  bool IsDeferred() const { return deferred_; }
1527
  bool IsHandler() const { return handler_; }
1528 1529
  void MarkHandler() { handler_ = true; }
  void UnmarkHandler() { handler_ = false; }
1530

1531 1532 1533 1534
  RpoNumber ao_number() const { return ao_number_; }
  RpoNumber rpo_number() const { return rpo_number_; }
  RpoNumber loop_header() const { return loop_header_; }
  RpoNumber loop_end() const {
1535 1536 1537 1538
    DCHECK(IsLoopHeader());
    return loop_end_;
  }
  inline bool IsLoopHeader() const { return loop_end_.IsValid(); }
1539
  inline bool IsSwitchTarget() const { return switch_target_; }
1540
  inline bool ShouldAlign() const { return alignment_; }
1541

1542
  using Predecessors = ZoneVector<RpoNumber>;
1543
  Predecessors& predecessors() { return predecessors_; }
1544 1545
  const Predecessors& predecessors() const { return predecessors_; }
  size_t PredecessorCount() const { return predecessors_.size(); }
1546
  size_t PredecessorIndexOf(RpoNumber rpo_number) const;
1547

1548
  using Successors = ZoneVector<RpoNumber>;
1549
  Successors& successors() { return successors_; }
1550 1551 1552
  const Successors& successors() const { return successors_; }
  size_t SuccessorCount() const { return successors_.size(); }

1553 1554 1555
  RpoNumber dominator() const { return dominator_; }
  void set_dominator(RpoNumber dominator) { dominator_ = dominator; }

1556
  using PhiInstructions = ZoneVector<PhiInstruction*>;
1557
  const PhiInstructions& phis() const { return phis_; }
1558
  PhiInstruction* PhiAt(size_t i) const { return phis_[i]; }
1559 1560
  void AddPhi(PhiInstruction* phi) { phis_.push_back(phi); }

1561
  void set_ao_number(RpoNumber ao_number) { ao_number_ = ao_number; }
1562

1563 1564
  void set_alignment(bool val) { alignment_ = val; }

1565 1566
  void set_switch_target(bool val) { switch_target_ = val; }

1567 1568 1569 1570 1571 1572 1573 1574
  bool needs_frame() const { return needs_frame_; }
  void mark_needs_frame() { needs_frame_ = true; }

  bool must_construct_frame() const { return must_construct_frame_; }
  void mark_must_construct_frame() { must_construct_frame_ = true; }

  bool must_deconstruct_frame() const { return must_deconstruct_frame_; }
  void mark_must_deconstruct_frame() { must_deconstruct_frame_ = true; }
1575
  void clear_must_deconstruct_frame() { must_deconstruct_frame_ = false; }
1576

1577 1578 1579 1580
 private:
  Successors successors_;
  Predecessors predecessors_;
  PhiInstructions phis_;
1581 1582 1583 1584
  RpoNumber ao_number_;  // Assembly order number.
  const RpoNumber rpo_number_;
  const RpoNumber loop_header_;
  const RpoNumber loop_end_;
1585
  RpoNumber dominator_;
1586
  int32_t code_start_;   // start index of arch-specific code.
1587
  int32_t code_end_ = -1;     // end index of arch-specific code.
1588 1589 1590 1591 1592 1593 1594
  const bool deferred_ : 1;   // Block contains deferred code.
  bool handler_ : 1;          // Block is a handler entry point.
  bool switch_target_ : 1;
  bool alignment_ : 1;  // insert alignment before this block
  bool needs_frame_ : 1;
  bool must_construct_frame_ : 1;
  bool must_deconstruct_frame_ : 1;
1595 1596
};

1597 1598 1599 1600 1601 1602 1603
class InstructionSequence;

struct PrintableInstructionBlock {
  const InstructionBlock* block_;
  const InstructionSequence* code_;
};

1604
std::ostream& operator<<(std::ostream&, const PrintableInstructionBlock&);
1605

1606 1607 1608
using ConstantDeque = ZoneDeque<Constant>;
using ConstantMap = std::map<int, Constant, std::less<int>,
                             ZoneAllocator<std::pair<const int, Constant> > >;
1609

1610 1611 1612
using InstructionDeque = ZoneDeque<Instruction*>;
using ReferenceMapDeque = ZoneDeque<ReferenceMap*>;
using InstructionBlocks = ZoneVector<InstructionBlock*>;
1613 1614 1615

// Represents architecture-specific generated code before, during, and after
// register allocation.
1616 1617
class V8_EXPORT_PRIVATE InstructionSequence final
    : public NON_EXPORTED_BASE(ZoneObject) {
1618
 public:
1619 1620
  static InstructionBlocks* InstructionBlocksFor(Zone* zone,
                                                 const Schedule* schedule);
1621 1622
  InstructionSequence(Isolate* isolate, Zone* zone,
                      InstructionBlocks* instruction_blocks);
1623 1624
  InstructionSequence(const InstructionSequence&) = delete;
  InstructionSequence& operator=(const InstructionSequence&) = delete;
1625

1626
  int NextVirtualRegister();
1627 1628
  int VirtualRegisterCount() const { return next_virtual_register_; }

1629
  const InstructionBlocks& instruction_blocks() const {
1630
    return *instruction_blocks_;
1631 1632
  }

1633 1634
  const InstructionBlocks& ao_blocks() const { return *ao_blocks_; }

1635
  int InstructionBlockCount() const {
1636
    return static_cast<int>(instruction_blocks_->size());
1637 1638
  }

1639
  InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) {
1640
    return instruction_blocks_->at(rpo_number.ToSize());
1641 1642
  }

1643
  int LastLoopInstructionIndex(const InstructionBlock* block) {
1644
    return instruction_blocks_->at(block->loop_end().ToSize() - 1)
1645 1646 1647
        ->last_instruction_index();
  }

1648
  const InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) const {
1649
    return instruction_blocks_->at(rpo_number.ToSize());
1650 1651
  }

1652
  InstructionBlock* GetInstructionBlock(int instruction_index) const;
1653

1654 1655
  static MachineRepresentation DefaultRepresentation() {
    return MachineType::PointerRepresentation();
1656
  }
1657 1658
  MachineRepresentation GetRepresentation(int virtual_register) const;
  void MarkAsRepresentation(MachineRepresentation rep, int virtual_register);
1659

1660
  bool IsReference(int virtual_register) const {
1661
    return CanBeTaggedOrCompressedPointer(GetRepresentation(virtual_register));
1662
  }
1663
  bool IsFP(int virtual_register) const {
1664
    return IsFloatingPoint(GetRepresentation(virtual_register));
1665
  }
1666 1667 1668 1669 1670 1671 1672 1673
  int representation_mask() const { return representation_mask_; }
  bool HasFPVirtualRegisters() const {
    constexpr int kFPRepMask =
        RepresentationBit(MachineRepresentation::kFloat32) |
        RepresentationBit(MachineRepresentation::kFloat64) |
        RepresentationBit(MachineRepresentation::kSimd128);
    return (representation_mask() & kFPRepMask) != 0;
  }
1674

1675
  Instruction* GetBlockStart(RpoNumber rpo) const;
1676

1677
  using const_iterator = InstructionDeque::const_iterator;
1678 1679
  const_iterator begin() const { return instructions_.begin(); }
  const_iterator end() const { return instructions_.end(); }
1680
  const InstructionDeque& instructions() const { return instructions_; }
1681 1682 1683
  int LastInstructionIndex() const {
    return static_cast<int>(instructions().size()) - 1;
  }
1684 1685

  Instruction* InstructionAt(int index) const {
1686 1687
    DCHECK_LE(0, index);
    DCHECK_GT(instructions_.size(), index);
1688 1689 1690
    return instructions_[index];
  }

1691
  Isolate* isolate() const { return isolate_; }
1692
  const ReferenceMapDeque* reference_maps() const { return &reference_maps_; }
1693
  Zone* zone() const { return zone_; }
1694

1695 1696
  // Used by the instruction selector while adding instructions.
  int AddInstruction(Instruction* instr);
1697 1698
  void StartBlock(RpoNumber rpo);
  void EndBlock(RpoNumber rpo);
1699

1700
  int AddConstant(int virtual_register, Constant constant) {
1701
    // TODO(titzer): allow RPO numbers as constants?
1702
    DCHECK_NE(Constant::kRpoNumber, constant.type());
1703
    DCHECK(virtual_register >= 0 && virtual_register < next_virtual_register_);
1704
    DCHECK(constants_.find(virtual_register) == constants_.end());
1705
    constants_.insert(std::make_pair(virtual_register, constant));
1706
    return virtual_register;
1707 1708
  }
  Constant GetConstant(int virtual_register) const {
1709
    auto it = constants_.find(virtual_register);
1710 1711
    DCHECK(it != constants_.end());
    DCHECK_EQ(virtual_register, it->first);
1712 1713 1714
    return it->second;
  }

1715
  using Immediates = ZoneVector<Constant>;
1716
  Immediates& immediates() { return immediates_; }
1717

1718 1719 1720
  using RpoImmediates = ZoneVector<RpoNumber>;
  RpoImmediates& rpo_immediates() { return rpo_immediates_; }

1721
  ImmediateOperand AddImmediate(const Constant& constant) {
1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742
    if (RelocInfo::IsNone(constant.rmode())) {
      if (constant.type() == Constant::kRpoNumber) {
        // Ideally we would inline RPO numbers into the operand, however jump-
        // threading modifies RPO values and so we indirect through a vector
        // of rpo_immediates to enable rewriting. We keep this seperate from the
        // immediates vector so that we don't repeatedly push the same rpo
        // number.
        RpoNumber rpo_number = constant.ToRpoNumber();
        DCHECK(!rpo_immediates().at(rpo_number.ToSize()).IsValid() ||
               rpo_immediates().at(rpo_number.ToSize()) == rpo_number);
        rpo_immediates()[rpo_number.ToSize()] = rpo_number;
        return ImmediateOperand(ImmediateOperand::INDEXED_RPO,
                                rpo_number.ToInt());
      } else if (constant.type() == Constant::kInt32) {
        return ImmediateOperand(ImmediateOperand::INLINE_INT32,
                                constant.ToInt32());
      } else if (constant.type() == Constant::kInt64 &&
                 constant.FitsInInt32()) {
        return ImmediateOperand(ImmediateOperand::INLINE_INT64,
                                constant.ToInt32());
      }
1743
    }
1744
    int index = static_cast<int>(immediates_.size());
1745
    immediates_.push_back(constant);
1746
    return ImmediateOperand(ImmediateOperand::INDEXED_IMM, index);
1747 1748 1749 1750
  }

  Constant GetImmediate(const ImmediateOperand* op) const {
    switch (op->type()) {
1751 1752 1753 1754 1755 1756 1757 1758 1759 1760 1761
      case ImmediateOperand::INLINE_INT32:
        return Constant(op->inline_int32_value());
      case ImmediateOperand::INLINE_INT64:
        return Constant(op->inline_int64_value());
      case ImmediateOperand::INDEXED_RPO: {
        int index = op->indexed_value();
        DCHECK_LE(0, index);
        DCHECK_GT(rpo_immediates_.size(), index);
        return Constant(rpo_immediates_[index]);
      }
      case ImmediateOperand::INDEXED_IMM: {
1762
        int index = op->indexed_value();
1763 1764
        DCHECK_LE(0, index);
        DCHECK_GT(immediates_.size(), index);
1765 1766 1767 1768
        return immediates_[index];
      }
    }
    UNREACHABLE();
1769 1770
  }

1771
  int AddDeoptimizationEntry(FrameStateDescriptor* descriptor,
1772
                             DeoptimizeKind kind, DeoptimizeReason reason,
1773
                             FeedbackSource const& feedback);
1774 1775 1776
  DeoptimizationEntry const& GetDeoptimizationEntry(int deoptimization_id);
  int GetDeoptimizationEntryCount() const {
    return static_cast<int>(deoptimization_entries_.size());
1777
  }
1778

1779
  RpoNumber InputRpo(Instruction* instr, size_t index);
1780

1781 1782 1783 1784
  bool GetSourcePosition(const Instruction* instr,
                         SourcePosition* result) const;
  void SetSourcePosition(const Instruction* instr, SourcePosition value);

1785 1786 1787 1788 1789 1790 1791
  bool ContainsCall() const {
    for (Instruction* instr : instructions_) {
      if (instr->IsCall()) return true;
    }
    return false;
  }

1792
  // APIs to aid debugging. For general-stream APIs, use operator<<.
1793 1794 1795
  void Print() const;

  void PrintBlock(int block_id) const;
1796

1797 1798 1799 1800
  void ValidateEdgeSplitForm() const;
  void ValidateDeferredBlockExitPaths() const;
  void ValidateDeferredBlockEntryPaths() const;
  void ValidateSSA() const;
1801

1802 1803 1804 1805
  static void SetRegisterConfigurationForTesting(
      const RegisterConfiguration* regConfig);
  static void ClearRegisterConfigurationForTesting();

1806 1807
  void RecomputeAssemblyOrderForTesting();

1808 1809 1810 1811 1812
  void IncreaseRpoForTesting(size_t rpo_count) {
    DCHECK_GE(rpo_count, rpo_immediates().size());
    rpo_immediates().resize(rpo_count);
  }

1813
 private:
1814 1815
  friend V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&,
                                                    const InstructionSequence&);
1816

1817
  using SourcePositionMap = ZoneMap<const Instruction*, SourcePosition>;
1818

1819 1820 1821
  static const RegisterConfiguration* RegisterConfigurationForTesting();
  static const RegisterConfiguration* registerConfigurationForTesting_;

1822 1823 1824
  // Puts the deferred blocks last and may rotate loops.
  void ComputeAssemblyOrder();

1825
  Isolate* isolate_;
1826
  Zone* const zone_;
1827
  InstructionBlocks* const instruction_blocks_;
1828
  InstructionBlocks* ao_blocks_;
1829
  SourcePositionMap source_positions_;
1830
  ConstantMap constants_;
1831
  Immediates immediates_;
1832
  RpoImmediates rpo_immediates_;
1833 1834
  InstructionDeque instructions_;
  int next_virtual_register_;
1835
  ReferenceMapDeque reference_maps_;
1836
  ZoneVector<MachineRepresentation> representations_;
1837
  int representation_mask_;
1838
  DeoptimizationVector deoptimization_entries_;
1839

1840 1841
  // Used at construction time
  InstructionBlock* current_block_;
1842 1843
};

1844 1845
V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&,
                                           const InstructionSequence&);
1846
#undef INSTRUCTION_OPERAND_ALIGN
1847 1848 1849 1850 1851

}  // namespace compiler
}  // namespace internal
}  // namespace v8

1852
#endif  // V8_COMPILER_BACKEND_INSTRUCTION_H_