instruction.h 55.8 KB
Newer Older
1 2 3 4
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5 6
#ifndef V8_COMPILER_BACKEND_INSTRUCTION_H_
#define V8_COMPILER_BACKEND_INSTRUCTION_H_
7 8

#include <deque>
9
#include <iosfwd>
10 11 12
#include <map>
#include <set>

13
#include "src/base/compiler-specific.h"
14
#include "src/codegen/external-reference.h"
15 16
#include "src/codegen/register-arch.h"
#include "src/codegen/source-position.h"
17
#include "src/common/globals.h"
18
#include "src/compiler/backend/instruction-codes.h"
19
#include "src/compiler/common-operator.h"
20
#include "src/compiler/feedback-source.h"
21 22
#include "src/compiler/frame.h"
#include "src/compiler/opcodes.h"
23
#include "src/numbers/double.h"
24
#include "src/zone/zone-allocator.h"
25 26 27

namespace v8 {
namespace internal {
28 29 30

class RegisterConfiguration;

31 32
namespace compiler {

33
class Schedule;
34
class SourcePositionTable;
35

36
class V8_EXPORT_PRIVATE InstructionOperand {
37
 public:
38 39
  static const int kInvalidVirtualRegister = -1;

40 41 42 43 44 45 46
  enum Kind {
    INVALID,
    UNALLOCATED,
    CONSTANT,
    IMMEDIATE,
    // Location operand kinds.
    ALLOCATED,
47
    FIRST_LOCATION_OPERAND_KIND = ALLOCATED
48 49
    // Location operand kinds must be last.
  };
50

51
  InstructionOperand() : InstructionOperand(INVALID) {}
52

53
  Kind kind() const { return KindField::decode(value_); }
54

55
#define INSTRUCTION_OPERAND_PREDICATE(name, type) \
56
  bool Is##name() const { return kind() == type; }
57
  INSTRUCTION_OPERAND_PREDICATE(Invalid, INVALID)
58 59
  // UnallocatedOperands are place-holder operands created before register
  // allocation. They later are assigned registers and become AllocatedOperands.
60
  INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED)
61 62 63 64
  // Constant operands participate in register allocation. They are allocated to
  // registers but have a special "spilling" behavior. When a ConstantOperand
  // value must be rematerialized, it is loaded from an immediate constant
  // rather from an unspilled slot.
65
  INSTRUCTION_OPERAND_PREDICATE(Constant, CONSTANT)
66 67 68
  // ImmediateOperands do not participate in register allocation and are only
  // embedded directly in instructions, e.g. small integers and on some
  // platforms Objects.
69
  INSTRUCTION_OPERAND_PREDICATE(Immediate, IMMEDIATE)
70 71
  // AllocatedOperands are registers or stack slots that are assigned by the
  // register allocator and are always associated with a virtual register.
72
  INSTRUCTION_OPERAND_PREDICATE(Allocated, ALLOCATED)
73
#undef INSTRUCTION_OPERAND_PREDICATE
74

75 76 77
  inline bool IsAnyLocationOperand() const;
  inline bool IsLocationOperand() const;
  inline bool IsFPLocationOperand() const;
78
  inline bool IsAnyRegister() const;
79
  inline bool IsRegister() const;
80 81
  inline bool IsFPRegister() const;
  inline bool IsFloatRegister() const;
82
  inline bool IsDoubleRegister() const;
83
  inline bool IsSimd128Register() const;
84
  inline bool IsAnyStackSlot() const;
85
  inline bool IsStackSlot() const;
86 87
  inline bool IsFPStackSlot() const;
  inline bool IsFloatStackSlot() const;
88
  inline bool IsDoubleStackSlot() const;
89
  inline bool IsSimd128StackSlot() const;
90

91 92 93 94
  template <typename SubKindOperand>
  static SubKindOperand* New(Zone* zone, const SubKindOperand& op) {
    void* buffer = zone->New(sizeof(op));
    return new (buffer) SubKindOperand(op);
95 96
  }

97 98 99
  static void ReplaceWith(InstructionOperand* dest,
                          const InstructionOperand* src) {
    *dest = *src;
100 101
  }

102 103 104 105 106 107 108 109
  bool Equals(const InstructionOperand& that) const {
    return this->value_ == that.value_;
  }

  bool Compare(const InstructionOperand& that) const {
    return this->value_ < that.value_;
  }

110 111
  bool EqualsCanonicalized(const InstructionOperand& that) const {
    return this->GetCanonicalizedValue() == that.GetCanonicalizedValue();
112 113
  }

114 115
  bool CompareCanonicalized(const InstructionOperand& that) const {
    return this->GetCanonicalizedValue() < that.GetCanonicalizedValue();
116 117
  }

118
  bool InterferesWith(const InstructionOperand& other) const;
119

120
  // APIs to aid debugging. For general-stream APIs, use operator<<.
121
  void Print() const;
122

123
 protected:
124
  explicit InstructionOperand(Kind kind) : value_(KindField::encode(kind)) {}
125

126
  inline uint64_t GetCanonicalizedValue() const;
127

128
  using KindField = base::BitField64<Kind, 0, 3>;
129 130

  uint64_t value_;
131 132
};

133
using InstructionOperandVector = ZoneVector<InstructionOperand>;
134

135
std::ostream& operator<<(std::ostream&, const InstructionOperand&);
136

137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153
#define INSTRUCTION_OPERAND_CASTS(OperandType, OperandKind)      \
                                                                 \
  static OperandType* cast(InstructionOperand* op) {             \
    DCHECK_EQ(OperandKind, op->kind());                          \
    return static_cast<OperandType*>(op);                        \
  }                                                              \
                                                                 \
  static const OperandType* cast(const InstructionOperand* op) { \
    DCHECK_EQ(OperandKind, op->kind());                          \
    return static_cast<const OperandType*>(op);                  \
  }                                                              \
                                                                 \
  static OperandType cast(const InstructionOperand& op) {        \
    DCHECK_EQ(OperandKind, op.kind());                           \
    return *static_cast<const OperandType*>(&op);                \
  }

154
class UnallocatedOperand final : public InstructionOperand {
155 156 157 158 159
 public:
  enum BasicPolicy { FIXED_SLOT, EXTENDED_POLICY };

  enum ExtendedPolicy {
    NONE,
160 161
    REGISTER_OR_SLOT,
    REGISTER_OR_SLOT_OR_CONSTANT,
162
    FIXED_REGISTER,
163
    FIXED_FP_REGISTER,
164
    MUST_HAVE_REGISTER,
165
    MUST_HAVE_SLOT,
166 167 168 169 170
    SAME_AS_FIRST_INPUT
  };

  // Lifetime of operand inside the instruction.
  enum Lifetime {
171 172 173
    // USED_AT_START operand is guaranteed to be live only at instruction start.
    // The register allocator is free to assign the same register to some other
    // operand used inside instruction (i.e. temporary or output).
174 175
    USED_AT_START,

176 177 178
    // USED_AT_END operand is treated as live until the end of instruction.
    // This means that register allocator will not reuse its register for any
    // other operand inside instruction.
179 180 181
    USED_AT_END
  };

182
  UnallocatedOperand(ExtendedPolicy policy, int virtual_register)
183
      : UnallocatedOperand(virtual_register) {
184 185 186 187 188
    value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
    value_ |= ExtendedPolicyField::encode(policy);
    value_ |= LifetimeField::encode(USED_AT_END);
  }

189
  UnallocatedOperand(BasicPolicy policy, int index, int virtual_register)
190
      : UnallocatedOperand(virtual_register) {
191
    DCHECK(policy == FIXED_SLOT);
192
    value_ |= BasicPolicyField::encode(policy);
193 194
    value_ |= static_cast<uint64_t>(static_cast<int64_t>(index))
              << FixedSlotIndexField::kShift;
195
    DCHECK(this->fixed_slot_index() == index);
196 197
  }

198
  UnallocatedOperand(ExtendedPolicy policy, int index, int virtual_register)
199
      : UnallocatedOperand(virtual_register) {
200
    DCHECK(policy == FIXED_REGISTER || policy == FIXED_FP_REGISTER);
201 202 203 204 205 206
    value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
    value_ |= ExtendedPolicyField::encode(policy);
    value_ |= LifetimeField::encode(USED_AT_END);
    value_ |= FixedRegisterField::encode(index);
  }

207 208
  UnallocatedOperand(ExtendedPolicy policy, Lifetime lifetime,
                     int virtual_register)
209
      : UnallocatedOperand(virtual_register) {
210 211 212 213 214
    value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
    value_ |= ExtendedPolicyField::encode(policy);
    value_ |= LifetimeField::encode(lifetime);
  }

215 216 217 218 219 220
  UnallocatedOperand(int reg_id, int slot_id, int virtual_register)
      : UnallocatedOperand(FIXED_REGISTER, reg_id, virtual_register) {
    value_ |= HasSecondaryStorageField::encode(true);
    value_ |= SecondaryStorageField::encode(slot_id);
  }

221 222 223 224 225 226
  UnallocatedOperand(const UnallocatedOperand& other, int virtual_register) {
    DCHECK_NE(kInvalidVirtualRegister, virtual_register);
    value_ = VirtualRegisterField::update(
        other.value_, static_cast<uint32_t>(virtual_register));
  }

227
  // Predicates for the operand policy.
228 229 230 231 232 233 234
  bool HasRegisterOrSlotPolicy() const {
    return basic_policy() == EXTENDED_POLICY &&
           extended_policy() == REGISTER_OR_SLOT;
  }
  bool HasRegisterOrSlotOrConstantPolicy() const {
    return basic_policy() == EXTENDED_POLICY &&
           extended_policy() == REGISTER_OR_SLOT_OR_CONSTANT;
235 236 237 238
  }
  bool HasFixedPolicy() const {
    return basic_policy() == FIXED_SLOT ||
           extended_policy() == FIXED_REGISTER ||
239
           extended_policy() == FIXED_FP_REGISTER;
240 241 242 243 244
  }
  bool HasRegisterPolicy() const {
    return basic_policy() == EXTENDED_POLICY &&
           extended_policy() == MUST_HAVE_REGISTER;
  }
245 246 247 248
  bool HasSlotPolicy() const {
    return basic_policy() == EXTENDED_POLICY &&
           extended_policy() == MUST_HAVE_SLOT;
  }
249 250 251 252 253 254 255 256 257
  bool HasSameAsInputPolicy() const {
    return basic_policy() == EXTENDED_POLICY &&
           extended_policy() == SAME_AS_FIRST_INPUT;
  }
  bool HasFixedSlotPolicy() const { return basic_policy() == FIXED_SLOT; }
  bool HasFixedRegisterPolicy() const {
    return basic_policy() == EXTENDED_POLICY &&
           extended_policy() == FIXED_REGISTER;
  }
258
  bool HasFixedFPRegisterPolicy() const {
259
    return basic_policy() == EXTENDED_POLICY &&
260
           extended_policy() == FIXED_FP_REGISTER;
261
  }
262 263 264 265 266 267 268 269 270
  bool HasSecondaryStorage() const {
    return basic_policy() == EXTENDED_POLICY &&
           extended_policy() == FIXED_REGISTER &&
           HasSecondaryStorageField::decode(value_);
  }
  int GetSecondaryStorage() const {
    DCHECK(HasSecondaryStorage());
    return SecondaryStorageField::decode(value_);
  }
271 272

  // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
273
  BasicPolicy basic_policy() const { return BasicPolicyField::decode(value_); }
274 275 276

  // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
  ExtendedPolicy extended_policy() const {
277
    DCHECK(basic_policy() == EXTENDED_POLICY);
278 279 280 281 282
    return ExtendedPolicyField::decode(value_);
  }

  // [fixed_slot_index]: Only for FIXED_SLOT.
  int fixed_slot_index() const {
283
    DCHECK(HasFixedSlotPolicy());
284
    return static_cast<int>(static_cast<int64_t>(value_) >>
285
                            FixedSlotIndexField::kShift);
286 287
  }

288
  // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_FP_REGISTER.
289
  int fixed_register_index() const {
290
    DCHECK(HasFixedRegisterPolicy() || HasFixedFPRegisterPolicy());
291 292 293 294
    return FixedRegisterField::decode(value_);
  }

  // [virtual_register]: The virtual register ID for this operand.
295
  int32_t virtual_register() const {
296
    return static_cast<int32_t>(VirtualRegisterField::decode(value_));
297 298
  }

299
  // [lifetime]: Only for non-FIXED_SLOT.
300
  bool IsUsedAtStart() const {
301
    DCHECK(basic_policy() == EXTENDED_POLICY);
302 303
    return LifetimeField::decode(value_) == USED_AT_START;
  }
304

305
  INSTRUCTION_OPERAND_CASTS(UnallocatedOperand, UNALLOCATED)
306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324

  // The encoding used for UnallocatedOperand operands depends on the policy
  // that is
  // stored within the operand. The FIXED_SLOT policy uses a compact encoding
  // because it accommodates a larger pay-load.
  //
  // For FIXED_SLOT policy:
  //     +------------------------------------------------+
  //     |      slot_index   | 0 | virtual_register | 001 |
  //     +------------------------------------------------+
  //
  // For all other (extended) policies:
  //     +-----------------------------------------------------+
  //     |  reg_index  | L | PPP |  1 | virtual_register | 001 |
  //     +-----------------------------------------------------+
  //     L ... Lifetime
  //     P ... Policy
  //
  // The slot index is a signed value which requires us to decode it manually
325
  // instead of using the base::BitField utility class.
326 327 328

  STATIC_ASSERT(KindField::kSize == 3);

329
  using VirtualRegisterField = base::BitField64<uint32_t, 3, 32>;
330

331 332
  // base::BitFields for all unallocated operands.
  using BasicPolicyField = base::BitField64<BasicPolicy, 35, 1>;
333 334

  // BitFields specific to BasicPolicy::FIXED_SLOT.
335
  using FixedSlotIndexField = base::BitField64<int, 36, 28>;
336 337

  // BitFields specific to BasicPolicy::EXTENDED_POLICY.
338 339 340 341 342
  using ExtendedPolicyField = base::BitField64<ExtendedPolicy, 36, 3>;
  using LifetimeField = base::BitField64<Lifetime, 39, 1>;
  using HasSecondaryStorageField = base::BitField64<bool, 40, 1>;
  using FixedRegisterField = base::BitField64<int, 41, 6>;
  using SecondaryStorageField = base::BitField64<int, 47, 3>;
343 344 345 346 347 348 349

 private:
  explicit UnallocatedOperand(int virtual_register)
      : InstructionOperand(UNALLOCATED) {
    value_ |=
        VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register));
  }
350 351
};

352 353 354
class ConstantOperand : public InstructionOperand {
 public:
  explicit ConstantOperand(int virtual_register)
355 356 357 358
      : InstructionOperand(CONSTANT) {
    value_ |=
        VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register));
  }
359 360 361 362 363 364 365 366 367

  int32_t virtual_register() const {
    return static_cast<int32_t>(VirtualRegisterField::decode(value_));
  }

  static ConstantOperand* New(Zone* zone, int virtual_register) {
    return InstructionOperand::New(zone, ConstantOperand(virtual_register));
  }

368
  INSTRUCTION_OPERAND_CASTS(ConstantOperand, CONSTANT)
369 370

  STATIC_ASSERT(KindField::kSize == 3);
371
  using VirtualRegisterField = base::BitField64<uint32_t, 3, 32>;
372 373 374 375
};

class ImmediateOperand : public InstructionOperand {
 public:
376 377 378 379 380
  enum ImmediateType { INLINE, INDEXED };

  explicit ImmediateOperand(ImmediateType type, int32_t value)
      : InstructionOperand(IMMEDIATE) {
    value_ |= TypeField::encode(type);
381 382
    value_ |= static_cast<uint64_t>(static_cast<int64_t>(value))
              << ValueField::kShift;
383
  }
384

385 386 387 388 389 390 391 392 393 394
  ImmediateType type() const { return TypeField::decode(value_); }

  int32_t inline_value() const {
    DCHECK_EQ(INLINE, type());
    return static_cast<int64_t>(value_) >> ValueField::kShift;
  }

  int32_t indexed_value() const {
    DCHECK_EQ(INDEXED, type());
    return static_cast<int64_t>(value_) >> ValueField::kShift;
395 396
  }

397 398
  static ImmediateOperand* New(Zone* zone, ImmediateType type, int32_t value) {
    return InstructionOperand::New(zone, ImmediateOperand(type, value));
399 400
  }

401
  INSTRUCTION_OPERAND_CASTS(ImmediateOperand, IMMEDIATE)
402 403

  STATIC_ASSERT(KindField::kSize == 3);
404 405
  using TypeField = base::BitField64<ImmediateType, 3, 1>;
  using ValueField = base::BitField64<int32_t, 32, 32>;
406 407
};

408
class LocationOperand : public InstructionOperand {
409
 public:
410
  enum LocationKind { REGISTER, STACK_SLOT };
411

412 413
  LocationOperand(InstructionOperand::Kind operand_kind,
                  LocationOperand::LocationKind location_kind,
414
                  MachineRepresentation rep, int index)
415 416
      : InstructionOperand(operand_kind) {
    DCHECK_IMPLIES(location_kind == REGISTER, index >= 0);
417
    DCHECK(IsSupportedRepresentation(rep));
418
    value_ |= LocationKindField::encode(location_kind);
419
    value_ |= RepresentationField::encode(rep);
420 421
    value_ |= static_cast<uint64_t>(static_cast<int64_t>(index))
              << IndexField::kShift;
422 423
  }

424
  int index() const {
425
    DCHECK(IsStackSlot() || IsFPStackSlot());
426 427 428
    return static_cast<int64_t>(value_) >> IndexField::kShift;
  }

429 430 431 432 433
  int register_code() const {
    DCHECK(IsRegister() || IsFPRegister());
    return static_cast<int64_t>(value_) >> IndexField::kShift;
  }

434
  Register GetRegister() const {
435
    DCHECK(IsRegister());
436
    return Register::from_code(register_code());
437 438
  }

439 440
  FloatRegister GetFloatRegister() const {
    DCHECK(IsFloatRegister());
441
    return FloatRegister::from_code(register_code());
442 443
  }

444
  DoubleRegister GetDoubleRegister() const {
445 446 447
    // On platforms where FloatRegister, DoubleRegister, and Simd128Register
    // are all the same type, it's convenient to treat everything as a
    // DoubleRegister, so be lax about type checking here.
448
    DCHECK(IsFPRegister());
449
    return DoubleRegister::from_code(register_code());
450 451
  }

452 453
  Simd128Register GetSimd128Register() const {
    DCHECK(IsSimd128Register());
454
    return Simd128Register::from_code(register_code());
455 456
  }

457 458
  LocationKind location_kind() const {
    return LocationKindField::decode(value_);
459 460
  }

461 462 463
  MachineRepresentation representation() const {
    return RepresentationField::decode(value_);
  }
464

465 466 467 468 469 470
  static bool IsSupportedRepresentation(MachineRepresentation rep) {
    switch (rep) {
      case MachineRepresentation::kWord32:
      case MachineRepresentation::kWord64:
      case MachineRepresentation::kFloat32:
      case MachineRepresentation::kFloat64:
471
      case MachineRepresentation::kSimd128:
472 473
      case MachineRepresentation::kTaggedSigned:
      case MachineRepresentation::kTaggedPointer:
474
      case MachineRepresentation::kTagged:
475 476
      case MachineRepresentation::kCompressedPointer:
      case MachineRepresentation::kCompressed:
477
        return true;
478 479 480 481
      case MachineRepresentation::kBit:
      case MachineRepresentation::kWord8:
      case MachineRepresentation::kWord16:
      case MachineRepresentation::kNone:
482 483
        return false;
    }
484
    UNREACHABLE();
485 486
  }

487 488 489
  // Return true if the locations can be moved to one another.
  bool IsCompatible(LocationOperand* op);

490
  static LocationOperand* cast(InstructionOperand* op) {
491
    DCHECK(op->IsAnyLocationOperand());
492 493 494 495
    return static_cast<LocationOperand*>(op);
  }

  static const LocationOperand* cast(const InstructionOperand* op) {
496
    DCHECK(op->IsAnyLocationOperand());
497 498 499 500
    return static_cast<const LocationOperand*>(op);
  }

  static LocationOperand cast(const InstructionOperand& op) {
501
    DCHECK(op.IsAnyLocationOperand());
502 503
    return *static_cast<const LocationOperand*>(&op);
  }
504

505
  STATIC_ASSERT(KindField::kSize == 3);
506 507 508
  using LocationKindField = base::BitField64<LocationKind, 3, 2>;
  using RepresentationField = base::BitField64<MachineRepresentation, 5, 8>;
  using IndexField = base::BitField64<int32_t, 35, 29>;
509
};
510

511 512
class AllocatedOperand : public LocationOperand {
 public:
513 514
  AllocatedOperand(LocationKind kind, MachineRepresentation rep, int index)
      : LocationOperand(ALLOCATED, kind, rep, index) {}
515 516

  static AllocatedOperand* New(Zone* zone, LocationKind kind,
517 518
                               MachineRepresentation rep, int index) {
    return InstructionOperand::New(zone, AllocatedOperand(kind, rep, index));
519 520
  }

521
  INSTRUCTION_OPERAND_CASTS(AllocatedOperand, ALLOCATED)
522 523
};

524 525
#undef INSTRUCTION_OPERAND_CASTS

526 527 528 529 530 531 532 533 534 535 536 537 538
bool InstructionOperand::IsAnyLocationOperand() const {
  return this->kind() >= FIRST_LOCATION_OPERAND_KIND;
}

bool InstructionOperand::IsLocationOperand() const {
  return IsAnyLocationOperand() &&
         !IsFloatingPoint(LocationOperand::cast(this)->representation());
}

bool InstructionOperand::IsFPLocationOperand() const {
  return IsAnyLocationOperand() &&
         IsFloatingPoint(LocationOperand::cast(this)->representation());
}
539

540
bool InstructionOperand::IsAnyRegister() const {
541
  return IsAnyLocationOperand() &&
542
         LocationOperand::cast(this)->location_kind() ==
543 544 545 546 547
             LocationOperand::REGISTER;
}

bool InstructionOperand::IsRegister() const {
  return IsAnyRegister() &&
548
         !IsFloatingPoint(LocationOperand::cast(this)->representation());
549
}
550

551
bool InstructionOperand::IsFPRegister() const {
552
  return IsAnyRegister() &&
553
         IsFloatingPoint(LocationOperand::cast(this)->representation());
554 555
}

556
bool InstructionOperand::IsFloatRegister() const {
557 558
  return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
                                MachineRepresentation::kFloat32;
559 560 561
}

bool InstructionOperand::IsDoubleRegister() const {
562 563
  return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
                                MachineRepresentation::kFloat64;
564 565
}

566
bool InstructionOperand::IsSimd128Register() const {
567 568
  return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
                                MachineRepresentation::kSimd128;
569 570
}

571 572
bool InstructionOperand::IsAnyStackSlot() const {
  return IsAnyLocationOperand() &&
573
         LocationOperand::cast(this)->location_kind() ==
574 575 576 577 578
             LocationOperand::STACK_SLOT;
}

bool InstructionOperand::IsStackSlot() const {
  return IsAnyStackSlot() &&
579
         !IsFloatingPoint(LocationOperand::cast(this)->representation());
580 581
}

582
bool InstructionOperand::IsFPStackSlot() const {
583
  return IsAnyStackSlot() &&
584
         IsFloatingPoint(LocationOperand::cast(this)->representation());
585
}
586

587
bool InstructionOperand::IsFloatStackSlot() const {
588
  return IsAnyLocationOperand() &&
589 590 591 592 593 594 595
         LocationOperand::cast(this)->location_kind() ==
             LocationOperand::STACK_SLOT &&
         LocationOperand::cast(this)->representation() ==
             MachineRepresentation::kFloat32;
}

bool InstructionOperand::IsDoubleStackSlot() const {
596
  return IsAnyLocationOperand() &&
597 598 599 600 601 602
         LocationOperand::cast(this)->location_kind() ==
             LocationOperand::STACK_SLOT &&
         LocationOperand::cast(this)->representation() ==
             MachineRepresentation::kFloat64;
}

603
bool InstructionOperand::IsSimd128StackSlot() const {
604
  return IsAnyLocationOperand() &&
605 606 607 608 609 610
         LocationOperand::cast(this)->location_kind() ==
             LocationOperand::STACK_SLOT &&
         LocationOperand::cast(this)->representation() ==
             MachineRepresentation::kSimd128;
}

611
uint64_t InstructionOperand::GetCanonicalizedValue() const {
612
  if (IsAnyLocationOperand()) {
613
    MachineRepresentation canonical = MachineRepresentation::kNone;
614
    if (IsFPRegister()) {
615 616 617 618 619 620 621 622
      if (kSimpleFPAliasing) {
        // We treat all FP register operands the same for simple aliasing.
        canonical = MachineRepresentation::kFloat64;
      } else {
        // We need to distinguish FP register operands of different reps when
        // aliasing is not simple (e.g. ARM).
        canonical = LocationOperand::cast(this)->representation();
      }
623
    }
624
    return InstructionOperand::KindField::update(
625
        LocationOperand::RepresentationField::update(this->value_, canonical),
626
        LocationOperand::ALLOCATED);
627 628 629 630 631 632 633 634
  }
  return this->value_;
}

// Required for maps that don't care about machine type.
struct CompareOperandModuloType {
  bool operator()(const InstructionOperand& a,
                  const InstructionOperand& b) const {
635
    return a.CompareCanonicalized(b);
636 637 638
  }
};

639 640
class V8_EXPORT_PRIVATE MoveOperands final
    : public NON_EXPORTED_BASE(ZoneObject) {
641
 public:
642 643 644 645 646
  MoveOperands(const InstructionOperand& source,
               const InstructionOperand& destination)
      : source_(source), destination_(destination) {
    DCHECK(!source.IsInvalid() && !destination.IsInvalid());
  }
647

648 649 650
  const InstructionOperand& source() const { return source_; }
  InstructionOperand& source() { return source_; }
  void set_source(const InstructionOperand& operand) { source_ = operand; }
651

652 653 654 655 656
  const InstructionOperand& destination() const { return destination_; }
  InstructionOperand& destination() { return destination_; }
  void set_destination(const InstructionOperand& operand) {
    destination_ = operand;
  }
657 658 659

  // The gap resolver marks moves as "in-progress" by clearing the
  // destination (but not the source).
660 661 662 663
  bool IsPending() const {
    return destination_.IsInvalid() && !source_.IsInvalid();
  }
  void SetPending() { destination_ = InstructionOperand(); }
664

665 666
  // A move is redundant if it's been eliminated or if its source and
  // destination are the same.
667
  bool IsRedundant() const {
668
    DCHECK_IMPLIES(!destination_.IsInvalid(), !destination_.IsConstant());
669
    return IsEliminated() || source_.EqualsCanonicalized(destination_);
670 671 672
  }

  // We clear both operands to indicate move that's been eliminated.
673
  void Eliminate() { source_ = destination_ = InstructionOperand(); }
674
  bool IsEliminated() const {
675 676
    DCHECK_IMPLIES(source_.IsInvalid(), destination_.IsInvalid());
    return source_.IsInvalid();
677 678
  }

679
  // APIs to aid debugging. For general-stream APIs, use operator<<.
680
  void Print() const;
681

682
 private:
683 684 685 686
  InstructionOperand source_;
  InstructionOperand destination_;

  DISALLOW_COPY_AND_ASSIGN(MoveOperands);
687 688
};

689
V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&, const MoveOperands&);
690

691
class V8_EXPORT_PRIVATE ParallelMove final
692
    : public NON_EXPORTED_BASE(ZoneVector<MoveOperands*>),
693
      public NON_EXPORTED_BASE(ZoneObject) {
694
 public:
695
  explicit ParallelMove(Zone* zone) : ZoneVector<MoveOperands*>(zone) {}
696

697 698
  MoveOperands* AddMove(const InstructionOperand& from,
                        const InstructionOperand& to) {
699 700 701 702 703 704 705
    Zone* zone = get_allocator().zone();
    return AddMove(from, to, zone);
  }

  MoveOperands* AddMove(const InstructionOperand& from,
                        const InstructionOperand& to,
                        Zone* operand_allocation_zone) {
706
    if (from.EqualsCanonicalized(to)) return nullptr;
707
    MoveOperands* move = new (operand_allocation_zone) MoveOperands(from, to);
708
    if (empty()) reserve(4);
709 710
    push_back(move);
    return move;
711 712 713 714
  }

  bool IsRedundant() const;

dcarney's avatar
dcarney committed
715
  // Prepare this ParallelMove to insert move as if it happened in a subsequent
716 717 718 719
  // ParallelMove.  move->source() may be changed.  Any MoveOperands added to
  // to_eliminate must be Eliminated.
  void PrepareInsertAfter(MoveOperands* move,
                          ZoneVector<MoveOperands*>* to_eliminate) const;
dcarney's avatar
dcarney committed
720

721
 private:
722
  DISALLOW_COPY_AND_ASSIGN(ParallelMove);
723 724
};

725
std::ostream& operator<<(std::ostream&, const ParallelMove&);
726

727
class ReferenceMap final : public ZoneObject {
728
 public:
729 730 731 732 733
  explicit ReferenceMap(Zone* zone)
      : reference_operands_(8, zone), instruction_position_(-1) {}

  const ZoneVector<InstructionOperand>& reference_operands() const {
    return reference_operands_;
734 735 736 737
  }
  int instruction_position() const { return instruction_position_; }

  void set_instruction_position(int pos) {
738
    DCHECK_EQ(-1, instruction_position_);
739 740 741
    instruction_position_ = pos;
  }

742
  void RecordReference(const AllocatedOperand& op);
743 744

 private:
745
  friend std::ostream& operator<<(std::ostream&, const ReferenceMap&);
746

747
  ZoneVector<InstructionOperand> reference_operands_;
748 749 750
  int instruction_position_;
};

751
std::ostream& operator<<(std::ostream&, const ReferenceMap&);
752

753 754
class InstructionBlock;

755
class V8_EXPORT_PRIVATE Instruction final {
756 757
 public:
  size_t OutputCount() const { return OutputCountField::decode(bit_field_); }
758
  const InstructionOperand* OutputAt(size_t i) const {
759
    DCHECK_LT(i, OutputCount());
760 761 762
    return &operands_[i];
  }
  InstructionOperand* OutputAt(size_t i) {
763
    DCHECK_LT(i, OutputCount());
764
    return &operands_[i];
765 766
  }

767
  bool HasOutput() const { return OutputCount() > 0; }
768 769
  const InstructionOperand* Output() const { return OutputAt(0); }
  InstructionOperand* Output() { return OutputAt(0); }
770

771
  size_t InputCount() const { return InputCountField::decode(bit_field_); }
772
  const InstructionOperand* InputAt(size_t i) const {
773
    DCHECK_LT(i, InputCount());
774
    return &operands_[OutputCount() + i];
775
  }
776
  InstructionOperand* InputAt(size_t i) {
777
    DCHECK_LT(i, InputCount());
778
    return &operands_[OutputCount() + i];
779
  }
780 781

  size_t TempCount() const { return TempCountField::decode(bit_field_); }
782
  const InstructionOperand* TempAt(size_t i) const {
783
    DCHECK_LT(i, TempCount());
784 785 786
    return &operands_[OutputCount() + InputCount() + i];
  }
  InstructionOperand* TempAt(size_t i) {
787
    DCHECK_LT(i, TempCount());
788
    return &operands_[OutputCount() + InputCount() + i];
789 790 791 792 793 794 795 796 797 798 799 800 801
  }

  InstructionCode opcode() const { return opcode_; }
  ArchOpcode arch_opcode() const { return ArchOpcodeField::decode(opcode()); }
  AddressingMode addressing_mode() const {
    return AddressingModeField::decode(opcode());
  }
  FlagsMode flags_mode() const { return FlagsModeField::decode(opcode()); }
  FlagsCondition flags_condition() const {
    return FlagsConditionField::decode(opcode());
  }

  static Instruction* New(Zone* zone, InstructionCode opcode) {
802
    return New(zone, opcode, 0, nullptr, 0, nullptr, 0, nullptr);
803 804 805
  }

  static Instruction* New(Zone* zone, InstructionCode opcode,
806 807 808
                          size_t output_count, InstructionOperand* outputs,
                          size_t input_count, InstructionOperand* inputs,
                          size_t temp_count, InstructionOperand* temps) {
809 810 811
    DCHECK(output_count == 0 || outputs != nullptr);
    DCHECK(input_count == 0 || inputs != nullptr);
    DCHECK(temp_count == 0 || temps != nullptr);
812
    // TODO(turbofan): Handle this gracefully. See crbug.com/582702.
813 814
    CHECK(InputCountField::is_valid(input_count));

815 816 817
    size_t total_extra_ops = output_count + input_count + temp_count;
    if (total_extra_ops != 0) total_extra_ops--;
    int size = static_cast<int>(
818 819
        RoundUp(sizeof(Instruction), sizeof(InstructionOperand)) +
        total_extra_ops * sizeof(InstructionOperand));
820 821 822 823 824 825 826 827 828
    return new (zone->New(size)) Instruction(
        opcode, output_count, outputs, input_count, inputs, temp_count, temps);
  }

  Instruction* MarkAsCall() {
    bit_field_ = IsCallField::update(bit_field_, true);
    return this;
  }
  bool IsCall() const { return IsCallField::decode(bit_field_); }
829
  bool NeedsReferenceMap() const { return IsCall(); }
830
  bool HasReferenceMap() const { return reference_map_ != nullptr; }
831 832 833 834

  bool ClobbersRegisters() const { return IsCall(); }
  bool ClobbersTemps() const { return IsCall(); }
  bool ClobbersDoubleRegisters() const { return IsCall(); }
835
  ReferenceMap* reference_map() const { return reference_map_; }
836

837 838 839 840
  void set_reference_map(ReferenceMap* map) {
    DCHECK(NeedsReferenceMap());
    DCHECK(!reference_map_);
    reference_map_ = map;
841 842
  }

843 844 845
  void OverwriteWithNop() {
    opcode_ = ArchOpcodeField::encode(kArchNop);
    bit_field_ = 0;
846
    reference_map_ = nullptr;
847 848
  }

849
  bool IsNop() const { return arch_opcode() == kArchNop; }
850

851 852
  bool IsDeoptimizeCall() const {
    return arch_opcode() == ArchOpcode::kArchDeoptimize ||
853 854
           FlagsModeField::decode(opcode()) == kFlags_deoptimize ||
           FlagsModeField::decode(opcode()) == kFlags_deoptimize_and_poison;
855 856
  }

857 858 859 860
  bool IsTrap() const {
    return FlagsModeField::decode(opcode()) == kFlags_trap;
  }

861 862 863
  bool IsJump() const { return arch_opcode() == ArchOpcode::kArchJmp; }
  bool IsRet() const { return arch_opcode() == ArchOpcode::kArchRet; }
  bool IsTailCall() const {
864
    return arch_opcode() <= ArchOpcode::kArchTailCallWasm;
865 866 867 868 869
  }
  bool IsThrow() const {
    return arch_opcode() == ArchOpcode::kArchThrowTerminator;
  }

870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896
  enum GapPosition {
    START,
    END,
    FIRST_GAP_POSITION = START,
    LAST_GAP_POSITION = END
  };

  ParallelMove* GetOrCreateParallelMove(GapPosition pos, Zone* zone) {
    if (parallel_moves_[pos] == nullptr) {
      parallel_moves_[pos] = new (zone) ParallelMove(zone);
    }
    return parallel_moves_[pos];
  }

  ParallelMove* GetParallelMove(GapPosition pos) {
    return parallel_moves_[pos];
  }

  const ParallelMove* GetParallelMove(GapPosition pos) const {
    return parallel_moves_[pos];
  }

  bool AreMovesRedundant() const;

  ParallelMove* const* parallel_moves() const { return &parallel_moves_[0]; }
  ParallelMove** parallel_moves() { return &parallel_moves_[0]; }

897 898 899 900 901 902 903 904 905
  // The block_id may be invalidated in JumpThreading. It is only important for
  // register allocation, to avoid searching for blocks from instruction
  // indexes.
  InstructionBlock* block() const { return block_; }
  void set_block(InstructionBlock* block) {
    DCHECK_NOT_NULL(block);
    block_ = block;
  }

906
  // APIs to aid debugging. For general-stream APIs, use operator<<.
907
  void Print() const;
908

909 910 911
  using OutputCountField = base::BitField<size_t, 0, 8>;
  using InputCountField = base::BitField<size_t, 8, 16>;
  using TempCountField = base::BitField<size_t, 24, 6>;
912 913 914 915 916

  static const size_t kMaxOutputCount = OutputCountField::kMax;
  static const size_t kMaxInputCount = InputCountField::kMax;
  static const size_t kMaxTempCount = TempCountField::kMax;

917
 private:
918
  explicit Instruction(InstructionCode opcode);
919

920
  Instruction(InstructionCode opcode, size_t output_count,
921 922 923
              InstructionOperand* outputs, size_t input_count,
              InstructionOperand* inputs, size_t temp_count,
              InstructionOperand* temps);
924

925
  using IsCallField = base::BitField<bool, 30, 1>;
926 927 928

  InstructionCode opcode_;
  uint32_t bit_field_;
929
  ParallelMove* parallel_moves_[2];
930
  ReferenceMap* reference_map_;
931
  InstructionBlock* block_;
932
  InstructionOperand operands_[1];
933 934

  DISALLOW_COPY_AND_ASSIGN(Instruction);
935 936
};

937
std::ostream& operator<<(std::ostream&, const Instruction&);
938

939
class RpoNumber final {
940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958
 public:
  static const int kInvalidRpoNumber = -1;
  int ToInt() const {
    DCHECK(IsValid());
    return index_;
  }
  size_t ToSize() const {
    DCHECK(IsValid());
    return static_cast<size_t>(index_);
  }
  bool IsValid() const { return index_ >= 0; }
  static RpoNumber FromInt(int index) { return RpoNumber(index); }
  static RpoNumber Invalid() { return RpoNumber(kInvalidRpoNumber); }

  bool IsNext(const RpoNumber other) const {
    DCHECK(IsValid());
    return other.index_ == this->index_ + 1;
  }

959 960 961 962 963
  RpoNumber Next() const {
    DCHECK(IsValid());
    return RpoNumber(index_ + 1);
  }

964 965 966 967 968 969 970
  // Comparison operators.
  bool operator==(RpoNumber other) const { return index_ == other.index_; }
  bool operator!=(RpoNumber other) const { return index_ != other.index_; }
  bool operator>(RpoNumber other) const { return index_ > other.index_; }
  bool operator<(RpoNumber other) const { return index_ < other.index_; }
  bool operator<=(RpoNumber other) const { return index_ <= other.index_; }
  bool operator>=(RpoNumber other) const { return index_ >= other.index_; }
971 972 973 974 975 976 977 978

 private:
  explicit RpoNumber(int32_t index) : index_(index) {}
  int32_t index_;
};

std::ostream& operator<<(std::ostream&, const RpoNumber&);

979
class V8_EXPORT_PRIVATE Constant final {
980
 public:
981 982 983 984 985 986
  enum Type {
    kInt32,
    kInt64,
    kFloat32,
    kFloat64,
    kExternalReference,
987
    kCompressedHeapObject,
988
    kHeapObject,
989 990
    kRpoNumber,
    kDelayedStringConstant
991
  };
992

993
  explicit Constant(int32_t v);
994
  explicit Constant(int64_t v) : type_(kInt64), value_(v) {}
995
  explicit Constant(float v) : type_(kFloat32), value_(bit_cast<int32_t>(v)) {}
996
  explicit Constant(double v) : type_(kFloat64), value_(bit_cast<int64_t>(v)) {}
997
  explicit Constant(ExternalReference ref)
998
      : type_(kExternalReference), value_(bit_cast<intptr_t>(ref.address())) {}
999 1000 1001
  explicit Constant(Handle<HeapObject> obj, bool is_compressed = false)
      : type_(is_compressed ? kCompressedHeapObject : kHeapObject),
        value_(bit_cast<intptr_t>(obj)) {}
1002
  explicit Constant(RpoNumber rpo) : type_(kRpoNumber), value_(rpo.ToInt()) {}
1003 1004
  explicit Constant(const StringConstantBase* str)
      : type_(kDelayedStringConstant), value_(bit_cast<intptr_t>(str)) {}
1005
  explicit Constant(RelocatablePtrConstantInfo info);
1006 1007 1008

  Type type() const { return type_; }

1009 1010
  RelocInfo::Mode rmode() const { return rmode_; }

1011
  int32_t ToInt32() const {
1012 1013 1014 1015
    DCHECK(type() == kInt32 || type() == kInt64);
    const int32_t value = static_cast<int32_t>(value_);
    DCHECK_EQ(value_, static_cast<int64_t>(value));
    return value;
1016 1017 1018 1019
  }

  int64_t ToInt64() const {
    if (type() == kInt32) return ToInt32();
1020
    DCHECK_EQ(kInt64, type());
1021 1022 1023
    return value_;
  }

1024
  float ToFloat32() const {
1025 1026 1027
    // TODO(ahaas): We should remove this function. If value_ has the bit
    // representation of a signalling NaN, then returning it as float can cause
    // the signalling bit to flip, and value_ is returned as a quiet NaN.
1028 1029 1030 1031
    DCHECK_EQ(kFloat32, type());
    return bit_cast<float>(static_cast<int32_t>(value_));
  }

1032 1033 1034 1035 1036
  uint32_t ToFloat32AsInt() const {
    DCHECK_EQ(kFloat32, type());
    return bit_cast<uint32_t>(static_cast<int32_t>(value_));
  }

1037
  Double ToFloat64() const {
1038
    DCHECK_EQ(kFloat64, type());
1039
    return Double(bit_cast<uint64_t>(value_));
1040 1041
  }

1042
  ExternalReference ToExternalReference() const {
1043
    DCHECK_EQ(kExternalReference, type());
1044
    return ExternalReference::FromRawAddress(static_cast<Address>(value_));
1045 1046
  }

1047
  RpoNumber ToRpoNumber() const {
1048
    DCHECK_EQ(kRpoNumber, type());
1049
    return RpoNumber::FromInt(static_cast<int>(value_));
1050 1051
  }

1052
  Handle<HeapObject> ToHeapObject() const;
1053
  Handle<Code> ToCode() const;
1054
  const StringConstantBase* ToDelayedStringConstant() const;
1055 1056 1057

 private:
  Type type_;
1058
  RelocInfo::Mode rmode_ = RelocInfo::NONE;
1059
  int64_t value_;
1060 1061
};

1062
std::ostream& operator<<(std::ostream&, const Constant&);
1063 1064 1065 1066

// Forward declarations.
class FrameStateDescriptor;

1067
enum class StateValueKind : uint8_t {
1068
  kArgumentsElements,
1069
  kArgumentsLength,
1070 1071 1072 1073 1074
  kPlain,
  kOptimizedOut,
  kNested,
  kDuplicate
};
1075 1076 1077

class StateValueDescriptor {
 public:
1078
  StateValueDescriptor()
1079
      : kind_(StateValueKind::kPlain), type_(MachineType::AnyTagged()) {}
1080

1081
  static StateValueDescriptor ArgumentsElements(ArgumentsStateType type) {
1082 1083
    StateValueDescriptor descr(StateValueKind::kArgumentsElements,
                               MachineType::AnyTagged());
1084
    descr.args_type_ = type;
1085
    return descr;
1086
  }
1087
  static StateValueDescriptor ArgumentsLength(ArgumentsStateType type) {
1088 1089
    StateValueDescriptor descr(StateValueKind::kArgumentsLength,
                               MachineType::AnyTagged());
1090
    descr.args_type_ = type;
1091 1092
    return descr;
  }
1093
  static StateValueDescriptor Plain(MachineType type) {
1094
    return StateValueDescriptor(StateValueKind::kPlain, type);
1095
  }
1096 1097
  static StateValueDescriptor OptimizedOut() {
    return StateValueDescriptor(StateValueKind::kOptimizedOut,
1098
                                MachineType::AnyTagged());
1099 1100
  }
  static StateValueDescriptor Recursive(size_t id) {
1101 1102 1103 1104
    StateValueDescriptor descr(StateValueKind::kNested,
                               MachineType::AnyTagged());
    descr.id_ = id;
    return descr;
1105
  }
1106
  static StateValueDescriptor Duplicate(size_t id) {
1107 1108 1109 1110
    StateValueDescriptor descr(StateValueKind::kDuplicate,
                               MachineType::AnyTagged());
    descr.id_ = id;
    return descr;
1111 1112
  }

1113 1114 1115
  bool IsArgumentsElements() const {
    return kind_ == StateValueKind::kArgumentsElements;
  }
1116 1117 1118
  bool IsArgumentsLength() const {
    return kind_ == StateValueKind::kArgumentsLength;
  }
1119 1120 1121 1122
  bool IsPlain() const { return kind_ == StateValueKind::kPlain; }
  bool IsOptimizedOut() const { return kind_ == StateValueKind::kOptimizedOut; }
  bool IsNested() const { return kind_ == StateValueKind::kNested; }
  bool IsDuplicate() const { return kind_ == StateValueKind::kDuplicate; }
1123
  MachineType type() const { return type_; }
1124 1125 1126 1127 1128
  size_t id() const {
    DCHECK(kind_ == StateValueKind::kDuplicate ||
           kind_ == StateValueKind::kNested);
    return id_;
  }
1129
  ArgumentsStateType arguments_type() const {
1130 1131
    DCHECK(kind_ == StateValueKind::kArgumentsElements ||
           kind_ == StateValueKind::kArgumentsLength);
1132
    return args_type_;
1133
  }
1134 1135

 private:
1136 1137
  StateValueDescriptor(StateValueKind kind, MachineType type)
      : kind_(kind), type_(type) {}
1138 1139 1140

  StateValueKind kind_;
  MachineType type_;
1141 1142
  union {
    size_t id_;
1143
    ArgumentsStateType args_type_;
1144
  };
1145 1146
};

1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193
class StateValueList {
 public:
  explicit StateValueList(Zone* zone) : fields_(zone), nested_(zone) {}

  size_t size() { return fields_.size(); }

  struct Value {
    StateValueDescriptor* desc;
    StateValueList* nested;

    Value(StateValueDescriptor* desc, StateValueList* nested)
        : desc(desc), nested(nested) {}
  };

  class iterator {
   public:
    // Bare minimum of operators needed for range iteration.
    bool operator!=(const iterator& other) const {
      return field_iterator != other.field_iterator;
    }
    bool operator==(const iterator& other) const {
      return field_iterator == other.field_iterator;
    }
    iterator& operator++() {
      if (field_iterator->IsNested()) {
        nested_iterator++;
      }
      ++field_iterator;
      return *this;
    }
    Value operator*() {
      StateValueDescriptor* desc = &(*field_iterator);
      StateValueList* nested = desc->IsNested() ? *nested_iterator : nullptr;
      return Value(desc, nested);
    }

   private:
    friend class StateValueList;

    iterator(ZoneVector<StateValueDescriptor>::iterator it,
             ZoneVector<StateValueList*>::iterator nested)
        : field_iterator(it), nested_iterator(nested) {}

    ZoneVector<StateValueDescriptor>::iterator field_iterator;
    ZoneVector<StateValueList*>::iterator nested_iterator;
  };

1194 1195
  void ReserveSize(size_t size) { fields_.reserve(size); }

1196 1197 1198 1199 1200 1201 1202
  StateValueList* PushRecursiveField(Zone* zone, size_t id) {
    fields_.push_back(StateValueDescriptor::Recursive(id));
    StateValueList* nested =
        new (zone->New(sizeof(StateValueList))) StateValueList(zone);
    nested_.push_back(nested);
    return nested;
  }
1203 1204
  void PushArgumentsElements(ArgumentsStateType type) {
    fields_.push_back(StateValueDescriptor::ArgumentsElements(type));
1205
  }
1206 1207
  void PushArgumentsLength(ArgumentsStateType type) {
    fields_.push_back(StateValueDescriptor::ArgumentsLength(type));
1208
  }
1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225
  void PushDuplicate(size_t id) {
    fields_.push_back(StateValueDescriptor::Duplicate(id));
  }
  void PushPlain(MachineType type) {
    fields_.push_back(StateValueDescriptor::Plain(type));
  }
  void PushOptimizedOut() {
    fields_.push_back(StateValueDescriptor::OptimizedOut());
  }

  iterator begin() { return iterator(fields_.begin(), nested_.begin()); }
  iterator end() { return iterator(fields_.end(), nested_.end()); }

 private:
  ZoneVector<StateValueDescriptor> fields_;
  ZoneVector<StateValueList*> nested_;
};
1226

1227 1228
class FrameStateDescriptor : public ZoneObject {
 public:
1229 1230
  FrameStateDescriptor(Zone* zone, FrameStateType type, BailoutId bailout_id,
                       OutputFrameStateCombine state_combine,
1231 1232
                       size_t parameters_count, size_t locals_count,
                       size_t stack_count,
1233
                       MaybeHandle<SharedFunctionInfo> shared_info,
1234
                       FrameStateDescriptor* outer_state = nullptr);
1235

1236
  FrameStateType type() const { return type_; }
1237
  BailoutId bailout_id() const { return bailout_id_; }
1238
  OutputFrameStateCombine state_combine() const { return frame_state_combine_; }
1239 1240 1241
  size_t parameters_count() const { return parameters_count_; }
  size_t locals_count() const { return locals_count_; }
  size_t stack_count() const { return stack_count_; }
1242
  MaybeHandle<SharedFunctionInfo> shared_info() const { return shared_info_; }
1243
  FrameStateDescriptor* outer_state() const { return outer_state_; }
1244
  bool HasContext() const {
1245
    return FrameStateFunctionInfo::IsJSFunctionType(type_) ||
1246 1247
           type_ == FrameStateType::kBuiltinContinuation ||
           type_ == FrameStateType::kConstructStub;
1248
  }
1249

1250 1251 1252 1253 1254 1255 1256
  // The frame height on the stack, in number of slots, as serialized into a
  // Translation and later used by the deoptimizer. Does *not* include
  // information from the chain of outer states. Unlike |GetSize| this does not
  // always include parameters, locals, and stack slots; instead, the returned
  // slot kinds depend on the frame type.
  size_t GetHeight() const;

1257 1258 1259 1260 1261 1262 1263
  // Returns an overapproximation of the unoptimized stack frame size in bytes,
  // as later produced by the deoptimizer. Considers both this and the chain of
  // outer states.
  size_t total_conservative_frame_size_in_bytes() const {
    return total_conservative_frame_size_in_bytes_;
  }

1264
  size_t GetSize() const;
1265 1266 1267
  size_t GetTotalSize() const;
  size_t GetFrameCount() const;
  size_t GetJSFrameCount() const;
1268

1269
  StateValueList* GetStateValueDescriptors() { return &values_; }
1270

1271 1272
  static const int kImpossibleValue = 0xdead;

1273
 private:
1274
  FrameStateType type_;
1275
  BailoutId bailout_id_;
1276
  OutputFrameStateCombine frame_state_combine_;
1277 1278 1279 1280
  const size_t parameters_count_;
  const size_t locals_count_;
  const size_t stack_count_;
  const size_t total_conservative_frame_size_in_bytes_;
1281
  StateValueList values_;
1282
  MaybeHandle<SharedFunctionInfo> const shared_info_;
1283
  FrameStateDescriptor* const outer_state_;
1284 1285
};

1286 1287 1288 1289
// A deoptimization entry is a pair of the reason why we deoptimize and the
// frame state descriptor that we have to go back to.
class DeoptimizationEntry final {
 public:
1290
  DeoptimizationEntry() = default;
1291
  DeoptimizationEntry(FrameStateDescriptor* descriptor, DeoptimizeKind kind,
1292
                      DeoptimizeReason reason, FeedbackSource const& feedback)
1293 1294 1295 1296
      : descriptor_(descriptor),
        kind_(kind),
        reason_(reason),
        feedback_(feedback) {}
1297 1298

  FrameStateDescriptor* descriptor() const { return descriptor_; }
1299
  DeoptimizeKind kind() const { return kind_; }
1300
  DeoptimizeReason reason() const { return reason_; }
1301
  FeedbackSource const& feedback() const { return feedback_; }
1302

1303 1304
 private:
  FrameStateDescriptor* descriptor_ = nullptr;
1305
  DeoptimizeKind kind_ = DeoptimizeKind::kEager;
1306
  DeoptimizeReason reason_ = DeoptimizeReason::kUnknown;
1307
  FeedbackSource feedback_ = FeedbackSource();
1308
};
1309

1310
using DeoptimizationVector = ZoneVector<DeoptimizationEntry>;
1311

1312 1313
class V8_EXPORT_PRIVATE PhiInstruction final
    : public NON_EXPORTED_BASE(ZoneObject) {
1314
 public:
1315
  using Inputs = ZoneVector<InstructionOperand>;
1316

1317 1318 1319
  PhiInstruction(Zone* zone, int virtual_register, size_t input_count);

  void SetInput(size_t offset, int virtual_register);
1320
  void RenameInput(size_t offset, int virtual_register);
1321 1322 1323

  int virtual_register() const { return virtual_register_; }
  const IntVector& operands() const { return operands_; }
1324

1325 1326
  // TODO(dcarney): this has no real business being here, since it's internal to
  // the register allocator, but putting it here was convenient.
1327 1328
  const InstructionOperand& output() const { return output_; }
  InstructionOperand& output() { return output_; }
1329 1330 1331

 private:
  const int virtual_register_;
1332
  InstructionOperand output_;
1333 1334 1335 1336
  IntVector operands_;
};

// Analogue of BasicBlock for Instructions instead of Nodes.
1337 1338
class V8_EXPORT_PRIVATE InstructionBlock final
    : public NON_EXPORTED_BASE(ZoneObject) {
1339
 public:
1340
  InstructionBlock(Zone* zone, RpoNumber rpo_number, RpoNumber loop_header,
1341
                   RpoNumber loop_end, bool deferred, bool handler);
1342 1343 1344

  // Instruction indexes (used by the register allocator).
  int first_instruction_index() const {
1345 1346 1347
    DCHECK_LE(0, code_start_);
    DCHECK_LT(0, code_end_);
    DCHECK_GE(code_end_, code_start_);
1348 1349 1350
    return code_start_;
  }
  int last_instruction_index() const {
1351 1352 1353
    DCHECK_LE(0, code_start_);
    DCHECK_LT(0, code_end_);
    DCHECK_GE(code_end_, code_start_);
1354 1355 1356 1357 1358 1359 1360 1361 1362
    return code_end_ - 1;
  }

  int32_t code_start() const { return code_start_; }
  void set_code_start(int32_t start) { code_start_ = start; }

  int32_t code_end() const { return code_end_; }
  void set_code_end(int32_t end) { code_end_ = end; }

1363
  bool IsDeferred() const { return deferred_; }
1364
  bool IsHandler() const { return handler_; }
1365

1366 1367 1368 1369
  RpoNumber ao_number() const { return ao_number_; }
  RpoNumber rpo_number() const { return rpo_number_; }
  RpoNumber loop_header() const { return loop_header_; }
  RpoNumber loop_end() const {
1370 1371 1372 1373
    DCHECK(IsLoopHeader());
    return loop_end_;
  }
  inline bool IsLoopHeader() const { return loop_end_.IsValid(); }
1374
  inline bool IsSwitchTarget() const { return switch_target_; }
1375
  inline bool ShouldAlign() const { return alignment_; }
1376

1377
  using Predecessors = ZoneVector<RpoNumber>;
1378
  Predecessors& predecessors() { return predecessors_; }
1379 1380
  const Predecessors& predecessors() const { return predecessors_; }
  size_t PredecessorCount() const { return predecessors_.size(); }
1381
  size_t PredecessorIndexOf(RpoNumber rpo_number) const;
1382

1383
  using Successors = ZoneVector<RpoNumber>;
1384
  Successors& successors() { return successors_; }
1385 1386 1387
  const Successors& successors() const { return successors_; }
  size_t SuccessorCount() const { return successors_.size(); }

1388
  using PhiInstructions = ZoneVector<PhiInstruction*>;
1389
  const PhiInstructions& phis() const { return phis_; }
1390
  PhiInstruction* PhiAt(size_t i) const { return phis_[i]; }
1391 1392
  void AddPhi(PhiInstruction* phi) { phis_.push_back(phi); }

1393
  void set_ao_number(RpoNumber ao_number) { ao_number_ = ao_number; }
1394

1395 1396
  void set_alignment(bool val) { alignment_ = val; }

1397 1398
  void set_switch_target(bool val) { switch_target_ = val; }

1399 1400 1401 1402 1403 1404 1405 1406 1407
  bool needs_frame() const { return needs_frame_; }
  void mark_needs_frame() { needs_frame_ = true; }

  bool must_construct_frame() const { return must_construct_frame_; }
  void mark_must_construct_frame() { must_construct_frame_ = true; }

  bool must_deconstruct_frame() const { return must_deconstruct_frame_; }
  void mark_must_deconstruct_frame() { must_deconstruct_frame_ = true; }

1408 1409 1410 1411
 private:
  Successors successors_;
  Predecessors predecessors_;
  PhiInstructions phis_;
1412 1413 1414 1415
  RpoNumber ao_number_;  // Assembly order number.
  const RpoNumber rpo_number_;
  const RpoNumber loop_header_;
  const RpoNumber loop_end_;
1416
  int32_t code_start_;   // start index of arch-specific code.
1417
  int32_t code_end_ = -1;     // end index of arch-specific code.
1418
  const bool deferred_;       // Block contains deferred code.
1419
  const bool handler_;   // Block is a handler entry point.
1420
  bool switch_target_ = false;
1421 1422 1423 1424
  bool alignment_ = false;  // insert alignment before this block
  bool needs_frame_ = false;
  bool must_construct_frame_ = false;
  bool must_deconstruct_frame_ = false;
1425 1426
};

1427 1428 1429 1430 1431 1432 1433
class InstructionSequence;

struct PrintableInstructionBlock {
  const InstructionBlock* block_;
  const InstructionSequence* code_;
};

1434
std::ostream& operator<<(std::ostream&, const PrintableInstructionBlock&);
1435

1436 1437 1438
using ConstantDeque = ZoneDeque<Constant>;
using ConstantMap = std::map<int, Constant, std::less<int>,
                             ZoneAllocator<std::pair<const int, Constant> > >;
1439

1440 1441 1442
using InstructionDeque = ZoneDeque<Instruction*>;
using ReferenceMapDeque = ZoneDeque<ReferenceMap*>;
using InstructionBlocks = ZoneVector<InstructionBlock*>;
1443 1444 1445

// Represents architecture-specific generated code before, during, and after
// register allocation.
1446 1447
class V8_EXPORT_PRIVATE InstructionSequence final
    : public NON_EXPORTED_BASE(ZoneObject) {
1448
 public:
1449 1450
  static InstructionBlocks* InstructionBlocksFor(Zone* zone,
                                                 const Schedule* schedule);
1451 1452
  InstructionSequence(Isolate* isolate, Zone* zone,
                      InstructionBlocks* instruction_blocks);
1453

1454
  int NextVirtualRegister();
1455 1456
  int VirtualRegisterCount() const { return next_virtual_register_; }

1457
  const InstructionBlocks& instruction_blocks() const {
1458
    return *instruction_blocks_;
1459 1460
  }

1461 1462
  const InstructionBlocks& ao_blocks() const { return *ao_blocks_; }

1463
  int InstructionBlockCount() const {
1464
    return static_cast<int>(instruction_blocks_->size());
1465 1466
  }

1467
  InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) {
1468
    return instruction_blocks_->at(rpo_number.ToSize());
1469 1470
  }

1471
  int LastLoopInstructionIndex(const InstructionBlock* block) {
1472
    return instruction_blocks_->at(block->loop_end().ToSize() - 1)
1473 1474 1475
        ->last_instruction_index();
  }

1476
  const InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) const {
1477
    return instruction_blocks_->at(rpo_number.ToSize());
1478 1479
  }

1480
  InstructionBlock* GetInstructionBlock(int instruction_index) const;
1481

1482 1483
  static MachineRepresentation DefaultRepresentation() {
    return MachineType::PointerRepresentation();
1484
  }
1485 1486
  MachineRepresentation GetRepresentation(int virtual_register) const;
  void MarkAsRepresentation(MachineRepresentation rep, int virtual_register);
1487

1488
  bool IsReference(int virtual_register) const {
1489
    return CanBeTaggedOrCompressedPointer(GetRepresentation(virtual_register));
1490
  }
1491
  bool IsFP(int virtual_register) const {
1492
    return IsFloatingPoint(GetRepresentation(virtual_register));
1493
  }
1494 1495 1496 1497 1498 1499 1500 1501
  int representation_mask() const { return representation_mask_; }
  bool HasFPVirtualRegisters() const {
    constexpr int kFPRepMask =
        RepresentationBit(MachineRepresentation::kFloat32) |
        RepresentationBit(MachineRepresentation::kFloat64) |
        RepresentationBit(MachineRepresentation::kSimd128);
    return (representation_mask() & kFPRepMask) != 0;
  }
1502

1503
  Instruction* GetBlockStart(RpoNumber rpo) const;
1504

1505
  using const_iterator = InstructionDeque::const_iterator;
1506 1507
  const_iterator begin() const { return instructions_.begin(); }
  const_iterator end() const { return instructions_.end(); }
1508
  const InstructionDeque& instructions() const { return instructions_; }
1509 1510 1511
  int LastInstructionIndex() const {
    return static_cast<int>(instructions().size()) - 1;
  }
1512 1513

  Instruction* InstructionAt(int index) const {
1514 1515
    DCHECK_LE(0, index);
    DCHECK_GT(instructions_.size(), index);
1516 1517 1518
    return instructions_[index];
  }

1519
  Isolate* isolate() const { return isolate_; }
1520
  const ReferenceMapDeque* reference_maps() const { return &reference_maps_; }
1521
  Zone* zone() const { return zone_; }
1522

1523 1524
  // Used by the instruction selector while adding instructions.
  int AddInstruction(Instruction* instr);
1525 1526
  void StartBlock(RpoNumber rpo);
  void EndBlock(RpoNumber rpo);
1527

1528
  int AddConstant(int virtual_register, Constant constant) {
1529
    // TODO(titzer): allow RPO numbers as constants?
1530
    DCHECK_NE(Constant::kRpoNumber, constant.type());
1531
    DCHECK(virtual_register >= 0 && virtual_register < next_virtual_register_);
1532
    DCHECK(constants_.find(virtual_register) == constants_.end());
1533
    constants_.insert(std::make_pair(virtual_register, constant));
1534
    return virtual_register;
1535 1536 1537
  }
  Constant GetConstant(int virtual_register) const {
    ConstantMap::const_iterator it = constants_.find(virtual_register);
1538 1539
    DCHECK(it != constants_.end());
    DCHECK_EQ(virtual_register, it->first);
1540 1541 1542
    return it->second;
  }

1543
  using Immediates = ZoneVector<Constant>;
1544
  Immediates& immediates() { return immediates_; }
1545

1546
  ImmediateOperand AddImmediate(const Constant& constant) {
1547 1548
    if (constant.type() == Constant::kInt32 &&
        RelocInfo::IsNone(constant.rmode())) {
1549 1550
      return ImmediateOperand(ImmediateOperand::INLINE, constant.ToInt32());
    }
1551
    int index = static_cast<int>(immediates_.size());
1552
    immediates_.push_back(constant);
1553 1554 1555 1556 1557 1558 1559 1560 1561
    return ImmediateOperand(ImmediateOperand::INDEXED, index);
  }

  Constant GetImmediate(const ImmediateOperand* op) const {
    switch (op->type()) {
      case ImmediateOperand::INLINE:
        return Constant(op->inline_value());
      case ImmediateOperand::INDEXED: {
        int index = op->indexed_value();
1562 1563
        DCHECK_LE(0, index);
        DCHECK_GT(immediates_.size(), index);
1564 1565 1566 1567
        return immediates_[index];
      }
    }
    UNREACHABLE();
1568 1569
  }

1570
  int AddDeoptimizationEntry(FrameStateDescriptor* descriptor,
1571
                             DeoptimizeKind kind, DeoptimizeReason reason,
1572
                             FeedbackSource const& feedback);
1573 1574 1575
  DeoptimizationEntry const& GetDeoptimizationEntry(int deoptimization_id);
  int GetDeoptimizationEntryCount() const {
    return static_cast<int>(deoptimization_entries_.size());
1576
  }
1577

1578
  RpoNumber InputRpo(Instruction* instr, size_t index);
1579

1580 1581 1582 1583
  bool GetSourcePosition(const Instruction* instr,
                         SourcePosition* result) const;
  void SetSourcePosition(const Instruction* instr, SourcePosition value);

1584 1585 1586 1587 1588 1589 1590
  bool ContainsCall() const {
    for (Instruction* instr : instructions_) {
      if (instr->IsCall()) return true;
    }
    return false;
  }

1591
  // APIs to aid debugging. For general-stream APIs, use operator<<.
1592 1593 1594
  void Print() const;

  void PrintBlock(int block_id) const;
1595

1596 1597 1598 1599
  void ValidateEdgeSplitForm() const;
  void ValidateDeferredBlockExitPaths() const;
  void ValidateDeferredBlockEntryPaths() const;
  void ValidateSSA() const;
1600

1601 1602 1603 1604
  static void SetRegisterConfigurationForTesting(
      const RegisterConfiguration* regConfig);
  static void ClearRegisterConfigurationForTesting();

1605 1606
  void RecomputeAssemblyOrderForTesting();

1607
 private:
1608 1609
  friend V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&,
                                                    const InstructionSequence&);
1610

1611
  using SourcePositionMap = ZoneMap<const Instruction*, SourcePosition>;
1612

1613 1614 1615
  static const RegisterConfiguration* RegisterConfigurationForTesting();
  static const RegisterConfiguration* registerConfigurationForTesting_;

1616 1617 1618
  // Puts the deferred blocks last and may rotate loops.
  void ComputeAssemblyOrder();

1619
  Isolate* isolate_;
1620
  Zone* const zone_;
1621
  InstructionBlocks* const instruction_blocks_;
1622
  InstructionBlocks* ao_blocks_;
1623
  SourcePositionMap source_positions_;
1624
  ConstantMap constants_;
1625
  Immediates immediates_;
1626 1627
  InstructionDeque instructions_;
  int next_virtual_register_;
1628
  ReferenceMapDeque reference_maps_;
1629
  ZoneVector<MachineRepresentation> representations_;
1630
  int representation_mask_;
1631
  DeoptimizationVector deoptimization_entries_;
1632

1633 1634 1635
  // Used at construction time
  InstructionBlock* current_block_;

1636
  DISALLOW_COPY_AND_ASSIGN(InstructionSequence);
1637 1638
};

1639 1640
V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&,
                                           const InstructionSequence&);
1641 1642 1643 1644 1645

}  // namespace compiler
}  // namespace internal
}  // namespace v8

1646
#endif  // V8_COMPILER_BACKEND_INSTRUCTION_H_