graph-assembler.cc 35.1 KB
Newer Older
1 2 3 4 5 6
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/compiler/graph-assembler.h"

7
#include "src/codegen/code-factory.h"
8
#include "src/compiler/linkage.h"
9
#include "src/compiler/schedule.h"
10 11 12 13 14
// For TNode types.
#include "src/objects/heap-number.h"
#include "src/objects/oddball.h"
#include "src/objects/smi.h"
#include "src/objects/string.h"
15 16 17 18 19

namespace v8 {
namespace internal {
namespace compiler {

20 21
class GraphAssembler::BasicBlockUpdater {
 public:
22 23
  BasicBlockUpdater(Schedule* schedule, Graph* graph,
                    CommonOperatorBuilder* common, Zone* temp_zone);
24 25 26 27 28 29 30 31 32 33 34

  Node* AddNode(Node* node);
  Node* AddNode(Node* node, BasicBlock* to);
  Node* AddClonedNode(Node* node);

  BasicBlock* NewBasicBlock(bool deferred);
  BasicBlock* SplitBasicBlock();
  void AddBind(BasicBlock* block);
  void AddBranch(Node* branch, BasicBlock* tblock, BasicBlock* fblock);
  void AddGoto(BasicBlock* to);
  void AddGoto(BasicBlock* from, BasicBlock* to);
35
  void AddTailCall(Node* node);
36 37 38 39 40 41 42 43 44 45 46 47 48

  void StartBlock(BasicBlock* block);
  BasicBlock* Finalize(BasicBlock* original);

  BasicBlock* original_block() { return original_block_; }
  BasicBlock::Control original_control() { return original_control_; }
  Node* original_control_input() { return original_control_input_; }

 private:
  enum State { kUnchanged, kChanged };

  Zone* temp_zone() { return temp_zone_; }

49
  bool IsOriginalNode(Node* node);
50 51
  void UpdateSuccessors(BasicBlock* block);
  void SetBlockDeferredFromPredecessors();
52
  void RemoveSuccessorsFromSchedule();
53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68
  void CopyForChange();

  Zone* temp_zone_;

  // Current basic block we are scheduling.
  BasicBlock* current_block_;

  // The original block that we are lowering.
  BasicBlock* original_block_;

  // Position in the current block, only applicable in the 'unchanged' state.
  BasicBlock::iterator node_it_;
  BasicBlock::iterator end_it_;

  Schedule* schedule_;
  Graph* graph_;
69
  CommonOperatorBuilder* common_;
70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90

  // The nodes in the original block if we are in 'changed' state. Retained to
  // avoid invalidating iterators that are iterating over the original nodes of
  // the block.
  NodeVector saved_nodes_;

  // The original control, control input and successors, to enable recovery of
  // them when we finalize the block.
  struct SuccessorInfo {
    BasicBlock* block;
    size_t index;
  };
  ZoneVector<SuccessorInfo> saved_successors_;
  BasicBlock::Control original_control_;
  Node* original_control_input_;
  bool original_deferred_;
  size_t original_node_count_;

  State state_;
};

91 92 93
GraphAssembler::BasicBlockUpdater::BasicBlockUpdater(
    Schedule* schedule, Graph* graph, CommonOperatorBuilder* common,
    Zone* temp_zone)
94 95 96 97 98
    : temp_zone_(temp_zone),
      current_block_(nullptr),
      original_block_(nullptr),
      schedule_(schedule),
      graph_(graph),
99
      common_(common),
100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131
      saved_nodes_(schedule->zone()),
      saved_successors_(schedule->zone()),
      original_control_(BasicBlock::kNone),
      original_control_input_(nullptr),
      original_deferred_(false),
      original_node_count_(graph->NodeCount()),
      state_(kUnchanged) {}

Node* GraphAssembler::BasicBlockUpdater::AddNode(Node* node) {
  return AddNode(node, current_block_);
}

Node* GraphAssembler::BasicBlockUpdater::AddNode(Node* node, BasicBlock* to) {
  if (state_ == kUnchanged) {
    DCHECK_EQ(to, original_block());

    if (node_it_ != end_it_ && *node_it_ == node) {
      node_it_++;
      return node;
    }

    CopyForChange();
  }

  // Add the node to the basic block.
  DCHECK(!schedule_->IsScheduled(node));
  schedule_->AddNode(to, node);
  return node;
}

Node* GraphAssembler::BasicBlockUpdater::AddClonedNode(Node* node) {
  DCHECK(node->op()->HasProperty(Operator::kPure));
132 133 134 135
  if (state_ == kUnchanged) {
    CopyForChange();
  }

136 137 138 139
  if (schedule_->IsScheduled(node) &&
      schedule_->block(node) == current_block_) {
    // Node is already scheduled for the current block, don't add it again.
    return node;
140
  } else if (!schedule_->IsScheduled(node) && !IsOriginalNode(node)) {
141 142 143 144 145 146 147 148 149
    // Node is not scheduled yet, so we can add it directly.
    return AddNode(node);
  } else {
    // TODO(9684): Potentially add some per-block caching so we can avoid
    // cloning if we've already cloned for this block.
    return AddNode(graph_->CloneNode(node));
  }
}

150
bool GraphAssembler::BasicBlockUpdater::IsOriginalNode(Node* node) {
151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270
  // Return true if node was part of the original schedule and might currently
  // be re-added to the schedule after a CopyForChange.
  return node->id() < original_node_count_;
}

void GraphAssembler::BasicBlockUpdater::CopyForChange() {
  DCHECK_EQ(kUnchanged, state_);

  // Save successor.
  DCHECK(saved_successors_.empty());
  for (BasicBlock* successor : original_block()->successors()) {
    for (size_t i = 0; i < successor->PredecessorCount(); i++) {
      if (successor->PredecessorAt(i) == original_block()) {
        saved_successors_.push_back({successor, i});
        break;
      }
    }
  }
  DCHECK_EQ(saved_successors_.size(), original_block()->SuccessorCount());

  // Save control.
  original_control_ = original_block()->control();
  original_control_input_ = original_block()->control_input();

  // Save original nodes (to allow them to continue to be iterated by the user
  // of graph assembler).
  original_block()->nodes()->swap(saved_nodes_);
  DCHECK(original_block()->nodes()->empty());

  // Re-insert the nodes from the front of the block.
  original_block()->InsertNodes(original_block()->begin(), saved_nodes_.begin(),
                                node_it_);

  // Remove the tail from the schedule.
  for (; node_it_ != end_it_; node_it_++) {
    schedule_->SetBlockForNode(nullptr, *node_it_);
  }

  // Reset the control.
  if (original_block()->control() != BasicBlock::kGoto) {
    schedule_->SetBlockForNode(nullptr, original_block()->control_input());
  }
  original_block()->set_control_input(nullptr);
  original_block()->set_control(BasicBlock::kNone);
  original_block()->ClearSuccessors();

  state_ = kChanged;
  end_it_ = {};
  node_it_ = {};
}

BasicBlock* GraphAssembler::BasicBlockUpdater::NewBasicBlock(bool deferred) {
  BasicBlock* block = schedule_->NewBasicBlock();
  block->set_deferred(deferred || original_deferred_);
  return block;
}

BasicBlock* GraphAssembler::BasicBlockUpdater::SplitBasicBlock() {
  return NewBasicBlock(current_block_->deferred());
}

void GraphAssembler::BasicBlockUpdater::AddBind(BasicBlock* to) {
  DCHECK_NOT_NULL(to);
  current_block_ = to;
  // Basic block should only have the control node, if any.
  DCHECK_LE(current_block_->NodeCount(), 1);
  SetBlockDeferredFromPredecessors();
}

void GraphAssembler::BasicBlockUpdater::SetBlockDeferredFromPredecessors() {
  if (!current_block_->deferred()) {
    bool deferred = true;
    for (BasicBlock* pred : current_block_->predecessors()) {
      if (!pred->deferred()) {
        deferred = false;
        break;
      }
    }
    current_block_->set_deferred(deferred);
  }
}

void GraphAssembler::BasicBlockUpdater::AddBranch(Node* node,
                                                  BasicBlock* tblock,
                                                  BasicBlock* fblock) {
  if (state_ == kUnchanged) {
    DCHECK_EQ(current_block_, original_block());
    CopyForChange();
  }

  DCHECK_EQ(state_, kChanged);
  schedule_->AddBranch(current_block_, node, tblock, fblock);
  current_block_ = nullptr;
}

void GraphAssembler::BasicBlockUpdater::AddGoto(BasicBlock* to) {
  DCHECK_NOT_NULL(current_block_);
  AddGoto(current_block_, to);
}

void GraphAssembler::BasicBlockUpdater::AddGoto(BasicBlock* from,
                                                BasicBlock* to) {
  if (state_ == kUnchanged) {
    CopyForChange();
  }

  if (to->deferred() && !from->deferred()) {
    // Add a new block with the correct deferred hint to avoid merges into the
    // target block with different deferred hints.
    // TODO(9684): Only split the current basic block if the label's target
    // block has multiple merges.
    BasicBlock* new_block = NewBasicBlock(to->deferred());
    schedule_->AddGoto(from, new_block);
    from = new_block;
  }

  schedule_->AddGoto(from, to);
  current_block_ = nullptr;
}

271 272 273 274 275 276 277 278 279 280 281 282
void GraphAssembler::BasicBlockUpdater::AddTailCall(Node* node) {
  DCHECK_EQ(node->opcode(), IrOpcode::kTailCall);
  DCHECK_NOT_NULL(current_block_);

  if (state_ == kUnchanged) {
    CopyForChange();
  }

  schedule_->AddTailCall(current_block_, node);
  current_block_ = nullptr;
}

283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331
void GraphAssembler::BasicBlockUpdater::UpdateSuccessors(BasicBlock* block) {
  for (SuccessorInfo succ : saved_successors_) {
    (succ.block->predecessors())[succ.index] = block;
    block->AddSuccessor(succ.block);
  }
  saved_successors_.clear();
  block->set_control(original_control_);
  block->set_control_input(original_control_input_);
  if (original_control_input_ != nullptr) {
    schedule_->SetBlockForNode(block, original_control_input_);
  } else {
    DCHECK_EQ(BasicBlock::kGoto, original_control_);
  }
}

void GraphAssembler::BasicBlockUpdater::StartBlock(BasicBlock* block) {
  DCHECK_NULL(current_block_);
  DCHECK_NULL(original_block_);
  DCHECK(saved_nodes_.empty());
  block->ResetRPOInfo();
  current_block_ = block;
  original_block_ = block;
  original_deferred_ = block->deferred();
  node_it_ = block->begin();
  end_it_ = block->end();
  state_ = kUnchanged;
}

BasicBlock* GraphAssembler::BasicBlockUpdater::Finalize(BasicBlock* original) {
  DCHECK_EQ(original, original_block());
  BasicBlock* block = current_block_;
  if (state_ == kChanged) {
    UpdateSuccessors(block);
  } else {
    DCHECK_EQ(block, original_block());
    if (node_it_ != end_it_) {
      // We have not got to the end of the node list, we need to trim.
      block->TrimNodes(node_it_);
    }
  }
  original_control_ = BasicBlock::kNone;
  saved_nodes_.clear();
  original_deferred_ = false;
  original_control_input_ = nullptr;
  original_block_ = nullptr;
  current_block_ = nullptr;
  return block;
}

332 333 334 335
GraphAssembler::GraphAssembler(
    MachineGraph* mcgraph, Zone* zone,
    base::Optional<NodeChangedCallback> node_changed_callback,
    Schedule* schedule, bool mark_loop_exits)
336
    : temp_zone_(zone),
337
      mcgraph_(mcgraph),
338 339
      effect_(nullptr),
      control_(nullptr),
340
      node_changed_callback_(node_changed_callback),
341 342 343 344
      block_updater_(schedule != nullptr
                         ? new BasicBlockUpdater(schedule, mcgraph->graph(),
                                                 mcgraph->common(), zone)
                         : nullptr),
345 346
      loop_headers_(zone),
      mark_loop_exits_(mark_loop_exits) {}
347

348
GraphAssembler::~GraphAssembler() { DCHECK_EQ(loop_nesting_level_, 0); }
349 350

Node* GraphAssembler::IntPtrConstant(intptr_t value) {
351
  return AddClonedNode(mcgraph()->IntPtrConstant(value));
352 353
}

354 355 356 357
Node* GraphAssembler::UintPtrConstant(uintptr_t value) {
  return AddClonedNode(mcgraph()->UintPtrConstant(value));
}

358
Node* GraphAssembler::Int32Constant(int32_t value) {
359
  return AddClonedNode(mcgraph()->Int32Constant(value));
360 361
}

362
Node* GraphAssembler::Int64Constant(int64_t value) {
363
  return AddClonedNode(mcgraph()->Int64Constant(value));
364 365
}

366
Node* GraphAssembler::UniqueIntPtrConstant(intptr_t value) {
367 368 369 370
  return AddNode(graph()->NewNode(
      machine()->Is64()
          ? common()->Int64Constant(value)
          : common()->Int32Constant(static_cast<int32_t>(value))));
371 372
}

373
Node* JSGraphAssembler::SmiConstant(int32_t value) {
374
  return AddClonedNode(jsgraph()->SmiConstant(value));
375 376
}

377
Node* GraphAssembler::Uint32Constant(uint32_t value) {
378
  return AddClonedNode(mcgraph()->Uint32Constant(value));
379 380 381
}

Node* GraphAssembler::Float64Constant(double value) {
382
  return AddClonedNode(mcgraph()->Float64Constant(value));
383 384
}

385
TNode<HeapObject> JSGraphAssembler::HeapConstant(Handle<HeapObject> object) {
386 387 388 389
  return TNode<HeapObject>::UncheckedCast(
      AddClonedNode(jsgraph()->HeapConstant(object)));
}

390
TNode<Object> JSGraphAssembler::Constant(const ObjectRef& ref) {
391
  return TNode<Object>::UncheckedCast(AddClonedNode(jsgraph()->Constant(ref)));
392 393
}

394
TNode<Number> JSGraphAssembler::NumberConstant(double value) {
395 396
  return TNode<Number>::UncheckedCast(
      AddClonedNode(jsgraph()->Constant(value)));
397
}
398 399

Node* GraphAssembler::ExternalConstant(ExternalReference ref) {
400
  return AddClonedNode(mcgraph()->ExternalConstant(ref));
401 402
}

403 404 405 406 407
Node* GraphAssembler::Parameter(int index) {
  return AddNode(
      graph()->NewNode(common()->Parameter(index), graph()->start()));
}

408
Node* JSGraphAssembler::CEntryStubConstant(int result_size) {
409
  return AddClonedNode(jsgraph()->CEntryStubConstant(result_size));
410 411
}

412
Node* GraphAssembler::LoadFramePointer() {
413
  return AddNode(graph()->NewNode(machine()->LoadFramePointer()));
414 415
}

416 417 418 419 420
Node* GraphAssembler::LoadHeapNumberValue(Node* heap_number) {
  return Load(MachineType::Float64(), heap_number,
              IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag));
}

421
#define SINGLETON_CONST_DEF(Name, Type)              \
422
  TNode<Type> JSGraphAssembler::Name##Constant() {   \
423
    return TNode<Type>::UncheckedCast(               \
424
        AddClonedNode(jsgraph()->Name##Constant())); \
425
  }
426 427
JSGRAPH_SINGLETON_CONSTANT_LIST(SINGLETON_CONST_DEF)
#undef SINGLETON_CONST_DEF
428

429 430 431 432
#define SINGLETON_CONST_TEST_DEF(Name, ...)                        \
  TNode<Boolean> JSGraphAssembler::Is##Name(TNode<Object> value) { \
    return TNode<Boolean>::UncheckedCast(                          \
        ReferenceEqual(value, Name##Constant()));                  \
433 434 435 436
  }
JSGRAPH_SINGLETON_CONSTANT_LIST(SINGLETON_CONST_TEST_DEF)
#undef SINGLETON_CONST_TEST_DEF

437 438 439
#define PURE_UNOP_DEF(Name)                                     \
  Node* GraphAssembler::Name(Node* input) {                     \
    return AddNode(graph()->NewNode(machine()->Name(), input)); \
440 441 442 443
  }
PURE_ASSEMBLER_MACH_UNOP_LIST(PURE_UNOP_DEF)
#undef PURE_UNOP_DEF

444 445 446
#define PURE_BINOP_DEF(Name)                                          \
  Node* GraphAssembler::Name(Node* left, Node* right) {               \
    return AddNode(graph()->NewNode(machine()->Name(), left, right)); \
447 448 449 450
  }
PURE_ASSEMBLER_MACH_BINOP_LIST(PURE_BINOP_DEF)
#undef PURE_BINOP_DEF

451 452 453 454
#define CHECKED_BINOP_DEF(Name)                                       \
  Node* GraphAssembler::Name(Node* left, Node* right) {               \
    return AddNode(                                                   \
        graph()->NewNode(machine()->Name(), left, right, control())); \
455 456 457 458
  }
CHECKED_ASSEMBLER_MACH_BINOP_LIST(CHECKED_BINOP_DEF)
#undef CHECKED_BINOP_DEF

459 460 461 462 463
Node* GraphAssembler::IntPtrEqual(Node* left, Node* right) {
  return WordEqual(left, right);
}

Node* GraphAssembler::TaggedEqual(Node* left, Node* right) {
464
  if (COMPRESS_POINTERS_BOOL) {
465 466 467
    return Word32Equal(left, right);
  } else {
    return WordEqual(left, right);
468
  }
469 470
}

471 472
Node* GraphAssembler::SmiSub(Node* left, Node* right) {
  if (COMPRESS_POINTERS_BOOL) {
473 474 475
    return Int32Sub(left, right);
  } else {
    return IntSub(left, right);
476 477 478 479 480
  }
}

Node* GraphAssembler::SmiLessThan(Node* left, Node* right) {
  if (COMPRESS_POINTERS_BOOL) {
481 482 483
    return Int32LessThan(left, right);
  } else {
    return IntLessThan(left, right);
484 485 486
  }
}

487
Node* GraphAssembler::Float64RoundDown(Node* value) {
488
  CHECK(machine()->Float64RoundDown().IsSupported());
489
  return AddNode(graph()->NewNode(machine()->Float64RoundDown().op(), value));
490 491 492 493
}

Node* GraphAssembler::Float64RoundTruncate(Node* value) {
  CHECK(machine()->Float64RoundTruncate().IsSupported());
494 495
  return AddNode(
      graph()->NewNode(machine()->Float64RoundTruncate().op(), value));
496 497
}

498 499 500 501 502
Node* GraphAssembler::TruncateFloat64ToInt64(Node* value, TruncateKind kind) {
  return AddNode(
      graph()->NewNode(machine()->TruncateFloat64ToInt64(kind), value));
}

503
Node* GraphAssembler::Projection(int index, Node* value) {
504
  return AddNode(
505
      graph()->NewNode(common()->Projection(index), value, control()));
506 507
}

508
Node* JSGraphAssembler::Allocate(AllocationType allocation, Node* size) {
509 510
  return AddNode(
      graph()->NewNode(simplified()->AllocateRaw(Type::Any(), allocation), size,
511
                       effect(), control()));
512 513
}

514
Node* JSGraphAssembler::LoadField(FieldAccess const& access, Node* object) {
515 516
  Node* value = AddNode(graph()->NewNode(simplified()->LoadField(access),
                                         object, effect(), control()));
517
  return value;
518 519
}

520 521
Node* JSGraphAssembler::LoadElement(ElementAccess const& access, Node* object,
                                    Node* index) {
522 523
  Node* value = AddNode(graph()->NewNode(simplified()->LoadElement(access),
                                         object, index, effect(), control()));
524
  return value;
525 526
}

527 528
Node* JSGraphAssembler::StoreField(FieldAccess const& access, Node* object,
                                   Node* value) {
529
  return AddNode(graph()->NewNode(simplified()->StoreField(access), object,
530
                                  value, effect(), control()));
531 532
}

533 534
Node* JSGraphAssembler::StoreElement(ElementAccess const& access, Node* object,
                                     Node* index, Node* value) {
535
  return AddNode(graph()->NewNode(simplified()->StoreElement(access), object,
536
                                  index, value, effect(), control()));
537 538
}

539 540 541 542 543
void JSGraphAssembler::TransitionAndStoreElement(MapRef double_map,
                                                 MapRef fast_map,
                                                 TNode<HeapObject> object,
                                                 TNode<Number> index,
                                                 TNode<Object> value) {
544 545 546 547 548
  AddNode(graph()->NewNode(simplified()->TransitionAndStoreElement(
                               double_map.object(), fast_map.object()),
                           object, index, value, effect(), control()));
}

549
TNode<Number> JSGraphAssembler::StringLength(TNode<String> string) {
550 551
  return AddNode<Number>(
      graph()->NewNode(simplified()->StringLength(), string));
552 553
}

554 555
TNode<Boolean> JSGraphAssembler::ReferenceEqual(TNode<Object> lhs,
                                                TNode<Object> rhs) {
556 557
  return AddNode<Boolean>(
      graph()->NewNode(simplified()->ReferenceEqual(), lhs, rhs));
558 559
}

560 561
TNode<Number> JSGraphAssembler::NumberMin(TNode<Number> lhs,
                                          TNode<Number> rhs) {
562
  return AddNode<Number>(graph()->NewNode(simplified()->NumberMin(), lhs, rhs));
563 564
}

565 566
TNode<Number> JSGraphAssembler::NumberMax(TNode<Number> lhs,
                                          TNode<Number> rhs) {
567
  return AddNode<Number>(graph()->NewNode(simplified()->NumberMax(), lhs, rhs));
568 569
}

570 571
TNode<Number> JSGraphAssembler::NumberAdd(TNode<Number> lhs,
                                          TNode<Number> rhs) {
572
  return AddNode<Number>(graph()->NewNode(simplified()->NumberAdd(), lhs, rhs));
573 574
}

575 576
TNode<Number> JSGraphAssembler::NumberSubtract(TNode<Number> lhs,
                                               TNode<Number> rhs) {
577 578
  return AddNode<Number>(
      graph()->NewNode(simplified()->NumberSubtract(), lhs, rhs));
579 580
}

581 582
TNode<Boolean> JSGraphAssembler::NumberLessThan(TNode<Number> lhs,
                                                TNode<Number> rhs) {
583 584
  return AddNode<Boolean>(
      graph()->NewNode(simplified()->NumberLessThan(), lhs, rhs));
585 586
}

587 588
TNode<Boolean> JSGraphAssembler::NumberLessThanOrEqual(TNode<Number> lhs,
                                                       TNode<Number> rhs) {
589
  return AddNode<Boolean>(
590 591 592
      graph()->NewNode(simplified()->NumberLessThanOrEqual(), lhs, rhs));
}

593 594 595
TNode<String> JSGraphAssembler::StringSubstring(TNode<String> string,
                                                TNode<Number> from,
                                                TNode<Number> to) {
596 597
  return AddNode<String>(graph()->NewNode(
      simplified()->StringSubstring(), string, from, to, effect(), control()));
598 599
}

600
TNode<Boolean> JSGraphAssembler::ObjectIsCallable(TNode<Object> value) {
601 602
  return AddNode<Boolean>(
      graph()->NewNode(simplified()->ObjectIsCallable(), value));
603 604
}

605 606 607 608 609
TNode<Boolean> JSGraphAssembler::ObjectIsUndetectable(TNode<Object> value) {
  return AddNode<Boolean>(
      graph()->NewNode(simplified()->ObjectIsUndetectable(), value));
}

610
Node* JSGraphAssembler::CheckIf(Node* cond, DeoptimizeReason reason) {
611 612 613 614
  return AddNode(graph()->NewNode(simplified()->CheckIf(reason), cond, effect(),
                                  control()));
}

615
TNode<Boolean> JSGraphAssembler::NumberIsFloat64Hole(TNode<Number> value) {
616 617
  return AddNode<Boolean>(
      graph()->NewNode(simplified()->NumberIsFloat64Hole(), value));
618 619
}

620
TNode<Boolean> JSGraphAssembler::ToBoolean(TNode<Object> value) {
621 622 623
  return AddNode<Boolean>(graph()->NewNode(simplified()->ToBoolean(), value));
}

624 625 626 627 628 629
TNode<Object> JSGraphAssembler::ConvertTaggedHoleToUndefined(
    TNode<Object> value) {
  return AddNode<Object>(
      graph()->NewNode(simplified()->ConvertTaggedHoleToUndefined(), value));
}

630
TNode<FixedArrayBase> JSGraphAssembler::MaybeGrowFastElements(
631 632 633 634 635 636 637 638 639 640 641
    ElementsKind kind, const FeedbackSource& feedback, TNode<JSArray> array,
    TNode<FixedArrayBase> elements, TNode<Number> new_length,
    TNode<Number> old_length) {
  GrowFastElementsMode mode = IsDoubleElementsKind(kind)
                                  ? GrowFastElementsMode::kDoubleElements
                                  : GrowFastElementsMode::kSmiOrObjectElements;
  return AddNode<FixedArrayBase>(graph()->NewNode(
      simplified()->MaybeGrowFastElements(mode, feedback), array, elements,
      new_length, old_length, effect(), control()));
}

642 643 644 645 646
Node* GraphAssembler::TypeGuard(Type type, Node* value) {
  return AddNode(
      graph()->NewNode(common()->TypeGuard(type), value, effect(), control()));
}

647
Node* GraphAssembler::Checkpoint(FrameState frame_state) {
648 649 650 651
  return AddNode(graph()->NewNode(common()->Checkpoint(), frame_state, effect(),
                                  control()));
}

652
Node* GraphAssembler::DebugBreak() {
653 654
  return AddNode(
      graph()->NewNode(machine()->DebugBreak(), effect(), control()));
655 656
}

657 658 659 660 661 662 663 664 665 666 667 668 669 670
Node* GraphAssembler::Unreachable(
    GraphAssemblerLabel<0u>* block_updater_successor) {
  Node* result = UnreachableWithoutConnectToEnd();
  if (block_updater_ == nullptr) {
    ConnectUnreachableToEnd();
    InitializeEffectControl(nullptr, nullptr);
  } else {
    DCHECK_NOT_NULL(block_updater_successor);
    Goto(block_updater_successor);
  }
  return result;
}

Node* GraphAssembler::UnreachableWithoutConnectToEnd() {
671 672
  return AddNode(
      graph()->NewNode(common()->Unreachable(), effect(), control()));
673 674
}

675 676 677 678 679
TNode<RawPtrT> GraphAssembler::StackSlot(int size, int alignment) {
  return AddNode<RawPtrT>(
      graph()->NewNode(machine()->StackSlot(size, alignment)));
}

680 681
Node* GraphAssembler::Store(StoreRepresentation rep, Node* object, Node* offset,
                            Node* value) {
682
  return AddNode(graph()->NewNode(machine()->Store(rep), object, offset, value,
683
                                  effect(), control()));
684 685
}

686 687 688 689 690
Node* GraphAssembler::Store(StoreRepresentation rep, Node* object, int offset,
                            Node* value) {
  return Store(rep, object, Int32Constant(offset), value);
}

691
Node* GraphAssembler::Load(MachineType type, Node* object, Node* offset) {
692 693
  return AddNode(graph()->NewNode(machine()->Load(type), object, offset,
                                  effect(), control()));
694 695
}

696 697 698 699
Node* GraphAssembler::Load(MachineType type, Node* object, int offset) {
  return Load(type, object, Int32Constant(offset));
}

700 701 702 703 704 705 706
Node* GraphAssembler::StoreUnaligned(MachineRepresentation rep, Node* object,
                                     Node* offset, Node* value) {
  Operator const* const op =
      (rep == MachineRepresentation::kWord8 ||
       machine()->UnalignedStoreSupported(rep))
          ? machine()->Store(StoreRepresentation(rep, kNoWriteBarrier))
          : machine()->UnalignedStore(rep);
707 708
  return AddNode(
      graph()->NewNode(op, object, offset, value, effect(), control()));
709 710
}

711
Node* GraphAssembler::LoadUnaligned(MachineType type, Node* object,
712 713
                                    Node* offset) {
  Operator const* const op =
714 715 716 717
      (type.representation() == MachineRepresentation::kWord8 ||
       machine()->UnalignedLoadSupported(type.representation()))
          ? machine()->Load(type)
          : machine()->UnalignedLoad(type);
718
  return AddNode(graph()->NewNode(op, object, offset, effect(), control()));
719 720
}

721 722 723 724 725 726 727 728 729 730 731 732
Node* GraphAssembler::ProtectedStore(MachineRepresentation rep, Node* object,
                                     Node* offset, Node* value) {
  return AddNode(graph()->NewNode(machine()->ProtectedStore(rep), object,
                                  offset, value, effect(), control()));
}

Node* GraphAssembler::ProtectedLoad(MachineType type, Node* object,
                                    Node* offset) {
  return AddNode(graph()->NewNode(machine()->ProtectedLoad(type), object,
                                  offset, effect(), control()));
}

733
Node* GraphAssembler::Retain(Node* buffer) {
734
  return AddNode(graph()->NewNode(common()->Retain(), buffer, effect()));
735 736 737
}

Node* GraphAssembler::UnsafePointerAdd(Node* base, Node* external) {
738
  return AddNode(graph()->NewNode(machine()->UnsafePointerAdd(), base, external,
739
                                  effect(), control()));
740 741
}

742
TNode<Number> JSGraphAssembler::PlainPrimitiveToNumber(TNode<Object> value) {
743 744 745
  return AddNode<Number>(graph()->NewNode(
      PlainPrimitiveToNumberOperator(), PlainPrimitiveToNumberBuiltinConstant(),
      value, effect()));
746 747
}

748 749 750 751 752
Node* GraphAssembler::BitcastWordToTaggedSigned(Node* value) {
  return AddNode(
      graph()->NewNode(machine()->BitcastWordToTaggedSigned(), value));
}

753
Node* GraphAssembler::BitcastWordToTagged(Node* value) {
754
  return AddNode(graph()->NewNode(machine()->BitcastWordToTagged(), value,
755
                                  effect(), control()));
756 757
}

758
Node* GraphAssembler::BitcastTaggedToWord(Node* value) {
759
  return AddNode(graph()->NewNode(machine()->BitcastTaggedToWord(), value,
760
                                  effect(), control()));
761 762
}

763
Node* GraphAssembler::BitcastTaggedToWordForTagAndSmiBits(Node* value) {
764 765 766 767 768 769 770
  return AddNode(graph()->NewNode(
      machine()->BitcastTaggedToWordForTagAndSmiBits(), value));
}

Node* GraphAssembler::BitcastMaybeObjectToWord(Node* value) {
  return AddNode(graph()->NewNode(machine()->BitcastMaybeObjectToWord(), value,
                                  effect(), control()));
771 772
}

773
Node* GraphAssembler::Word32PoisonOnSpeculation(Node* value) {
774
  return AddNode(graph()->NewNode(machine()->Word32PoisonOnSpeculation(), value,
775
                                  effect(), control()));
776 777
}

778
Node* GraphAssembler::DeoptimizeIf(DeoptimizeReason reason,
779
                                   FeedbackSource const& feedback,
780 781
                                   Node* condition, Node* frame_state,
                                   IsSafetyCheck is_safety_check) {
782 783 784 785
  return AddNode(
      graph()->NewNode(common()->DeoptimizeIf(DeoptimizeKind::kEager, reason,
                                              feedback, is_safety_check),
                       condition, frame_state, effect(), control()));
786 787
}

788 789 790 791 792 793 794 795 796
Node* GraphAssembler::DeoptimizeIf(DeoptimizeKind kind, DeoptimizeReason reason,
                                   FeedbackSource const& feedback,
                                   Node* condition, Node* frame_state,
                                   IsSafetyCheck is_safety_check) {
  return AddNode(graph()->NewNode(
      common()->DeoptimizeIf(kind, reason, feedback, is_safety_check),
      condition, frame_state, effect(), control()));
}

797 798
Node* GraphAssembler::DeoptimizeIfNot(DeoptimizeKind kind,
                                      DeoptimizeReason reason,
799
                                      FeedbackSource const& feedback,
800 801
                                      Node* condition, Node* frame_state,
                                      IsSafetyCheck is_safety_check) {
802
  return AddNode(graph()->NewNode(
803
      common()->DeoptimizeUnless(kind, reason, feedback, is_safety_check),
804
      condition, frame_state, effect(), control()));
805 806
}

807 808 809 810 811 812 813 814
Node* GraphAssembler::DeoptimizeIfNot(DeoptimizeReason reason,
                                      FeedbackSource const& feedback,
                                      Node* condition, Node* frame_state,
                                      IsSafetyCheck is_safety_check) {
  return DeoptimizeIfNot(DeoptimizeKind::kEager, reason, feedback, condition,
                         frame_state, is_safety_check);
}

815 816 817 818 819 820 821
Node* GraphAssembler::DynamicCheckMapsWithDeoptUnless(Node* condition,
                                                      Node* slot_index,
                                                      Node* value, Node* map,
                                                      Node* frame_state) {
  return AddNode(graph()->NewNode(common()->DynamicCheckMapsWithDeoptUnless(),
                                  condition, slot_index, value, map,
                                  frame_state, effect(), control()));
822 823
}

824 825 826 827 828 829 830 831 832 833 834
TNode<Object> GraphAssembler::Call(const CallDescriptor* call_descriptor,
                                   int inputs_size, Node** inputs) {
  return Call(common()->Call(call_descriptor), inputs_size, inputs);
}

TNode<Object> GraphAssembler::Call(const Operator* op, int inputs_size,
                                   Node** inputs) {
  DCHECK_EQ(IrOpcode::kCall, op->opcode());
  return AddNode<Object>(graph()->NewNode(op, inputs_size, inputs));
}

835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856
void GraphAssembler::TailCall(const CallDescriptor* call_descriptor,
                              int inputs_size, Node** inputs) {
#ifdef DEBUG
  static constexpr int kTargetEffectControl = 3;
  DCHECK_EQ(inputs_size,
            call_descriptor->ParameterCount() + kTargetEffectControl);
#endif  // DEBUG

  Node* node = AddNode(graph()->NewNode(common()->TailCall(call_descriptor),
                                        inputs_size, inputs));

  if (block_updater_) block_updater_->AddTailCall(node);

  // Unlike ConnectUnreachableToEnd, the TailCall node terminates a block; to
  // keep it live, it *must* be connected to End (also in Turboprop schedules).
  NodeProperties::MergeControlToEnd(graph(), common(), node);

  // Setting effect, control to nullptr effectively terminates the current block
  // by disallowing the addition of new nodes until a new label has been bound.
  InitializeEffectControl(nullptr, nullptr);
}

857 858 859
void GraphAssembler::BranchWithCriticalSafetyCheck(
    Node* condition, GraphAssemblerLabel<0u>* if_true,
    GraphAssemblerLabel<0u>* if_false) {
860 861 862 863 864
  BranchHint hint = BranchHint::kNone;
  if (if_true->IsDeferred() != if_false->IsDeferred()) {
    hint = if_false->IsDeferred() ? BranchHint::kTrue : BranchHint::kFalse;
  }

865 866
  BranchImpl(condition, if_true, if_false, hint,
             IsSafetyCheck::kCriticalSafetyCheck);
867 868
}

869 870 871 872 873 874 875 876 877 878
void GraphAssembler::RecordBranchInBlockUpdater(Node* branch,
                                                Node* if_true_control,
                                                Node* if_false_control,
                                                BasicBlock* if_true_block,
                                                BasicBlock* if_false_block) {
  DCHECK_NOT_NULL(block_updater_);
  // TODO(9684): Only split the current basic block if the label's target
  // block has multiple merges.
  BasicBlock* if_true_target = block_updater_->SplitBasicBlock();
  BasicBlock* if_false_target = block_updater_->SplitBasicBlock();
879

880
  block_updater_->AddBranch(branch, if_true_target, if_false_target);
881

882 883
  block_updater_->AddNode(if_true_control, if_true_target);
  block_updater_->AddGoto(if_true_target, if_true_block);
884

885 886
  block_updater_->AddNode(if_false_control, if_false_target);
  block_updater_->AddGoto(if_false_target, if_false_block);
887 888
}

889 890 891 892
void GraphAssembler::BindBasicBlock(BasicBlock* block) {
  if (block_updater_) {
    block_updater_->AddBind(block);
  }
893 894
}

895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920
BasicBlock* GraphAssembler::NewBasicBlock(bool deferred) {
  if (!block_updater_) return nullptr;
  return block_updater_->NewBasicBlock(deferred);
}

void GraphAssembler::GotoBasicBlock(BasicBlock* block) {
  if (block_updater_) {
    block_updater_->AddGoto(block);
  }
}

void GraphAssembler::GotoIfBasicBlock(BasicBlock* block, Node* branch,
                                      IrOpcode::Value goto_if) {
  if (block_updater_) {
    // TODO(9684): Only split the current basic block for the goto_target
    // if block has multiple merges.
    BasicBlock* goto_target = block_updater_->SplitBasicBlock();
    BasicBlock* fallthrough_target = block_updater_->SplitBasicBlock();

    if (goto_if == IrOpcode::kIfTrue) {
      block_updater_->AddBranch(branch, goto_target, fallthrough_target);
    } else {
      DCHECK_EQ(goto_if, IrOpcode::kIfFalse);
      block_updater_->AddBranch(branch, fallthrough_target, goto_target);
    }

921
    block_updater_->AddNode(control(), goto_target);
922 923 924 925 926 927 928 929
    block_updater_->AddGoto(goto_target, block);

    block_updater_->AddBind(fallthrough_target);
  }
}

BasicBlock* GraphAssembler::FinalizeCurrentBlock(BasicBlock* block) {
  if (block_updater_) {
930
    block = block_updater_->Finalize(block);
931
    if (control() == mcgraph()->Dead()) {
932 933 934 935
      // If the block's end is unreachable, then reset current effect and
      // control to that of the block's throw control node.
      DCHECK(block->control() == BasicBlock::kThrow);
      Node* throw_node = block->control_input();
936 937
      control_ = NodeProperties::GetControlInput(throw_node);
      effect_ = NodeProperties::GetEffectInput(throw_node);
938
    }
939 940 941 942 943
  }
  return block;
}

void GraphAssembler::ConnectUnreachableToEnd() {
944
  DCHECK_EQ(effect()->opcode(), IrOpcode::kUnreachable);
945 946 947 948
  // When maintaining the schedule we can't easily rewire the successor blocks
  // to disconnect them from the graph, so we just leave the unreachable nodes
  // in the schedule.
  // TODO(9684): Add a scheduled dead-code elimination phase to remove all the
949
  // subsequent unreachable code from the schedule.
950 951 952
  if (!block_updater_) {
    Node* throw_node = graph()->NewNode(common()->Throw(), effect(), control());
    NodeProperties::MergeControlToEnd(graph(), common(), throw_node);
953 954 955
    if (node_changed_callback_.has_value()) {
      (*node_changed_callback_)(graph()->end());
    }
956
    effect_ = control_ = mcgraph()->Dead();
957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980
  }
}

Node* GraphAssembler::AddClonedNode(Node* node) {
  DCHECK(node->op()->HasProperty(Operator::kPure));
  if (block_updater_) {
    node = block_updater_->AddClonedNode(node);
  }

  UpdateEffectControlWith(node);
  return node;
}

Node* GraphAssembler::AddNode(Node* node) {
  if (block_updater_) {
    block_updater_->AddNode(node);
  }

  if (node->opcode() == IrOpcode::kTerminate) {
    return node;
  }

  UpdateEffectControlWith(node);
  return node;
981 982
}

983
void GraphAssembler::Reset(BasicBlock* block) {
984 985
  effect_ = nullptr;
  control_ = nullptr;
986 987 988 989 990 991
  if (block_updater_) {
    block_updater_->StartBlock(block);
  }
}

void GraphAssembler::InitializeEffectControl(Node* effect, Node* control) {
992 993
  effect_ = effect;
  control_ = control;
994 995
}

996
Operator const* JSGraphAssembler::PlainPrimitiveToNumberOperator() {
997
  if (!to_number_operator_.is_set()) {
998 999
    Callable callable =
        Builtins::CallableFor(isolate(), Builtins::kPlainPrimitiveToNumber);
1000
    CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
1001 1002 1003 1004
    auto call_descriptor = Linkage::GetStubCallDescriptor(
        graph()->zone(), callable.descriptor(),
        callable.descriptor().GetStackParameterCount(), flags,
        Operator::kEliminatable);
1005
    to_number_operator_.set(common()->Call(call_descriptor));
1006 1007 1008 1009 1010 1011 1012
  }
  return to_number_operator_.get();
}

}  // namespace compiler
}  // namespace internal
}  // namespace v8