escape-analysis.cc 53.1 KB
Newer Older
1 2 3 4 5 6
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/compiler/escape-analysis.h"

7 8
#include <limits>

9 10 11 12 13 14 15 16
#include "src/base/flags.h"
#include "src/bootstrapper.h"
#include "src/compilation-dependencies.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/js-operator.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/node-properties.h"
17
#include "src/compiler/node.h"
18
#include "src/compiler/operator-properties.h"
19
#include "src/compiler/simplified-operator.h"
20
#include "src/compiler/type-cache.h"
21 22 23 24 25 26
#include "src/objects-inl.h"

namespace v8 {
namespace internal {
namespace compiler {

27
typedef NodeId Alias;
28

29 30 31 32 33 34 35 36 37
#ifdef DEBUG
#define TRACE(...)                                    \
  do {                                                \
    if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
  } while (false)
#else
#define TRACE(...)
#endif

38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121
// EscapeStatusAnalysis determines for each allocation whether it escapes.
class EscapeStatusAnalysis : public ZoneObject {
 public:
  enum Status {
    kUnknown = 0u,
    kTracked = 1u << 0,
    kEscaped = 1u << 1,
    kOnStack = 1u << 2,
    kVisited = 1u << 3,
    // A node is dangling, if it is a load of some kind, and does not have
    // an effect successor.
    kDanglingComputed = 1u << 4,
    kDangling = 1u << 5,
    // A node is is an effect branch point, if it has more than 2 non-dangling
    // effect successors.
    kBranchPointComputed = 1u << 6,
    kBranchPoint = 1u << 7,
    kInQueue = 1u << 8
  };
  typedef base::Flags<Status, uint16_t> StatusFlags;

  void RunStatusAnalysis();

  bool IsVirtual(Node* node);
  bool IsEscaped(Node* node);
  bool IsAllocation(Node* node);

  bool IsInQueue(NodeId id);
  void SetInQueue(NodeId id, bool on_stack);

  void DebugPrint();

  EscapeStatusAnalysis(EscapeAnalysis* object_analysis, Graph* graph,
                       Zone* zone);
  void EnqueueForStatusAnalysis(Node* node);
  bool SetEscaped(Node* node);
  bool IsEffectBranchPoint(Node* node);
  bool IsDanglingEffectNode(Node* node);
  void ResizeStatusVector();
  size_t GetStatusVectorSize();
  bool IsVirtual(NodeId id);

  Graph* graph() const { return graph_; }
  void AssignAliases();
  Alias GetAlias(NodeId id) const { return aliases_[id]; }
  const ZoneVector<Alias>& GetAliasMap() const { return aliases_; }
  Alias AliasCount() const { return next_free_alias_; }
  static const Alias kNotReachable;
  static const Alias kUntrackable;

  bool IsNotReachable(Node* node);

 private:
  void Process(Node* node);
  void ProcessAllocate(Node* node);
  void ProcessFinishRegion(Node* node);
  void ProcessStoreField(Node* node);
  void ProcessStoreElement(Node* node);
  bool CheckUsesForEscape(Node* node, bool phi_escaping = false) {
    return CheckUsesForEscape(node, node, phi_escaping);
  }
  bool CheckUsesForEscape(Node* node, Node* rep, bool phi_escaping = false);
  void RevisitUses(Node* node);
  void RevisitInputs(Node* node);

  Alias NextAlias() { return next_free_alias_++; }

  bool HasEntry(Node* node);

  bool IsAllocationPhi(Node* node);

  ZoneVector<Node*> stack_;
  EscapeAnalysis* object_analysis_;
  Graph* const graph_;
  ZoneVector<StatusFlags> status_;
  Alias next_free_alias_;
  ZoneVector<Node*> status_stack_;
  ZoneVector<Alias> aliases_;

  DISALLOW_COPY_AND_ASSIGN(EscapeStatusAnalysis);
};

DEFINE_OPERATORS_FOR_FLAGS(EscapeStatusAnalysis::StatusFlags)

122
const Alias EscapeStatusAnalysis::kNotReachable =
123
    std::numeric_limits<Alias>::max();
124
const Alias EscapeStatusAnalysis::kUntrackable =
125 126
    std::numeric_limits<Alias>::max() - 1;

127 128
class VirtualObject : public ZoneObject {
 public:
129 130 131 132 133 134 135 136 137
  enum Status {
    kInitial = 0,
    kTracked = 1u << 0,
    kInitialized = 1u << 1,
    kCopyRequired = 1u << 2,
  };
  typedef base::Flags<Status, unsigned char> StatusFlags;

  VirtualObject(NodeId id, VirtualState* owner, Zone* zone)
138
      : id_(id),
139
        status_(kInitial),
140 141
        fields_(zone),
        phi_(zone),
142 143
        object_state_(nullptr),
        owner_(owner) {}
144

145
  VirtualObject(VirtualState* owner, const VirtualObject& other)
146
      : id_(other.id_),
147
        status_(other.status_ & ~kCopyRequired),
148
        fields_(other.fields_),
149
        phi_(other.phi_),
150 151
        object_state_(other.object_state_),
        owner_(owner) {}
152

153 154
  VirtualObject(NodeId id, VirtualState* owner, Zone* zone, size_t field_number,
                bool initialized)
155
      : id_(id),
156
        status_(kTracked | (initialized ? kInitialized : kInitial)),
157 158
        fields_(zone),
        phi_(zone),
159 160
        object_state_(nullptr),
        owner_(owner) {
161
    fields_.resize(field_number);
162
    phi_.resize(field_number, false);
163 164
  }

165
  Node* GetField(size_t offset) { return fields_[offset]; }
166

167
  bool IsCreatedPhi(size_t offset) { return phi_[offset]; }
168

169
  void SetField(size_t offset, Node* node, bool created_phi = false) {
170
    fields_[offset] = node;
171
    phi_[offset] = created_phi;
172
  }
173 174 175 176
  bool IsTracked() const { return status_ & kTracked; }
  bool IsInitialized() const { return status_ & kInitialized; }
  bool SetInitialized() { return status_ |= kInitialized; }
  VirtualState* owner() const { return owner_; }
177

178 179 180
  Node** fields_array() { return &fields_.front(); }
  size_t field_count() { return fields_.size(); }
  bool ResizeFields(size_t field_count) {
181
    if (field_count > fields_.size()) {
182
      fields_.resize(field_count);
183
      phi_.resize(field_count);
184 185 186 187
      return true;
    }
    return false;
  }
188 189 190 191 192 193 194
  void ClearAllFields() {
    for (size_t i = 0; i < fields_.size(); ++i) {
      fields_[i] = nullptr;
      phi_[i] = false;
    }
  }
  bool AllFieldsClear() {
195 196
    for (size_t i = 0; i < fields_.size(); ++i) {
      if (fields_[i] != nullptr) {
197
        return false;
198 199
      }
    }
200
    return true;
201
  }
202
  bool UpdateFrom(const VirtualObject& other);
203 204
  bool MergeFrom(MergeCache* cache, Node* at, Graph* graph,
                 CommonOperatorBuilder* common);
205
  void SetObjectState(Node* node) { object_state_ = node; }
206
  Node* GetObjectState() const { return object_state_; }
207 208 209 210 211 212 213 214
  bool IsCopyRequired() const { return status_ & kCopyRequired; }
  void SetCopyRequired() { status_ |= kCopyRequired; }
  bool NeedCopyForModification() {
    if (!IsCopyRequired() || !IsInitialized()) {
      return false;
    }
    return true;
  }
215

216
  NodeId id() const { return id_; }
217 218 219
  void id(NodeId id) { id_ = id; }

 private:
220 221 222
  bool MergeFields(size_t i, Node* at, MergeCache* cache, Graph* graph,
                   CommonOperatorBuilder* common);

223
  NodeId id_;
224
  StatusFlags status_;
225
  ZoneVector<Node*> fields_;
226
  ZoneVector<bool> phi_;
227
  Node* object_state_;
228 229 230
  VirtualState* owner_;

  DISALLOW_COPY_AND_ASSIGN(VirtualObject);
231 232
};

233 234
DEFINE_OPERATORS_FOR_FLAGS(VirtualObject::StatusFlags)

235 236 237
bool VirtualObject::UpdateFrom(const VirtualObject& other) {
  bool changed = status_ != other.status_;
  status_ = other.status_;
238
  phi_ = other.phi_;
239 240 241 242 243 244 245 246 247 248 249 250 251
  if (fields_.size() != other.fields_.size()) {
    fields_ = other.fields_;
    return true;
  }
  for (size_t i = 0; i < fields_.size(); ++i) {
    if (fields_[i] != other.fields_[i]) {
      changed = true;
      fields_[i] = other.fields_[i];
    }
  }
  return changed;
}

252 253
class VirtualState : public ZoneObject {
 public:
254 255 256 257 258 259 260 261 262 263 264 265
  VirtualState(Node* owner, Zone* zone, size_t size)
      : info_(size, nullptr, zone), owner_(owner) {}

  VirtualState(Node* owner, const VirtualState& state)
      : info_(state.info_.size(), nullptr, state.info_.get_allocator().zone()),
        owner_(owner) {
    for (size_t i = 0; i < info_.size(); ++i) {
      if (state.info_[i]) {
        info_[i] = state.info_[i];
      }
    }
  }
266

267
  VirtualObject* VirtualObjectFromAlias(size_t alias);
268
  void SetVirtualObject(Alias alias, VirtualObject* state);
269
  bool UpdateFrom(VirtualState* state, Zone* zone);
270
  bool MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
271
                 CommonOperatorBuilder* common, Node* at);
272
  size_t size() const { return info_.size(); }
273
  Node* owner() const { return owner_; }
274 275 276 277 278 279
  VirtualObject* Copy(VirtualObject* obj, Alias alias);
  void SetCopyRequired() {
    for (VirtualObject* obj : info_) {
      if (obj) obj->SetCopyRequired();
    }
  }
280 281 282

 private:
  ZoneVector<VirtualObject*> info_;
283
  Node* owner_;
284 285

  DISALLOW_COPY_AND_ASSIGN(VirtualState);
286 287
};

288 289 290
class MergeCache : public ZoneObject {
 public:
  explicit MergeCache(Zone* zone)
291
      : states_(zone), objects_(zone), fields_(zone) {
292 293 294
    states_.reserve(5);
    objects_.reserve(5);
    fields_.reserve(5);
295 296 297 298 299 300 301 302 303
  }
  ZoneVector<VirtualState*>& states() { return states_; }
  ZoneVector<VirtualObject*>& objects() { return objects_; }
  ZoneVector<Node*>& fields() { return fields_; }
  void Clear() {
    states_.clear();
    objects_.clear();
    fields_.clear();
  }
304 305 306
  size_t LoadVirtualObjectsFromStatesFor(Alias alias);
  void LoadVirtualObjectsForFieldsFrom(VirtualState* state,
                                       const ZoneVector<Alias>& aliases);
307 308 309 310 311 312
  Node* GetFields(size_t pos);

 private:
  ZoneVector<VirtualState*> states_;
  ZoneVector<VirtualObject*> objects_;
  ZoneVector<Node*> fields_;
313 314

  DISALLOW_COPY_AND_ASSIGN(MergeCache);
315 316
};

317
size_t MergeCache::LoadVirtualObjectsFromStatesFor(Alias alias) {
318 319
  objects_.clear();
  DCHECK_GT(states_.size(), 0u);
320
  size_t min = std::numeric_limits<size_t>::max();
321
  for (VirtualState* state : states_) {
322
    if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
323 324 325 326 327 328 329
      objects_.push_back(obj);
      min = std::min(obj->field_count(), min);
    }
  }
  return min;
}

330
void MergeCache::LoadVirtualObjectsForFieldsFrom(
331
    VirtualState* state, const ZoneVector<Alias>& aliases) {
332
  objects_.clear();
333
  size_t max_alias = state->size();
334
  for (Node* field : fields_) {
335
    Alias alias = aliases[field->id()];
336 337
    if (alias >= max_alias) continue;
    if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
338 339 340 341 342 343 344
      objects_.push_back(obj);
    }
  }
}

Node* MergeCache::GetFields(size_t pos) {
  fields_.clear();
345 346 347
  Node* rep = pos >= objects_.front()->field_count()
                  ? nullptr
                  : objects_.front()->GetField(pos);
348
  for (VirtualObject* obj : objects_) {
349
    if (pos >= obj->field_count()) continue;
350 351 352 353 354 355 356 357 358 359 360
    Node* field = obj->GetField(pos);
    if (field) {
      fields_.push_back(field);
    }
    if (field != rep) {
      rep = nullptr;
    }
  }
  return rep;
}

361 362 363 364 365 366 367 368 369 370 371
VirtualObject* VirtualState::Copy(VirtualObject* obj, Alias alias) {
  if (obj->owner() == this) return obj;
  VirtualObject* new_obj =
      new (info_.get_allocator().zone()) VirtualObject(this, *obj);
  TRACE("At state %p, alias @%d (#%d), copying virtual object from %p to %p\n",
        static_cast<void*>(this), alias, obj->id(), static_cast<void*>(obj),
        static_cast<void*>(new_obj));
  info_[alias] = new_obj;
  return new_obj;
}

372 373
VirtualObject* VirtualState::VirtualObjectFromAlias(size_t alias) {
  return info_[alias];
374 375
}

376
void VirtualState::SetVirtualObject(Alias alias, VirtualObject* obj) {
377
  info_[alias] = obj;
378 379 380
}

bool VirtualState::UpdateFrom(VirtualState* from, Zone* zone) {
381
  if (from == this) return false;
382
  bool changed = false;
383
  for (Alias alias = 0; alias < size(); ++alias) {
384 385
    VirtualObject* ls = VirtualObjectFromAlias(alias);
    VirtualObject* rs = from->VirtualObjectFromAlias(alias);
386

387
    if (ls == rs || rs == nullptr) continue;
388 389

    if (ls == nullptr) {
390
      ls = new (zone) VirtualObject(this, *rs);
391
      SetVirtualObject(alias, ls);
392 393 394 395
      changed = true;
      continue;
    }

396
    TRACE("  Updating fields of @%d\n", alias);
397 398 399 400 401 402

    changed = ls->UpdateFrom(*rs) || changed;
  }
  return false;
}

403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436
namespace {

bool IsEquivalentPhi(Node* node1, Node* node2) {
  if (node1 == node2) return true;
  if (node1->opcode() != IrOpcode::kPhi || node2->opcode() != IrOpcode::kPhi ||
      node1->op()->ValueInputCount() != node2->op()->ValueInputCount()) {
    return false;
  }
  for (int i = 0; i < node1->op()->ValueInputCount(); ++i) {
    Node* input1 = NodeProperties::GetValueInput(node1, i);
    Node* input2 = NodeProperties::GetValueInput(node2, i);
    if (!IsEquivalentPhi(input1, input2)) {
      return false;
    }
  }
  return true;
}

bool IsEquivalentPhi(Node* phi, ZoneVector<Node*>& inputs) {
  if (phi->opcode() != IrOpcode::kPhi) return false;
  if (phi->op()->ValueInputCount() != inputs.size()) {
    return false;
  }
  for (size_t i = 0; i < inputs.size(); ++i) {
    Node* input = NodeProperties::GetValueInput(phi, static_cast<int>(i));
    if (!IsEquivalentPhi(input, inputs[i])) {
      return false;
    }
  }
  return true;
}

}  // namespace

437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467
bool VirtualObject::MergeFields(size_t i, Node* at, MergeCache* cache,
                                Graph* graph, CommonOperatorBuilder* common) {
  bool changed = false;
  int value_input_count = static_cast<int>(cache->fields().size());
  Node* rep = GetField(i);
  if (!rep || !IsCreatedPhi(i)) {
    Node* control = NodeProperties::GetControlInput(at);
    cache->fields().push_back(control);
    Node* phi = graph->NewNode(
        common->Phi(MachineRepresentation::kTagged, value_input_count),
        value_input_count + 1, &cache->fields().front());
    SetField(i, phi, true);
#ifdef DEBUG
    if (FLAG_trace_turbo_escape) {
      PrintF("    Creating Phi #%d as merge of", phi->id());
      for (int i = 0; i < value_input_count; i++) {
        PrintF(" #%d (%s)", cache->fields()[i]->id(),
               cache->fields()[i]->op()->mnemonic());
      }
      PrintF("\n");
    }
#endif
    changed = true;
  } else {
    DCHECK(rep->opcode() == IrOpcode::kPhi);
    for (int n = 0; n < value_input_count; ++n) {
      Node* old = NodeProperties::GetValueInput(rep, n);
      if (old != cache->fields()[n]) {
        changed = true;
        NodeProperties::ReplaceValueInput(rep, cache->fields()[n], n);
      }
468 469
    }
  }
470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498
  return changed;
}

bool VirtualObject::MergeFrom(MergeCache* cache, Node* at, Graph* graph,
                              CommonOperatorBuilder* common) {
  DCHECK(at->opcode() == IrOpcode::kEffectPhi ||
         at->opcode() == IrOpcode::kPhi);
  bool changed = false;
  for (size_t i = 0; i < field_count(); ++i) {
    if (Node* field = cache->GetFields(i)) {
      changed = changed || GetField(i) != field;
      SetField(i, field);
      TRACE("    Field %zu agree on rep #%d\n", i, field->id());
    } else {
      int arity = at->opcode() == IrOpcode::kEffectPhi
                      ? at->op()->EffectInputCount()
                      : at->op()->ValueInputCount();
      if (cache->fields().size() == arity) {
        changed = MergeFields(i, at, cache, graph, common) || changed;
      } else {
        if (GetField(i) != nullptr) {
          TRACE("    Field %zu cleared\n", i);
          changed = true;
        }
        SetField(i, nullptr);
      }
    }
  }
  return changed;
499 500
}

501
bool VirtualState::MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
502
                             CommonOperatorBuilder* common, Node* at) {
503
  DCHECK_GT(cache->states().size(), 0u);
504
  bool changed = false;
505 506 507 508 509 510 511 512 513 514 515 516 517 518
  for (Alias alias = 0; alias < size(); ++alias) {
    cache->objects().clear();
    VirtualObject* mergeObject = VirtualObjectFromAlias(alias);
    bool copy_merge_object = false;
    size_t fields = std::numeric_limits<size_t>::max();
    for (VirtualState* state : cache->states()) {
      if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
        cache->objects().push_back(obj);
        if (mergeObject == obj) {
          copy_merge_object = true;
        }
        fields = std::min(obj->field_count(), fields);
      }
    }
519
    if (cache->objects().size() == cache->states().size()) {
520 521 522 523 524 525 526 527 528 529 530 531 532 533 534
      if (!mergeObject) {
        VirtualObject* obj = new (zone)
            VirtualObject(cache->objects().front()->id(), this, zone, fields,
                          cache->objects().front()->IsInitialized());
        SetVirtualObject(alias, obj);
        mergeObject = obj;
        changed = true;
      } else if (copy_merge_object) {
        VirtualObject* obj = new (zone) VirtualObject(this, *mergeObject);
        SetVirtualObject(alias, obj);
        mergeObject = obj;
        changed = true;
      } else {
        changed = mergeObject->ResizeFields(fields) || changed;
      }
535 536 537 538 539 540 541 542 543 544
#ifdef DEBUG
      if (FLAG_trace_turbo_escape) {
        PrintF("  Alias @%d, merging into %p virtual objects", alias,
               static_cast<void*>(mergeObject));
        for (size_t i = 0; i < cache->objects().size(); i++) {
          PrintF(" %p", static_cast<void*>(cache->objects()[i]));
        }
        PrintF("\n");
      }
#endif  // DEBUG
545
      changed = mergeObject->MergeFrom(cache, at, graph, common) || changed;
546
    } else {
547 548 549 550
      if (mergeObject) {
        TRACE("  Alias %d, virtual object removed\n", alias);
        changed = true;
      }
551
      SetVirtualObject(alias, nullptr);
552 553 554 555 556
    }
  }
  return changed;
}

557 558
EscapeStatusAnalysis::EscapeStatusAnalysis(EscapeAnalysis* object_analysis,
                                           Graph* graph, Zone* zone)
559 560
    : stack_(zone),
      object_analysis_(object_analysis),
561
      graph_(graph),
562
      status_(zone),
563 564 565
      next_free_alias_(0),
      status_stack_(zone),
      aliases_(zone) {}
566 567

bool EscapeStatusAnalysis::HasEntry(Node* node) {
568
  return status_[node->id()] & (kTracked | kEscaped);
569 570 571
}

bool EscapeStatusAnalysis::IsVirtual(Node* node) {
572 573 574 575 576
  return IsVirtual(node->id());
}

bool EscapeStatusAnalysis::IsVirtual(NodeId id) {
  return (status_[id] & kTracked) && !(status_[id] & kEscaped);
577 578 579
}

bool EscapeStatusAnalysis::IsEscaped(Node* node) {
580
  return status_[node->id()] & kEscaped;
581 582
}

583 584 585 586 587
bool EscapeStatusAnalysis::IsAllocation(Node* node) {
  return node->opcode() == IrOpcode::kAllocate ||
         node->opcode() == IrOpcode::kFinishRegion;
}

588
bool EscapeStatusAnalysis::SetEscaped(Node* node) {
589 590
  bool changed = !(status_[node->id()] & kEscaped);
  status_[node->id()] |= kEscaped | kTracked;
591 592 593
  return changed;
}

594 595 596 597 598 599 600 601 602 603 604
bool EscapeStatusAnalysis::IsInQueue(NodeId id) {
  return status_[id] & kInQueue;
}

void EscapeStatusAnalysis::SetInQueue(NodeId id, bool on_stack) {
  if (on_stack) {
    status_[id] |= kInQueue;
  } else {
    status_[id] &= ~kInQueue;
  }
}
605

606 607 608 609
void EscapeStatusAnalysis::ResizeStatusVector() {
  if (status_.size() <= graph()->NodeCount()) {
    status_.resize(graph()->NodeCount() * 1.1, kUnknown);
  }
610 611
}

612
size_t EscapeStatusAnalysis::GetStatusVectorSize() { return status_.size(); }
613

614 615 616 617 618
void EscapeStatusAnalysis::RunStatusAnalysis() {
  ResizeStatusVector();
  while (!status_stack_.empty()) {
    Node* node = status_stack_.back();
    status_stack_.pop_back();
619
    status_[node->id()] &= ~kOnStack;
620
    Process(node);
621
    status_[node->id()] |= kVisited;
622 623 624 625 626 627 628 629
  }
}

void EscapeStatusAnalysis::EnqueueForStatusAnalysis(Node* node) {
  DCHECK_NOT_NULL(node);
  if (!(status_[node->id()] & kOnStack)) {
    status_stack_.push_back(node);
    status_[node->id()] |= kOnStack;
630 631 632 633 634 635
  }
}

void EscapeStatusAnalysis::RevisitInputs(Node* node) {
  for (Edge edge : node->input_edges()) {
    Node* input = edge.to();
636
    if (!(status_[input->id()] & kOnStack)) {
637
      status_stack_.push_back(input);
638 639
      status_[input->id()] |= kOnStack;
    }
640 641 642 643 644 645
  }
}

void EscapeStatusAnalysis::RevisitUses(Node* node) {
  for (Edge edge : node->use_edges()) {
    Node* use = edge.from();
646 647
    if (!(status_[use->id()] & kOnStack) && !IsNotReachable(use)) {
      status_stack_.push_back(use);
648 649
      status_[use->id()] |= kOnStack;
    }
650 651 652 653 654 655 656 657 658 659 660 661 662 663
  }
}

void EscapeStatusAnalysis::Process(Node* node) {
  switch (node->opcode()) {
    case IrOpcode::kAllocate:
      ProcessAllocate(node);
      break;
    case IrOpcode::kFinishRegion:
      ProcessFinishRegion(node);
      break;
    case IrOpcode::kStoreField:
      ProcessStoreField(node);
      break;
664 665 666 667 668
    case IrOpcode::kStoreElement:
      ProcessStoreElement(node);
      break;
    case IrOpcode::kLoadField:
    case IrOpcode::kLoadElement: {
669
      if (Node* rep = object_analysis_->GetReplacement(node)) {
670 671 672
        if (IsAllocation(rep) && CheckUsesForEscape(node, rep)) {
          RevisitInputs(rep);
          RevisitUses(rep);
673 674
        }
      }
675
      RevisitUses(node);
676 677 678 679
      break;
    }
    case IrOpcode::kPhi:
      if (!HasEntry(node)) {
680
        status_[node->id()] |= kTracked;
681 682 683 684 685
        RevisitUses(node);
      }
      if (!IsAllocationPhi(node) && SetEscaped(node)) {
        RevisitInputs(node);
        RevisitUses(node);
686 687 688 689 690 691 692
      }
      CheckUsesForEscape(node);
    default:
      break;
  }
}

693 694 695 696 697 698 699 700 701 702
bool EscapeStatusAnalysis::IsAllocationPhi(Node* node) {
  for (Edge edge : node->input_edges()) {
    Node* input = edge.to();
    if (input->opcode() == IrOpcode::kPhi && !IsEscaped(input)) continue;
    if (IsAllocation(input)) continue;
    return false;
  }
  return true;
}

703 704 705 706
void EscapeStatusAnalysis::ProcessStoreField(Node* node) {
  DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
  Node* to = NodeProperties::GetValueInput(node, 0);
  Node* val = NodeProperties::GetValueInput(node, 1);
707
  if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
708
    RevisitUses(val);
709
    RevisitInputs(val);
710 711
    TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
          val->id(), val->op()->mnemonic(), to->id());
712 713 714
  }
}

715 716 717 718
void EscapeStatusAnalysis::ProcessStoreElement(Node* node) {
  DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
  Node* to = NodeProperties::GetValueInput(node, 0);
  Node* val = NodeProperties::GetValueInput(node, 2);
719
  if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
720
    RevisitUses(val);
721
    RevisitInputs(val);
722 723
    TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
          val->id(), val->op()->mnemonic(), to->id());
724 725 726
  }
}

727 728 729
void EscapeStatusAnalysis::ProcessAllocate(Node* node) {
  DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
  if (!HasEntry(node)) {
730
    status_[node->id()] |= kTracked;
731 732
    TRACE("Created status entry for node #%d (%s)\n", node->id(),
          node->op()->mnemonic());
733
    NumberMatcher size(node->InputAt(0));
734 735 736 737
    DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
           node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
           node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
           node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
738
    RevisitUses(node);
739
    if (!size.HasValue() && SetEscaped(node)) {
740
      TRACE("Setting #%d to escaped because of non-const alloc\n", node->id());
741 742
      // This node is already known to escape, uses do not have to be checked
      // for escape.
743 744 745
      return;
    }
  }
746
  if (CheckUsesForEscape(node, true)) {
747 748 749 750
    RevisitUses(node);
  }
}

751 752
bool EscapeStatusAnalysis::CheckUsesForEscape(Node* uses, Node* rep,
                                              bool phi_escaping) {
753 754
  for (Edge edge : uses->use_edges()) {
    Node* use = edge.from();
755
    if (IsNotReachable(use)) continue;
756 757
    if (edge.index() >= use->op()->ValueInputCount() +
                            OperatorProperties::GetContextInputCount(use->op()))
758
      continue;
759 760
    switch (use->opcode()) {
      case IrOpcode::kPhi:
761
        if (phi_escaping && SetEscaped(rep)) {
762 763 764 765 766
          TRACE(
              "Setting #%d (%s) to escaped because of use by phi node "
              "#%d (%s)\n",
              rep->id(), rep->op()->mnemonic(), use->id(),
              use->op()->mnemonic());
767 768
          return true;
        }
769 770 771 772 773 774 775 776 777 778
      // Fallthrough.
      case IrOpcode::kStoreField:
      case IrOpcode::kLoadField:
      case IrOpcode::kStoreElement:
      case IrOpcode::kLoadElement:
      case IrOpcode::kFrameState:
      case IrOpcode::kStateValues:
      case IrOpcode::kReferenceEqual:
      case IrOpcode::kFinishRegion:
        if (IsEscaped(use) && SetEscaped(rep)) {
779 780 781 782 783
          TRACE(
              "Setting #%d (%s) to escaped because of use by escaping node "
              "#%d (%s)\n",
              rep->id(), rep->op()->mnemonic(), use->id(),
              use->op()->mnemonic());
784 785
          return true;
        }
786
        break;
787 788
      case IrOpcode::kObjectIsSmi:
        if (!IsAllocation(rep) && SetEscaped(rep)) {
789 790 791
          TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
                rep->id(), rep->op()->mnemonic(), use->id(),
                use->op()->mnemonic());
792 793 794
          return true;
        }
        break;
795
      case IrOpcode::kSelect:
796 797
      // TODO(mstarzinger): The following list of operators will eventually be
      // handled by the EscapeAnalysisReducer (similar to ObjectIsSmi).
798 799 800 801 802 803
      case IrOpcode::kStringEqual:
      case IrOpcode::kStringLessThan:
      case IrOpcode::kStringLessThanOrEqual:
      case IrOpcode::kPlainPrimitiveToNumber:
      case IrOpcode::kPlainPrimitiveToWord32:
      case IrOpcode::kPlainPrimitiveToFloat64:
804
      case IrOpcode::kStringCharCodeAt:
805 806
      case IrOpcode::kObjectIsCallable:
      case IrOpcode::kObjectIsNumber:
807
      case IrOpcode::kObjectIsReceiver:
808 809
      case IrOpcode::kObjectIsString:
      case IrOpcode::kObjectIsUndetectable:
810
        if (SetEscaped(rep)) {
811 812 813
          TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
                rep->id(), rep->op()->mnemonic(), use->id(),
                use->op()->mnemonic());
814 815 816
          return true;
        }
        break;
817
      default:
818
        if (use->op()->EffectInputCount() == 0 &&
819 820
            uses->op()->EffectInputCount() > 0 &&
            !IrOpcode::IsJsOpcode(use->opcode())) {
821 822
          TRACE("Encountered unaccounted use by #%d (%s)\n", use->id(),
                use->op()->mnemonic());
823 824
          UNREACHABLE();
        }
825
        if (SetEscaped(rep)) {
826 827 828
          TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
                rep->id(), rep->op()->mnemonic(), use->id(),
                use->op()->mnemonic());
829 830 831 832 833 834 835 836 837 838
          return true;
        }
    }
  }
  return false;
}

void EscapeStatusAnalysis::ProcessFinishRegion(Node* node) {
  DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
  if (!HasEntry(node)) {
839
    status_[node->id()] |= kTracked;
840 841
    RevisitUses(node);
  }
842
  if (CheckUsesForEscape(node, true)) {
843 844 845 846 847
    RevisitInputs(node);
  }
}

void EscapeStatusAnalysis::DebugPrint() {
848 849
  for (NodeId id = 0; id < status_.size(); id++) {
    if (status_[id] & kTracked) {
850
      PrintF("Node #%d is %s\n", id,
851
             (status_[id] & kEscaped) ? "escaping" : "virtual");
852 853 854 855
    }
  }
}

856 857
EscapeAnalysis::EscapeAnalysis(Graph* graph, CommonOperatorBuilder* common,
                               Zone* zone)
858
    : zone_(zone),
859
      slot_not_analyzed_(graph->NewNode(common->NumberConstant(0x1c0debad))),
860
      common_(common),
861
      status_analysis_(new (zone) EscapeStatusAnalysis(this, graph, zone)),
862
      virtual_states_(zone),
863
      replacements_(zone),
864
      cycle_detection_(zone),
865
      cache_(nullptr) {}
866

867 868 869
EscapeAnalysis::~EscapeAnalysis() {}

void EscapeAnalysis::Run() {
870
  replacements_.resize(graph()->NodeCount());
871 872
  status_analysis_->AssignAliases();
  if (status_analysis_->AliasCount() > 0) {
873 874
    cache_ = new (zone()) MergeCache(zone());
    replacements_.resize(graph()->NodeCount());
875
    status_analysis_->ResizeStatusVector();
876
    RunObjectAnalysis();
877
    status_analysis_->RunStatusAnalysis();
878
  }
879
}
880

881
void EscapeStatusAnalysis::AssignAliases() {
882 883
  size_t max_size = 1024;
  size_t min_size = 32;
884 885
  size_t stack_size =
      std::min(std::max(graph()->NodeCount() / 5, min_size), max_size);
886
  stack_.reserve(stack_size);
887 888
  ResizeStatusVector();
  stack_.push_back(graph()->end());
889 890 891
  CHECK_LT(graph()->NodeCount(), kUntrackable);
  aliases_.resize(graph()->NodeCount(), kNotReachable);
  aliases_[graph()->end()->id()] = kUntrackable;
892
  status_stack_.reserve(8);
893
  TRACE("Discovering trackable nodes");
894 895 896
  while (!stack_.empty()) {
    Node* node = stack_.back();
    stack_.pop_back();
897 898 899 900
    switch (node->opcode()) {
      case IrOpcode::kAllocate:
        if (aliases_[node->id()] >= kUntrackable) {
          aliases_[node->id()] = NextAlias();
901 902
          TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
                node->id());
903
          EnqueueForStatusAnalysis(node);
904 905 906 907
        }
        break;
      case IrOpcode::kFinishRegion: {
        Node* allocate = NodeProperties::GetValueInput(node, 0);
908
        DCHECK_NOT_NULL(allocate);
909 910 911
        if (allocate->opcode() == IrOpcode::kAllocate) {
          if (aliases_[allocate->id()] >= kUntrackable) {
            if (aliases_[allocate->id()] == kNotReachable) {
912
              stack_.push_back(allocate);
913 914
            }
            aliases_[allocate->id()] = NextAlias();
915 916
            TRACE(" @%d:%s#%u", aliases_[allocate->id()],
                  allocate->op()->mnemonic(), allocate->id());
917
            EnqueueForStatusAnalysis(allocate);
918 919
          }
          aliases_[node->id()] = aliases_[allocate->id()];
920 921
          TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
                node->id());
922 923 924 925 926 927 928 929 930 931
        }
        break;
      }
      default:
        DCHECK_EQ(aliases_[node->id()], kUntrackable);
        break;
    }
    for (Edge edge : node->input_edges()) {
      Node* input = edge.to();
      if (aliases_[input->id()] == kNotReachable) {
932
        stack_.push_back(input);
933 934 935 936
        aliases_[input->id()] = kUntrackable;
      }
    }
  }
937
  TRACE("\n");
938 939
}

940 941 942 943 944 945 946
bool EscapeStatusAnalysis::IsNotReachable(Node* node) {
  if (node->id() >= aliases_.size()) {
    return false;
  }
  return aliases_[node->id()] == kNotReachable;
}

947
void EscapeAnalysis::RunObjectAnalysis() {
948
  virtual_states_.resize(graph()->NodeCount());
949 950
  ZoneDeque<Node*> queue(zone());
  queue.push_back(graph()->start());
951
  ZoneVector<Node*> danglers(zone());
952 953 954
  while (!queue.empty()) {
    Node* node = queue.back();
    queue.pop_back();
955
    status_analysis_->SetInQueue(node->id(), false);
956
    if (Process(node)) {
957
      for (Edge edge : node->use_edges()) {
958
        Node* use = edge.from();
959
        if (status_analysis_->IsNotReachable(use)) {
960 961
          continue;
        }
962
        if (NodeProperties::IsEffectEdge(edge)) {
963 964 965 966
          // Iteration order: depth first, but delay phis.
          // We need DFS do avoid some duplication of VirtualStates and
          // VirtualObjects, and we want to delay phis to improve performance.
          if (use->opcode() == IrOpcode::kEffectPhi) {
967
            if (!status_analysis_->IsInQueue(use->id())) {
968 969 970 971
              queue.push_front(use);
            }
          } else if ((use->opcode() != IrOpcode::kLoadField &&
                      use->opcode() != IrOpcode::kLoadElement) ||
972 973 974
                     !status_analysis_->IsDanglingEffectNode(use)) {
            if (!status_analysis_->IsInQueue(use->id())) {
              status_analysis_->SetInQueue(use->id(), true);
975 976
              queue.push_back(use);
            }
977 978
          } else {
            danglers.push_back(use);
979 980 981
          }
        }
      }
982 983 984 985
      // Danglers need to be processed immediately, even if they are
      // on the stack. Since they do not have effect outputs,
      // we don't have to track whether they are on the stack.
      queue.insert(queue.end(), danglers.begin(), danglers.end());
986
      danglers.clear();
987 988
    }
  }
989
#ifdef DEBUG
990 991 992
  if (FLAG_trace_turbo_escape) {
    DebugPrint();
  }
993
#endif
994 995
}

996 997 998 999
bool EscapeStatusAnalysis::IsDanglingEffectNode(Node* node) {
  if (status_[node->id()] & kDanglingComputed) {
    return status_[node->id()] & kDangling;
  }
1000 1001 1002 1003
  if (node->op()->EffectInputCount() == 0 ||
      node->op()->EffectOutputCount() == 0 ||
      (node->op()->EffectInputCount() == 1 &&
       NodeProperties::GetEffectInput(node)->opcode() == IrOpcode::kStart)) {
1004 1005 1006
    // The start node is used as sentinel for nodes that are in general
    // effectful, but of which an analysis has determined that they do not
    // produce effects in this instance. We don't consider these nodes dangling.
1007
    status_[node->id()] |= kDanglingComputed;
1008 1009
    return false;
  }
1010
  for (Edge edge : node->use_edges()) {
1011 1012
    Node* use = edge.from();
    if (aliases_[use->id()] == kNotReachable) continue;
1013
    if (NodeProperties::IsEffectEdge(edge)) {
1014
      status_[node->id()] |= kDanglingComputed;
1015 1016 1017
      return false;
    }
  }
1018
  status_[node->id()] |= kDanglingComputed | kDangling;
1019 1020 1021
  return true;
}

1022 1023 1024 1025 1026 1027 1028 1029 1030
bool EscapeStatusAnalysis::IsEffectBranchPoint(Node* node) {
  if (status_[node->id()] & kBranchPointComputed) {
    return status_[node->id()] & kBranchPoint;
  }
  int count = 0;
  for (Edge edge : node->use_edges()) {
    Node* use = edge.from();
    if (aliases_[use->id()] == kNotReachable) continue;
    if (NodeProperties::IsEffectEdge(edge)) {
1031 1032 1033 1034
      if ((use->opcode() == IrOpcode::kLoadField ||
           use->opcode() == IrOpcode::kLoadElement ||
           use->opcode() == IrOpcode::kLoad) &&
          IsDanglingEffectNode(use))
1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045
        continue;
      if (++count > 1) {
        status_[node->id()] |= kBranchPointComputed | kBranchPoint;
        return true;
      }
    }
  }
  status_[node->id()] |= kBranchPointComputed;
  return false;
}

1046
bool EscapeAnalysis::Process(Node* node) {
1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062
  switch (node->opcode()) {
    case IrOpcode::kAllocate:
      ProcessAllocation(node);
      break;
    case IrOpcode::kBeginRegion:
      ForwardVirtualState(node);
      break;
    case IrOpcode::kFinishRegion:
      ProcessFinishRegion(node);
      break;
    case IrOpcode::kStoreField:
      ProcessStoreField(node);
      break;
    case IrOpcode::kLoadField:
      ProcessLoadField(node);
      break;
1063 1064 1065 1066 1067 1068
    case IrOpcode::kStoreElement:
      ProcessStoreElement(node);
      break;
    case IrOpcode::kLoadElement:
      ProcessLoadElement(node);
      break;
1069 1070 1071 1072 1073 1074 1075 1076 1077 1078
    case IrOpcode::kStart:
      ProcessStart(node);
      break;
    case IrOpcode::kEffectPhi:
      return ProcessEffectPhi(node);
      break;
    default:
      if (node->op()->EffectInputCount() > 0) {
        ForwardVirtualState(node);
      }
1079
      ProcessAllocationUsers(node);
1080 1081 1082 1083 1084
      break;
  }
  return true;
}

1085 1086 1087
void EscapeAnalysis::ProcessAllocationUsers(Node* node) {
  for (Edge edge : node->input_edges()) {
    Node* input = edge.to();
1088 1089 1090
    Node* use = edge.from();
    if (edge.index() >= use->op()->ValueInputCount() +
                            OperatorProperties::GetContextInputCount(use->op()))
1091 1092 1093 1094 1095 1096 1097 1098 1099 1100
      continue;
    switch (node->opcode()) {
      case IrOpcode::kStoreField:
      case IrOpcode::kLoadField:
      case IrOpcode::kStoreElement:
      case IrOpcode::kLoadElement:
      case IrOpcode::kFrameState:
      case IrOpcode::kStateValues:
      case IrOpcode::kReferenceEqual:
      case IrOpcode::kFinishRegion:
1101
      case IrOpcode::kObjectIsSmi:
1102 1103
        break;
      default:
1104
        VirtualState* state = virtual_states_[node->id()];
1105 1106
        if (VirtualObject* obj =
                GetVirtualObject(state, ResolveReplacement(input))) {
1107 1108 1109
          if (!obj->AllFieldsClear()) {
            obj = CopyForModificationAt(obj, state, node);
            obj->ClearAllFields();
1110 1111
            TRACE("Cleared all fields of @%d:#%d\n",
                  status_analysis_->GetAlias(obj->id()), obj->id());
1112 1113 1114 1115 1116 1117 1118
          }
        }
        break;
    }
  }
}

1119 1120
VirtualState* EscapeAnalysis::CopyForModificationAt(VirtualState* state,
                                                    Node* node) {
1121 1122
  if (state->owner() != node) {
    VirtualState* new_state = new (zone()) VirtualState(node, *state);
1123 1124 1125 1126 1127
    virtual_states_[node->id()] = new_state;
    TRACE("Copying virtual state %p to new state %p at node %s#%d\n",
          static_cast<void*>(state), static_cast<void*>(new_state),
          node->op()->mnemonic(), node->id());
    return new_state;
1128
  }
1129 1130 1131 1132 1133 1134 1135 1136
  return state;
}

VirtualObject* EscapeAnalysis::CopyForModificationAt(VirtualObject* obj,
                                                     VirtualState* state,
                                                     Node* node) {
  if (obj->NeedCopyForModification()) {
    state = CopyForModificationAt(state, node);
1137
    return state->Copy(obj, status_analysis_->GetAlias(obj->id()));
1138 1139
  }
  return obj;
1140 1141
}

1142
void EscapeAnalysis::ForwardVirtualState(Node* node) {
1143
  DCHECK_EQ(node->op()->EffectInputCount(), 1);
1144
#ifdef DEBUG
1145 1146
  if (node->opcode() != IrOpcode::kLoadField &&
      node->opcode() != IrOpcode::kLoadElement &&
1147 1148
      node->opcode() != IrOpcode::kLoad &&
      status_analysis_->IsDanglingEffectNode(node)) {
1149 1150
    PrintF("Dangeling effect node: #%d (%s)\n", node->id(),
           node->op()->mnemonic());
1151
    UNREACHABLE();
1152
  }
1153
#endif  // DEBUG
1154 1155
  Node* effect = NodeProperties::GetEffectInput(node);
  DCHECK_NOT_NULL(virtual_states_[effect->id()]);
1156 1157 1158
  if (virtual_states_[node->id()]) {
    virtual_states_[node->id()]->UpdateFrom(virtual_states_[effect->id()],
                                            zone());
1159 1160
  } else {
    virtual_states_[node->id()] = virtual_states_[effect->id()];
1161 1162 1163 1164
    TRACE("Forwarding object state %p from %s#%d to %s#%d",
          static_cast<void*>(virtual_states_[effect->id()]),
          effect->op()->mnemonic(), effect->id(), node->op()->mnemonic(),
          node->id());
1165
    if (status_analysis_->IsEffectBranchPoint(effect) ||
1166
        OperatorProperties::HasFrameStateInput(node->op())) {
1167 1168 1169 1170 1171
      virtual_states_[node->id()]->SetCopyRequired();
      TRACE(", effect input %s#%d is branch point", effect->op()->mnemonic(),
            effect->id());
    }
    TRACE("\n");
1172 1173 1174
  }
}

1175
void EscapeAnalysis::ProcessStart(Node* node) {
1176
  DCHECK_EQ(node->opcode(), IrOpcode::kStart);
1177
  virtual_states_[node->id()] =
1178
      new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
1179 1180
}

1181
bool EscapeAnalysis::ProcessEffectPhi(Node* node) {
1182 1183 1184 1185 1186
  DCHECK_EQ(node->opcode(), IrOpcode::kEffectPhi);
  bool changed = false;

  VirtualState* mergeState = virtual_states_[node->id()];
  if (!mergeState) {
1187 1188
    mergeState =
        new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
1189 1190
    virtual_states_[node->id()] = mergeState;
    changed = true;
1191 1192
    TRACE("Effect Phi #%d got new virtual state %p.\n", node->id(),
          static_cast<void*>(mergeState));
1193 1194
  }

1195
  cache_->Clear();
1196

1197 1198
  TRACE("At Effect Phi #%d, merging states into %p:", node->id(),
        static_cast<void*>(mergeState));
1199

1200 1201 1202 1203
  for (int i = 0; i < node->op()->EffectInputCount(); ++i) {
    Node* input = NodeProperties::GetEffectInput(node, i);
    VirtualState* state = virtual_states_[input->id()];
    if (state) {
1204
      cache_->states().push_back(state);
1205
      if (state == mergeState) {
1206 1207
        mergeState = new (zone())
            VirtualState(node, zone(), status_analysis_->AliasCount());
1208 1209 1210
        virtual_states_[node->id()] = mergeState;
        changed = true;
      }
1211
    }
1212 1213
    TRACE(" %p (from %d %s)", static_cast<void*>(state), input->id(),
          input->op()->mnemonic());
1214
  }
1215
  TRACE("\n");
1216

1217
  if (cache_->states().size() == 0) {
1218
    return changed;
1219
  }
1220

1221 1222
  changed =
      mergeState->MergeFrom(cache_, zone(), graph(), common(), node) || changed;
1223

1224
  TRACE("Merge %s the node.\n", changed ? "changed" : "did not change");
1225

1226
  if (changed) {
1227
    status_analysis_->ResizeStatusVector();
1228 1229 1230 1231
  }
  return changed;
}

1232
void EscapeAnalysis::ProcessAllocation(Node* node) {
1233 1234
  DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
  ForwardVirtualState(node);
1235
  VirtualState* state = virtual_states_[node->id()];
1236
  Alias alias = status_analysis_->GetAlias(node->id());
1237 1238

  // Check if we have already processed this node.
1239
  if (state->VirtualObjectFromAlias(alias)) {
1240 1241
    return;
  }
1242

1243 1244 1245
  if (state->owner()->opcode() == IrOpcode::kEffectPhi) {
    state = CopyForModificationAt(state, node);
  }
1246

1247
  NumberMatcher size(node->InputAt(0));
1248 1249 1250 1251
  DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
         node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
         node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
         node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
1252
  if (size.HasValue()) {
1253 1254 1255
    VirtualObject* obj = new (zone()) VirtualObject(
        node->id(), state, zone(), size.Value() / kPointerSize, false);
    state->SetVirtualObject(alias, obj);
1256
  } else {
1257 1258
    state->SetVirtualObject(
        alias, new (zone()) VirtualObject(node->id(), state, zone()));
1259 1260 1261
  }
}

1262
void EscapeAnalysis::ProcessFinishRegion(Node* node) {
1263 1264 1265 1266
  DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
  ForwardVirtualState(node);
  Node* allocation = NodeProperties::GetValueInput(node, 0);
  if (allocation->opcode() == IrOpcode::kAllocate) {
1267
    VirtualState* state = virtual_states_[node->id()];
1268 1269
    VirtualObject* obj =
        state->VirtualObjectFromAlias(status_analysis_->GetAlias(node->id()));
1270 1271
    DCHECK_NOT_NULL(obj);
    obj->SetInitialized();
1272 1273 1274
  }
}

1275
Node* EscapeAnalysis::replacement(Node* node) {
1276 1277
  if (node->id() >= replacements_.size()) return nullptr;
  return replacements_[node->id()];
1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288
}

bool EscapeAnalysis::SetReplacement(Node* node, Node* rep) {
  bool changed = replacements_[node->id()] != rep;
  replacements_[node->id()] = rep;
  return changed;
}

bool EscapeAnalysis::UpdateReplacement(VirtualState* state, Node* node,
                                       Node* rep) {
  if (SetReplacement(node, rep)) {
1289 1290 1291 1292 1293
    if (rep) {
      TRACE("Replacement of #%d is #%d (%s)\n", node->id(), rep->id(),
            rep->op()->mnemonic());
    } else {
      TRACE("Replacement of #%d cleared\n", node->id());
1294 1295
    }
    return true;
1296
  }
1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307
  return false;
}

Node* EscapeAnalysis::ResolveReplacement(Node* node) {
  while (replacement(node)) {
    node = replacement(node);
  }
  return node;
}

Node* EscapeAnalysis::GetReplacement(Node* node) {
1308 1309 1310
  Node* result = nullptr;
  while (replacement(node)) {
    node = result = replacement(node);
1311
  }
1312
  return result;
1313 1314
}

1315
bool EscapeAnalysis::IsVirtual(Node* node) {
1316
  if (node->id() >= status_analysis_->GetStatusVectorSize()) {
1317 1318
    return false;
  }
1319
  return status_analysis_->IsVirtual(node);
1320 1321 1322
}

bool EscapeAnalysis::IsEscaped(Node* node) {
1323
  if (node->id() >= status_analysis_->GetStatusVectorSize()) {
1324 1325
    return false;
  }
1326
  return status_analysis_->IsEscaped(node);
1327 1328
}

1329 1330 1331 1332 1333 1334 1335 1336 1337 1338
bool EscapeAnalysis::CompareVirtualObjects(Node* left, Node* right) {
  DCHECK(IsVirtual(left) && IsVirtual(right));
  left = ResolveReplacement(left);
  right = ResolveReplacement(right);
  if (IsEquivalentPhi(left, right)) {
    return true;
  }
  return false;
}

1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352
namespace {

int OffsetForFieldAccess(Node* node) {
  FieldAccess access = FieldAccessOf(node->op());
  DCHECK_EQ(access.offset % kPointerSize, 0);
  return access.offset / kPointerSize;
}

int OffsetForElementAccess(Node* node, int index) {
  ElementAccess access = ElementAccessOf(node->op());
  DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
            kPointerSizeLog2);
  DCHECK_EQ(access.header_size % kPointerSize, 0);
  return access.header_size / kPointerSize + index;
1353 1354
}

1355 1356
}  // namespace

1357
void EscapeAnalysis::ProcessLoadFromPhi(int offset, Node* from, Node* load,
1358
                                        VirtualState* state) {
1359
  TRACE("Load #%d from phi #%d", load->id(), from->id());
1360

1361
  cache_->fields().clear();
1362 1363
  for (int i = 0; i < load->op()->ValueInputCount(); ++i) {
    Node* input = NodeProperties::GetValueInput(load, i);
1364
    cache_->fields().push_back(input);
1365 1366
  }

1367
  cache_->LoadVirtualObjectsForFieldsFrom(state,
1368
                                          status_analysis_->GetAliasMap());
1369 1370 1371
  if (cache_->objects().size() == cache_->fields().size()) {
    cache_->GetFields(offset);
    if (cache_->fields().size() == cache_->objects().size()) {
1372
      Node* rep = replacement(load);
1373 1374 1375
      if (!rep || !IsEquivalentPhi(rep, cache_->fields())) {
        int value_input_count = static_cast<int>(cache_->fields().size());
        cache_->fields().push_back(NodeProperties::GetControlInput(from));
1376
        Node* phi = graph()->NewNode(
1377 1378
            common()->Phi(MachineRepresentation::kTagged, value_input_count),
            value_input_count + 1, &cache_->fields().front());
1379
        status_analysis_->ResizeStatusVector();
1380
        SetReplacement(load, phi);
1381 1382 1383
        TRACE(" got phi created.\n");
      } else {
        TRACE(" has already phi #%d.\n", rep->id());
1384
      }
1385 1386
    } else {
      TRACE(" has incomplete field info.\n");
1387
    }
1388 1389
  } else {
    TRACE(" has incomplete virtual object info.\n");
1390 1391 1392
  }
}

1393
void EscapeAnalysis::ProcessLoadField(Node* node) {
1394 1395
  DCHECK_EQ(node->opcode(), IrOpcode::kLoadField);
  ForwardVirtualState(node);
1396
  Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1397
  VirtualState* state = virtual_states_[node->id()];
1398
  if (VirtualObject* object = GetVirtualObject(state, from)) {
1399
    if (!object->IsTracked()) return;
1400
    int offset = OffsetForFieldAccess(node);
1401
    if (static_cast<size_t>(offset) >= object->field_count()) return;
1402
    Node* value = object->GetField(offset);
1403
    if (value) {
1404
      value = ResolveReplacement(value);
1405
    }
1406
    // Record that the load has this alias.
1407
    UpdateReplacement(state, node, value);
1408
  } else if (from->opcode() == IrOpcode::kPhi &&
1409 1410
             FieldAccessOf(node->op()).offset % kPointerSize == 0) {
    int offset = OffsetForFieldAccess(node);
1411 1412
    // Only binary phis are supported for now.
    ProcessLoadFromPhi(offset, from, node, state);
1413 1414
  } else {
    UpdateReplacement(state, node, nullptr);
1415 1416 1417 1418 1419 1420
  }
}

void EscapeAnalysis::ProcessLoadElement(Node* node) {
  DCHECK_EQ(node->opcode(), IrOpcode::kLoadElement);
  ForwardVirtualState(node);
1421
  Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1422
  VirtualState* state = virtual_states_[node->id()];
1423 1424
  Node* index_node = node->InputAt(1);
  NumberMatcher index(index_node);
1425 1426 1427 1428
  DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
         index_node->opcode() != IrOpcode::kInt64Constant &&
         index_node->opcode() != IrOpcode::kFloat32Constant &&
         index_node->opcode() != IrOpcode::kFloat64Constant);
1429
  if (index.HasValue()) {
1430
    if (VirtualObject* object = GetVirtualObject(state, from)) {
1431
      if (!object->IsTracked()) return;
1432
      int offset = OffsetForElementAccess(node, index.Value());
1433
      if (static_cast<size_t>(offset) >= object->field_count()) return;
1434 1435
      Node* value = object->GetField(offset);
      if (value) {
1436
        value = ResolveReplacement(value);
1437
      }
1438
      // Record that the load has this alias.
1439
      UpdateReplacement(state, node, value);
1440
    } else if (from->opcode() == IrOpcode::kPhi) {
1441
      int offset = OffsetForElementAccess(node, index.Value());
1442
      ProcessLoadFromPhi(offset, from, node, state);
1443 1444
    } else {
      UpdateReplacement(state, node, nullptr);
1445 1446 1447
    }
  } else {
    // We have a load from a non-const index, cannot eliminate object.
1448
    if (status_analysis_->SetEscaped(from)) {
1449 1450 1451 1452 1453
      TRACE(
          "Setting #%d (%s) to escaped because load element #%d from non-const "
          "index #%d (%s)\n",
          from->id(), from->op()->mnemonic(), node->id(), index_node->id(),
          index_node->op()->mnemonic());
1454 1455 1456 1457
    }
  }
}

1458
void EscapeAnalysis::ProcessStoreField(Node* node) {
1459 1460
  DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
  ForwardVirtualState(node);
1461
  Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1462
  VirtualState* state = virtual_states_[node->id()];
1463 1464 1465 1466
  if (VirtualObject* object = GetVirtualObject(state, to)) {
    if (!object->IsTracked()) return;
    int offset = OffsetForFieldAccess(node);
    if (static_cast<size_t>(offset) >= object->field_count()) return;
1467
    Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 1));
1468 1469 1470
    // TODO(mstarzinger): The following is a workaround to not track some well
    // known raw fields. We only ever store default initial values into these
    // fields which are hard-coded in {TranslatedState::MaterializeAt} as well.
1471 1472
    if (val->opcode() == IrOpcode::kInt32Constant ||
        val->opcode() == IrOpcode::kInt64Constant) {
1473 1474
      DCHECK(FieldAccessOf(node->op()).offset == JSFunction::kCodeEntryOffset ||
             FieldAccessOf(node->op()).offset == Name::kHashFieldOffset);
1475 1476
      val = slot_not_analyzed_;
    }
1477 1478 1479
    if (object->GetField(offset) != val) {
      object = CopyForModificationAt(object, state, node);
      object->SetField(offset, val);
1480 1481 1482 1483
    }
  }
}

1484 1485 1486
void EscapeAnalysis::ProcessStoreElement(Node* node) {
  DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
  ForwardVirtualState(node);
1487
  Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1488 1489
  Node* index_node = node->InputAt(1);
  NumberMatcher index(index_node);
1490 1491 1492 1493
  DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
         index_node->opcode() != IrOpcode::kInt64Constant &&
         index_node->opcode() != IrOpcode::kFloat32Constant &&
         index_node->opcode() != IrOpcode::kFloat64Constant);
1494
  VirtualState* state = virtual_states_[node->id()];
1495
  if (index.HasValue()) {
1496 1497 1498 1499
    if (VirtualObject* object = GetVirtualObject(state, to)) {
      if (!object->IsTracked()) return;
      int offset = OffsetForElementAccess(node, index.Value());
      if (static_cast<size_t>(offset) >= object->field_count()) return;
1500
      Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 2));
1501 1502 1503
      if (object->GetField(offset) != val) {
        object = CopyForModificationAt(object, state, node);
        object->SetField(offset, val);
1504 1505
      }
    }
1506 1507
  } else {
    // We have a store to a non-const index, cannot eliminate object.
1508
    if (status_analysis_->SetEscaped(to)) {
1509 1510 1511 1512 1513
      TRACE(
          "Setting #%d (%s) to escaped because store element #%d to non-const "
          "index #%d (%s)\n",
          to->id(), to->op()->mnemonic(), node->id(), index_node->id(),
          index_node->op()->mnemonic());
1514
    }
1515 1516 1517 1518 1519
    if (VirtualObject* object = GetVirtualObject(state, to)) {
      if (!object->IsTracked()) return;
      if (!object->AllFieldsClear()) {
        object = CopyForModificationAt(object, state, node);
        object->ClearAllFields();
1520
        TRACE("Cleared all fields of @%d:#%d\n",
1521
              status_analysis_->GetAlias(object->id()), object->id());
1522
      }
1523
    }
1524 1525 1526
  }
}

1527 1528 1529 1530
Node* EscapeAnalysis::GetOrCreateObjectState(Node* effect, Node* node) {
  if ((node->opcode() == IrOpcode::kFinishRegion ||
       node->opcode() == IrOpcode::kAllocate) &&
      IsVirtual(node)) {
1531 1532
    if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
                                               ResolveReplacement(node))) {
1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546
      if (Node* object_state = vobj->GetObjectState()) {
        return object_state;
      } else {
        cache_->fields().clear();
        for (size_t i = 0; i < vobj->field_count(); ++i) {
          if (Node* field = vobj->GetField(i)) {
            cache_->fields().push_back(field);
          }
        }
        int input_count = static_cast<int>(cache_->fields().size());
        Node* new_object_state =
            graph()->NewNode(common()->ObjectState(input_count, vobj->id()),
                             input_count, &cache_->fields().front());
        vobj->SetObjectState(new_object_state);
1547 1548 1549 1550 1551
        TRACE(
            "Creating object state #%d for vobj %p (from node #%d) at effect "
            "#%d\n",
            new_object_state->id(), static_cast<void*>(vobj), node->id(),
            effect->id());
1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568
        // Now fix uses of other objects.
        for (size_t i = 0; i < vobj->field_count(); ++i) {
          if (Node* field = vobj->GetField(i)) {
            if (Node* field_object_state =
                    GetOrCreateObjectState(effect, field)) {
              NodeProperties::ReplaceValueInput(
                  new_object_state, field_object_state, static_cast<int>(i));
            }
          }
        }
        return new_object_state;
      }
    }
  }
  return nullptr;
}

1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589
bool EscapeAnalysis::IsCyclicObjectState(Node* effect, Node* node) {
  if ((node->opcode() == IrOpcode::kFinishRegion ||
       node->opcode() == IrOpcode::kAllocate) &&
      IsVirtual(node)) {
    if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
                                               ResolveReplacement(node))) {
      if (cycle_detection_.find(vobj) != cycle_detection_.end()) return true;
      cycle_detection_.insert(vobj);
      bool cycle_detected = false;
      for (size_t i = 0; i < vobj->field_count(); ++i) {
        if (Node* field = vobj->GetField(i)) {
          if (IsCyclicObjectState(effect, field)) cycle_detected = true;
        }
      }
      cycle_detection_.erase(vobj);
      return cycle_detected;
    }
  }
  return false;
}

1590
void EscapeAnalysis::DebugPrintState(VirtualState* state) {
1591
  PrintF("Dumping virtual state %p\n", static_cast<void*>(state));
1592
  for (Alias alias = 0; alias < status_analysis_->AliasCount(); ++alias) {
1593
    if (VirtualObject* object = state->VirtualObjectFromAlias(alias)) {
1594 1595 1596 1597 1598 1599 1600
      PrintF("  Alias @%d: Object #%d with %zu fields\n", alias, object->id(),
             object->field_count());
      for (size_t i = 0; i < object->field_count(); ++i) {
        if (Node* f = object->GetField(i)) {
          PrintF("    Field %zu = #%d (%s)\n", i, f->id(), f->op()->mnemonic());
        }
      }
1601 1602 1603 1604
    }
  }
}

1605
void EscapeAnalysis::DebugPrint() {
1606 1607 1608 1609 1610 1611 1612 1613 1614 1615
  ZoneVector<VirtualState*> object_states(zone());
  for (NodeId id = 0; id < virtual_states_.size(); id++) {
    if (VirtualState* states = virtual_states_[id]) {
      if (std::find(object_states.begin(), object_states.end(), states) ==
          object_states.end()) {
        object_states.push_back(states);
      }
    }
  }
  for (size_t n = 0; n < object_states.size(); n++) {
1616
    DebugPrintState(object_states[n]);
1617 1618 1619
  }
}

1620 1621
VirtualObject* EscapeAnalysis::GetVirtualObject(VirtualState* state,
                                                Node* node) {
1622
  if (node->id() >= status_analysis_->GetAliasMap().size()) return nullptr;
1623
  Alias alias = status_analysis_->GetAlias(node->id());
1624 1625 1626 1627
  if (alias >= state->size()) return nullptr;
  return state->VirtualObjectFromAlias(alias);
}

1628
bool EscapeAnalysis::ExistsVirtualAllocate() {
1629
  for (size_t id = 0; id < status_analysis_->GetAliasMap().size(); ++id) {
1630
    Alias alias = status_analysis_->GetAlias(static_cast<NodeId>(id));
1631
    if (alias < EscapeStatusAnalysis::kUntrackable) {
1632
      if (status_analysis_->IsVirtual(static_cast<int>(id))) {
1633 1634 1635 1636 1637 1638 1639
        return true;
      }
    }
  }
  return false;
}

1640 1641
Graph* EscapeAnalysis::graph() const { return status_analysis_->graph(); }

1642 1643 1644
}  // namespace compiler
}  // namespace internal
}  // namespace v8