escape-analysis.cc 55.1 KB
Newer Older
1 2 3 4 5 6
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/compiler/escape-analysis.h"

7 8
#include <limits>

9 10 11 12 13 14 15 16
#include "src/base/flags.h"
#include "src/bootstrapper.h"
#include "src/compilation-dependencies.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/js-operator.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/node-properties.h"
17
#include "src/compiler/node.h"
18
#include "src/compiler/operator-properties.h"
19
#include "src/compiler/simplified-operator.h"
20
#include "src/compiler/type-cache.h"
21 22 23 24 25 26
#include "src/objects-inl.h"

namespace v8 {
namespace internal {
namespace compiler {

27
typedef NodeId Alias;
28

29 30 31 32 33 34 35 36 37
#ifdef DEBUG
#define TRACE(...)                                    \
  do {                                                \
    if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
  } while (false)
#else
#define TRACE(...)
#endif

38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121
// EscapeStatusAnalysis determines for each allocation whether it escapes.
class EscapeStatusAnalysis : public ZoneObject {
 public:
  enum Status {
    kUnknown = 0u,
    kTracked = 1u << 0,
    kEscaped = 1u << 1,
    kOnStack = 1u << 2,
    kVisited = 1u << 3,
    // A node is dangling, if it is a load of some kind, and does not have
    // an effect successor.
    kDanglingComputed = 1u << 4,
    kDangling = 1u << 5,
    // A node is is an effect branch point, if it has more than 2 non-dangling
    // effect successors.
    kBranchPointComputed = 1u << 6,
    kBranchPoint = 1u << 7,
    kInQueue = 1u << 8
  };
  typedef base::Flags<Status, uint16_t> StatusFlags;

  void RunStatusAnalysis();

  bool IsVirtual(Node* node);
  bool IsEscaped(Node* node);
  bool IsAllocation(Node* node);

  bool IsInQueue(NodeId id);
  void SetInQueue(NodeId id, bool on_stack);

  void DebugPrint();

  EscapeStatusAnalysis(EscapeAnalysis* object_analysis, Graph* graph,
                       Zone* zone);
  void EnqueueForStatusAnalysis(Node* node);
  bool SetEscaped(Node* node);
  bool IsEffectBranchPoint(Node* node);
  bool IsDanglingEffectNode(Node* node);
  void ResizeStatusVector();
  size_t GetStatusVectorSize();
  bool IsVirtual(NodeId id);

  Graph* graph() const { return graph_; }
  void AssignAliases();
  Alias GetAlias(NodeId id) const { return aliases_[id]; }
  const ZoneVector<Alias>& GetAliasMap() const { return aliases_; }
  Alias AliasCount() const { return next_free_alias_; }
  static const Alias kNotReachable;
  static const Alias kUntrackable;

  bool IsNotReachable(Node* node);

 private:
  void Process(Node* node);
  void ProcessAllocate(Node* node);
  void ProcessFinishRegion(Node* node);
  void ProcessStoreField(Node* node);
  void ProcessStoreElement(Node* node);
  bool CheckUsesForEscape(Node* node, bool phi_escaping = false) {
    return CheckUsesForEscape(node, node, phi_escaping);
  }
  bool CheckUsesForEscape(Node* node, Node* rep, bool phi_escaping = false);
  void RevisitUses(Node* node);
  void RevisitInputs(Node* node);

  Alias NextAlias() { return next_free_alias_++; }

  bool HasEntry(Node* node);

  bool IsAllocationPhi(Node* node);

  ZoneVector<Node*> stack_;
  EscapeAnalysis* object_analysis_;
  Graph* const graph_;
  ZoneVector<StatusFlags> status_;
  Alias next_free_alias_;
  ZoneVector<Node*> status_stack_;
  ZoneVector<Alias> aliases_;

  DISALLOW_COPY_AND_ASSIGN(EscapeStatusAnalysis);
};

DEFINE_OPERATORS_FOR_FLAGS(EscapeStatusAnalysis::StatusFlags)

122
const Alias EscapeStatusAnalysis::kNotReachable =
123
    std::numeric_limits<Alias>::max();
124
const Alias EscapeStatusAnalysis::kUntrackable =
125 126
    std::numeric_limits<Alias>::max() - 1;

127 128
class VirtualObject : public ZoneObject {
 public:
129 130 131 132 133 134 135 136 137
  enum Status {
    kInitial = 0,
    kTracked = 1u << 0,
    kInitialized = 1u << 1,
    kCopyRequired = 1u << 2,
  };
  typedef base::Flags<Status, unsigned char> StatusFlags;

  VirtualObject(NodeId id, VirtualState* owner, Zone* zone)
138
      : id_(id),
139
        status_(kInitial),
140 141
        fields_(zone),
        phi_(zone),
142 143
        object_state_(nullptr),
        owner_(owner) {}
144

145
  VirtualObject(VirtualState* owner, const VirtualObject& other)
146
      : id_(other.id_),
147
        status_(other.status_ & ~kCopyRequired),
148
        fields_(other.fields_),
149
        phi_(other.phi_),
150 151
        object_state_(other.object_state_),
        owner_(owner) {}
152

153 154
  VirtualObject(NodeId id, VirtualState* owner, Zone* zone, size_t field_number,
                bool initialized)
155
      : id_(id),
156
        status_(kTracked | (initialized ? kInitialized : kInitial)),
157 158
        fields_(zone),
        phi_(zone),
159 160
        object_state_(nullptr),
        owner_(owner) {
161
    fields_.resize(field_number);
162
    phi_.resize(field_number, false);
163 164
  }

165
  Node* GetField(size_t offset) { return fields_[offset]; }
166

167
  bool IsCreatedPhi(size_t offset) { return phi_[offset]; }
168

169
  void SetField(size_t offset, Node* node, bool created_phi = false) {
170
    fields_[offset] = node;
171
    phi_[offset] = created_phi;
172
  }
173 174 175 176
  bool IsTracked() const { return status_ & kTracked; }
  bool IsInitialized() const { return status_ & kInitialized; }
  bool SetInitialized() { return status_ |= kInitialized; }
  VirtualState* owner() const { return owner_; }
177

178 179 180
  Node** fields_array() { return &fields_.front(); }
  size_t field_count() { return fields_.size(); }
  bool ResizeFields(size_t field_count) {
181
    if (field_count > fields_.size()) {
182
      fields_.resize(field_count);
183
      phi_.resize(field_count);
184 185 186 187
      return true;
    }
    return false;
  }
188 189 190 191 192 193 194
  void ClearAllFields() {
    for (size_t i = 0; i < fields_.size(); ++i) {
      fields_[i] = nullptr;
      phi_[i] = false;
    }
  }
  bool AllFieldsClear() {
195 196
    for (size_t i = 0; i < fields_.size(); ++i) {
      if (fields_[i] != nullptr) {
197
        return false;
198 199
      }
    }
200
    return true;
201
  }
202
  bool UpdateFrom(const VirtualObject& other);
203 204
  bool MergeFrom(MergeCache* cache, Node* at, Graph* graph,
                 CommonOperatorBuilder* common);
205
  void SetObjectState(Node* node) { object_state_ = node; }
206
  Node* GetObjectState() const { return object_state_; }
207 208 209 210 211 212 213 214
  bool IsCopyRequired() const { return status_ & kCopyRequired; }
  void SetCopyRequired() { status_ |= kCopyRequired; }
  bool NeedCopyForModification() {
    if (!IsCopyRequired() || !IsInitialized()) {
      return false;
    }
    return true;
  }
215

216
  NodeId id() const { return id_; }
217 218 219
  void id(NodeId id) { id_ = id; }

 private:
220 221 222
  bool MergeFields(size_t i, Node* at, MergeCache* cache, Graph* graph,
                   CommonOperatorBuilder* common);

223
  NodeId id_;
224
  StatusFlags status_;
225
  ZoneVector<Node*> fields_;
226
  ZoneVector<bool> phi_;
227
  Node* object_state_;
228 229 230
  VirtualState* owner_;

  DISALLOW_COPY_AND_ASSIGN(VirtualObject);
231 232
};

233 234
DEFINE_OPERATORS_FOR_FLAGS(VirtualObject::StatusFlags)

235 236 237
bool VirtualObject::UpdateFrom(const VirtualObject& other) {
  bool changed = status_ != other.status_;
  status_ = other.status_;
238
  phi_ = other.phi_;
239 240 241 242 243 244 245 246 247 248 249 250 251
  if (fields_.size() != other.fields_.size()) {
    fields_ = other.fields_;
    return true;
  }
  for (size_t i = 0; i < fields_.size(); ++i) {
    if (fields_[i] != other.fields_[i]) {
      changed = true;
      fields_[i] = other.fields_[i];
    }
  }
  return changed;
}

252 253
class VirtualState : public ZoneObject {
 public:
254 255 256 257 258 259 260 261 262 263 264 265
  VirtualState(Node* owner, Zone* zone, size_t size)
      : info_(size, nullptr, zone), owner_(owner) {}

  VirtualState(Node* owner, const VirtualState& state)
      : info_(state.info_.size(), nullptr, state.info_.get_allocator().zone()),
        owner_(owner) {
    for (size_t i = 0; i < info_.size(); ++i) {
      if (state.info_[i]) {
        info_[i] = state.info_[i];
      }
    }
  }
266

267
  VirtualObject* VirtualObjectFromAlias(size_t alias);
268
  void SetVirtualObject(Alias alias, VirtualObject* state);
269
  bool UpdateFrom(VirtualState* state, Zone* zone);
270
  bool MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
271
                 CommonOperatorBuilder* common, Node* at);
272
  size_t size() const { return info_.size(); }
273
  Node* owner() const { return owner_; }
274 275 276 277 278 279
  VirtualObject* Copy(VirtualObject* obj, Alias alias);
  void SetCopyRequired() {
    for (VirtualObject* obj : info_) {
      if (obj) obj->SetCopyRequired();
    }
  }
280 281 282

 private:
  ZoneVector<VirtualObject*> info_;
283
  Node* owner_;
284 285

  DISALLOW_COPY_AND_ASSIGN(VirtualState);
286 287
};

288 289 290
class MergeCache : public ZoneObject {
 public:
  explicit MergeCache(Zone* zone)
291
      : states_(zone), objects_(zone), fields_(zone) {
292 293 294
    states_.reserve(5);
    objects_.reserve(5);
    fields_.reserve(5);
295 296 297 298 299 300 301 302 303
  }
  ZoneVector<VirtualState*>& states() { return states_; }
  ZoneVector<VirtualObject*>& objects() { return objects_; }
  ZoneVector<Node*>& fields() { return fields_; }
  void Clear() {
    states_.clear();
    objects_.clear();
    fields_.clear();
  }
304 305 306
  size_t LoadVirtualObjectsFromStatesFor(Alias alias);
  void LoadVirtualObjectsForFieldsFrom(VirtualState* state,
                                       const ZoneVector<Alias>& aliases);
307 308 309 310 311 312
  Node* GetFields(size_t pos);

 private:
  ZoneVector<VirtualState*> states_;
  ZoneVector<VirtualObject*> objects_;
  ZoneVector<Node*> fields_;
313 314

  DISALLOW_COPY_AND_ASSIGN(MergeCache);
315 316
};

317
size_t MergeCache::LoadVirtualObjectsFromStatesFor(Alias alias) {
318 319
  objects_.clear();
  DCHECK_GT(states_.size(), 0u);
320
  size_t min = std::numeric_limits<size_t>::max();
321
  for (VirtualState* state : states_) {
322
    if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
323 324 325 326 327 328 329
      objects_.push_back(obj);
      min = std::min(obj->field_count(), min);
    }
  }
  return min;
}

330
void MergeCache::LoadVirtualObjectsForFieldsFrom(
331
    VirtualState* state, const ZoneVector<Alias>& aliases) {
332
  objects_.clear();
333
  size_t max_alias = state->size();
334
  for (Node* field : fields_) {
335
    Alias alias = aliases[field->id()];
336 337
    if (alias >= max_alias) continue;
    if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
338 339 340 341 342 343 344
      objects_.push_back(obj);
    }
  }
}

Node* MergeCache::GetFields(size_t pos) {
  fields_.clear();
345 346 347
  Node* rep = pos >= objects_.front()->field_count()
                  ? nullptr
                  : objects_.front()->GetField(pos);
348
  for (VirtualObject* obj : objects_) {
349
    if (pos >= obj->field_count()) continue;
350 351 352 353 354 355 356 357 358 359 360
    Node* field = obj->GetField(pos);
    if (field) {
      fields_.push_back(field);
    }
    if (field != rep) {
      rep = nullptr;
    }
  }
  return rep;
}

361 362 363 364 365 366 367 368 369 370 371
VirtualObject* VirtualState::Copy(VirtualObject* obj, Alias alias) {
  if (obj->owner() == this) return obj;
  VirtualObject* new_obj =
      new (info_.get_allocator().zone()) VirtualObject(this, *obj);
  TRACE("At state %p, alias @%d (#%d), copying virtual object from %p to %p\n",
        static_cast<void*>(this), alias, obj->id(), static_cast<void*>(obj),
        static_cast<void*>(new_obj));
  info_[alias] = new_obj;
  return new_obj;
}

372 373
VirtualObject* VirtualState::VirtualObjectFromAlias(size_t alias) {
  return info_[alias];
374 375
}

376
void VirtualState::SetVirtualObject(Alias alias, VirtualObject* obj) {
377
  info_[alias] = obj;
378 379 380
}

bool VirtualState::UpdateFrom(VirtualState* from, Zone* zone) {
381
  if (from == this) return false;
382
  bool changed = false;
383
  for (Alias alias = 0; alias < size(); ++alias) {
384 385
    VirtualObject* ls = VirtualObjectFromAlias(alias);
    VirtualObject* rs = from->VirtualObjectFromAlias(alias);
386

387
    if (ls == rs || rs == nullptr) continue;
388 389

    if (ls == nullptr) {
390
      ls = new (zone) VirtualObject(this, *rs);
391
      SetVirtualObject(alias, ls);
392 393 394 395
      changed = true;
      continue;
    }

396
    TRACE("  Updating fields of @%d\n", alias);
397 398 399 400 401 402

    changed = ls->UpdateFrom(*rs) || changed;
  }
  return false;
}

403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422
namespace {

bool IsEquivalentPhi(Node* node1, Node* node2) {
  if (node1 == node2) return true;
  if (node1->opcode() != IrOpcode::kPhi || node2->opcode() != IrOpcode::kPhi ||
      node1->op()->ValueInputCount() != node2->op()->ValueInputCount()) {
    return false;
  }
  for (int i = 0; i < node1->op()->ValueInputCount(); ++i) {
    Node* input1 = NodeProperties::GetValueInput(node1, i);
    Node* input2 = NodeProperties::GetValueInput(node2, i);
    if (!IsEquivalentPhi(input1, input2)) {
      return false;
    }
  }
  return true;
}

bool IsEquivalentPhi(Node* phi, ZoneVector<Node*>& inputs) {
  if (phi->opcode() != IrOpcode::kPhi) return false;
423
  if (static_cast<size_t>(phi->op()->ValueInputCount()) != inputs.size()) {
424 425 426 427 428 429 430 431 432 433 434 435 436
    return false;
  }
  for (size_t i = 0; i < inputs.size(); ++i) {
    Node* input = NodeProperties::GetValueInput(phi, static_cast<int>(i));
    if (!IsEquivalentPhi(input, inputs[i])) {
      return false;
    }
  }
  return true;
}

}  // namespace

437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467
bool VirtualObject::MergeFields(size_t i, Node* at, MergeCache* cache,
                                Graph* graph, CommonOperatorBuilder* common) {
  bool changed = false;
  int value_input_count = static_cast<int>(cache->fields().size());
  Node* rep = GetField(i);
  if (!rep || !IsCreatedPhi(i)) {
    Node* control = NodeProperties::GetControlInput(at);
    cache->fields().push_back(control);
    Node* phi = graph->NewNode(
        common->Phi(MachineRepresentation::kTagged, value_input_count),
        value_input_count + 1, &cache->fields().front());
    SetField(i, phi, true);
#ifdef DEBUG
    if (FLAG_trace_turbo_escape) {
      PrintF("    Creating Phi #%d as merge of", phi->id());
      for (int i = 0; i < value_input_count; i++) {
        PrintF(" #%d (%s)", cache->fields()[i]->id(),
               cache->fields()[i]->op()->mnemonic());
      }
      PrintF("\n");
    }
#endif
    changed = true;
  } else {
    DCHECK(rep->opcode() == IrOpcode::kPhi);
    for (int n = 0; n < value_input_count; ++n) {
      Node* old = NodeProperties::GetValueInput(rep, n);
      if (old != cache->fields()[n]) {
        changed = true;
        NodeProperties::ReplaceValueInput(rep, cache->fields()[n], n);
      }
468 469
    }
  }
470 471 472 473 474 475 476 477 478 479 480 481 482 483
  return changed;
}

bool VirtualObject::MergeFrom(MergeCache* cache, Node* at, Graph* graph,
                              CommonOperatorBuilder* common) {
  DCHECK(at->opcode() == IrOpcode::kEffectPhi ||
         at->opcode() == IrOpcode::kPhi);
  bool changed = false;
  for (size_t i = 0; i < field_count(); ++i) {
    if (Node* field = cache->GetFields(i)) {
      changed = changed || GetField(i) != field;
      SetField(i, field);
      TRACE("    Field %zu agree on rep #%d\n", i, field->id());
    } else {
484 485 486
      size_t arity = at->opcode() == IrOpcode::kEffectPhi
                         ? at->op()->EffectInputCount()
                         : at->op()->ValueInputCount();
487 488 489 490 491 492 493 494 495 496 497 498
      if (cache->fields().size() == arity) {
        changed = MergeFields(i, at, cache, graph, common) || changed;
      } else {
        if (GetField(i) != nullptr) {
          TRACE("    Field %zu cleared\n", i);
          changed = true;
        }
        SetField(i, nullptr);
      }
    }
  }
  return changed;
499 500
}

501
bool VirtualState::MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
502
                             CommonOperatorBuilder* common, Node* at) {
503
  DCHECK_GT(cache->states().size(), 0u);
504
  bool changed = false;
505 506 507 508 509 510 511 512 513 514 515 516 517 518
  for (Alias alias = 0; alias < size(); ++alias) {
    cache->objects().clear();
    VirtualObject* mergeObject = VirtualObjectFromAlias(alias);
    bool copy_merge_object = false;
    size_t fields = std::numeric_limits<size_t>::max();
    for (VirtualState* state : cache->states()) {
      if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
        cache->objects().push_back(obj);
        if (mergeObject == obj) {
          copy_merge_object = true;
        }
        fields = std::min(obj->field_count(), fields);
      }
    }
519
    if (cache->objects().size() == cache->states().size()) {
520 521 522 523 524 525 526 527 528 529 530 531 532 533 534
      if (!mergeObject) {
        VirtualObject* obj = new (zone)
            VirtualObject(cache->objects().front()->id(), this, zone, fields,
                          cache->objects().front()->IsInitialized());
        SetVirtualObject(alias, obj);
        mergeObject = obj;
        changed = true;
      } else if (copy_merge_object) {
        VirtualObject* obj = new (zone) VirtualObject(this, *mergeObject);
        SetVirtualObject(alias, obj);
        mergeObject = obj;
        changed = true;
      } else {
        changed = mergeObject->ResizeFields(fields) || changed;
      }
535 536 537 538 539 540 541 542 543 544
#ifdef DEBUG
      if (FLAG_trace_turbo_escape) {
        PrintF("  Alias @%d, merging into %p virtual objects", alias,
               static_cast<void*>(mergeObject));
        for (size_t i = 0; i < cache->objects().size(); i++) {
          PrintF(" %p", static_cast<void*>(cache->objects()[i]));
        }
        PrintF("\n");
      }
#endif  // DEBUG
545
      changed = mergeObject->MergeFrom(cache, at, graph, common) || changed;
546
    } else {
547 548 549 550
      if (mergeObject) {
        TRACE("  Alias %d, virtual object removed\n", alias);
        changed = true;
      }
551
      SetVirtualObject(alias, nullptr);
552 553 554 555 556
    }
  }
  return changed;
}

557 558
EscapeStatusAnalysis::EscapeStatusAnalysis(EscapeAnalysis* object_analysis,
                                           Graph* graph, Zone* zone)
559 560
    : stack_(zone),
      object_analysis_(object_analysis),
561
      graph_(graph),
562
      status_(zone),
563 564 565
      next_free_alias_(0),
      status_stack_(zone),
      aliases_(zone) {}
566 567

bool EscapeStatusAnalysis::HasEntry(Node* node) {
568
  return status_[node->id()] & (kTracked | kEscaped);
569 570 571
}

bool EscapeStatusAnalysis::IsVirtual(Node* node) {
572 573 574 575 576
  return IsVirtual(node->id());
}

bool EscapeStatusAnalysis::IsVirtual(NodeId id) {
  return (status_[id] & kTracked) && !(status_[id] & kEscaped);
577 578 579
}

bool EscapeStatusAnalysis::IsEscaped(Node* node) {
580
  return status_[node->id()] & kEscaped;
581 582
}

583 584 585 586 587
bool EscapeStatusAnalysis::IsAllocation(Node* node) {
  return node->opcode() == IrOpcode::kAllocate ||
         node->opcode() == IrOpcode::kFinishRegion;
}

588
bool EscapeStatusAnalysis::SetEscaped(Node* node) {
589 590
  bool changed = !(status_[node->id()] & kEscaped);
  status_[node->id()] |= kEscaped | kTracked;
591 592 593
  return changed;
}

594 595 596 597 598 599 600 601 602 603 604
bool EscapeStatusAnalysis::IsInQueue(NodeId id) {
  return status_[id] & kInQueue;
}

void EscapeStatusAnalysis::SetInQueue(NodeId id, bool on_stack) {
  if (on_stack) {
    status_[id] |= kInQueue;
  } else {
    status_[id] &= ~kInQueue;
  }
}
605

606 607 608 609
void EscapeStatusAnalysis::ResizeStatusVector() {
  if (status_.size() <= graph()->NodeCount()) {
    status_.resize(graph()->NodeCount() * 1.1, kUnknown);
  }
610 611
}

612
size_t EscapeStatusAnalysis::GetStatusVectorSize() { return status_.size(); }
613

614 615 616 617 618
void EscapeStatusAnalysis::RunStatusAnalysis() {
  ResizeStatusVector();
  while (!status_stack_.empty()) {
    Node* node = status_stack_.back();
    status_stack_.pop_back();
619
    status_[node->id()] &= ~kOnStack;
620
    Process(node);
621
    status_[node->id()] |= kVisited;
622 623 624 625 626 627 628 629
  }
}

void EscapeStatusAnalysis::EnqueueForStatusAnalysis(Node* node) {
  DCHECK_NOT_NULL(node);
  if (!(status_[node->id()] & kOnStack)) {
    status_stack_.push_back(node);
    status_[node->id()] |= kOnStack;
630 631 632 633 634 635
  }
}

void EscapeStatusAnalysis::RevisitInputs(Node* node) {
  for (Edge edge : node->input_edges()) {
    Node* input = edge.to();
636
    if (!(status_[input->id()] & kOnStack)) {
637
      status_stack_.push_back(input);
638 639
      status_[input->id()] |= kOnStack;
    }
640 641 642 643 644 645
  }
}

void EscapeStatusAnalysis::RevisitUses(Node* node) {
  for (Edge edge : node->use_edges()) {
    Node* use = edge.from();
646 647
    if (!(status_[use->id()] & kOnStack) && !IsNotReachable(use)) {
      status_stack_.push_back(use);
648 649
      status_[use->id()] |= kOnStack;
    }
650 651 652 653 654 655 656 657 658 659 660 661 662 663
  }
}

void EscapeStatusAnalysis::Process(Node* node) {
  switch (node->opcode()) {
    case IrOpcode::kAllocate:
      ProcessAllocate(node);
      break;
    case IrOpcode::kFinishRegion:
      ProcessFinishRegion(node);
      break;
    case IrOpcode::kStoreField:
      ProcessStoreField(node);
      break;
664 665 666 667 668
    case IrOpcode::kStoreElement:
      ProcessStoreElement(node);
      break;
    case IrOpcode::kLoadField:
    case IrOpcode::kLoadElement: {
669
      if (Node* rep = object_analysis_->GetReplacement(node)) {
670 671 672
        if (IsAllocation(rep) && CheckUsesForEscape(node, rep)) {
          RevisitInputs(rep);
          RevisitUses(rep);
673 674
        }
      }
675
      RevisitUses(node);
676 677 678 679
      break;
    }
    case IrOpcode::kPhi:
      if (!HasEntry(node)) {
680
        status_[node->id()] |= kTracked;
681 682 683 684 685
        RevisitUses(node);
      }
      if (!IsAllocationPhi(node) && SetEscaped(node)) {
        RevisitInputs(node);
        RevisitUses(node);
686 687 688 689 690 691 692
      }
      CheckUsesForEscape(node);
    default:
      break;
  }
}

693 694 695 696 697 698 699 700 701 702
bool EscapeStatusAnalysis::IsAllocationPhi(Node* node) {
  for (Edge edge : node->input_edges()) {
    Node* input = edge.to();
    if (input->opcode() == IrOpcode::kPhi && !IsEscaped(input)) continue;
    if (IsAllocation(input)) continue;
    return false;
  }
  return true;
}

703 704 705 706
void EscapeStatusAnalysis::ProcessStoreField(Node* node) {
  DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
  Node* to = NodeProperties::GetValueInput(node, 0);
  Node* val = NodeProperties::GetValueInput(node, 1);
707
  if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
708
    RevisitUses(val);
709
    RevisitInputs(val);
710 711
    TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
          val->id(), val->op()->mnemonic(), to->id());
712 713 714
  }
}

715 716 717 718
void EscapeStatusAnalysis::ProcessStoreElement(Node* node) {
  DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
  Node* to = NodeProperties::GetValueInput(node, 0);
  Node* val = NodeProperties::GetValueInput(node, 2);
719
  if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
720
    RevisitUses(val);
721
    RevisitInputs(val);
722 723
    TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
          val->id(), val->op()->mnemonic(), to->id());
724 725 726
  }
}

727 728 729
void EscapeStatusAnalysis::ProcessAllocate(Node* node) {
  DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
  if (!HasEntry(node)) {
730
    status_[node->id()] |= kTracked;
731 732
    TRACE("Created status entry for node #%d (%s)\n", node->id(),
          node->op()->mnemonic());
733
    NumberMatcher size(node->InputAt(0));
734 735 736 737
    DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
           node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
           node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
           node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
738
    RevisitUses(node);
739
    if (!size.HasValue() && SetEscaped(node)) {
740
      TRACE("Setting #%d to escaped because of non-const alloc\n", node->id());
741 742
      // This node is already known to escape, uses do not have to be checked
      // for escape.
743 744 745
      return;
    }
  }
746
  if (CheckUsesForEscape(node, true)) {
747 748 749 750
    RevisitUses(node);
  }
}

751 752
bool EscapeStatusAnalysis::CheckUsesForEscape(Node* uses, Node* rep,
                                              bool phi_escaping) {
753 754
  for (Edge edge : uses->use_edges()) {
    Node* use = edge.from();
755
    if (IsNotReachable(use)) continue;
756 757
    if (edge.index() >= use->op()->ValueInputCount() +
                            OperatorProperties::GetContextInputCount(use->op()))
758
      continue;
759 760
    switch (use->opcode()) {
      case IrOpcode::kPhi:
761
        if (phi_escaping && SetEscaped(rep)) {
762 763 764 765 766
          TRACE(
              "Setting #%d (%s) to escaped because of use by phi node "
              "#%d (%s)\n",
              rep->id(), rep->op()->mnemonic(), use->id(),
              use->op()->mnemonic());
767 768
          return true;
        }
769 770 771 772 773 774 775 776 777 778
      // Fallthrough.
      case IrOpcode::kStoreField:
      case IrOpcode::kLoadField:
      case IrOpcode::kStoreElement:
      case IrOpcode::kLoadElement:
      case IrOpcode::kFrameState:
      case IrOpcode::kStateValues:
      case IrOpcode::kReferenceEqual:
      case IrOpcode::kFinishRegion:
        if (IsEscaped(use) && SetEscaped(rep)) {
779 780 781 782 783
          TRACE(
              "Setting #%d (%s) to escaped because of use by escaping node "
              "#%d (%s)\n",
              rep->id(), rep->op()->mnemonic(), use->id(),
              use->op()->mnemonic());
784 785
          return true;
        }
786
        break;
787 788
      case IrOpcode::kObjectIsSmi:
        if (!IsAllocation(rep) && SetEscaped(rep)) {
789 790 791
          TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
                rep->id(), rep->op()->mnemonic(), use->id(),
                use->op()->mnemonic());
792 793 794
          return true;
        }
        break;
795
      case IrOpcode::kSelect:
796 797
      // TODO(mstarzinger): The following list of operators will eventually be
      // handled by the EscapeAnalysisReducer (similar to ObjectIsSmi).
798 799 800
      case IrOpcode::kStringEqual:
      case IrOpcode::kStringLessThan:
      case IrOpcode::kStringLessThanOrEqual:
801
      case IrOpcode::kTypeGuard:
802 803 804
      case IrOpcode::kPlainPrimitiveToNumber:
      case IrOpcode::kPlainPrimitiveToWord32:
      case IrOpcode::kPlainPrimitiveToFloat64:
805
      case IrOpcode::kStringCharCodeAt:
806 807
      case IrOpcode::kObjectIsCallable:
      case IrOpcode::kObjectIsNumber:
808
      case IrOpcode::kObjectIsReceiver:
809 810
      case IrOpcode::kObjectIsString:
      case IrOpcode::kObjectIsUndetectable:
811
        if (SetEscaped(rep)) {
812 813 814
          TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
                rep->id(), rep->op()->mnemonic(), use->id(),
                use->op()->mnemonic());
815 816 817
          return true;
        }
        break;
818
      default:
819
        if (use->op()->EffectInputCount() == 0 &&
820 821
            uses->op()->EffectInputCount() > 0 &&
            !IrOpcode::IsJsOpcode(use->opcode())) {
822 823
          TRACE("Encountered unaccounted use by #%d (%s)\n", use->id(),
                use->op()->mnemonic());
824 825
          UNREACHABLE();
        }
826
        if (SetEscaped(rep)) {
827 828 829
          TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
                rep->id(), rep->op()->mnemonic(), use->id(),
                use->op()->mnemonic());
830 831 832 833 834 835 836 837 838 839
          return true;
        }
    }
  }
  return false;
}

void EscapeStatusAnalysis::ProcessFinishRegion(Node* node) {
  DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
  if (!HasEntry(node)) {
840
    status_[node->id()] |= kTracked;
841 842
    RevisitUses(node);
  }
843
  if (CheckUsesForEscape(node, true)) {
844 845 846 847 848
    RevisitInputs(node);
  }
}

void EscapeStatusAnalysis::DebugPrint() {
849 850
  for (NodeId id = 0; id < status_.size(); id++) {
    if (status_[id] & kTracked) {
851
      PrintF("Node #%d is %s\n", id,
852
             (status_[id] & kEscaped) ? "escaping" : "virtual");
853 854 855 856
    }
  }
}

857 858
EscapeAnalysis::EscapeAnalysis(Graph* graph, CommonOperatorBuilder* common,
                               Zone* zone)
859
    : zone_(zone),
860
      slot_not_analyzed_(graph->NewNode(common->NumberConstant(0x1c0debad))),
861
      common_(common),
862
      status_analysis_(new (zone) EscapeStatusAnalysis(this, graph, zone)),
863
      virtual_states_(zone),
864
      replacements_(zone),
865
      cycle_detection_(zone),
866
      cache_(nullptr) {}
867

868 869 870
EscapeAnalysis::~EscapeAnalysis() {}

void EscapeAnalysis::Run() {
871
  replacements_.resize(graph()->NodeCount());
872 873
  status_analysis_->AssignAliases();
  if (status_analysis_->AliasCount() > 0) {
874 875
    cache_ = new (zone()) MergeCache(zone());
    replacements_.resize(graph()->NodeCount());
876
    status_analysis_->ResizeStatusVector();
877
    RunObjectAnalysis();
878
    status_analysis_->RunStatusAnalysis();
879
  }
880
}
881

882
void EscapeStatusAnalysis::AssignAliases() {
883 884
  size_t max_size = 1024;
  size_t min_size = 32;
885 886
  size_t stack_size =
      std::min(std::max(graph()->NodeCount() / 5, min_size), max_size);
887
  stack_.reserve(stack_size);
888 889
  ResizeStatusVector();
  stack_.push_back(graph()->end());
890 891 892
  CHECK_LT(graph()->NodeCount(), kUntrackable);
  aliases_.resize(graph()->NodeCount(), kNotReachable);
  aliases_[graph()->end()->id()] = kUntrackable;
893
  status_stack_.reserve(8);
894
  TRACE("Discovering trackable nodes");
895 896 897
  while (!stack_.empty()) {
    Node* node = stack_.back();
    stack_.pop_back();
898 899 900 901
    switch (node->opcode()) {
      case IrOpcode::kAllocate:
        if (aliases_[node->id()] >= kUntrackable) {
          aliases_[node->id()] = NextAlias();
902 903
          TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
                node->id());
904
          EnqueueForStatusAnalysis(node);
905 906 907 908
        }
        break;
      case IrOpcode::kFinishRegion: {
        Node* allocate = NodeProperties::GetValueInput(node, 0);
909
        DCHECK_NOT_NULL(allocate);
910 911 912
        if (allocate->opcode() == IrOpcode::kAllocate) {
          if (aliases_[allocate->id()] >= kUntrackable) {
            if (aliases_[allocate->id()] == kNotReachable) {
913
              stack_.push_back(allocate);
914 915
            }
            aliases_[allocate->id()] = NextAlias();
916 917
            TRACE(" @%d:%s#%u", aliases_[allocate->id()],
                  allocate->op()->mnemonic(), allocate->id());
918
            EnqueueForStatusAnalysis(allocate);
919 920
          }
          aliases_[node->id()] = aliases_[allocate->id()];
921 922
          TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
                node->id());
923 924 925 926 927 928 929 930 931 932
        }
        break;
      }
      default:
        DCHECK_EQ(aliases_[node->id()], kUntrackable);
        break;
    }
    for (Edge edge : node->input_edges()) {
      Node* input = edge.to();
      if (aliases_[input->id()] == kNotReachable) {
933
        stack_.push_back(input);
934 935 936 937
        aliases_[input->id()] = kUntrackable;
      }
    }
  }
938
  TRACE("\n");
939 940
}

941 942 943 944 945 946 947
bool EscapeStatusAnalysis::IsNotReachable(Node* node) {
  if (node->id() >= aliases_.size()) {
    return false;
  }
  return aliases_[node->id()] == kNotReachable;
}

948
void EscapeAnalysis::RunObjectAnalysis() {
949
  virtual_states_.resize(graph()->NodeCount());
950 951
  ZoneDeque<Node*> queue(zone());
  queue.push_back(graph()->start());
952
  ZoneVector<Node*> danglers(zone());
953 954 955
  while (!queue.empty()) {
    Node* node = queue.back();
    queue.pop_back();
956
    status_analysis_->SetInQueue(node->id(), false);
957
    if (Process(node)) {
958
      for (Edge edge : node->use_edges()) {
959
        Node* use = edge.from();
960
        if (status_analysis_->IsNotReachable(use)) {
961 962
          continue;
        }
963
        if (NodeProperties::IsEffectEdge(edge)) {
964 965 966 967
          // Iteration order: depth first, but delay phis.
          // We need DFS do avoid some duplication of VirtualStates and
          // VirtualObjects, and we want to delay phis to improve performance.
          if (use->opcode() == IrOpcode::kEffectPhi) {
968
            if (!status_analysis_->IsInQueue(use->id())) {
969 970 971 972
              queue.push_front(use);
            }
          } else if ((use->opcode() != IrOpcode::kLoadField &&
                      use->opcode() != IrOpcode::kLoadElement) ||
973 974 975
                     !status_analysis_->IsDanglingEffectNode(use)) {
            if (!status_analysis_->IsInQueue(use->id())) {
              status_analysis_->SetInQueue(use->id(), true);
976 977
              queue.push_back(use);
            }
978 979
          } else {
            danglers.push_back(use);
980 981 982
          }
        }
      }
983 984 985 986
      // Danglers need to be processed immediately, even if they are
      // on the stack. Since they do not have effect outputs,
      // we don't have to track whether they are on the stack.
      queue.insert(queue.end(), danglers.begin(), danglers.end());
987
      danglers.clear();
988 989
    }
  }
990
#ifdef DEBUG
991 992 993
  if (FLAG_trace_turbo_escape) {
    DebugPrint();
  }
994
#endif
995 996
}

997 998 999 1000
bool EscapeStatusAnalysis::IsDanglingEffectNode(Node* node) {
  if (status_[node->id()] & kDanglingComputed) {
    return status_[node->id()] & kDangling;
  }
1001 1002 1003 1004
  if (node->op()->EffectInputCount() == 0 ||
      node->op()->EffectOutputCount() == 0 ||
      (node->op()->EffectInputCount() == 1 &&
       NodeProperties::GetEffectInput(node)->opcode() == IrOpcode::kStart)) {
1005 1006 1007
    // The start node is used as sentinel for nodes that are in general
    // effectful, but of which an analysis has determined that they do not
    // produce effects in this instance. We don't consider these nodes dangling.
1008
    status_[node->id()] |= kDanglingComputed;
1009 1010
    return false;
  }
1011
  for (Edge edge : node->use_edges()) {
1012 1013
    Node* use = edge.from();
    if (aliases_[use->id()] == kNotReachable) continue;
1014
    if (NodeProperties::IsEffectEdge(edge)) {
1015
      status_[node->id()] |= kDanglingComputed;
1016 1017 1018
      return false;
    }
  }
1019
  status_[node->id()] |= kDanglingComputed | kDangling;
1020 1021 1022
  return true;
}

1023 1024 1025 1026 1027 1028 1029 1030 1031
bool EscapeStatusAnalysis::IsEffectBranchPoint(Node* node) {
  if (status_[node->id()] & kBranchPointComputed) {
    return status_[node->id()] & kBranchPoint;
  }
  int count = 0;
  for (Edge edge : node->use_edges()) {
    Node* use = edge.from();
    if (aliases_[use->id()] == kNotReachable) continue;
    if (NodeProperties::IsEffectEdge(edge)) {
1032 1033 1034 1035
      if ((use->opcode() == IrOpcode::kLoadField ||
           use->opcode() == IrOpcode::kLoadElement ||
           use->opcode() == IrOpcode::kLoad) &&
          IsDanglingEffectNode(use))
1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046
        continue;
      if (++count > 1) {
        status_[node->id()] |= kBranchPointComputed | kBranchPoint;
        return true;
      }
    }
  }
  status_[node->id()] |= kBranchPointComputed;
  return false;
}

1047
bool EscapeAnalysis::Process(Node* node) {
1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063
  switch (node->opcode()) {
    case IrOpcode::kAllocate:
      ProcessAllocation(node);
      break;
    case IrOpcode::kBeginRegion:
      ForwardVirtualState(node);
      break;
    case IrOpcode::kFinishRegion:
      ProcessFinishRegion(node);
      break;
    case IrOpcode::kStoreField:
      ProcessStoreField(node);
      break;
    case IrOpcode::kLoadField:
      ProcessLoadField(node);
      break;
1064 1065 1066 1067 1068 1069
    case IrOpcode::kStoreElement:
      ProcessStoreElement(node);
      break;
    case IrOpcode::kLoadElement:
      ProcessLoadElement(node);
      break;
1070 1071 1072 1073 1074 1075 1076 1077 1078 1079
    case IrOpcode::kStart:
      ProcessStart(node);
      break;
    case IrOpcode::kEffectPhi:
      return ProcessEffectPhi(node);
      break;
    default:
      if (node->op()->EffectInputCount() > 0) {
        ForwardVirtualState(node);
      }
1080
      ProcessAllocationUsers(node);
1081 1082 1083 1084 1085
      break;
  }
  return true;
}

1086 1087 1088
void EscapeAnalysis::ProcessAllocationUsers(Node* node) {
  for (Edge edge : node->input_edges()) {
    Node* input = edge.to();
1089 1090 1091
    Node* use = edge.from();
    if (edge.index() >= use->op()->ValueInputCount() +
                            OperatorProperties::GetContextInputCount(use->op()))
1092 1093 1094 1095 1096 1097 1098 1099 1100 1101
      continue;
    switch (node->opcode()) {
      case IrOpcode::kStoreField:
      case IrOpcode::kLoadField:
      case IrOpcode::kStoreElement:
      case IrOpcode::kLoadElement:
      case IrOpcode::kFrameState:
      case IrOpcode::kStateValues:
      case IrOpcode::kReferenceEqual:
      case IrOpcode::kFinishRegion:
1102
      case IrOpcode::kObjectIsSmi:
1103 1104
        break;
      default:
1105
        VirtualState* state = virtual_states_[node->id()];
1106 1107
        if (VirtualObject* obj =
                GetVirtualObject(state, ResolveReplacement(input))) {
1108 1109 1110
          if (!obj->AllFieldsClear()) {
            obj = CopyForModificationAt(obj, state, node);
            obj->ClearAllFields();
1111 1112
            TRACE("Cleared all fields of @%d:#%d\n",
                  status_analysis_->GetAlias(obj->id()), obj->id());
1113 1114 1115 1116 1117 1118 1119
          }
        }
        break;
    }
  }
}

1120 1121
VirtualState* EscapeAnalysis::CopyForModificationAt(VirtualState* state,
                                                    Node* node) {
1122 1123
  if (state->owner() != node) {
    VirtualState* new_state = new (zone()) VirtualState(node, *state);
1124 1125 1126 1127 1128
    virtual_states_[node->id()] = new_state;
    TRACE("Copying virtual state %p to new state %p at node %s#%d\n",
          static_cast<void*>(state), static_cast<void*>(new_state),
          node->op()->mnemonic(), node->id());
    return new_state;
1129
  }
1130 1131 1132 1133 1134 1135 1136 1137
  return state;
}

VirtualObject* EscapeAnalysis::CopyForModificationAt(VirtualObject* obj,
                                                     VirtualState* state,
                                                     Node* node) {
  if (obj->NeedCopyForModification()) {
    state = CopyForModificationAt(state, node);
1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148
    // TODO(tebbi): this copies the complete virtual state. Replace with a more
    // precise analysis of which objects are actually affected by the change.
    Alias changed_alias = status_analysis_->GetAlias(obj->id());
    for (Alias alias = 0; alias < state->size(); ++alias) {
      if (VirtualObject* next_obj = state->VirtualObjectFromAlias(alias)) {
        if (alias != changed_alias && next_obj->NeedCopyForModification()) {
          state->Copy(next_obj, alias);
        }
      }
    }
    return state->Copy(obj, changed_alias);
1149 1150
  }
  return obj;
1151 1152
}

1153
void EscapeAnalysis::ForwardVirtualState(Node* node) {
1154
  DCHECK_EQ(node->op()->EffectInputCount(), 1);
1155
#ifdef DEBUG
1156 1157
  if (node->opcode() != IrOpcode::kLoadField &&
      node->opcode() != IrOpcode::kLoadElement &&
1158 1159
      node->opcode() != IrOpcode::kLoad &&
      status_analysis_->IsDanglingEffectNode(node)) {
1160 1161
    PrintF("Dangeling effect node: #%d (%s)\n", node->id(),
           node->op()->mnemonic());
1162
    UNREACHABLE();
1163
  }
1164
#endif  // DEBUG
1165 1166
  Node* effect = NodeProperties::GetEffectInput(node);
  DCHECK_NOT_NULL(virtual_states_[effect->id()]);
1167 1168 1169
  if (virtual_states_[node->id()]) {
    virtual_states_[node->id()]->UpdateFrom(virtual_states_[effect->id()],
                                            zone());
1170 1171
  } else {
    virtual_states_[node->id()] = virtual_states_[effect->id()];
1172 1173 1174 1175
    TRACE("Forwarding object state %p from %s#%d to %s#%d",
          static_cast<void*>(virtual_states_[effect->id()]),
          effect->op()->mnemonic(), effect->id(), node->op()->mnemonic(),
          node->id());
1176
    if (status_analysis_->IsEffectBranchPoint(effect) ||
1177
        OperatorProperties::HasFrameStateInput(node->op())) {
1178 1179 1180 1181 1182
      virtual_states_[node->id()]->SetCopyRequired();
      TRACE(", effect input %s#%d is branch point", effect->op()->mnemonic(),
            effect->id());
    }
    TRACE("\n");
1183 1184 1185
  }
}

1186
void EscapeAnalysis::ProcessStart(Node* node) {
1187
  DCHECK_EQ(node->opcode(), IrOpcode::kStart);
1188
  virtual_states_[node->id()] =
1189
      new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
1190 1191
}

1192
bool EscapeAnalysis::ProcessEffectPhi(Node* node) {
1193 1194 1195 1196 1197
  DCHECK_EQ(node->opcode(), IrOpcode::kEffectPhi);
  bool changed = false;

  VirtualState* mergeState = virtual_states_[node->id()];
  if (!mergeState) {
1198 1199
    mergeState =
        new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
1200 1201
    virtual_states_[node->id()] = mergeState;
    changed = true;
1202 1203
    TRACE("Effect Phi #%d got new virtual state %p.\n", node->id(),
          static_cast<void*>(mergeState));
1204 1205
  }

1206
  cache_->Clear();
1207

1208 1209
  TRACE("At Effect Phi #%d, merging states into %p:", node->id(),
        static_cast<void*>(mergeState));
1210

1211 1212 1213 1214
  for (int i = 0; i < node->op()->EffectInputCount(); ++i) {
    Node* input = NodeProperties::GetEffectInput(node, i);
    VirtualState* state = virtual_states_[input->id()];
    if (state) {
1215
      cache_->states().push_back(state);
1216
      if (state == mergeState) {
1217 1218
        mergeState = new (zone())
            VirtualState(node, zone(), status_analysis_->AliasCount());
1219 1220 1221
        virtual_states_[node->id()] = mergeState;
        changed = true;
      }
1222
    }
1223 1224
    TRACE(" %p (from %d %s)", static_cast<void*>(state), input->id(),
          input->op()->mnemonic());
1225
  }
1226
  TRACE("\n");
1227

1228
  if (cache_->states().size() == 0) {
1229
    return changed;
1230
  }
1231

1232 1233
  changed =
      mergeState->MergeFrom(cache_, zone(), graph(), common(), node) || changed;
1234

1235
  TRACE("Merge %s the node.\n", changed ? "changed" : "did not change");
1236

1237
  if (changed) {
1238
    status_analysis_->ResizeStatusVector();
1239 1240 1241 1242
  }
  return changed;
}

1243
void EscapeAnalysis::ProcessAllocation(Node* node) {
1244 1245
  DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
  ForwardVirtualState(node);
1246
  VirtualState* state = virtual_states_[node->id()];
1247
  Alias alias = status_analysis_->GetAlias(node->id());
1248 1249

  // Check if we have already processed this node.
1250
  if (state->VirtualObjectFromAlias(alias)) {
1251 1252
    return;
  }
1253

1254 1255 1256
  if (state->owner()->opcode() == IrOpcode::kEffectPhi) {
    state = CopyForModificationAt(state, node);
  }
1257

1258
  NumberMatcher size(node->InputAt(0));
1259 1260 1261 1262
  DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
         node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
         node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
         node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
1263
  if (size.HasValue()) {
1264 1265 1266
    VirtualObject* obj = new (zone()) VirtualObject(
        node->id(), state, zone(), size.Value() / kPointerSize, false);
    state->SetVirtualObject(alias, obj);
1267
  } else {
1268 1269
    state->SetVirtualObject(
        alias, new (zone()) VirtualObject(node->id(), state, zone()));
1270 1271 1272
  }
}

1273
void EscapeAnalysis::ProcessFinishRegion(Node* node) {
1274 1275 1276 1277
  DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
  ForwardVirtualState(node);
  Node* allocation = NodeProperties::GetValueInput(node, 0);
  if (allocation->opcode() == IrOpcode::kAllocate) {
1278
    VirtualState* state = virtual_states_[node->id()];
1279 1280
    VirtualObject* obj =
        state->VirtualObjectFromAlias(status_analysis_->GetAlias(node->id()));
1281 1282
    DCHECK_NOT_NULL(obj);
    obj->SetInitialized();
1283 1284 1285
  }
}

1286
Node* EscapeAnalysis::replacement(Node* node) {
1287 1288
  if (node->id() >= replacements_.size()) return nullptr;
  return replacements_[node->id()];
1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299
}

bool EscapeAnalysis::SetReplacement(Node* node, Node* rep) {
  bool changed = replacements_[node->id()] != rep;
  replacements_[node->id()] = rep;
  return changed;
}

bool EscapeAnalysis::UpdateReplacement(VirtualState* state, Node* node,
                                       Node* rep) {
  if (SetReplacement(node, rep)) {
1300 1301 1302 1303 1304
    if (rep) {
      TRACE("Replacement of #%d is #%d (%s)\n", node->id(), rep->id(),
            rep->op()->mnemonic());
    } else {
      TRACE("Replacement of #%d cleared\n", node->id());
1305 1306
    }
    return true;
1307
  }
1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318
  return false;
}

Node* EscapeAnalysis::ResolveReplacement(Node* node) {
  while (replacement(node)) {
    node = replacement(node);
  }
  return node;
}

Node* EscapeAnalysis::GetReplacement(Node* node) {
1319 1320 1321
  Node* result = nullptr;
  while (replacement(node)) {
    node = result = replacement(node);
1322
  }
1323
  return result;
1324 1325
}

1326
bool EscapeAnalysis::IsVirtual(Node* node) {
1327
  if (node->id() >= status_analysis_->GetStatusVectorSize()) {
1328 1329
    return false;
  }
1330
  return status_analysis_->IsVirtual(node);
1331 1332 1333
}

bool EscapeAnalysis::IsEscaped(Node* node) {
1334
  if (node->id() >= status_analysis_->GetStatusVectorSize()) {
1335 1336
    return false;
  }
1337
  return status_analysis_->IsEscaped(node);
1338 1339
}

1340 1341 1342 1343 1344 1345 1346 1347 1348 1349
bool EscapeAnalysis::CompareVirtualObjects(Node* left, Node* right) {
  DCHECK(IsVirtual(left) && IsVirtual(right));
  left = ResolveReplacement(left);
  right = ResolveReplacement(right);
  if (IsEquivalentPhi(left, right)) {
    return true;
  }
  return false;
}

1350 1351
namespace {

1352 1353 1354 1355 1356 1357 1358 1359 1360 1361
bool IsOffsetForFieldAccessCorrect(const FieldAccess& access) {
#if V8_TARGET_LITTLE_ENDIAN
  return (access.offset % kPointerSize) == 0;
#else
  return ((access.offset +
           (1 << ElementSizeLog2Of(access.machine_type.representation()))) %
          kPointerSize) == 0;
#endif
}

1362 1363
int OffsetForFieldAccess(Node* node) {
  FieldAccess access = FieldAccessOf(node->op());
1364
  DCHECK(IsOffsetForFieldAccessCorrect(access));
1365 1366 1367 1368 1369 1370 1371 1372 1373
  return access.offset / kPointerSize;
}

int OffsetForElementAccess(Node* node, int index) {
  ElementAccess access = ElementAccessOf(node->op());
  DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
            kPointerSizeLog2);
  DCHECK_EQ(access.header_size % kPointerSize, 0);
  return access.header_size / kPointerSize + index;
1374 1375
}

1376 1377
}  // namespace

1378
void EscapeAnalysis::ProcessLoadFromPhi(int offset, Node* from, Node* load,
1379
                                        VirtualState* state) {
1380
  TRACE("Load #%d from phi #%d", load->id(), from->id());
1381

1382
  cache_->fields().clear();
1383 1384
  for (int i = 0; i < load->op()->ValueInputCount(); ++i) {
    Node* input = NodeProperties::GetValueInput(load, i);
1385
    cache_->fields().push_back(input);
1386 1387
  }

1388
  cache_->LoadVirtualObjectsForFieldsFrom(state,
1389
                                          status_analysis_->GetAliasMap());
1390 1391 1392
  if (cache_->objects().size() == cache_->fields().size()) {
    cache_->GetFields(offset);
    if (cache_->fields().size() == cache_->objects().size()) {
1393
      Node* rep = replacement(load);
1394 1395 1396
      if (!rep || !IsEquivalentPhi(rep, cache_->fields())) {
        int value_input_count = static_cast<int>(cache_->fields().size());
        cache_->fields().push_back(NodeProperties::GetControlInput(from));
1397
        Node* phi = graph()->NewNode(
1398 1399
            common()->Phi(MachineRepresentation::kTagged, value_input_count),
            value_input_count + 1, &cache_->fields().front());
1400
        status_analysis_->ResizeStatusVector();
1401
        SetReplacement(load, phi);
1402 1403 1404
        TRACE(" got phi created.\n");
      } else {
        TRACE(" has already phi #%d.\n", rep->id());
1405
      }
1406 1407
    } else {
      TRACE(" has incomplete field info.\n");
1408
    }
1409 1410
  } else {
    TRACE(" has incomplete virtual object info.\n");
1411 1412 1413
  }
}

1414
void EscapeAnalysis::ProcessLoadField(Node* node) {
1415 1416
  DCHECK_EQ(node->opcode(), IrOpcode::kLoadField);
  ForwardVirtualState(node);
1417
  Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1418
  VirtualState* state = virtual_states_[node->id()];
1419
  if (VirtualObject* object = GetVirtualObject(state, from)) {
1420
    if (!object->IsTracked()) return;
1421
    int offset = OffsetForFieldAccess(node);
1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435
    if (static_cast<size_t>(offset) >= object->field_count()) {
      // We have a load from a field that is not inside the {object}. This
      // can only happen with conflicting type feedback and for dead {node}s.
      // For now, we just mark the {object} as escaping.
      // TODO(turbofan): Consider introducing an Undefined or None operator
      // that we can replace this load with, since we know it's dead code.
      if (status_analysis_->SetEscaped(from)) {
        TRACE(
            "Setting #%d (%s) to escaped because load field #%d from "
            "offset %d outside of object\n",
            from->id(), from->op()->mnemonic(), node->id(), offset);
      }
      return;
    }
1436
    Node* value = object->GetField(offset);
1437
    if (value) {
1438
      value = ResolveReplacement(value);
1439
    }
1440
    // Record that the load has this alias.
1441
    UpdateReplacement(state, node, value);
1442
  } else if (from->opcode() == IrOpcode::kPhi &&
1443
             IsOffsetForFieldAccessCorrect(FieldAccessOf(node->op()))) {
1444
    int offset = OffsetForFieldAccess(node);
1445 1446
    // Only binary phis are supported for now.
    ProcessLoadFromPhi(offset, from, node, state);
1447 1448
  } else {
    UpdateReplacement(state, node, nullptr);
1449 1450 1451 1452 1453 1454
  }
}

void EscapeAnalysis::ProcessLoadElement(Node* node) {
  DCHECK_EQ(node->opcode(), IrOpcode::kLoadElement);
  ForwardVirtualState(node);
1455
  Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1456
  VirtualState* state = virtual_states_[node->id()];
1457 1458
  Node* index_node = node->InputAt(1);
  NumberMatcher index(index_node);
1459 1460 1461 1462
  DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
         index_node->opcode() != IrOpcode::kInt64Constant &&
         index_node->opcode() != IrOpcode::kFloat32Constant &&
         index_node->opcode() != IrOpcode::kFloat64Constant);
1463
  if (index.HasValue()) {
1464
    if (VirtualObject* object = GetVirtualObject(state, from)) {
1465
      if (!object->IsTracked()) return;
1466
      int offset = OffsetForElementAccess(node, index.Value());
1467
      if (static_cast<size_t>(offset) >= object->field_count()) return;
1468 1469
      Node* value = object->GetField(offset);
      if (value) {
1470
        value = ResolveReplacement(value);
1471
      }
1472
      // Record that the load has this alias.
1473
      UpdateReplacement(state, node, value);
1474
    } else if (from->opcode() == IrOpcode::kPhi) {
1475
      int offset = OffsetForElementAccess(node, index.Value());
1476
      ProcessLoadFromPhi(offset, from, node, state);
1477 1478
    } else {
      UpdateReplacement(state, node, nullptr);
1479 1480 1481
    }
  } else {
    // We have a load from a non-const index, cannot eliminate object.
1482
    if (status_analysis_->SetEscaped(from)) {
1483 1484 1485 1486 1487
      TRACE(
          "Setting #%d (%s) to escaped because load element #%d from non-const "
          "index #%d (%s)\n",
          from->id(), from->op()->mnemonic(), node->id(), index_node->id(),
          index_node->op()->mnemonic());
1488 1489 1490 1491
    }
  }
}

1492
void EscapeAnalysis::ProcessStoreField(Node* node) {
1493 1494
  DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
  ForwardVirtualState(node);
1495
  Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1496
  VirtualState* state = virtual_states_[node->id()];
1497 1498 1499
  if (VirtualObject* object = GetVirtualObject(state, to)) {
    if (!object->IsTracked()) return;
    int offset = OffsetForFieldAccess(node);
1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513
    if (static_cast<size_t>(offset) >= object->field_count()) {
      // We have a store to a field that is not inside the {object}. This
      // can only happen with conflicting type feedback and for dead {node}s.
      // For now, we just mark the {object} as escaping.
      // TODO(turbofan): Consider just eliminating the store in the reducer
      // pass, as it's dead code anyways.
      if (status_analysis_->SetEscaped(to)) {
        TRACE(
            "Setting #%d (%s) to escaped because store field #%d to "
            "offset %d outside of object\n",
            to->id(), to->op()->mnemonic(), node->id(), offset);
      }
      return;
    }
1514
    Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 1));
1515 1516 1517
    // TODO(mstarzinger): The following is a workaround to not track some well
    // known raw fields. We only ever store default initial values into these
    // fields which are hard-coded in {TranslatedState::MaterializeAt} as well.
1518 1519
    if (val->opcode() == IrOpcode::kInt32Constant ||
        val->opcode() == IrOpcode::kInt64Constant) {
1520 1521
      DCHECK(FieldAccessOf(node->op()).offset == JSFunction::kCodeEntryOffset ||
             FieldAccessOf(node->op()).offset == Name::kHashFieldOffset);
1522 1523
      val = slot_not_analyzed_;
    }
1524 1525 1526
    if (object->GetField(offset) != val) {
      object = CopyForModificationAt(object, state, node);
      object->SetField(offset, val);
1527 1528 1529 1530
    }
  }
}

1531 1532 1533
void EscapeAnalysis::ProcessStoreElement(Node* node) {
  DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
  ForwardVirtualState(node);
1534
  Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1535 1536
  Node* index_node = node->InputAt(1);
  NumberMatcher index(index_node);
1537 1538 1539 1540
  DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
         index_node->opcode() != IrOpcode::kInt64Constant &&
         index_node->opcode() != IrOpcode::kFloat32Constant &&
         index_node->opcode() != IrOpcode::kFloat64Constant);
1541
  VirtualState* state = virtual_states_[node->id()];
1542
  if (index.HasValue()) {
1543 1544 1545 1546
    if (VirtualObject* object = GetVirtualObject(state, to)) {
      if (!object->IsTracked()) return;
      int offset = OffsetForElementAccess(node, index.Value());
      if (static_cast<size_t>(offset) >= object->field_count()) return;
1547
      Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 2));
1548 1549 1550
      if (object->GetField(offset) != val) {
        object = CopyForModificationAt(object, state, node);
        object->SetField(offset, val);
1551 1552
      }
    }
1553 1554
  } else {
    // We have a store to a non-const index, cannot eliminate object.
1555
    if (status_analysis_->SetEscaped(to)) {
1556 1557 1558 1559 1560
      TRACE(
          "Setting #%d (%s) to escaped because store element #%d to non-const "
          "index #%d (%s)\n",
          to->id(), to->op()->mnemonic(), node->id(), index_node->id(),
          index_node->op()->mnemonic());
1561
    }
1562 1563 1564 1565 1566
    if (VirtualObject* object = GetVirtualObject(state, to)) {
      if (!object->IsTracked()) return;
      if (!object->AllFieldsClear()) {
        object = CopyForModificationAt(object, state, node);
        object->ClearAllFields();
1567
        TRACE("Cleared all fields of @%d:#%d\n",
1568
              status_analysis_->GetAlias(object->id()), object->id());
1569
      }
1570
    }
1571 1572 1573
  }
}

1574 1575 1576 1577
Node* EscapeAnalysis::GetOrCreateObjectState(Node* effect, Node* node) {
  if ((node->opcode() == IrOpcode::kFinishRegion ||
       node->opcode() == IrOpcode::kAllocate) &&
      IsVirtual(node)) {
1578 1579
    if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
                                               ResolveReplacement(node))) {
1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590
      if (Node* object_state = vobj->GetObjectState()) {
        return object_state;
      } else {
        cache_->fields().clear();
        for (size_t i = 0; i < vobj->field_count(); ++i) {
          if (Node* field = vobj->GetField(i)) {
            cache_->fields().push_back(field);
          }
        }
        int input_count = static_cast<int>(cache_->fields().size());
        Node* new_object_state =
1591 1592
            graph()->NewNode(common()->ObjectState(input_count), input_count,
                             &cache_->fields().front());
1593
        vobj->SetObjectState(new_object_state);
1594 1595 1596 1597 1598
        TRACE(
            "Creating object state #%d for vobj %p (from node #%d) at effect "
            "#%d\n",
            new_object_state->id(), static_cast<void*>(vobj), node->id(),
            effect->id());
1599 1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610 1611 1612 1613 1614 1615
        // Now fix uses of other objects.
        for (size_t i = 0; i < vobj->field_count(); ++i) {
          if (Node* field = vobj->GetField(i)) {
            if (Node* field_object_state =
                    GetOrCreateObjectState(effect, field)) {
              NodeProperties::ReplaceValueInput(
                  new_object_state, field_object_state, static_cast<int>(i));
            }
          }
        }
        return new_object_state;
      }
    }
  }
  return nullptr;
}

1616 1617 1618 1619 1620 1621 1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635 1636
bool EscapeAnalysis::IsCyclicObjectState(Node* effect, Node* node) {
  if ((node->opcode() == IrOpcode::kFinishRegion ||
       node->opcode() == IrOpcode::kAllocate) &&
      IsVirtual(node)) {
    if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
                                               ResolveReplacement(node))) {
      if (cycle_detection_.find(vobj) != cycle_detection_.end()) return true;
      cycle_detection_.insert(vobj);
      bool cycle_detected = false;
      for (size_t i = 0; i < vobj->field_count(); ++i) {
        if (Node* field = vobj->GetField(i)) {
          if (IsCyclicObjectState(effect, field)) cycle_detected = true;
        }
      }
      cycle_detection_.erase(vobj);
      return cycle_detected;
    }
  }
  return false;
}

1637
void EscapeAnalysis::DebugPrintState(VirtualState* state) {
1638
  PrintF("Dumping virtual state %p\n", static_cast<void*>(state));
1639
  for (Alias alias = 0; alias < status_analysis_->AliasCount(); ++alias) {
1640
    if (VirtualObject* object = state->VirtualObjectFromAlias(alias)) {
1641 1642 1643 1644 1645 1646 1647
      PrintF("  Alias @%d: Object #%d with %zu fields\n", alias, object->id(),
             object->field_count());
      for (size_t i = 0; i < object->field_count(); ++i) {
        if (Node* f = object->GetField(i)) {
          PrintF("    Field %zu = #%d (%s)\n", i, f->id(), f->op()->mnemonic());
        }
      }
1648 1649 1650 1651
    }
  }
}

1652
void EscapeAnalysis::DebugPrint() {
1653 1654 1655 1656 1657 1658 1659 1660 1661 1662
  ZoneVector<VirtualState*> object_states(zone());
  for (NodeId id = 0; id < virtual_states_.size(); id++) {
    if (VirtualState* states = virtual_states_[id]) {
      if (std::find(object_states.begin(), object_states.end(), states) ==
          object_states.end()) {
        object_states.push_back(states);
      }
    }
  }
  for (size_t n = 0; n < object_states.size(); n++) {
1663
    DebugPrintState(object_states[n]);
1664 1665 1666
  }
}

1667 1668
VirtualObject* EscapeAnalysis::GetVirtualObject(VirtualState* state,
                                                Node* node) {
1669
  if (node->id() >= status_analysis_->GetAliasMap().size()) return nullptr;
1670
  Alias alias = status_analysis_->GetAlias(node->id());
1671 1672 1673 1674
  if (alias >= state->size()) return nullptr;
  return state->VirtualObjectFromAlias(alias);
}

1675
bool EscapeAnalysis::ExistsVirtualAllocate() {
1676
  for (size_t id = 0; id < status_analysis_->GetAliasMap().size(); ++id) {
1677
    Alias alias = status_analysis_->GetAlias(static_cast<NodeId>(id));
1678
    if (alias < EscapeStatusAnalysis::kUntrackable) {
1679
      if (status_analysis_->IsVirtual(static_cast<int>(id))) {
1680 1681 1682 1683 1684 1685 1686
        return true;
      }
    }
  }
  return false;
}

1687 1688
Graph* EscapeAnalysis::graph() const { return status_analysis_->graph(); }

1689 1690 1691
}  // namespace compiler
}  // namespace internal
}  // namespace v8