Commit 46473f82 authored by Tobias Tebbi's avatar Tobias Tebbi Committed by Commit Bot

[turbofan] delete old implementation of escape analysis

Bug: 
Change-Id: Ib9e0d0844ad5e7bc6cd038f736546cad77669321
Reviewed-on: https://chromium-review.googlesource.com/641530Reviewed-by: 's avatarJaroslav Sevcik <jarin@chromium.org>
Commit-Queue: Tobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47699}
parent 8efc5f04
...@@ -1419,10 +1419,6 @@ v8_source_set("v8_base") { ...@@ -1419,10 +1419,6 @@ v8_source_set("v8_base") {
"src/compiler/memory-optimizer.h", "src/compiler/memory-optimizer.h",
"src/compiler/move-optimizer.cc", "src/compiler/move-optimizer.cc",
"src/compiler/move-optimizer.h", "src/compiler/move-optimizer.h",
"src/compiler/new-escape-analysis-reducer.cc",
"src/compiler/new-escape-analysis-reducer.h",
"src/compiler/new-escape-analysis.cc",
"src/compiler/new-escape-analysis.h",
"src/compiler/node-aux-data.h", "src/compiler/node-aux-data.h",
"src/compiler/node-cache.cc", "src/compiler/node-cache.cc",
"src/compiler/node-cache.h", "src/compiler/node-cache.h",
...@@ -2079,8 +2075,6 @@ v8_source_set("v8_base") { ...@@ -2079,8 +2075,6 @@ v8_source_set("v8_base") {
jumbo_excluded_sources = [ jumbo_excluded_sources = [
# TODO(mostynb@opera.com): don't exclude these http://crbug.com/752428 # TODO(mostynb@opera.com): don't exclude these http://crbug.com/752428
"src/profiler/heap-snapshot-generator.cc", # Macro clash in mman-linux.h "src/profiler/heap-snapshot-generator.cc", # Macro clash in mman-linux.h
"src/compiler/escape-analysis.cc", # Symbol clashes with new-escape-analysis.cc
"src/compiler/escape-analysis-reducer.cc", # Symbol clashes with new-escape-analysis-reducer.cc
] ]
} }
......
// Copyright 2015 the V8 project authors. All rights reserved. // Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
#include "src/compiler/escape-analysis-reducer.h" #include "src/compiler/escape-analysis-reducer.h"
#include "src/compiler/all-nodes.h" #include "src/compiler/all-nodes.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/simplified-operator.h" #include "src/compiler/simplified-operator.h"
#include "src/compiler/type-cache.h" #include "src/compiler/type-cache.h"
#include "src/counters.h"
#include "src/frame-constants.h" #include "src/frame-constants.h"
namespace v8 { namespace v8 {
...@@ -24,111 +22,33 @@ namespace compiler { ...@@ -24,111 +22,33 @@ namespace compiler {
#define TRACE(...) #define TRACE(...)
#endif // DEBUG #endif // DEBUG
EscapeAnalysisReducer::EscapeAnalysisReducer(Editor* editor, JSGraph* jsgraph, EscapeAnalysisReducer::EscapeAnalysisReducer(
EscapeAnalysis* escape_analysis, Editor* editor, JSGraph* jsgraph, EscapeAnalysisResult analysis_result,
Zone* zone) Zone* zone)
: AdvancedReducer(editor), : AdvancedReducer(editor),
jsgraph_(jsgraph), jsgraph_(jsgraph),
escape_analysis_(escape_analysis), analysis_result_(analysis_result),
zone_(zone), object_id_cache_(zone),
fully_reduced_(static_cast<int>(jsgraph->graph()->NodeCount() * 2), zone), node_cache_(jsgraph->graph(), zone),
exists_virtual_allocate_(escape_analysis->ExistsVirtualAllocate()) {} arguments_elements_(zone),
zone_(zone) {}
Reduction EscapeAnalysisReducer::ReduceNode(Node* node) { Node* EscapeAnalysisReducer::MaybeGuard(Node* original, Node* replacement) {
if (node->id() < static_cast<NodeId>(fully_reduced_.length()) &&
fully_reduced_.Contains(node->id())) {
return NoChange();
}
switch (node->opcode()) {
case IrOpcode::kLoadField:
case IrOpcode::kLoadElement:
return ReduceLoad(node);
case IrOpcode::kStoreField:
case IrOpcode::kStoreElement:
return ReduceStore(node);
case IrOpcode::kCheckMaps:
return ReduceCheckMaps(node);
case IrOpcode::kAllocate:
return ReduceAllocate(node);
case IrOpcode::kFinishRegion:
return ReduceFinishRegion(node);
case IrOpcode::kReferenceEqual:
return ReduceReferenceEqual(node);
case IrOpcode::kObjectIsSmi:
return ReduceObjectIsSmi(node);
// FrameStates and Value nodes are preprocessed here,
// and visited via ReduceFrameStateUses from their user nodes.
case IrOpcode::kFrameState:
case IrOpcode::kStateValues: {
if (node->id() >= static_cast<NodeId>(fully_reduced_.length()) ||
fully_reduced_.Contains(node->id())) {
break;
}
bool depends_on_object_state = false;
for (Node* input : node->inputs()) {
switch (input->opcode()) {
case IrOpcode::kAllocate:
case IrOpcode::kFinishRegion:
depends_on_object_state =
depends_on_object_state || escape_analysis()->IsVirtual(input);
break;
case IrOpcode::kFrameState:
case IrOpcode::kStateValues:
depends_on_object_state =
depends_on_object_state ||
input->id() >= static_cast<NodeId>(fully_reduced_.length()) ||
!fully_reduced_.Contains(input->id());
break;
default:
break;
}
}
if (!depends_on_object_state) {
fully_reduced_.Add(node->id());
}
return NoChange();
}
case IrOpcode::kNewUnmappedArgumentsElements:
arguments_elements_.insert(node);
break;
default:
// TODO(sigurds): Change this to GetFrameStateInputCount once
// it is working. For now we use EffectInputCount > 0 to determine
// whether a node might have a frame state input.
if (exists_virtual_allocate_ && node->op()->EffectInputCount() > 0) {
return ReduceFrameStateUses(node);
}
break;
}
return NoChange();
}
Reduction EscapeAnalysisReducer::Reduce(Node* node) {
Reduction reduction = ReduceNode(node);
if (reduction.Changed() && node != reduction.replacement()) {
escape_analysis()->SetReplacement(node, reduction.replacement());
}
return reduction;
}
namespace {
Node* MaybeGuard(JSGraph* jsgraph, Zone* zone, Node* original,
Node* replacement) {
// We might need to guard the replacement if the type of the {replacement} // We might need to guard the replacement if the type of the {replacement}
// node is not in a sub-type relation to the type of the the {original} node. // node is not in a sub-type relation to the type of the the {original} node.
Type* const replacement_type = NodeProperties::GetType(replacement); Type* const replacement_type = NodeProperties::GetType(replacement);
Type* const original_type = NodeProperties::GetType(original); Type* const original_type = NodeProperties::GetType(original);
if (!replacement_type->Is(original_type)) { if (!replacement_type->Is(original_type)) {
Node* const control = NodeProperties::GetControlInput(original); Node* const control = NodeProperties::GetControlInput(original);
replacement = jsgraph->graph()->NewNode( replacement = jsgraph()->graph()->NewNode(
jsgraph->common()->TypeGuard(original_type), replacement, control); jsgraph()->common()->TypeGuard(original_type), replacement, control);
NodeProperties::SetType(replacement, original_type); NodeProperties::SetType(replacement, original_type);
} }
return replacement; return replacement;
} }
namespace {
Node* SkipTypeGuards(Node* node) { Node* SkipTypeGuards(Node* node) {
while (node->opcode() == IrOpcode::kTypeGuard) { while (node->opcode() == IrOpcode::kTypeGuard) {
node = NodeProperties::GetValueInput(node, 0); node = NodeProperties::GetValueInput(node, 0);
...@@ -138,269 +58,156 @@ Node* SkipTypeGuards(Node* node) { ...@@ -138,269 +58,156 @@ Node* SkipTypeGuards(Node* node) {
} // namespace } // namespace
Reduction EscapeAnalysisReducer::ReduceLoad(Node* node) { Node* EscapeAnalysisReducer::ObjectIdNode(const VirtualObject* vobject) {
DCHECK(node->opcode() == IrOpcode::kLoadField || VirtualObject::Id id = vobject->id();
node->opcode() == IrOpcode::kLoadElement); if (id >= object_id_cache_.size()) object_id_cache_.resize(id + 1);
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) { if (!object_id_cache_[id]) {
fully_reduced_.Add(node->id()); Node* node = jsgraph()->graph()->NewNode(jsgraph()->common()->ObjectId(id));
} NodeProperties::SetType(node, Type::Object());
if (escape_analysis()->IsVirtual( object_id_cache_[id] = node;
SkipTypeGuards(NodeProperties::GetValueInput(node, 0)))) {
if (Node* rep = escape_analysis()->GetReplacement(node)) {
TRACE("Replaced #%d (%s) with #%d (%s)\n", node->id(),
node->op()->mnemonic(), rep->id(), rep->op()->mnemonic());
rep = MaybeGuard(jsgraph(), zone(), node, rep);
ReplaceWithValue(node, rep);
return Replace(rep);
} }
} return object_id_cache_[id];
return NoChange();
} }
Reduction EscapeAnalysisReducer::Reduce(Node* node) {
Reduction EscapeAnalysisReducer::ReduceStore(Node* node) { if (Node* replacement = analysis_result().GetReplacementOf(node)) {
DCHECK(node->opcode() == IrOpcode::kStoreField || DCHECK(node->opcode() != IrOpcode::kAllocate &&
node->opcode() == IrOpcode::kStoreElement); node->opcode() != IrOpcode::kFinishRegion);
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) { DCHECK_NE(replacement, node);
fully_reduced_.Add(node->id()); if (replacement != jsgraph()->Dead()) {
} replacement = MaybeGuard(node, replacement);
if (escape_analysis()->IsVirtual(
SkipTypeGuards(NodeProperties::GetValueInput(node, 0)))) {
TRACE("Removed #%d (%s) from effect chain\n", node->id(),
node->op()->mnemonic());
RelaxEffectsAndControls(node);
return Changed(node);
} }
return NoChange();
}
Reduction EscapeAnalysisReducer::ReduceCheckMaps(Node* node) {
DCHECK(node->opcode() == IrOpcode::kCheckMaps);
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) {
fully_reduced_.Add(node->id());
}
if (escape_analysis()->IsVirtual(
SkipTypeGuards(NodeProperties::GetValueInput(node, 0))) &&
!escape_analysis()->IsEscaped(node)) {
TRACE("Removed #%d (%s) from effect chain\n", node->id(),
node->op()->mnemonic());
RelaxEffectsAndControls(node); RelaxEffectsAndControls(node);
return Changed(node); return Replace(replacement);
} }
return NoChange();
}
Reduction EscapeAnalysisReducer::ReduceAllocate(Node* node) { switch (node->opcode()) {
DCHECK_EQ(node->opcode(), IrOpcode::kAllocate); case IrOpcode::kAllocate: {
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) { const VirtualObject* vobject = analysis_result().GetVirtualObject(node);
fully_reduced_.Add(node->id()); if (vobject && !vobject->HasEscaped()) {
}
if (escape_analysis()->IsVirtual(node)) {
RelaxEffectsAndControls(node); RelaxEffectsAndControls(node);
TRACE("Removed allocate #%d from effect chain\n", node->id());
return Changed(node);
} }
return NoChange(); return NoChange();
} }
case IrOpcode::kFinishRegion: {
Reduction EscapeAnalysisReducer::ReduceFinishRegion(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
Node* effect = NodeProperties::GetEffectInput(node, 0); Node* effect = NodeProperties::GetEffectInput(node, 0);
if (effect->opcode() == IrOpcode::kBeginRegion) { if (effect->opcode() == IrOpcode::kBeginRegion) {
// We only add it now to remove empty Begin/Finish region pairs
// in the process.
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) {
fully_reduced_.Add(node->id());
}
RelaxEffectsAndControls(effect); RelaxEffectsAndControls(effect);
RelaxEffectsAndControls(node); RelaxEffectsAndControls(node);
#ifdef DEBUG
if (FLAG_trace_turbo_escape) {
PrintF("Removed region #%d / #%d from effect chain,", effect->id(),
node->id());
PrintF(" %d user(s) of #%d remain(s):", node->UseCount(), node->id());
for (Edge edge : node->use_edges()) {
PrintF(" #%d", edge.from()->id());
} }
PrintF("\n"); return NoChange();
}
#endif // DEBUG
return Changed(node);
} }
case IrOpcode::kNewUnmappedArgumentsElements:
arguments_elements_.insert(node);
return NoChange(); return NoChange();
} default: {
// TODO(sigurds): Change this to GetFrameStateInputCount once
// it is working. For now we use EffectInputCount > 0 to determine
Reduction EscapeAnalysisReducer::ReduceReferenceEqual(Node* node) { // whether a node might have a frame state input.
DCHECK_EQ(node->opcode(), IrOpcode::kReferenceEqual); if (node->op()->EffectInputCount() > 0) {
Node* left = SkipTypeGuards(NodeProperties::GetValueInput(node, 0)); ReduceFrameStateInputs(node);
Node* right = SkipTypeGuards(NodeProperties::GetValueInput(node, 1));
if (escape_analysis()->IsVirtual(left)) {
if (escape_analysis()->IsVirtual(right) &&
escape_analysis()->CompareVirtualObjects(left, right)) {
ReplaceWithValue(node, jsgraph()->TrueConstant());
TRACE("Replaced ref eq #%d with true\n", node->id());
return Replace(jsgraph()->TrueConstant());
}
// Right-hand side is not a virtual object, or a different one.
ReplaceWithValue(node, jsgraph()->FalseConstant());
TRACE("Replaced ref eq #%d with false\n", node->id());
return Replace(jsgraph()->FalseConstant());
} else if (escape_analysis()->IsVirtual(right)) {
// Left-hand side is not a virtual object.
ReplaceWithValue(node, jsgraph()->FalseConstant());
TRACE("Replaced ref eq #%d with false\n", node->id());
return Replace(jsgraph()->FalseConstant());
} }
return NoChange(); return NoChange();
}
}
} }
// While doing DFS on the FrameState tree, we have to recognize duplicate
Reduction EscapeAnalysisReducer::ReduceObjectIsSmi(Node* node) { // occurrences of virtual objects.
DCHECK_EQ(node->opcode(), IrOpcode::kObjectIsSmi); class Deduplicator {
Node* input = SkipTypeGuards(NodeProperties::GetValueInput(node, 0)); public:
if (escape_analysis()->IsVirtual(input)) { explicit Deduplicator(Zone* zone) : is_duplicate_(zone) {}
ReplaceWithValue(node, jsgraph()->FalseConstant()); bool SeenBefore(const VirtualObject* vobject) {
TRACE("Replaced ObjectIsSmi #%d with false\n", node->id()); VirtualObject::Id id = vobject->id();
return Replace(jsgraph()->FalseConstant()); if (id >= is_duplicate_.size()) {
is_duplicate_.resize(id + 1);
}
bool is_duplicate = is_duplicate_[id];
is_duplicate_[id] = true;
return is_duplicate;
} }
return NoChange();
}
private:
ZoneVector<bool> is_duplicate_;
};
Reduction EscapeAnalysisReducer::ReduceFrameStateUses(Node* node) { void EscapeAnalysisReducer::ReduceFrameStateInputs(Node* node) {
DCHECK_GE(node->op()->EffectInputCount(), 1); DCHECK_GE(node->op()->EffectInputCount(), 1);
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) {
fully_reduced_.Add(node->id());
}
bool changed = false;
for (int i = 0; i < node->InputCount(); ++i) { for (int i = 0; i < node->InputCount(); ++i) {
Node* input = node->InputAt(i); Node* input = node->InputAt(i);
if (input->opcode() == IrOpcode::kFrameState) { if (input->opcode() == IrOpcode::kFrameState) {
if (Node* ret = ReduceDeoptState(input, node, false)) { Deduplicator deduplicator(zone());
if (Node* ret = ReduceDeoptState(input, node, &deduplicator)) {
node->ReplaceInput(i, ret); node->ReplaceInput(i, ret);
changed = true;
}
} }
} }
if (changed) {
return Changed(node);
} }
return NoChange();
} }
// Returns the clone if it duplicated the node, and null otherwise.
Node* EscapeAnalysisReducer::ReduceDeoptState(Node* node, Node* effect, Node* EscapeAnalysisReducer::ReduceDeoptState(Node* node, Node* effect,
bool multiple_users) { Deduplicator* deduplicator) {
DCHECK(node->opcode() == IrOpcode::kFrameState || if (node->opcode() == IrOpcode::kFrameState) {
node->opcode() == IrOpcode::kStateValues); NodeHashCache::Constructor new_node(&node_cache_, node);
if (node->id() < static_cast<NodeId>(fully_reduced_.length()) && // This input order is important to match the DFS traversal used in the
fully_reduced_.Contains(node->id())) { // instruction selector. Otherwise, the instruction selector might find a
return nullptr; // duplicate node before the original one.
} for (int input_id : {kFrameStateOuterStateInput, kFrameStateFunctionInput,
TRACE("Reducing %s %d\n", node->op()->mnemonic(), node->id()); kFrameStateParametersInput, kFrameStateContextInput,
Node* clone = nullptr; kFrameStateLocalsInput, kFrameStateStackInput}) {
bool node_multiused = node->UseCount() > 1; Node* input = node->InputAt(input_id);
bool multiple_users_rec = multiple_users || node_multiused; new_node.ReplaceInput(ReduceDeoptState(input, effect, deduplicator),
input_id);
}
return new_node.Get();
} else if (node->opcode() == IrOpcode::kStateValues) {
NodeHashCache::Constructor new_node(&node_cache_, node);
for (int i = 0; i < node->op()->ValueInputCount(); ++i) { for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
Node* input = NodeProperties::GetValueInput(node, i); Node* input = NodeProperties::GetValueInput(node, i);
if (input->opcode() == IrOpcode::kStateValues) { new_node.ReplaceValueInput(ReduceDeoptState(input, effect, deduplicator),
if (Node* ret = ReduceDeoptState(input, effect, multiple_users_rec)) { i);
if (node_multiused || (multiple_users && !clone)) { }
TRACE(" Cloning #%d", node->id()); return new_node.Get();
node = clone = jsgraph()->graph()->CloneNode(node); } else if (const VirtualObject* vobject =
TRACE(" to #%d\n", node->id()); analysis_result().GetVirtualObject(SkipTypeGuards(node))) {
node_multiused = false; if (vobject->HasEscaped()) return node;
} if (deduplicator->SeenBefore(vobject)) {
NodeProperties::ReplaceValueInput(node, ret, i); return ObjectIdNode(vobject);
}
} else { } else {
if (Node* ret = ReduceStateValueInput(node, i, effect, node_multiused, std::vector<Node*> inputs;
clone, multiple_users)) { for (int offset = 0; offset < vobject->size(); offset += kPointerSize) {
DCHECK_NULL(clone); Node* field =
node_multiused = false; // Don't clone anymore. analysis_result().GetVirtualObjectField(vobject, offset, effect);
node = clone = ret; CHECK_NOT_NULL(field);
} if (field != jsgraph()->Dead()) {
inputs.push_back(ReduceDeoptState(field, effect, deduplicator));
}
}
int num_inputs = static_cast<int>(inputs.size());
NodeHashCache::Constructor new_node(
&node_cache_,
jsgraph()->common()->ObjectState(vobject->id(), num_inputs),
num_inputs, &inputs.front(), NodeProperties::GetType(node));
return new_node.Get();
} }
}
if (node->opcode() == IrOpcode::kFrameState) {
Node* outer_frame_state = NodeProperties::GetFrameStateInput(node);
if (outer_frame_state->opcode() == IrOpcode::kFrameState) {
if (Node* ret =
ReduceDeoptState(outer_frame_state, effect, multiple_users_rec)) {
if (node_multiused || (multiple_users && !clone)) {
TRACE(" Cloning #%d", node->id());
node = clone = jsgraph()->graph()->CloneNode(node);
TRACE(" to #%d\n", node->id());
}
NodeProperties::ReplaceFrameStateInput(node, ret);
}
}
}
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) {
fully_reduced_.Add(node->id());
}
return clone;
}
// Returns the clone if it duplicated the node, and null otherwise.
Node* EscapeAnalysisReducer::ReduceStateValueInput(Node* node, int node_index,
Node* effect,
bool node_multiused,
bool already_cloned,
bool multiple_users) {
Node* input = SkipTypeGuards(NodeProperties::GetValueInput(node, node_index));
if (node->id() < static_cast<NodeId>(fully_reduced_.length()) &&
fully_reduced_.Contains(node->id())) {
return nullptr;
}
TRACE("Reducing State Input #%d (%s)\n", input->id(),
input->op()->mnemonic());
Node* clone = nullptr;
if (input->opcode() == IrOpcode::kFinishRegion ||
input->opcode() == IrOpcode::kAllocate) {
if (escape_analysis()->IsVirtual(input)) {
if (escape_analysis()->IsCyclicObjectState(effect, input)) {
// TODO(mstarzinger): Represent cyclic object states differently to
// ensure the scheduler can properly handle such object states.
compilation_failed_ = true;
return nullptr;
}
if (Node* object_state =
escape_analysis()->GetOrCreateObjectState(effect, input)) {
if (node_multiused || (multiple_users && !already_cloned)) {
TRACE("Cloning #%d", node->id());
node = clone = jsgraph()->graph()->CloneNode(node);
TRACE(" to #%d\n", node->id());
node_multiused = false;
already_cloned = true;
}
NodeProperties::ReplaceValueInput(node, object_state, node_index);
TRACE("Replaced state #%d input #%d with object state #%d\n",
node->id(), input->id(), object_state->id());
} else { } else {
TRACE("No object state replacement for #%d at effect #%d available.\n", return node;
input->id(), effect->id());
UNREACHABLE();
}
}
} }
return clone;
} }
void EscapeAnalysisReducer::VerifyReplacement() const { void EscapeAnalysisReducer::VerifyReplacement() const {
#ifdef DEBUG
AllNodes all(zone(), jsgraph()->graph()); AllNodes all(zone(), jsgraph()->graph());
for (Node* node : all.reachable) { for (Node* node : all.reachable) {
if (node->opcode() == IrOpcode::kAllocate) { if (node->opcode() == IrOpcode::kAllocate) {
CHECK(!escape_analysis_->IsVirtual(node)); if (const VirtualObject* vobject =
analysis_result().GetVirtualObject(node)) {
if (!vobject->HasEscaped()) {
V8_Fatal(__FILE__, __LINE__,
"Escape analysis failed to remove node %s#%d\n",
node->op()->mnemonic(), node->id());
}
}
} }
} }
#endif // DEBUG
} }
void EscapeAnalysisReducer::Finalize() { void EscapeAnalysisReducer::Finalize() {
...@@ -517,6 +324,88 @@ void EscapeAnalysisReducer::Finalize() { ...@@ -517,6 +324,88 @@ void EscapeAnalysisReducer::Finalize() {
} }
} }
Node* NodeHashCache::Query(Node* node) {
auto it = cache_.find(node);
if (it != cache_.end()) {
return *it;
} else {
return nullptr;
}
}
NodeHashCache::Constructor::Constructor(NodeHashCache* cache,
const Operator* op, int input_count,
Node** inputs, Type* type)
: node_cache_(cache), from_(nullptr) {
if (node_cache_->temp_nodes_.size() > 0) {
tmp_ = node_cache_->temp_nodes_.back();
node_cache_->temp_nodes_.pop_back();
int tmp_input_count = tmp_->InputCount();
if (input_count <= tmp_input_count) {
tmp_->TrimInputCount(input_count);
}
for (int i = 0; i < input_count; ++i) {
if (i < tmp_input_count) {
tmp_->ReplaceInput(i, inputs[i]);
} else {
tmp_->AppendInput(node_cache_->graph_->zone(), inputs[i]);
}
}
NodeProperties::ChangeOp(tmp_, op);
} else {
tmp_ = node_cache_->graph_->NewNode(op, input_count, inputs);
}
NodeProperties::SetType(tmp_, type);
}
Node* NodeHashCache::Constructor::Get() {
DCHECK(tmp_ || from_);
Node* node;
if (!tmp_) {
node = node_cache_->Query(from_);
if (!node) node = from_;
} else {
node = node_cache_->Query(tmp_);
if (node) {
node_cache_->temp_nodes_.push_back(tmp_);
} else {
node = tmp_;
node_cache_->Insert(node);
}
}
tmp_ = from_ = nullptr;
return node;
}
Node* NodeHashCache::Constructor::MutableNode() {
DCHECK(tmp_ || from_);
if (!tmp_) {
if (node_cache_->temp_nodes_.empty()) {
tmp_ = node_cache_->graph_->CloneNode(from_);
} else {
tmp_ = node_cache_->temp_nodes_.back();
node_cache_->temp_nodes_.pop_back();
int from_input_count = from_->InputCount();
int tmp_input_count = tmp_->InputCount();
if (from_input_count <= tmp_input_count) {
tmp_->TrimInputCount(from_input_count);
}
for (int i = 0; i < from_input_count; ++i) {
if (i < tmp_input_count) {
tmp_->ReplaceInput(i, from_->InputAt(i));
} else {
tmp_->AppendInput(node_cache_->graph_->zone(), from_->InputAt(i));
}
}
NodeProperties::SetType(tmp_, NodeProperties::GetType(from_));
NodeProperties::ChangeOp(tmp_, from_->op());
}
}
return tmp_;
}
#undef TRACE
} // namespace compiler } // namespace compiler
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
// Copyright 2015 the V8 project authors. All rights reserved. // Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
...@@ -6,7 +6,6 @@ ...@@ -6,7 +6,6 @@
#define V8_COMPILER_ESCAPE_ANALYSIS_REDUCER_H_ #define V8_COMPILER_ESCAPE_ANALYSIS_REDUCER_H_
#include "src/base/compiler-specific.h" #include "src/base/compiler-specific.h"
#include "src/bit-vector.h"
#include "src/compiler/escape-analysis.h" #include "src/compiler/escape-analysis.h"
#include "src/compiler/graph-reducer.h" #include "src/compiler/graph-reducer.h"
#include "src/globals.h" #include "src/globals.h"
...@@ -15,55 +14,101 @@ namespace v8 { ...@@ -15,55 +14,101 @@ namespace v8 {
namespace internal { namespace internal {
namespace compiler { namespace compiler {
// Forward declarations. class Deduplicator;
class JSGraph; class JSGraph;
// Perform hash-consing when creating or mutating nodes. Used to avoid duplicate
// nodes when creating ObjectState, StateValues and FrameState nodes
class NodeHashCache {
public:
NodeHashCache(Graph* graph, Zone* zone)
: graph_(graph), cache_(zone), temp_nodes_(zone) {}
// Handle to a conceptually new mutable node. Tries to re-use existing nodes
// and to recycle memory if possible.
class Constructor {
public:
// Construct a new node as a clone of [from].
Constructor(NodeHashCache* cache, Node* from)
: node_cache_(cache), from_(from), tmp_(nullptr) {}
// Construct a new node from scratch.
Constructor(NodeHashCache* cache, const Operator* op, int input_count,
Node** inputs, Type* type);
// Modify the new node.
void ReplaceValueInput(Node* input, int i) {
if (!tmp_ && input == NodeProperties::GetValueInput(from_, i)) return;
Node* node = MutableNode();
NodeProperties::ReplaceValueInput(node, input, i);
}
void ReplaceInput(Node* input, int i) {
if (!tmp_ && input == from_->InputAt(i)) return;
Node* node = MutableNode();
node->ReplaceInput(i, input);
}
// Obtain the mutated node or a cached copy. Invalidates the [Constructor].
Node* Get();
private:
Node* MutableNode();
NodeHashCache* node_cache_;
// Original node, copied on write.
Node* from_;
// Temporary node used for mutations, can be recycled if cache is hit.
Node* tmp_;
};
private:
Node* Query(Node* node);
void Insert(Node* node) { cache_.insert(node); }
Graph* graph_;
struct NodeEquals {
bool operator()(Node* a, Node* b) const {
return NodeProperties::Equals(a, b);
}
};
struct NodeHashCode {
size_t operator()(Node* n) const { return NodeProperties::HashCode(n); }
};
ZoneUnorderedSet<Node*, NodeHashCode, NodeEquals> cache_;
// Unused nodes whose memory can be recycled.
ZoneVector<Node*> temp_nodes_;
};
// Modify the graph according to the information computed in the previous phase.
class V8_EXPORT_PRIVATE EscapeAnalysisReducer final class V8_EXPORT_PRIVATE EscapeAnalysisReducer final
: public NON_EXPORTED_BASE(AdvancedReducer) { : public NON_EXPORTED_BASE(AdvancedReducer) {
public: public:
EscapeAnalysisReducer(Editor* editor, JSGraph* jsgraph, EscapeAnalysisReducer(Editor* editor, JSGraph* jsgraph,
EscapeAnalysis* escape_analysis, Zone* zone); EscapeAnalysisResult analysis_result, Zone* zone);
Reduction Reduce(Node* node) override;
const char* reducer_name() const override { return "EscapeAnalysisReducer"; } const char* reducer_name() const override { return "EscapeAnalysisReducer"; }
Reduction Reduce(Node* node) final;
void Finalize() override; void Finalize() override;
// Verifies that all virtual allocation nodes have been dealt with. Run it // Verifies that all virtual allocation nodes have been dealt with. Run it
// after this reducer has been applied. Has no effect in release mode. // after this reducer has been applied.
void VerifyReplacement() const; void VerifyReplacement() const;
bool compilation_failed() const { return compilation_failed_; }
private: private:
Reduction ReduceNode(Node* node); void ReduceFrameStateInputs(Node* node);
Reduction ReduceLoad(Node* node); Node* ReduceDeoptState(Node* node, Node* effect, Deduplicator* deduplicator);
Reduction ReduceStore(Node* node); Node* ObjectIdNode(const VirtualObject* vobject);
Reduction ReduceCheckMaps(Node* node); Node* MaybeGuard(Node* original, Node* replacement);
Reduction ReduceAllocate(Node* node);
Reduction ReduceFinishRegion(Node* node);
Reduction ReduceReferenceEqual(Node* node);
Reduction ReduceObjectIsSmi(Node* node);
Reduction ReduceFrameStateUses(Node* node);
Node* ReduceDeoptState(Node* node, Node* effect, bool multiple_users);
Node* ReduceStateValueInput(Node* node, int node_index, Node* effect,
bool node_multiused, bool already_cloned,
bool multiple_users);
JSGraph* jsgraph() const { return jsgraph_; } JSGraph* jsgraph() const { return jsgraph_; }
EscapeAnalysis* escape_analysis() const { return escape_analysis_; } EscapeAnalysisResult analysis_result() const { return analysis_result_; }
Zone* zone() const { return zone_; } Zone* zone() const { return zone_; }
JSGraph* const jsgraph_; JSGraph* const jsgraph_;
EscapeAnalysis* escape_analysis_; EscapeAnalysisResult analysis_result_;
ZoneVector<Node*> object_id_cache_;
NodeHashCache node_cache_;
ZoneSet<Node*> arguments_elements_;
Zone* const zone_; Zone* const zone_;
// This bit vector marks nodes we already processed (allocs, loads, stores)
// and nodes that do not need a visit from ReduceDeoptState etc.
BitVector fully_reduced_;
bool exists_virtual_allocate_;
std::set<Node*> arguments_elements_;
bool compilation_failed_ = false;
DISALLOW_COPY_AND_ASSIGN(EscapeAnalysisReducer); DISALLOW_COPY_AND_ASSIGN(EscapeAnalysisReducer);
}; };
......
// Copyright 2015 the V8 project authors. All rights reserved. // Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
#include "src/compiler/escape-analysis.h" #include "src/compiler/escape-analysis.h"
#include <limits>
#include "src/base/flags.h"
#include "src/bootstrapper.h" #include "src/bootstrapper.h"
#include "src/compilation-dependencies.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/js-operator.h"
#include "src/compiler/linkage.h" #include "src/compiler/linkage.h"
#include "src/compiler/node-matchers.h" #include "src/compiler/node-matchers.h"
#include "src/compiler/node-properties.h"
#include "src/compiler/node.h"
#include "src/compiler/operator-properties.h" #include "src/compiler/operator-properties.h"
#include "src/compiler/simplified-operator.h" #include "src/compiler/simplified-operator.h"
#include "src/compiler/type-cache.h"
#include "src/objects-inl.h" #include "src/objects-inl.h"
namespace v8 {
namespace internal {
namespace compiler {
typedef NodeId Alias;
#ifdef DEBUG #ifdef DEBUG
#define TRACE(...) \ #define TRACE(...) \
do { \ do { \
...@@ -36,1760 +20,718 @@ typedef NodeId Alias; ...@@ -36,1760 +20,718 @@ typedef NodeId Alias;
#define TRACE(...) #define TRACE(...)
#endif #endif
// EscapeStatusAnalysis determines for each allocation whether it escapes. namespace v8 {
class EscapeStatusAnalysis : public ZoneObject { namespace internal {
public: namespace compiler {
enum Status {
kUnknown = 0u,
kTracked = 1u << 0,
kEscaped = 1u << 1,
kOnStack = 1u << 2,
kVisited = 1u << 3,
// A node is dangling, if it is a load of some kind, and does not have
// an effect successor.
kDanglingComputed = 1u << 4,
kDangling = 1u << 5,
// A node is is an effect branch point, if it has more than 2 non-dangling
// effect successors.
kBranchPointComputed = 1u << 6,
kBranchPoint = 1u << 7,
kInQueue = 1u << 8
};
typedef base::Flags<Status, uint16_t> StatusFlags;
void RunStatusAnalysis();
bool IsVirtual(Node* node);
bool IsEscaped(Node* node);
bool IsAllocation(Node* node);
bool IsInQueue(NodeId id);
void SetInQueue(NodeId id, bool on_stack);
void DebugPrint();
EscapeStatusAnalysis(EscapeAnalysis* object_analysis, Graph* graph,
Zone* zone);
void EnqueueForStatusAnalysis(Node* node);
bool SetEscaped(Node* node);
bool IsEffectBranchPoint(Node* node);
bool IsDanglingEffectNode(Node* node);
void ResizeStatusVector();
size_t GetStatusVectorSize();
bool IsVirtual(NodeId id);
Graph* graph() const { return graph_; }
void AssignAliases();
Alias GetAlias(NodeId id) const { return aliases_[id]; }
const ZoneVector<Alias>& GetAliasMap() const { return aliases_; }
Alias AliasCount() const { return next_free_alias_; }
static const Alias kNotReachable;
static const Alias kUntrackable;
bool IsNotReachable(Node* node);
private:
void Process(Node* node);
void ProcessAllocate(Node* node);
void ProcessFinishRegion(Node* node);
void ProcessStoreField(Node* node);
void ProcessStoreElement(Node* node);
bool CheckUsesForEscape(Node* node, bool phi_escaping = false) {
return CheckUsesForEscape(node, node, phi_escaping);
}
bool CheckUsesForEscape(Node* node, Node* rep, bool phi_escaping = false);
void RevisitUses(Node* node);
void RevisitInputs(Node* node);
Alias NextAlias() { return next_free_alias_++; }
bool HasEntry(Node* node);
bool IsAllocationPhi(Node* node);
ZoneVector<Node*> stack_;
EscapeAnalysis* object_analysis_;
Graph* const graph_;
ZoneVector<StatusFlags> status_;
Alias next_free_alias_;
ZoneVector<Node*> status_stack_;
ZoneVector<Alias> aliases_;
DISALLOW_COPY_AND_ASSIGN(EscapeStatusAnalysis);
};
DEFINE_OPERATORS_FOR_FLAGS(EscapeStatusAnalysis::StatusFlags)
const Alias EscapeStatusAnalysis::kNotReachable =
std::numeric_limits<Alias>::max();
const Alias EscapeStatusAnalysis::kUntrackable =
std::numeric_limits<Alias>::max() - 1;
namespace impl {
class VirtualObject : public ZoneObject { template <class T>
class Sidetable {
public: public:
enum Status { explicit Sidetable(Zone* zone) : map_(zone) {}
kInitial = 0, T& operator[](const Node* node) {
kTracked = 1u << 0, NodeId id = node->id();
kInitialized = 1u << 1, if (id >= map_.size()) {
kCopyRequired = 1u << 2, map_.resize(id + 1);
};
typedef base::Flags<Status, unsigned char> StatusFlags;
VirtualObject(NodeId id, VirtualState* owner, Zone* zone)
: id_(id),
status_(kInitial),
fields_(zone),
phi_(zone),
object_state_(nullptr),
owner_(owner) {}
VirtualObject(VirtualState* owner, const VirtualObject& other)
: id_(other.id_),
status_(other.status_ & ~kCopyRequired),
fields_(other.fields_),
phi_(other.phi_),
object_state_(other.object_state_),
owner_(owner) {}
VirtualObject(NodeId id, VirtualState* owner, Zone* zone, size_t field_number,
bool initialized)
: id_(id),
status_(kTracked | (initialized ? kInitialized : kInitial)),
fields_(zone),
phi_(zone),
object_state_(nullptr),
owner_(owner) {
fields_.resize(field_number);
phi_.resize(field_number, false);
}
Node* GetField(size_t offset) { return fields_[offset]; }
bool IsCreatedPhi(size_t offset) { return phi_[offset]; }
void SetField(size_t offset, Node* node, bool created_phi = false) {
TRACE(" VirtualObject(%p)[%zu] changes from #%i to #%i\n",
static_cast<void*>(this), offset,
fields_[offset] ? fields_[offset]->id() : -1, node ? node->id() : -1);
fields_[offset] = node;
phi_[offset] = created_phi;
}
bool IsTracked() const { return status_ & kTracked; }
bool IsInitialized() const { return status_ & kInitialized; }
bool SetInitialized() { return status_ |= kInitialized; }
VirtualState* owner() const { return owner_; }
Node** fields_array() { return &fields_.front(); }
size_t field_count() { return fields_.size(); }
bool ResizeFields(size_t field_count) {
if (field_count > fields_.size()) {
fields_.resize(field_count);
phi_.resize(field_count);
return true;
}
return false;
}
void ClearAllFields() {
for (size_t i = 0; i < fields_.size(); ++i) {
fields_[i] = nullptr;
phi_[i] = false;
}
} }
bool AllFieldsClear() { return map_[id];
for (size_t i = 0; i < fields_.size(); ++i) {
if (fields_[i] != nullptr) {
return false;
}
}
return true;
}
bool UpdateFrom(const VirtualObject& other);
bool MergeFrom(MergeCache* cache, Node* at, Graph* graph,
CommonOperatorBuilder* common, bool initialMerge);
void SetObjectState(Node* node) { object_state_ = node; }
Node* GetObjectState() const { return object_state_; }
bool IsCopyRequired() const { return status_ & kCopyRequired; }
void SetCopyRequired() { status_ |= kCopyRequired; }
bool NeedCopyForModification() {
if (!IsCopyRequired() || !IsInitialized()) {
return false;
}
return true;
} }
NodeId id() const { return id_; }
void id(NodeId id) { id_ = id; }
private: private:
bool MergeFields(size_t i, Node* at, MergeCache* cache, Graph* graph, ZoneVector<T> map_;
CommonOperatorBuilder* common);
NodeId id_;
StatusFlags status_;
ZoneVector<Node*> fields_;
ZoneVector<bool> phi_;
Node* object_state_;
VirtualState* owner_;
DISALLOW_COPY_AND_ASSIGN(VirtualObject);
}; };
DEFINE_OPERATORS_FOR_FLAGS(VirtualObject::StatusFlags) template <class T>
class SparseSidetable {
bool VirtualObject::UpdateFrom(const VirtualObject& other) {
TRACE("%p.UpdateFrom(%p)\n", static_cast<void*>(this),
static_cast<const void*>(&other));
bool changed = status_ != other.status_;
status_ = other.status_;
phi_ = other.phi_;
if (fields_.size() != other.fields_.size()) {
fields_ = other.fields_;
return true;
}
for (size_t i = 0; i < fields_.size(); ++i) {
if (fields_[i] != other.fields_[i]) {
changed = true;
fields_[i] = other.fields_[i];
}
}
return changed;
}
class VirtualState : public ZoneObject {
public: public:
VirtualState(Node* owner, Zone* zone, size_t size) explicit SparseSidetable(Zone* zone, T def_value = T())
: info_(size, nullptr, zone), : def_value_(std::move(def_value)), map_(zone) {}
initialized_(static_cast<int>(size), zone), void Set(const Node* node, T value) {
owner_(owner) {} auto iter = map_.find(node->id());
if (iter != map_.end()) {
VirtualState(Node* owner, const VirtualState& state) iter->second = std::move(value);
: info_(state.info_.size(), nullptr, state.info_.get_allocator().zone()), } else if (value != def_value_) {
initialized_(state.initialized_.length(), map_.insert(iter, std::make_pair(node->id(), std::move(value)));
state.info_.get_allocator().zone()),
owner_(owner) {
for (size_t i = 0; i < info_.size(); ++i) {
if (state.info_[i]) {
info_[i] = state.info_[i];
} }
} }
} const T& Get(const Node* node) const {
auto iter = map_.find(node->id());
VirtualObject* VirtualObjectFromAlias(size_t alias); return iter != map_.end() ? iter->second : def_value_;
void SetVirtualObject(Alias alias, VirtualObject* state);
bool UpdateFrom(VirtualState* state, Zone* zone);
bool MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
CommonOperatorBuilder* common, Node* at);
size_t size() const { return info_.size(); }
Node* owner() const { return owner_; }
VirtualObject* Copy(VirtualObject* obj, Alias alias);
void SetCopyRequired() {
for (VirtualObject* obj : info_) {
if (obj) obj->SetCopyRequired();
}
} }
private: private:
ZoneVector<VirtualObject*> info_; T def_value_;
BitVector initialized_; ZoneUnorderedMap<NodeId, T> map_;
Node* owner_;
DISALLOW_COPY_AND_ASSIGN(VirtualState);
}; };
class MergeCache : public ZoneObject { // Keeps track of the changes to the current node during reduction.
// Encapsulates the current state of the IR graph and the reducer state like
// side-tables. All access to the IR and the reducer state should happen through
// a ReduceScope to ensure that changes and dependencies are tracked and all
// necessary node revisitations happen.
class ReduceScope {
public: public:
explicit MergeCache(Zone* zone) typedef EffectGraphReducer::Reduction Reduction;
: states_(zone), objects_(zone), fields_(zone) { explicit ReduceScope(Node* node, Reduction* reduction)
states_.reserve(5); : current_node_(node), reduction_(reduction) {}
objects_.reserve(5);
fields_.reserve(5);
}
ZoneVector<VirtualState*>& states() { return states_; }
ZoneVector<VirtualObject*>& objects() { return objects_; }
ZoneVector<Node*>& fields() { return fields_; }
void Clear() {
states_.clear();
objects_.clear();
fields_.clear();
}
size_t LoadVirtualObjectsFromStatesFor(Alias alias);
void LoadVirtualObjectsForFieldsFrom(VirtualState* state,
const ZoneVector<Alias>& aliases);
Node* GetFields(size_t pos);
private: protected:
ZoneVector<VirtualState*> states_; Node* current_node() const { return current_node_; }
ZoneVector<VirtualObject*> objects_; Reduction* reduction() { return reduction_; }
ZoneVector<Node*> fields_;
DISALLOW_COPY_AND_ASSIGN(MergeCache); private:
Node* current_node_;
Reduction* reduction_;
}; };
size_t MergeCache::LoadVirtualObjectsFromStatesFor(Alias alias) { // A VariableTracker object keeps track of the values of variables at all points
objects_.clear(); // of the effect chain and introduces new phi nodes when necessary.
DCHECK_GT(states_.size(), 0u); // Initially and by default, variables are mapped to nullptr, which means that
size_t min = std::numeric_limits<size_t>::max(); // the variable allocation point does not dominate the current point on the
for (VirtualState* state : states_) { // effect chain. We map variables that represent uninitialized memory to the
if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) { // Dead node to ensure it is not read.
objects_.push_back(obj); // Unmapped values are impossible by construction, it is indistinguishable if a
min = std::min(obj->field_count(), min); // PersistentMap does not contain an element or maps it to the default element.
} class VariableTracker {
} private:
return min; // The state of all variables at one point in the effect chain.
} class State {
typedef PersistentMap<Variable, Node*> Map;
void MergeCache::LoadVirtualObjectsForFieldsFrom( public:
VirtualState* state, const ZoneVector<Alias>& aliases) { explicit State(Zone* zone) : map_(zone) {}
objects_.clear(); Node* Get(Variable var) const {
size_t max_alias = state->size(); CHECK(var != Variable::Invalid());
for (Node* field : fields_) { return map_.Get(var);
Alias alias = aliases[field->id()];
if (alias >= max_alias) continue;
if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
objects_.push_back(obj);
} }
void Set(Variable var, Node* node) {
CHECK(var != Variable::Invalid());
return map_.Set(var, node);
} }
} Map::iterator begin() const { return map_.begin(); }
Map::iterator end() const { return map_.end(); }
bool operator!=(const State& other) const { return map_ != other.map_; }
Node* MergeCache::GetFields(size_t pos) { private:
fields_.clear(); Map map_;
Node* rep = pos >= objects_.front()->field_count() };
? nullptr
: objects_.front()->GetField(pos);
for (VirtualObject* obj : objects_) {
if (pos >= obj->field_count()) continue;
Node* field = obj->GetField(pos);
if (field) {
fields_.push_back(field);
}
if (field != rep) {
rep = nullptr;
}
}
return rep;
}
VirtualObject* VirtualState::Copy(VirtualObject* obj, Alias alias) { public:
if (obj->owner() == this) return obj; VariableTracker(JSGraph* graph, EffectGraphReducer* reducer, Zone* zone);
VirtualObject* new_obj = Variable NewVariable() { return Variable(next_variable_++); }
new (info_.get_allocator().zone()) VirtualObject(this, *obj); Node* Get(Variable var, Node* effect) { return table_.Get(effect).Get(var); }
TRACE("At state %p, alias @%d (#%d), copying virtual object from %p to %p\n", Zone* zone() { return zone_; }
static_cast<void*>(this), alias, obj->id(), static_cast<void*>(obj),
static_cast<void*>(new_obj));
info_[alias] = new_obj;
return new_obj;
}
VirtualObject* VirtualState::VirtualObjectFromAlias(size_t alias) { class Scope : public ReduceScope {
return info_[alias]; public:
} Scope(VariableTracker* tracker, Node* node, Reduction* reduction);
~Scope();
Node* Get(Variable var) { return current_state_.Get(var); }
void Set(Variable var, Node* node) { current_state_.Set(var, node); }
void VirtualState::SetVirtualObject(Alias alias, VirtualObject* obj) { private:
info_[alias] = obj; VariableTracker* states_;
if (obj) initialized_.Add(alias); State current_state_;
} };
bool VirtualState::UpdateFrom(VirtualState* from, Zone* zone) { private:
if (from == this) return false; State MergeInputs(Node* effect_phi);
bool changed = false; Zone* zone_;
for (Alias alias = 0; alias < size(); ++alias) { JSGraph* graph_;
VirtualObject* ls = VirtualObjectFromAlias(alias); SparseSidetable<State> table_;
VirtualObject* rs = from->VirtualObjectFromAlias(alias); ZoneVector<Node*> buffer_;
EffectGraphReducer* reducer_;
int next_variable_ = 0;
DISALLOW_COPY_AND_ASSIGN(VariableTracker);
};
if (ls == rs || rs == nullptr) continue; // Encapsulates the current state of the escape analysis reducer to preserve
// invariants regarding changes and re-visitation.
class EscapeAnalysisTracker : public ZoneObject {
public:
EscapeAnalysisTracker(JSGraph* jsgraph, EffectGraphReducer* reducer,
Zone* zone)
: virtual_objects_(zone),
replacements_(zone),
variable_states_(jsgraph, reducer, zone),
jsgraph_(jsgraph),
zone_(zone) {}
if (ls == nullptr) { class Scope : public VariableTracker::Scope {
ls = new (zone) VirtualObject(this, *rs); public:
SetVirtualObject(alias, ls); Scope(EffectGraphReducer* reducer, EscapeAnalysisTracker* tracker,
changed = true; Node* node, Reduction* reduction)
continue; : VariableTracker::Scope(&tracker->variable_states_, node, reduction),
tracker_(tracker),
reducer_(reducer) {}
const VirtualObject* GetVirtualObject(Node* node) {
VirtualObject* vobject = tracker_->virtual_objects_.Get(node);
if (vobject) vobject->AddDependency(current_node());
return vobject;
}
// Create or retrieve a virtual object for the current node.
const VirtualObject* InitVirtualObject(int size) {
DCHECK(current_node()->opcode() == IrOpcode::kAllocate);
VirtualObject* vobject = tracker_->virtual_objects_.Get(current_node());
if (vobject) {
CHECK(vobject->size() == size);
} else {
vobject = tracker_->NewVirtualObject(size);
} }
if (vobject) vobject->AddDependency(current_node());
TRACE(" Updating fields of @%d\n", alias); vobject_ = vobject;
return vobject;
changed = ls->UpdateFrom(*rs) || changed;
} }
return false;
}
namespace { void SetVirtualObject(Node* object) {
vobject_ = tracker_->virtual_objects_.Get(object);
bool IsEquivalentPhi(Node* node1, Node* node2) {
if (node1 == node2) return true;
if (node1->opcode() != IrOpcode::kPhi || node2->opcode() != IrOpcode::kPhi ||
node1->op()->ValueInputCount() != node2->op()->ValueInputCount()) {
return false;
}
for (int i = 0; i < node1->op()->ValueInputCount(); ++i) {
Node* input1 = NodeProperties::GetValueInput(node1, i);
Node* input2 = NodeProperties::GetValueInput(node2, i);
if (!IsEquivalentPhi(input1, input2)) {
return false;
}
} }
return true;
}
} // namespace
bool VirtualObject::MergeFields(size_t i, Node* at, MergeCache* cache, void SetEscaped(Node* node) {
Graph* graph, CommonOperatorBuilder* common) { if (VirtualObject* object = tracker_->virtual_objects_.Get(node)) {
bool changed = false; if (object->HasEscaped()) return;
int value_input_count = static_cast<int>(cache->fields().size()); TRACE("Setting %s#%d to escaped because of use by %s#%d\n",
Node* rep = GetField(i); node->op()->mnemonic(), node->id(),
if (!rep || !IsCreatedPhi(i)) { current_node()->op()->mnemonic(), current_node()->id());
for (Node* input : cache->fields()) { object->SetEscaped();
CHECK_NOT_NULL(input); object->RevisitDependants(reducer_);
CHECK(!input->IsDead());
}
Node* control = NodeProperties::GetControlInput(at);
cache->fields().push_back(control);
Node* phi = graph->NewNode(
common->Phi(MachineRepresentation::kTagged, value_input_count),
value_input_count + 1, &cache->fields().front());
NodeProperties::SetType(phi, Type::Any());
SetField(i, phi, true);
#ifdef DEBUG
if (FLAG_trace_turbo_escape) {
PrintF(" Creating Phi #%d as merge of", phi->id());
for (int i = 0; i < value_input_count; i++) {
PrintF(" #%d (%s)", cache->fields()[i]->id(),
cache->fields()[i]->op()->mnemonic());
}
PrintF("\n");
} }
#endif
changed = true;
} else {
DCHECK(rep->opcode() == IrOpcode::kPhi);
for (int n = 0; n < value_input_count; ++n) {
Node* old = NodeProperties::GetValueInput(rep, n);
if (old != cache->fields()[n]) {
changed = true;
NodeProperties::ReplaceValueInput(rep, cache->fields()[n], n);
} }
// The inputs of the current node have to be accessed through the scope to
// ensure that they respect the node replacements.
Node* ValueInput(int i) {
return tracker_->ResolveReplacement(
NodeProperties::GetValueInput(current_node(), i));
} }
Node* ContextInput() {
return tracker_->ResolveReplacement(
NodeProperties::GetContextInput(current_node()));
} }
return changed;
}
bool VirtualObject::MergeFrom(MergeCache* cache, Node* at, Graph* graph, void SetReplacement(Node* replacement) {
CommonOperatorBuilder* common, replacement_ = replacement;
bool initialMerge) { vobject_ =
DCHECK(at->opcode() == IrOpcode::kEffectPhi || replacement ? tracker_->virtual_objects_.Get(replacement) : nullptr;
at->opcode() == IrOpcode::kPhi); TRACE("Set %s#%d as replacement.\n", replacement->op()->mnemonic(),
bool changed = false; replacement->id());
for (size_t i = 0; i < field_count(); ++i) {
if (!initialMerge && GetField(i) == nullptr) continue;
Node* field = cache->GetFields(i);
if (field && !IsCreatedPhi(i)) {
changed = changed || GetField(i) != field;
SetField(i, field);
TRACE(" Field %zu agree on rep #%d\n", i, field->id());
} else {
size_t arity = at->opcode() == IrOpcode::kEffectPhi
? at->op()->EffectInputCount()
: at->op()->ValueInputCount();
if (cache->fields().size() == arity) {
changed = MergeFields(i, at, cache, graph, common) || changed;
} else {
if (GetField(i) != nullptr) {
TRACE(" Field %zu cleared\n", i);
changed = true;
} }
SetField(i, nullptr);
}
}
}
return changed;
}
bool VirtualState::MergeFrom(MergeCache* cache, Zone* zone, Graph* graph, void MarkForDeletion() { SetReplacement(tracker_->jsgraph_->Dead()); }
CommonOperatorBuilder* common, Node* at) {
DCHECK_GT(cache->states().size(), 0u); ~Scope() {
bool changed = false; if (replacement_ != tracker_->replacements_[current_node()] ||
for (Alias alias = 0; alias < size(); ++alias) { vobject_ != tracker_->virtual_objects_.Get(current_node())) {
cache->objects().clear(); reduction()->set_value_changed();
VirtualObject* mergeObject = VirtualObjectFromAlias(alias);
bool copy_merge_object = false;
size_t fields = std::numeric_limits<size_t>::max();
for (VirtualState* state : cache->states()) {
if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
cache->objects().push_back(obj);
if (mergeObject == obj) {
copy_merge_object = true;
}
fields = std::min(obj->field_count(), fields);
}
}
if (cache->objects().size() == cache->states().size() &&
(mergeObject || !initialized_.Contains(alias))) {
bool initialMerge = false;
if (!mergeObject) {
initialMerge = true;
VirtualObject* obj = new (zone)
VirtualObject(cache->objects().front()->id(), this, zone, fields,
cache->objects().front()->IsInitialized());
SetVirtualObject(alias, obj);
mergeObject = obj;
changed = true;
} else if (copy_merge_object) {
VirtualObject* obj = new (zone) VirtualObject(this, *mergeObject);
SetVirtualObject(alias, obj);
mergeObject = obj;
changed = true;
} else {
changed = mergeObject->ResizeFields(fields) || changed;
}
#ifdef DEBUG
if (FLAG_trace_turbo_escape) {
PrintF(" Alias @%d, merging into %p virtual objects", alias,
static_cast<void*>(mergeObject));
for (size_t i = 0; i < cache->objects().size(); i++) {
PrintF(" %p", static_cast<void*>(cache->objects()[i]));
}
PrintF("\n");
}
#endif // DEBUG
changed =
mergeObject->MergeFrom(cache, at, graph, common, initialMerge) ||
changed;
} else {
if (mergeObject) {
TRACE(" Alias %d, virtual object removed\n", alias);
changed = true;
}
SetVirtualObject(alias, nullptr);
} }
tracker_->replacements_[current_node()] = replacement_;
tracker_->virtual_objects_.Set(current_node(), vobject_);
} }
return changed;
}
} // namespace impl
using namespace impl;
EscapeStatusAnalysis::EscapeStatusAnalysis(EscapeAnalysis* object_analysis,
Graph* graph, Zone* zone)
: stack_(zone),
object_analysis_(object_analysis),
graph_(graph),
status_(zone),
next_free_alias_(0),
status_stack_(zone),
aliases_(zone) {}
bool EscapeStatusAnalysis::HasEntry(Node* node) {
return status_[node->id()] & (kTracked | kEscaped);
}
bool EscapeStatusAnalysis::IsVirtual(Node* node) {
return IsVirtual(node->id());
}
bool EscapeStatusAnalysis::IsVirtual(NodeId id) {
return (status_[id] & kTracked) && !(status_[id] & kEscaped);
}
bool EscapeStatusAnalysis::IsEscaped(Node* node) { private:
return status_[node->id()] & kEscaped; EscapeAnalysisTracker* tracker_;
} EffectGraphReducer* reducer_;
VirtualObject* vobject_ = nullptr;
bool EscapeStatusAnalysis::IsAllocation(Node* node) { Node* replacement_ = nullptr;
return node->opcode() == IrOpcode::kAllocate || };
node->opcode() == IrOpcode::kFinishRegion;
}
bool EscapeStatusAnalysis::SetEscaped(Node* node) {
bool changed = !(status_[node->id()] & kEscaped);
status_[node->id()] |= kEscaped | kTracked;
return changed;
}
bool EscapeStatusAnalysis::IsInQueue(NodeId id) {
return status_[id] & kInQueue;
}
void EscapeStatusAnalysis::SetInQueue(NodeId id, bool on_stack) { Node* GetReplacementOf(Node* node) { return replacements_[node]; }
if (on_stack) { Node* ResolveReplacement(Node* node) {
status_[id] |= kInQueue; if (Node* replacement = GetReplacementOf(node)) {
} else { // Replacements cannot have replacements. This is important to ensure
status_[id] &= ~kInQueue; // re-visitation: If a replacement is replaced, then all nodes accessing
// the replacement have to be updated.
DCHECK_NULL(GetReplacementOf(replacement));
return replacement;
} }
} return node;
void EscapeStatusAnalysis::ResizeStatusVector() {
if (status_.size() <= graph()->NodeCount()) {
status_.resize(graph()->NodeCount() * 1.1, kUnknown);
} }
}
size_t EscapeStatusAnalysis::GetStatusVectorSize() { return status_.size(); }
void EscapeStatusAnalysis::RunStatusAnalysis() { private:
// TODO(tebbi): This checks for faulty VirtualObject states, which can happen friend class EscapeAnalysisResult;
// due to bug https://bugs.chromium.org/p/v8/issues/detail?id=6302. As a static const size_t kMaxTrackedObjects = 100;
// workaround, we set everything to escaped if such a faulty state was
// detected.
bool all_objects_complete = object_analysis_->AllObjectsComplete();
ResizeStatusVector();
while (!status_stack_.empty()) {
Node* node = status_stack_.back();
status_stack_.pop_back();
status_[node->id()] &= ~kOnStack;
Process(node);
status_[node->id()] |= kVisited;
if (!all_objects_complete) SetEscaped(node);
}
}
void EscapeStatusAnalysis::EnqueueForStatusAnalysis(Node* node) { VirtualObject* NewVirtualObject(int size) {
DCHECK_NOT_NULL(node); if (next_object_id_ >= kMaxTrackedObjects) return nullptr;
if (!(status_[node->id()] & kOnStack)) { return new (zone_)
status_stack_.push_back(node); VirtualObject(&variable_states_, next_object_id_++, size);
status_[node->id()] |= kOnStack;
} }
}
void EscapeStatusAnalysis::RevisitInputs(Node* node) { SparseSidetable<VirtualObject*> virtual_objects_;
for (Edge edge : node->input_edges()) { Sidetable<Node*> replacements_;
Node* input = edge.to(); VariableTracker variable_states_;
if (!(status_[input->id()] & kOnStack)) { VirtualObject::Id next_object_id_ = 0;
status_stack_.push_back(input); JSGraph* const jsgraph_;
status_[input->id()] |= kOnStack; Zone* const zone_;
}
}
}
void EscapeStatusAnalysis::RevisitUses(Node* node) { DISALLOW_COPY_AND_ASSIGN(EscapeAnalysisTracker);
for (Edge edge : node->use_edges()) { };
Node* use = edge.from();
if (!(status_[use->id()] & kOnStack) && !IsNotReachable(use)) {
status_stack_.push_back(use);
status_[use->id()] |= kOnStack;
}
}
}
void EscapeStatusAnalysis::Process(Node* node) { EffectGraphReducer::EffectGraphReducer(
switch (node->opcode()) { Graph* graph, std::function<void(Node*, Reduction*)> reduce, Zone* zone)
case IrOpcode::kAllocate: : graph_(graph),
ProcessAllocate(node); state_(graph, kNumStates),
break; revisit_(zone),
case IrOpcode::kFinishRegion: stack_(zone),
ProcessFinishRegion(node); reduce_(reduce) {}
void EffectGraphReducer::ReduceFrom(Node* node) {
// Perform DFS and eagerly trigger revisitation as soon as possible.
// A stack element {node, i} indicates that input i of node should be visited
// next.
DCHECK(stack_.empty());
stack_.push({node, 0});
while (!stack_.empty()) {
Node* current = stack_.top().node;
int& input_index = stack_.top().input_index;
if (input_index < current->InputCount()) {
Node* input = current->InputAt(input_index);
input_index++;
switch (state_.Get(input)) {
case State::kVisited:
// The input is already reduced.
break; break;
case IrOpcode::kStoreField: case State::kOnStack:
ProcessStoreField(node); // The input is on the DFS stack right now, so it will be revisited
// later anyway.
break; break;
case IrOpcode::kStoreElement: case State::kUnvisited:
ProcessStoreElement(node); case State::kRevisit: {
state_.Set(input, State::kOnStack);
stack_.push({input, 0});
break; break;
case IrOpcode::kLoadField:
case IrOpcode::kLoadElement: {
if (Node* rep = object_analysis_->GetReplacement(node)) {
if (IsAllocation(rep) && CheckUsesForEscape(node, rep)) {
RevisitInputs(rep);
RevisitUses(rep);
} }
} else {
Node* from = NodeProperties::GetValueInput(node, 0);
from = object_analysis_->ResolveReplacement(from);
if (SetEscaped(from)) {
TRACE("Setting #%d (%s) to escaped because of unresolved load #%i\n",
from->id(), from->op()->mnemonic(), node->id());
RevisitInputs(from);
RevisitUses(from);
} }
} } else {
RevisitUses(node); stack_.pop();
break; Reduction reduction;
} reduce_(current, &reduction);
case IrOpcode::kPhi: for (Edge edge : current->use_edges()) {
if (!HasEntry(node)) { // Mark uses for revisitation.
status_[node->id()] |= kTracked;
RevisitUses(node);
}
if (!IsAllocationPhi(node) && SetEscaped(node)) {
RevisitInputs(node);
RevisitUses(node);
}
CheckUsesForEscape(node);
default:
break;
}
}
bool EscapeStatusAnalysis::IsAllocationPhi(Node* node) {
for (Edge edge : node->input_edges()) {
Node* input = edge.to();
if (input->opcode() == IrOpcode::kPhi && !IsEscaped(input)) continue;
if (IsAllocation(input)) continue;
return false;
}
return true;
}
void EscapeStatusAnalysis::ProcessStoreField(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
Node* to = NodeProperties::GetValueInput(node, 0);
Node* val = NodeProperties::GetValueInput(node, 1);
if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
RevisitUses(val);
RevisitInputs(val);
TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
val->id(), val->op()->mnemonic(), to->id());
}
}
void EscapeStatusAnalysis::ProcessStoreElement(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
Node* to = NodeProperties::GetValueInput(node, 0);
Node* val = NodeProperties::GetValueInput(node, 2);
if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
RevisitUses(val);
RevisitInputs(val);
TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
val->id(), val->op()->mnemonic(), to->id());
}
}
void EscapeStatusAnalysis::ProcessAllocate(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
if (!HasEntry(node)) {
status_[node->id()] |= kTracked;
TRACE("Created status entry for node #%d (%s)\n", node->id(),
node->op()->mnemonic());
NumberMatcher size(node->InputAt(0));
DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
RevisitUses(node);
if (!size.HasValue() && SetEscaped(node)) {
TRACE("Setting #%d to escaped because of non-const alloc\n", node->id());
// This node is already known to escape, uses do not have to be checked
// for escape.
return;
}
}
if (CheckUsesForEscape(node, true)) {
RevisitUses(node);
}
}
bool EscapeStatusAnalysis::CheckUsesForEscape(Node* uses, Node* rep,
bool phi_escaping) {
for (Edge edge : uses->use_edges()) {
Node* use = edge.from(); Node* use = edge.from();
if (IsNotReachable(use)) continue; if (NodeProperties::IsEffectEdge(edge)) {
if (edge.index() >= use->op()->ValueInputCount() + if (reduction.effect_changed()) Revisit(use);
OperatorProperties::GetContextInputCount(use->op())) } else {
continue; if (reduction.value_changed()) Revisit(use);
switch (use->opcode()) {
case IrOpcode::kPhi:
if (phi_escaping && SetEscaped(rep)) {
TRACE(
"Setting #%d (%s) to escaped because of use by phi node "
"#%d (%s)\n",
rep->id(), rep->op()->mnemonic(), use->id(),
use->op()->mnemonic());
return true;
}
// Fallthrough.
case IrOpcode::kStoreField:
case IrOpcode::kLoadField:
case IrOpcode::kStoreElement:
case IrOpcode::kLoadElement:
case IrOpcode::kFrameState:
case IrOpcode::kStateValues:
case IrOpcode::kReferenceEqual:
case IrOpcode::kFinishRegion:
case IrOpcode::kCheckMaps:
if (IsEscaped(use) && SetEscaped(rep)) {
TRACE(
"Setting #%d (%s) to escaped because of use by escaping node "
"#%d (%s)\n",
rep->id(), rep->op()->mnemonic(), use->id(),
use->op()->mnemonic());
return true;
} }
break;
case IrOpcode::kObjectIsSmi:
if (!IsAllocation(rep) && SetEscaped(rep)) {
TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
rep->id(), rep->op()->mnemonic(), use->id(),
use->op()->mnemonic());
return true;
} }
break; state_.Set(current, State::kVisited);
case IrOpcode::kSelect: // Process the revisitation buffer immediately. This improves performance
// TODO(mstarzinger): The following list of operators will eventually be // of escape analysis. Using a stack for {revisit_} reverses the order in
// handled by the EscapeAnalysisReducer (similar to ObjectIsSmi). // which the revisitation happens. This also seems to improve performance.
case IrOpcode::kConvertTaggedHoleToUndefined: while (!revisit_.empty()) {
case IrOpcode::kStringEqual: Node* revisit = revisit_.top();
case IrOpcode::kStringLessThan: if (state_.Get(revisit) == State::kRevisit) {
case IrOpcode::kStringLessThanOrEqual: state_.Set(revisit, State::kOnStack);
case IrOpcode::kTypeGuard: stack_.push({revisit, 0});
case IrOpcode::kPlainPrimitiveToNumber:
case IrOpcode::kPlainPrimitiveToWord32:
case IrOpcode::kPlainPrimitiveToFloat64:
case IrOpcode::kStringCharAt:
case IrOpcode::kStringCharCodeAt:
case IrOpcode::kSeqStringCharCodeAt:
case IrOpcode::kStringIndexOf:
case IrOpcode::kStringToLowerCaseIntl:
case IrOpcode::kStringToUpperCaseIntl:
case IrOpcode::kObjectIsCallable:
case IrOpcode::kObjectIsDetectableCallable:
case IrOpcode::kObjectIsNaN:
case IrOpcode::kObjectIsNonCallable:
case IrOpcode::kObjectIsNumber:
case IrOpcode::kObjectIsReceiver:
case IrOpcode::kObjectIsString:
case IrOpcode::kObjectIsSymbol:
case IrOpcode::kObjectIsUndetectable:
case IrOpcode::kNumberLessThan:
case IrOpcode::kNumberLessThanOrEqual:
case IrOpcode::kNumberEqual:
#define CASE(opcode) case IrOpcode::k##opcode:
SIMPLIFIED_NUMBER_BINOP_LIST(CASE)
SIMPLIFIED_NUMBER_UNOP_LIST(CASE)
#undef CASE
if (SetEscaped(rep)) {
TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
rep->id(), rep->op()->mnemonic(), use->id(),
use->op()->mnemonic());
return true;
} }
break; revisit_.pop();
default:
DCHECK(use->op()->EffectInputCount() > 0 ||
uses->op()->EffectInputCount() == 0 ||
IrOpcode::IsJsOpcode(use->opcode()));
if (SetEscaped(rep)) {
TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
rep->id(), rep->op()->mnemonic(), use->id(),
use->op()->mnemonic());
return true;
} }
} }
} }
return false;
} }
void EscapeStatusAnalysis::ProcessFinishRegion(Node* node) { void EffectGraphReducer::Revisit(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion); if (state_.Get(node) == State::kVisited) {
if (!HasEntry(node)) { TRACE(" Queueing for revisit: %s#%d\n", node->op()->mnemonic(),
status_[node->id()] |= kTracked; node->id());
RevisitUses(node); state_.Set(node, State::kRevisit);
} revisit_.push(node);
if (CheckUsesForEscape(node, true)) {
RevisitInputs(node);
RevisitUses(node);
}
}
void EscapeStatusAnalysis::DebugPrint() {
for (NodeId id = 0; id < status_.size(); id++) {
if (status_[id] & kTracked) {
PrintF("Node #%d is %s\n", id,
(status_[id] & kEscaped) ? "escaping" : "virtual");
}
} }
} }
EscapeAnalysis::EscapeAnalysis(Graph* graph, CommonOperatorBuilder* common, VariableTracker::VariableTracker(JSGraph* graph, EffectGraphReducer* reducer,
Zone* zone) Zone* zone)
: zone_(zone), : zone_(zone),
slot_not_analyzed_(graph->NewNode(common->NumberConstant(0x1c0debad))), graph_(graph),
common_(common), table_(zone, State(zone)),
status_analysis_(new (zone) EscapeStatusAnalysis(this, graph, zone)), buffer_(zone),
virtual_states_(zone), reducer_(reducer) {}
replacements_(zone),
cycle_detection_(zone), VariableTracker::Scope::Scope(VariableTracker* states, Node* node,
cache_(nullptr) { Reduction* reduction)
// Type slot_not_analyzed_ manually. : ReduceScope(node, reduction),
double v = OpParameter<double>(slot_not_analyzed_); states_(states),
NodeProperties::SetType(slot_not_analyzed_, Type::Range(v, v, zone)); current_state_(states->zone_) {
}
EscapeAnalysis::~EscapeAnalysis() {}
bool EscapeAnalysis::Run() {
replacements_.resize(graph()->NodeCount());
status_analysis_->AssignAliases();
if (status_analysis_->AliasCount() > 0) {
cache_ = new (zone()) MergeCache(zone());
replacements_.resize(graph()->NodeCount());
status_analysis_->ResizeStatusVector();
RunObjectAnalysis();
status_analysis_->RunStatusAnalysis();
return true;
} else {
return false;
}
}
void EscapeStatusAnalysis::AssignAliases() {
size_t max_size = 1024;
size_t min_size = 32;
size_t stack_size =
std::min(std::max(graph()->NodeCount() / 5, min_size), max_size);
stack_.reserve(stack_size);
ResizeStatusVector();
stack_.push_back(graph()->end());
CHECK_LT(graph()->NodeCount(), kUntrackable);
aliases_.resize(graph()->NodeCount(), kNotReachable);
aliases_[graph()->end()->id()] = kUntrackable;
status_stack_.reserve(8);
TRACE("Discovering trackable nodes");
while (!stack_.empty()) {
Node* node = stack_.back();
stack_.pop_back();
switch (node->opcode()) { switch (node->opcode()) {
case IrOpcode::kAllocate: case IrOpcode::kEffectPhi:
if (aliases_[node->id()] >= kUntrackable) { current_state_ = states_->MergeInputs(node);
aliases_[node->id()] = NextAlias();
TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
node->id());
EnqueueForStatusAnalysis(node);
}
break;
case IrOpcode::kFinishRegion: {
Node* allocate = NodeProperties::GetValueInput(node, 0);
DCHECK_NOT_NULL(allocate);
if (allocate->opcode() == IrOpcode::kAllocate) {
if (aliases_[allocate->id()] >= kUntrackable) {
if (aliases_[allocate->id()] == kNotReachable) {
stack_.push_back(allocate);
}
aliases_[allocate->id()] = NextAlias();
TRACE(" @%d:%s#%u", aliases_[allocate->id()],
allocate->op()->mnemonic(), allocate->id());
EnqueueForStatusAnalysis(allocate);
}
aliases_[node->id()] = aliases_[allocate->id()];
TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
node->id());
}
break; break;
}
default: default:
DCHECK_EQ(aliases_[node->id()], kUntrackable); int effect_inputs = node->op()->EffectInputCount();
break; if (effect_inputs == 1) {
} current_state_ =
for (Edge edge : node->input_edges()) { states_->table_.Get(NodeProperties::GetEffectInput(node, 0));
Node* input = edge.to(); } else {
if (aliases_[input->id()] == kNotReachable) { DCHECK_EQ(0, effect_inputs);
stack_.push_back(input); }
aliases_[input->id()] = kUntrackable; }
} }
}
} VariableTracker::Scope::~Scope() {
TRACE("\n"); if (!reduction()->effect_changed() &&
} states_->table_.Get(current_node()) != current_state_) {
reduction()->set_effect_changed();
bool EscapeStatusAnalysis::IsNotReachable(Node* node) { }
if (node->id() >= aliases_.size()) { states_->table_.Set(current_node(), current_state_);
return false; }
}
return aliases_[node->id()] == kNotReachable; VariableTracker::State VariableTracker::MergeInputs(Node* effect_phi) {
} // A variable that is mapped to [nullptr] was not assigned a value on every
// execution path to the current effect phi. Relying on the invariant that
bool EscapeAnalysis::AllObjectsComplete() { // every variable is initialized (at least with a sentinel like the Dead
for (VirtualState* state : virtual_states_) { // node), this means that the variable initialization does not dominate the
if (state) { // current point. So for loop effect phis, we can keep nullptr for a variable
for (size_t i = 0; i < state->size(); ++i) { // as long as the first input of the loop has nullptr for this variable. For
if (VirtualObject* object = state->VirtualObjectFromAlias(i)) { // non-loop effect phis, we can even keep it nullptr as long as any input has
if (!object->AllFieldsClear()) { // nullptr.
for (size_t i = 0; i < object->field_count(); ++i) { DCHECK(effect_phi->opcode() == IrOpcode::kEffectPhi);
if (object->GetField(i) == nullptr) { int arity = effect_phi->op()->EffectInputCount();
return false; Node* control = NodeProperties::GetControlInput(effect_phi, 0);
} TRACE("control: %s#%d\n", control->op()->mnemonic(), control->id());
} bool is_loop = control->opcode() == IrOpcode::kLoop;
} buffer_.reserve(arity + 1);
}
} State first_input = table_.Get(NodeProperties::GetEffectInput(effect_phi, 0));
State result = first_input;
for (std::pair<Variable, Node*> var_value : first_input) {
if (Node* value = var_value.second) {
Variable var = var_value.first;
TRACE("var %i:\n", var.id_);
buffer_.clear();
buffer_.push_back(value);
bool identical_inputs = true;
int num_defined_inputs = 1;
TRACE(" input 0: %s#%d\n", value->op()->mnemonic(), value->id());
for (int i = 1; i < arity; ++i) {
Node* next_value =
table_.Get(NodeProperties::GetEffectInput(effect_phi, i)).Get(var);
if (next_value != value) identical_inputs = false;
if (next_value != nullptr) {
num_defined_inputs++;
TRACE(" input %i: %s#%d\n", i, next_value->op()->mnemonic(),
next_value->id());
} else {
TRACE(" input %i: nullptr\n", i);
} }
buffer_.push_back(next_value);
} }
return true;
}
void EscapeAnalysis::RunObjectAnalysis() { Node* old_value = table_.Get(effect_phi).Get(var);
virtual_states_.resize(graph()->NodeCount()); if (old_value) {
ZoneDeque<Node*> queue(zone()); TRACE(" old: %s#%d\n", old_value->op()->mnemonic(), old_value->id());
queue.push_back(graph()->start());
ZoneVector<Node*> danglers(zone());
while (!queue.empty()) {
Node* node = queue.back();
queue.pop_back();
status_analysis_->SetInQueue(node->id(), false);
if (Process(node)) {
for (Edge edge : node->use_edges()) {
Node* use = edge.from();
if (status_analysis_->IsNotReachable(use)) {
continue;
}
if (NodeProperties::IsEffectEdge(edge)) {
// Iteration order: depth first, but delay phis.
// We need DFS do avoid some duplication of VirtualStates and
// VirtualObjects, and we want to delay phis to improve performance.
if (use->opcode() == IrOpcode::kEffectPhi) {
if (!status_analysis_->IsInQueue(use->id())) {
status_analysis_->SetInQueue(use->id(), true);
queue.push_front(use);
}
} else if ((use->opcode() != IrOpcode::kLoadField &&
use->opcode() != IrOpcode::kLoadElement) ||
!status_analysis_->IsDanglingEffectNode(use)) {
if (!status_analysis_->IsInQueue(use->id())) {
status_analysis_->SetInQueue(use->id(), true);
queue.push_back(use);
}
} else { } else {
danglers.push_back(use); TRACE(" old: nullptr\n");
} }
// Reuse a previously created phi node if possible.
if (old_value && old_value->opcode() == IrOpcode::kPhi &&
NodeProperties::GetControlInput(old_value, 0) == control) {
// Since a phi node can never dominate its control node,
// [old_value] cannot originate from the inputs. Thus [old_value]
// must have been created by a previous reduction of this [effect_phi].
for (int i = 0; i < arity; ++i) {
NodeProperties::ReplaceValueInput(
old_value, buffer_[i] ? buffer_[i] : graph_->Dead(), i);
// This change cannot affect the rest of the reducer, so there is no
// need to trigger additional revisitations.
} }
result.Set(var, old_value);
} else {
if (num_defined_inputs == 1 && is_loop) {
// For loop effect phis, the variable initialization dominates iff it
// dominates the first input.
DCHECK_EQ(2, arity);
DCHECK_EQ(value, buffer_[0]);
result.Set(var, value);
} else if (num_defined_inputs < arity) {
// If the variable is undefined on some input of this non-loop effect
// phi, then its initialization does not dominate this point.
result.Set(var, nullptr);
} else {
DCHECK_EQ(num_defined_inputs, arity);
// We only create a phi if the values are different.
if (identical_inputs) {
result.Set(var, value);
} else {
TRACE("Creating new phi\n");
buffer_.push_back(control);
Node* phi = graph_->graph()->NewNode(
graph_->common()->Phi(MachineRepresentation::kTagged, arity),
arity + 1, &buffer_.front());
// TODO(tebbi): Computing precise types here is tricky, because of
// the necessary revisitations. If we really need this, we should
// probably do it afterwards.
NodeProperties::SetType(phi, Type::Any());
reducer_->AddRoot(phi);
result.Set(var, phi);
} }
// Danglers need to be processed immediately, even if they are
// on the stack. Since they do not have effect outputs,
// we don't have to track whether they are on the stack.
queue.insert(queue.end(), danglers.begin(), danglers.end());
danglers.clear();
} }
} }
#ifdef DEBUG #ifdef DEBUG
if (FLAG_trace_turbo_escape) { if (Node* result_node = result.Get(var)) {
DebugPrint(); TRACE(" result: %s#%d\n", result_node->op()->mnemonic(),
result_node->id());
} else {
TRACE(" result: nullptr\n");
} }
#endif #endif
}
bool EscapeStatusAnalysis::IsDanglingEffectNode(Node* node) {
if (status_[node->id()] & kDanglingComputed) {
return status_[node->id()] & kDangling;
}
if (node->op()->EffectInputCount() == 0 ||
node->op()->EffectOutputCount() == 0 ||
(node->op()->EffectInputCount() == 1 &&
NodeProperties::GetEffectInput(node)->opcode() == IrOpcode::kStart)) {
// The start node is used as sentinel for nodes that are in general
// effectful, but of which an analysis has determined that they do not
// produce effects in this instance. We don't consider these nodes dangling.
status_[node->id()] |= kDanglingComputed;
return false;
}
for (Edge edge : node->use_edges()) {
Node* use = edge.from();
if (aliases_[use->id()] == kNotReachable) continue;
if (NodeProperties::IsEffectEdge(edge)) {
status_[node->id()] |= kDanglingComputed;
return false;
}
}
status_[node->id()] |= kDanglingComputed | kDangling;
return true;
}
bool EscapeStatusAnalysis::IsEffectBranchPoint(Node* node) {
if (status_[node->id()] & kBranchPointComputed) {
return status_[node->id()] & kBranchPoint;
}
int count = 0;
for (Edge edge : node->use_edges()) {
Node* use = edge.from();
if (aliases_[use->id()] == kNotReachable) continue;
if (NodeProperties::IsEffectEdge(edge)) {
if ((use->opcode() == IrOpcode::kLoadField ||
use->opcode() == IrOpcode::kLoadElement ||
use->opcode() == IrOpcode::kLoad) &&
IsDanglingEffectNode(use))
continue;
if (++count > 1) {
status_[node->id()] |= kBranchPointComputed | kBranchPoint;
return true;
}
} }
} }
status_[node->id()] |= kBranchPointComputed; return result;
return false;
} }
namespace { namespace {
bool HasFrameStateInput(const Operator* op) { int OffsetOfFieldAccess(const Operator* op) {
if (op->opcode() == IrOpcode::kCall || DCHECK(op->opcode() == IrOpcode::kLoadField ||
op->opcode() == IrOpcode::kCallWithCallerSavedRegisters || op->opcode() == IrOpcode::kStoreField);
op->opcode() == IrOpcode::kTailCall) { FieldAccess access = FieldAccessOf(op);
const CallDescriptor* d = CallDescriptorOf(op); return access.offset;
return d->NeedsFrameState(); }
} else {
return OperatorProperties::HasFrameStateInput(op); Maybe<int> OffsetOfElementsAccess(const Operator* op, Node* index_node) {
} DCHECK(op->opcode() == IrOpcode::kLoadElement ||
op->opcode() == IrOpcode::kStoreElement);
Type* index_type = NodeProperties::GetType(index_node);
if (!index_type->Is(Type::Number())) return Nothing<int>();
double max = index_type->Max();
double min = index_type->Min();
int index = static_cast<int>(min);
if (!(index == min && index == max)) return Nothing<int>();
ElementAccess access = ElementAccessOf(op);
DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
kPointerSizeLog2);
return Just(access.header_size + (index << ElementSizeLog2Of(
access.machine_type.representation())));
} }
} // namespace void ReduceNode(const Operator* op, EscapeAnalysisTracker::Scope* current,
JSGraph* jsgraph) {
bool EscapeAnalysis::Process(Node* node) { switch (op->opcode()) {
switch (node->opcode()) { case IrOpcode::kAllocate: {
case IrOpcode::kAllocate: NumberMatcher size(current->ValueInput(0));
ProcessAllocation(node); if (!size.HasValue()) break;
break; int size_int = static_cast<int>(size.Value());
case IrOpcode::kBeginRegion: if (size_int != size.Value()) break;
ForwardVirtualState(node); if (const VirtualObject* vobject = current->InitVirtualObject(size_int)) {
break; // Initialize with dead nodes as a sentinel for uninitialized memory.
case IrOpcode::kFinishRegion: for (Variable field : *vobject) {
ProcessFinishRegion(node); current->Set(field, jsgraph->Dead());
break;
case IrOpcode::kStoreField:
ProcessStoreField(node);
break;
case IrOpcode::kLoadField:
ProcessLoadField(node);
break;
case IrOpcode::kStoreElement:
ProcessStoreElement(node);
break;
case IrOpcode::kLoadElement:
ProcessLoadElement(node);
break;
case IrOpcode::kCheckMaps:
ProcessCheckMaps(node);
break;
case IrOpcode::kStart:
ProcessStart(node);
break;
case IrOpcode::kEffectPhi:
return ProcessEffectPhi(node);
break;
default:
if (node->op()->EffectInputCount() > 0) {
ForwardVirtualState(node);
} }
ProcessAllocationUsers(node);
break;
} }
if (HasFrameStateInput(node->op())) { break;
virtual_states_[node->id()]->SetCopyRequired();
} }
return true;
}
void EscapeAnalysis::ProcessAllocationUsers(Node* node) {
for (Edge edge : node->input_edges()) {
Node* input = edge.to();
Node* use = edge.from();
if (edge.index() >= use->op()->ValueInputCount() +
OperatorProperties::GetContextInputCount(use->op()))
continue;
switch (node->opcode()) {
case IrOpcode::kStoreField:
case IrOpcode::kLoadField:
case IrOpcode::kStoreElement:
case IrOpcode::kLoadElement:
case IrOpcode::kFrameState:
case IrOpcode::kStateValues:
case IrOpcode::kReferenceEqual:
case IrOpcode::kFinishRegion: case IrOpcode::kFinishRegion:
case IrOpcode::kObjectIsSmi: current->SetVirtualObject(current->ValueInput(0));
break; break;
case IrOpcode::kCheckMaps: { case IrOpcode::kStoreField: {
CheckMapsParameters params = CheckMapsParametersOf(node->op()); Node* object = current->ValueInput(0);
if (params.flags() == CheckMapsFlag::kNone) break; Node* value = current->ValueInput(1);
} // Fallthrough. const VirtualObject* vobject = current->GetVirtualObject(object);
default: Variable var;
VirtualState* state = virtual_states_[node->id()]; if (vobject && !vobject->HasEscaped() &&
if (VirtualObject* obj = vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) {
GetVirtualObject(state, ResolveReplacement(input))) { current->Set(var, value);
if (!obj->AllFieldsClear()) { current->MarkForDeletion();
obj = CopyForModificationAt(obj, state, node); } else {
obj->ClearAllFields(); current->SetEscaped(object);
TRACE("Cleared all fields of @%d:#%d\n", current->SetEscaped(value);
status_analysis_->GetAlias(obj->id()), obj->id());
}
} }
break; break;
} }
} case IrOpcode::kStoreElement: {
} Node* object = current->ValueInput(0);
Node* index = current->ValueInput(1);
VirtualState* EscapeAnalysis::CopyForModificationAt(VirtualState* state, Node* value = current->ValueInput(2);
Node* node) { const VirtualObject* vobject = current->GetVirtualObject(object);
if (state->owner() != node) { int offset;
VirtualState* new_state = new (zone()) VirtualState(node, *state); Variable var;
virtual_states_[node->id()] = new_state; if (vobject && !vobject->HasEscaped() &&
TRACE("Copying virtual state %p to new state %p at node %s#%d\n", OffsetOfElementsAccess(op, index).To(&offset) &&
static_cast<void*>(state), static_cast<void*>(new_state), vobject->FieldAt(offset).To(&var)) {
node->op()->mnemonic(), node->id()); current->Set(var, value);
return new_state; current->MarkForDeletion();
}
return state;
}
VirtualObject* EscapeAnalysis::CopyForModificationAt(VirtualObject* obj,
VirtualState* state,
Node* node) {
if (obj->NeedCopyForModification()) {
state = CopyForModificationAt(state, node);
// TODO(tebbi): this copies the complete virtual state. Replace with a more
// precise analysis of which objects are actually affected by the change.
Alias changed_alias = status_analysis_->GetAlias(obj->id());
for (Alias alias = 0; alias < state->size(); ++alias) {
if (VirtualObject* next_obj = state->VirtualObjectFromAlias(alias)) {
if (alias != changed_alias && next_obj->NeedCopyForModification()) {
state->Copy(next_obj, alias);
}
}
}
return state->Copy(obj, changed_alias);
}
return obj;
}
void EscapeAnalysis::ForwardVirtualState(Node* node) {
DCHECK_EQ(node->op()->EffectInputCount(), 1);
#ifdef DEBUG
if (node->opcode() != IrOpcode::kLoadField &&
node->opcode() != IrOpcode::kLoadElement &&
node->opcode() != IrOpcode::kLoad &&
status_analysis_->IsDanglingEffectNode(node)) {
PrintF("Dangeling effect node: #%d (%s)\n", node->id(),
node->op()->mnemonic());
UNREACHABLE();
}
#endif // DEBUG
Node* effect = NodeProperties::GetEffectInput(node);
DCHECK_NOT_NULL(virtual_states_[effect->id()]);
if (virtual_states_[node->id()]) {
TRACE("Updating virtual state %p at %s#%d from virtual state %p at %s#%d\n",
static_cast<void*>(virtual_states_[node->id()]),
node->op()->mnemonic(), node->id(),
static_cast<void*>(virtual_states_[effect->id()]),
effect->op()->mnemonic(), effect->id());
virtual_states_[node->id()]->UpdateFrom(virtual_states_[effect->id()],
zone());
} else { } else {
virtual_states_[node->id()] = virtual_states_[effect->id()]; current->SetEscaped(value);
TRACE("Forwarding object state %p from %s#%d to %s#%d", current->SetEscaped(object);
static_cast<void*>(virtual_states_[effect->id()]),
effect->op()->mnemonic(), effect->id(), node->op()->mnemonic(),
node->id());
if (status_analysis_->IsEffectBranchPoint(effect)) {
virtual_states_[node->id()]->SetCopyRequired();
TRACE(", effect input %s#%d is branch point", effect->op()->mnemonic(),
effect->id());
} }
TRACE("\n"); break;
} }
} case IrOpcode::kLoadField: {
Node* object = current->ValueInput(0);
void EscapeAnalysis::ProcessStart(Node* node) { const VirtualObject* vobject = current->GetVirtualObject(object);
DCHECK_EQ(node->opcode(), IrOpcode::kStart); Variable var;
virtual_states_[node->id()] = if (vobject && !vobject->HasEscaped() &&
new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount()); vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) {
} current->SetReplacement(current->Get(var));
} else {
bool EscapeAnalysis::ProcessEffectPhi(Node* node) { // TODO(tebbi): At the moment, we mark objects as escaping if there
DCHECK_EQ(node->opcode(), IrOpcode::kEffectPhi); // is a load from an invalid location to avoid dead nodes. This is a
bool changed = false; // workaround that should be removed once we can handle dead nodes
// everywhere.
VirtualState* mergeState = virtual_states_[node->id()]; current->SetEscaped(object);
if (!mergeState) {
mergeState =
new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
virtual_states_[node->id()] = mergeState;
changed = true;
TRACE("Effect Phi #%d got new virtual state %p.\n", node->id(),
static_cast<void*>(mergeState));
} }
break;
cache_->Clear();
TRACE("At Effect Phi #%d, merging states into %p:", node->id(),
static_cast<void*>(mergeState));
for (int i = 0; i < node->op()->EffectInputCount(); ++i) {
Node* input = NodeProperties::GetEffectInput(node, i);
VirtualState* state = virtual_states_[input->id()];
if (state) {
cache_->states().push_back(state);
if (state == mergeState) {
mergeState = new (zone())
VirtualState(node, zone(), status_analysis_->AliasCount());
virtual_states_[node->id()] = mergeState;
changed = true;
}
}
TRACE(" %p (from %d %s)", static_cast<void*>(state), input->id(),
input->op()->mnemonic());
}
TRACE("\n");
if (cache_->states().size() == 0) {
return changed;
} }
case IrOpcode::kLoadElement: {
changed = Node* object = current->ValueInput(0);
mergeState->MergeFrom(cache_, zone(), graph(), common(), node) || changed; Node* index = current->ValueInput(1);
const VirtualObject* vobject = current->GetVirtualObject(object);
TRACE("Merge %s the node.\n", changed ? "changed" : "did not change"); int offset;
Variable var;
if (changed) { if (vobject && !vobject->HasEscaped() &&
status_analysis_->ResizeStatusVector(); OffsetOfElementsAccess(op, index).To(&offset) &&
vobject->FieldAt(offset).To(&var)) {
current->SetReplacement(current->Get(var));
} else {
current->SetEscaped(object);
} }
return changed; break;
}
void EscapeAnalysis::ProcessAllocation(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
ForwardVirtualState(node);
VirtualState* state = virtual_states_[node->id()];
Alias alias = status_analysis_->GetAlias(node->id());
// Check if we have already processed this node.
if (state->VirtualObjectFromAlias(alias)) {
return;
} }
case IrOpcode::kTypeGuard: {
if (state->owner()->opcode() == IrOpcode::kEffectPhi) { // The type-guard is re-introduced in the final reducer if the types
state = CopyForModificationAt(state, node); // don't match.
current->SetReplacement(current->ValueInput(0));
break;
} }
case IrOpcode::kReferenceEqual: {
NumberMatcher size(node->InputAt(0)); Node* left = current->ValueInput(0);
DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant && Node* right = current->ValueInput(1);
node->InputAt(0)->opcode() != IrOpcode::kInt64Constant && const VirtualObject* left_object = current->GetVirtualObject(left);
node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant && const VirtualObject* right_object = current->GetVirtualObject(right);
node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant); Node* replacement = nullptr;
if (size.HasValue()) { if (left_object && !left_object->HasEscaped()) {
VirtualObject* obj = new (zone()) VirtualObject( if (right_object && !right_object->HasEscaped() &&
node->id(), state, zone(), size.Value() / kPointerSize, false); left_object->id() == right_object->id()) {
state->SetVirtualObject(alias, obj); replacement = jsgraph->TrueConstant();
} else { } else {
state->SetVirtualObject( replacement = jsgraph->FalseConstant();
alias, new (zone()) VirtualObject(node->id(), state, zone())); }
} } else if (right_object && !right_object->HasEscaped()) {
} replacement = jsgraph->FalseConstant();
}
void EscapeAnalysis::ProcessFinishRegion(Node* node) { if (replacement) {
DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion); // TODO(tebbi) This is a workaround for uninhabited types. If we
ForwardVirtualState(node); // replaced a value of uninhabited type with a constant, we would
Node* allocation = NodeProperties::GetValueInput(node, 0); // widen the type of the node. This could produce inconsistent
if (allocation->opcode() == IrOpcode::kAllocate) { // types (which might confuse representation selection). We get
VirtualState* state = virtual_states_[node->id()]; // around this by refusing to constant-fold and escape-analyze
VirtualObject* obj = // if the type is not inhabited.
state->VirtualObjectFromAlias(status_analysis_->GetAlias(node->id())); if (NodeProperties::GetType(left)->IsInhabited() &&
DCHECK_NOT_NULL(obj); NodeProperties::GetType(right)->IsInhabited()) {
obj->SetInitialized(); current->SetReplacement(replacement);
}
}
Node* EscapeAnalysis::replacement(Node* node) {
if (node->id() >= replacements_.size()) return nullptr;
return replacements_[node->id()];
}
bool EscapeAnalysis::SetReplacement(Node* node, Node* rep) {
bool changed = replacements_[node->id()] != rep;
replacements_[node->id()] = rep;
return changed;
}
bool EscapeAnalysis::UpdateReplacement(VirtualState* state, Node* node,
Node* rep) {
if (SetReplacement(node, rep)) {
if (rep) {
TRACE("Replacement of #%d is #%d (%s)\n", node->id(), rep->id(),
rep->op()->mnemonic());
} else { } else {
TRACE("Replacement of #%d cleared\n", node->id()); current->SetEscaped(left);
current->SetEscaped(right);
} }
return true;
} }
return false; break;
}
Node* EscapeAnalysis::ResolveReplacement(Node* node) {
while (replacement(node)) {
node = replacement(node);
} }
return node; case IrOpcode::kCheckMaps: {
} CheckMapsParameters params = CheckMapsParametersOf(op);
Node* checked = current->ValueInput(0);
Node* EscapeAnalysis::GetReplacement(Node* node) { const VirtualObject* vobject = current->GetVirtualObject(checked);
Node* result = nullptr; Variable map_field;
while (replacement(node)) { if (vobject && !vobject->HasEscaped() &&
node = result = replacement(node); vobject->FieldAt(HeapObject::kMapOffset).To(&map_field)) {
Node* map = current->Get(map_field);
if (map) {
Type* const map_type = NodeProperties::GetType(map);
if (map_type->IsHeapConstant() &&
params.maps().contains(ZoneHandleSet<Map>(bit_cast<Handle<Map>>(
map_type->AsHeapConstant()->Value())))) {
current->MarkForDeletion();
break;
} }
return result;
}
bool EscapeAnalysis::IsVirtual(Node* node) {
if (node->id() >= status_analysis_->GetStatusVectorSize()) {
return false;
} }
return status_analysis_->IsVirtual(node);
}
bool EscapeAnalysis::IsEscaped(Node* node) {
if (node->id() >= status_analysis_->GetStatusVectorSize()) {
return false;
} }
return status_analysis_->IsEscaped(node); current->SetEscaped(checked);
} break;
bool EscapeAnalysis::CompareVirtualObjects(Node* left, Node* right) {
DCHECK(IsVirtual(left) && IsVirtual(right));
left = ResolveReplacement(left);
right = ResolveReplacement(right);
if (IsEquivalentPhi(left, right)) {
return true;
}
return false;
}
namespace {
#ifdef DEBUG
bool IsOffsetForFieldAccessCorrect(const FieldAccess& access) {
#if V8_TARGET_LITTLE_ENDIAN
return (access.offset % kPointerSize) == 0;
#else
return ((access.offset +
(1 << ElementSizeLog2Of(access.machine_type.representation()))) %
kPointerSize) == 0;
#endif
}
#endif
int OffsetForFieldAccess(Node* node) {
FieldAccess access = FieldAccessOf(node->op());
DCHECK(IsOffsetForFieldAccessCorrect(access));
return access.offset / kPointerSize;
}
int OffsetForElementAccess(Node* node, int index) {
ElementAccess access = ElementAccessOf(node->op());
DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
kPointerSizeLog2);
DCHECK_EQ(access.header_size % kPointerSize, 0);
return access.header_size / kPointerSize + index;
}
} // namespace
void EscapeAnalysis::ProcessLoadField(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kLoadField);
ForwardVirtualState(node);
Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
VirtualState* state = virtual_states_[node->id()];
if (VirtualObject* object = GetVirtualObject(state, from)) {
if (!object->IsTracked()) return;
int offset = OffsetForFieldAccess(node);
if (static_cast<size_t>(offset) >= object->field_count()) {
// We have a load from a field that is not inside the {object}. This
// can only happen with conflicting type feedback and for dead {node}s.
// For now, we just mark the {object} as escaping.
// TODO(turbofan): Consider introducing an Undefined or None operator
// that we can replace this load with, since we know it's dead code.
if (status_analysis_->SetEscaped(from)) {
TRACE(
"Setting #%d (%s) to escaped because load field #%d from "
"offset %d outside of object\n",
from->id(), from->op()->mnemonic(), node->id(), offset);
}
return;
}
Node* value = object->GetField(offset);
if (value) {
value = ResolveReplacement(value);
}
// Record that the load has this alias.
UpdateReplacement(state, node, value);
} else {
UpdateReplacement(state, node, nullptr);
} }
} case IrOpcode::kCheckHeapObject: {
Node* checked = current->ValueInput(0);
void EscapeAnalysis::ProcessCheckMaps(Node* node) { switch (checked->opcode()) {
DCHECK_EQ(node->opcode(), IrOpcode::kCheckMaps); case IrOpcode::kAllocate:
ForwardVirtualState(node); case IrOpcode::kFinishRegion:
Node* checked = ResolveReplacement(NodeProperties::GetValueInput(node, 0)); case IrOpcode::kHeapConstant:
if (FLAG_turbo_experimental) { current->SetReplacement(checked);
VirtualState* state = virtual_states_[node->id()]; break;
if (VirtualObject* object = GetVirtualObject(state, checked)) { default:
if (!object->IsTracked()) { current->SetEscaped(checked);
if (status_analysis_->SetEscaped(node)) { break;
TRACE(
"Setting #%d (%s) to escaped because checked object #%i is not "
"tracked\n",
node->id(), node->op()->mnemonic(), object->id());
}
return;
}
CheckMapsParameters params = CheckMapsParametersOf(node->op());
Node* value = object->GetField(HeapObject::kMapOffset / kPointerSize);
if (value) {
value = ResolveReplacement(value);
// TODO(tebbi): We want to extend this beyond constant folding with a
// CheckMapsValue operator that takes the load-eliminated map value as
// input.
if (value->opcode() == IrOpcode::kHeapConstant &&
params.maps().contains(ZoneHandleSet<Map>(bit_cast<Handle<Map>>(
OpParameter<Handle<HeapObject>>(value))))) {
TRACE("CheckMaps #%i seems to be redundant (until now).\n",
node->id());
return;
} }
break;
} }
case IrOpcode::kMapGuard: {
Node* object = current->ValueInput(0);
const VirtualObject* vobject = current->GetVirtualObject(object);
if (vobject && !vobject->HasEscaped()) {
current->MarkForDeletion();
} }
break;
} }
if (status_analysis_->SetEscaped(node)) { case IrOpcode::kStateValues:
TRACE("Setting #%d (%s) to escaped (checking #%i)\n", node->id(), case IrOpcode::kFrameState:
node->op()->mnemonic(), checked->id()); // These uses are always safe.
break;
default: {
// For unknown nodes, treat all value inputs as escaping.
int value_input_count = op->ValueInputCount();
for (int i = 0; i < value_input_count; ++i) {
Node* input = current->ValueInput(i);
current->SetEscaped(input);
} }
} if (OperatorProperties::HasContextInput(op)) {
current->SetEscaped(current->ContextInput());
void EscapeAnalysis::ProcessLoadElement(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kLoadElement);
ForwardVirtualState(node);
Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
VirtualState* state = virtual_states_[node->id()];
Node* index_node = node->InputAt(1);
NumberMatcher index(index_node);
DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
index_node->opcode() != IrOpcode::kInt64Constant &&
index_node->opcode() != IrOpcode::kFloat32Constant &&
index_node->opcode() != IrOpcode::kFloat64Constant);
if (index.HasValue()) {
if (VirtualObject* object = GetVirtualObject(state, from)) {
if (!object->IsTracked()) return;
int offset = OffsetForElementAccess(node, index.Value());
if (static_cast<size_t>(offset) >= object->field_count()) return;
Node* value = object->GetField(offset);
if (value) {
value = ResolveReplacement(value);
}
// Record that the load has this alias.
UpdateReplacement(state, node, value);
} else {
UpdateReplacement(state, node, nullptr);
} }
} else { break;
// We have a load from a non-const index, cannot eliminate object.
if (status_analysis_->SetEscaped(from)) {
TRACE(
"Setting #%d (%s) to escaped because load element #%d from non-const "
"index #%d (%s)\n",
from->id(), from->op()->mnemonic(), node->id(), index_node->id(),
index_node->op()->mnemonic());
} }
} }
} }
void EscapeAnalysis::ProcessStoreField(Node* node) { } // namespace
DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
ForwardVirtualState(node);
Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
VirtualState* state = virtual_states_[node->id()];
if (VirtualObject* object = GetVirtualObject(state, to)) {
if (!object->IsTracked()) return;
int offset = OffsetForFieldAccess(node);
if (static_cast<size_t>(offset) >= object->field_count()) {
// We have a store to a field that is not inside the {object}. This
// can only happen with conflicting type feedback and for dead {node}s.
// For now, we just mark the {object} as escaping.
// TODO(turbofan): Consider just eliminating the store in the reducer
// pass, as it's dead code anyways.
if (status_analysis_->SetEscaped(to)) {
TRACE(
"Setting #%d (%s) to escaped because store field #%d to "
"offset %d outside of object\n",
to->id(), to->op()->mnemonic(), node->id(), offset);
}
return;
}
Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 1));
// TODO(mstarzinger): The following is a workaround to not track some well
// known raw fields. We only ever store default initial values into these
// fields which are hard-coded in {TranslatedState::MaterializeAt} as well.
if (val->opcode() == IrOpcode::kInt32Constant ||
val->opcode() == IrOpcode::kInt64Constant) {
DCHECK(FieldAccessOf(node->op()).offset == Name::kHashFieldOffset);
val = slot_not_analyzed_;
}
object = CopyForModificationAt(object, state, node);
if (object->GetField(offset) != val) {
object->SetField(offset, val);
}
}
}
void EscapeAnalysis::ProcessStoreElement(Node* node) { void EscapeAnalysis::Reduce(Node* node, Reduction* reduction) {
DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement); const Operator* op = node->op();
ForwardVirtualState(node); TRACE("Reducing %s#%d\n", op->mnemonic(), node->id());
Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
Node* index_node = node->InputAt(1);
NumberMatcher index(index_node);
DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
index_node->opcode() != IrOpcode::kInt64Constant &&
index_node->opcode() != IrOpcode::kFloat32Constant &&
index_node->opcode() != IrOpcode::kFloat64Constant);
VirtualState* state = virtual_states_[node->id()];
if (index.HasValue()) {
if (VirtualObject* object = GetVirtualObject(state, to)) {
if (!object->IsTracked()) return;
int offset = OffsetForElementAccess(node, index.Value());
if (static_cast<size_t>(offset) >= object->field_count()) return;
Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 2));
object = CopyForModificationAt(object, state, node);
if (object->GetField(offset) != val) {
object->SetField(offset, val);
}
}
} else {
// We have a store to a non-const index, cannot eliminate object.
if (status_analysis_->SetEscaped(to)) {
TRACE(
"Setting #%d (%s) to escaped because store element #%d to non-const "
"index #%d (%s)\n",
to->id(), to->op()->mnemonic(), node->id(), index_node->id(),
index_node->op()->mnemonic());
}
if (VirtualObject* object = GetVirtualObject(state, to)) {
if (!object->IsTracked()) return;
object = CopyForModificationAt(object, state, node);
if (!object->AllFieldsClear()) {
object->ClearAllFields();
TRACE("Cleared all fields of @%d:#%d\n",
status_analysis_->GetAlias(object->id()), object->id());
}
}
}
}
Node* EscapeAnalysis::GetOrCreateObjectState(Node* effect, Node* node) { EscapeAnalysisTracker::Scope current(this, tracker_, node, reduction);
if ((node->opcode() == IrOpcode::kFinishRegion || ReduceNode(op, &current, jsgraph());
node->opcode() == IrOpcode::kAllocate) &&
IsVirtual(node)) {
if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
ResolveReplacement(node))) {
if (Node* object_state = vobj->GetObjectState()) {
return object_state;
} else {
cache_->fields().clear();
for (size_t i = 0; i < vobj->field_count(); ++i) {
if (Node* field = vobj->GetField(i)) {
cache_->fields().push_back(ResolveReplacement(field));
} else {
return nullptr;
}
}
int input_count = static_cast<int>(cache_->fields().size());
Node* new_object_state =
graph()->NewNode(common()->ObjectState(vobj->id(), input_count),
input_count, &cache_->fields().front());
NodeProperties::SetType(new_object_state, Type::OtherInternal());
vobj->SetObjectState(new_object_state);
TRACE(
"Creating object state #%d for vobj %p (from node #%d) at effect "
"#%d\n",
new_object_state->id(), static_cast<void*>(vobj), node->id(),
effect->id());
// Now fix uses of other objects.
for (size_t i = 0; i < vobj->field_count(); ++i) {
if (Node* field = vobj->GetField(i)) {
if (Node* field_object_state =
GetOrCreateObjectState(effect, field)) {
NodeProperties::ReplaceValueInput(
new_object_state, field_object_state, static_cast<int>(i));
}
}
}
return new_object_state;
}
}
}
return nullptr;
} }
bool EscapeAnalysis::IsCyclicObjectState(Node* effect, Node* node) { EscapeAnalysis::EscapeAnalysis(JSGraph* jsgraph, Zone* zone)
if ((node->opcode() == IrOpcode::kFinishRegion || : EffectGraphReducer(
node->opcode() == IrOpcode::kAllocate) && jsgraph->graph(),
IsVirtual(node)) { [this](Node* node, Reduction* reduction) { Reduce(node, reduction); },
if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()], zone),
ResolveReplacement(node))) { tracker_(new (zone) EscapeAnalysisTracker(jsgraph, this, zone)),
if (cycle_detection_.find(vobj) != cycle_detection_.end()) return true; jsgraph_(jsgraph) {}
cycle_detection_.insert(vobj);
bool cycle_detected = false;
for (size_t i = 0; i < vobj->field_count(); ++i) {
if (Node* field = vobj->GetField(i)) {
if (IsCyclicObjectState(effect, field)) cycle_detected = true;
}
}
cycle_detection_.erase(vobj);
return cycle_detected;
}
}
return false;
}
void EscapeAnalysis::DebugPrintState(VirtualState* state) { Node* EscapeAnalysisResult::GetReplacementOf(Node* node) {
PrintF("Dumping virtual state %p\n", static_cast<void*>(state)); return tracker_->GetReplacementOf(node);
for (Alias alias = 0; alias < status_analysis_->AliasCount(); ++alias) {
if (VirtualObject* object = state->VirtualObjectFromAlias(alias)) {
PrintF(" Alias @%d: Object #%d with %zu fields\n", alias, object->id(),
object->field_count());
for (size_t i = 0; i < object->field_count(); ++i) {
if (Node* f = object->GetField(i)) {
PrintF(" Field %zu = #%d (%s)\n", i, f->id(), f->op()->mnemonic());
}
}
}
}
} }
void EscapeAnalysis::DebugPrint() { Node* EscapeAnalysisResult::GetVirtualObjectField(const VirtualObject* vobject,
ZoneVector<VirtualState*> object_states(zone()); int field, Node* effect) {
for (NodeId id = 0; id < virtual_states_.size(); id++) { return tracker_->variable_states_.Get(vobject->FieldAt(field).FromJust(),
if (VirtualState* states = virtual_states_[id]) { effect);
if (std::find(object_states.begin(), object_states.end(), states) ==
object_states.end()) {
object_states.push_back(states);
}
}
}
for (size_t n = 0; n < object_states.size(); n++) {
DebugPrintState(object_states[n]);
}
} }
VirtualObject* EscapeAnalysis::GetVirtualObject(VirtualState* state, const VirtualObject* EscapeAnalysisResult::GetVirtualObject(Node* node) {
Node* node) { return tracker_->virtual_objects_.Get(node);
if (node->id() >= status_analysis_->GetAliasMap().size()) return nullptr;
Alias alias = status_analysis_->GetAlias(node->id());
if (alias >= state->size()) return nullptr;
return state->VirtualObjectFromAlias(alias);
} }
bool EscapeAnalysis::ExistsVirtualAllocate() { VirtualObject::VirtualObject(VariableTracker* var_states, VirtualObject::Id id,
for (size_t id = 0; id < status_analysis_->GetAliasMap().size(); ++id) { int size)
Alias alias = status_analysis_->GetAlias(static_cast<NodeId>(id)); : Dependable(var_states->zone()), id_(id), fields_(var_states->zone()) {
if (alias < EscapeStatusAnalysis::kUntrackable) { DCHECK(size % kPointerSize == 0);
if (status_analysis_->IsVirtual(static_cast<int>(id))) { TRACE("Creating VirtualObject id:%d size:%d\n", id, size);
return true; int num_fields = size / kPointerSize;
fields_.reserve(num_fields);
for (int i = 0; i < num_fields; ++i) {
fields_.push_back(var_states->NewVariable());
} }
}
}
return false;
} }
Graph* EscapeAnalysis::graph() const { return status_analysis_->graph(); }
#undef TRACE #undef TRACE
} // namespace compiler } // namespace compiler
......
// Copyright 2015 the V8 project authors. All rights reserved. // Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
#ifndef V8_COMPILER_ESCAPE_ANALYSIS_H_ #ifndef V8_COMPILER_ESCAPE_ANALYSIS_H_
#define V8_COMPILER_ESCAPE_ANALYSIS_H_ #define V8_COMPILER_ESCAPE_ANALYSIS_H_
#include "src/compiler/graph.h" #include "src/base/functional.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/persistent-map.h"
#include "src/globals.h" #include "src/globals.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
namespace compiler { namespace compiler {
// Forward declarations.
class CommonOperatorBuilder; class CommonOperatorBuilder;
class EscapeStatusAnalysis; class VariableTracker;
namespace impl { class EscapeAnalysisTracker;
class MergeCache;
class VirtualState; // {EffectGraphReducer} reduces up to a fixed point. It distinguishes changes to
class VirtualObject; // the effect output of a node from changes to the value output to reduce the
}; // namespace impl // number of revisitations.
class EffectGraphReducer {
// EscapeObjectAnalysis simulates stores to determine values of loads if public:
// an object is virtual and eliminated. class Reduction {
class V8_EXPORT_PRIVATE EscapeAnalysis {
public: public:
EscapeAnalysis(Graph* graph, CommonOperatorBuilder* common, Zone* zone); bool value_changed() const { return value_changed_; }
~EscapeAnalysis(); void set_value_changed() { value_changed_ = true; }
bool effect_changed() const { return effect_changed_; }
bool Run(); void set_effect_changed() { effect_changed_ = true; }
Node* GetReplacement(Node* node); private:
Node* ResolveReplacement(Node* node); bool value_changed_ = false;
bool IsVirtual(Node* node); bool effect_changed_ = false;
bool IsEscaped(Node* node); };
bool CompareVirtualObjects(Node* left, Node* right);
Node* GetOrCreateObjectState(Node* effect, Node* node); EffectGraphReducer(Graph* graph,
bool IsCyclicObjectState(Node* effect, Node* node); std::function<void(Node*, Reduction*)> reduce, Zone* zone);
bool ExistsVirtualAllocate();
bool SetReplacement(Node* node, Node* rep); void ReduceGraph() { ReduceFrom(graph_->end()); }
bool AllObjectsComplete();
// Mark node for revisitation.
void Revisit(Node* node);
// Add a new root node to start reduction from. This is useful if the reducer
// adds nodes that are not yet reachable, but should already be considered
// part of the graph.
void AddRoot(Node* node) {
DCHECK(state_.Get(node) == State::kUnvisited);
state_.Set(node, State::kRevisit);
revisit_.push(node);
}
bool Complete() { return stack_.empty() && revisit_.empty(); }
private:
struct NodeState {
Node* node;
int input_index;
};
void ReduceFrom(Node* node);
enum class State : uint8_t { kUnvisited = 0, kRevisit, kOnStack, kVisited };
const uint8_t kNumStates = static_cast<uint8_t>(State::kVisited) + 1;
Graph* graph_;
NodeMarker<State> state_;
ZoneStack<Node*> revisit_;
ZoneStack<NodeState> stack_;
std::function<void(Node*, Reduction*)> reduce_;
};
// A variable is an abstract storage location, which is lowered to SSA values
// and phi nodes by {VariableTracker}.
class Variable {
public:
Variable() : id_(kInvalid) {}
bool operator==(Variable other) const { return id_ == other.id_; }
bool operator!=(Variable other) const { return id_ != other.id_; }
bool operator<(Variable other) const { return id_ < other.id_; }
static Variable Invalid() { return Variable(kInvalid); }
friend V8_INLINE size_t hash_value(Variable v) {
return base::hash_value(v.id_);
}
friend std::ostream& operator<<(std::ostream& os, Variable var) {
return os << var.id_;
}
private:
typedef int Id;
explicit Variable(Id id) : id_(id) {}
Id id_;
static const Id kInvalid = -1;
friend class VariableTracker;
};
// An object that can track the nodes in the graph whose current reduction
// depends on the value of the object.
class Dependable : public ZoneObject {
public:
explicit Dependable(Zone* zone) : dependants_(zone) {}
void AddDependency(Node* node) { dependants_.push_back(node); }
void RevisitDependants(EffectGraphReducer* reducer) {
for (Node* node : dependants_) {
reducer->Revisit(node);
}
dependants_.clear();
}
private:
ZoneVector<Node*> dependants_;
};
// A virtual object represents an allocation site and tracks the Variables
// associated with its fields as well as its global escape status.
class VirtualObject : public Dependable {
public:
typedef uint32_t Id;
typedef ZoneVector<Variable>::const_iterator const_iterator;
VirtualObject(VariableTracker* var_states, Id id, int size);
Maybe<Variable> FieldAt(int offset) const {
DCHECK(offset % kPointerSize == 0);
CHECK(!HasEscaped());
if (offset >= size()) {
// This can only happen in unreachable code.
return Nothing<Variable>();
}
return Just(fields_.at(offset / kPointerSize));
}
Id id() const { return id_; }
int size() const { return static_cast<int>(kPointerSize * fields_.size()); }
// Escaped might mean that the object escaped to untracked memory or that it
// is used in an operation that requires materialization.
void SetEscaped() { escaped_ = true; }
bool HasEscaped() const { return escaped_; }
const_iterator begin() const { return fields_.begin(); }
const_iterator end() const { return fields_.end(); }
private:
bool escaped_ = false;
Id id_;
ZoneVector<Variable> fields_;
};
class EscapeAnalysisResult {
public:
explicit EscapeAnalysisResult(EscapeAnalysisTracker* tracker)
: tracker_(tracker) {}
const VirtualObject* GetVirtualObject(Node* node);
Node* GetVirtualObjectField(const VirtualObject* vobject, int field,
Node* effect);
Node* GetReplacementOf(Node* node);
private:
EscapeAnalysisTracker* tracker_;
};
class V8_EXPORT_PRIVATE EscapeAnalysis final
: public NON_EXPORTED_BASE(EffectGraphReducer) {
public:
EscapeAnalysis(JSGraph* jsgraph, Zone* zone);
EscapeAnalysisResult analysis_result() {
DCHECK(Complete());
return EscapeAnalysisResult(tracker_);
}
private: private:
void RunObjectAnalysis(); void Reduce(Node* node, Reduction* reduction);
bool Process(Node* node); JSGraph* jsgraph() { return jsgraph_; }
void ProcessLoadField(Node* node); EscapeAnalysisTracker* tracker_;
void ProcessStoreField(Node* node); JSGraph* jsgraph_;
void ProcessLoadElement(Node* node);
void ProcessStoreElement(Node* node);
void ProcessCheckMaps(Node* node);
void ProcessAllocationUsers(Node* node);
void ProcessAllocation(Node* node);
void ProcessFinishRegion(Node* node);
void ProcessCall(Node* node);
void ProcessStart(Node* node);
bool ProcessEffectPhi(Node* node);
void ForwardVirtualState(Node* node);
impl::VirtualState* CopyForModificationAt(impl::VirtualState* state,
Node* node);
impl::VirtualObject* CopyForModificationAt(impl::VirtualObject* obj,
impl::VirtualState* state,
Node* node);
Node* replacement(Node* node);
bool UpdateReplacement(impl::VirtualState* state, Node* node, Node* rep);
impl::VirtualObject* GetVirtualObject(impl::VirtualState* state, Node* node);
void DebugPrint();
void DebugPrintState(impl::VirtualState* state);
Graph* graph() const;
Zone* zone() const { return zone_; }
CommonOperatorBuilder* common() const { return common_; }
Zone* const zone_;
Node* const slot_not_analyzed_;
CommonOperatorBuilder* const common_;
EscapeStatusAnalysis* status_analysis_;
ZoneVector<impl::VirtualState*> virtual_states_;
ZoneVector<Node*> replacements_;
ZoneSet<impl::VirtualObject*> cycle_detection_;
impl::MergeCache* cache_;
DISALLOW_COPY_AND_ASSIGN(EscapeAnalysis);
}; };
} // namespace compiler } // namespace compiler
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/new-escape-analysis-reducer.h"
#include "src/compiler/all-nodes.h"
#include "src/compiler/simplified-operator.h"
#include "src/compiler/type-cache.h"
#include "src/frame-constants.h"
namespace v8 {
namespace internal {
namespace compiler {
#ifdef DEBUG
#define TRACE(...) \
do { \
if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
} while (false)
#else
#define TRACE(...)
#endif // DEBUG
NewEscapeAnalysisReducer::NewEscapeAnalysisReducer(
Editor* editor, JSGraph* jsgraph, EscapeAnalysisResult analysis_result,
Zone* zone)
: AdvancedReducer(editor),
jsgraph_(jsgraph),
analysis_result_(analysis_result),
object_id_cache_(zone),
node_cache_(jsgraph->graph(), zone),
arguments_elements_(zone),
zone_(zone) {}
Node* NewEscapeAnalysisReducer::MaybeGuard(Node* original, Node* replacement) {
// We might need to guard the replacement if the type of the {replacement}
// node is not in a sub-type relation to the type of the the {original} node.
Type* const replacement_type = NodeProperties::GetType(replacement);
Type* const original_type = NodeProperties::GetType(original);
if (!replacement_type->Is(original_type)) {
Node* const control = NodeProperties::GetControlInput(original);
replacement = jsgraph()->graph()->NewNode(
jsgraph()->common()->TypeGuard(original_type), replacement, control);
NodeProperties::SetType(replacement, original_type);
}
return replacement;
}
namespace {
Node* SkipTypeGuards(Node* node) {
while (node->opcode() == IrOpcode::kTypeGuard) {
node = NodeProperties::GetValueInput(node, 0);
}
return node;
}
} // namespace
Node* NewEscapeAnalysisReducer::ObjectIdNode(const VirtualObject* vobject) {
VirtualObject::Id id = vobject->id();
if (id >= object_id_cache_.size()) object_id_cache_.resize(id + 1);
if (!object_id_cache_[id]) {
Node* node = jsgraph()->graph()->NewNode(jsgraph()->common()->ObjectId(id));
NodeProperties::SetType(node, Type::Object());
object_id_cache_[id] = node;
}
return object_id_cache_[id];
}
Reduction NewEscapeAnalysisReducer::Reduce(Node* node) {
if (Node* replacement = analysis_result().GetReplacementOf(node)) {
DCHECK(node->opcode() != IrOpcode::kAllocate &&
node->opcode() != IrOpcode::kFinishRegion);
DCHECK_NE(replacement, node);
if (replacement != jsgraph()->Dead()) {
replacement = MaybeGuard(node, replacement);
}
RelaxEffectsAndControls(node);
return Replace(replacement);
}
switch (node->opcode()) {
case IrOpcode::kAllocate: {
const VirtualObject* vobject = analysis_result().GetVirtualObject(node);
if (vobject && !vobject->HasEscaped()) {
RelaxEffectsAndControls(node);
}
return NoChange();
}
case IrOpcode::kFinishRegion: {
Node* effect = NodeProperties::GetEffectInput(node, 0);
if (effect->opcode() == IrOpcode::kBeginRegion) {
RelaxEffectsAndControls(effect);
RelaxEffectsAndControls(node);
}
return NoChange();
}
case IrOpcode::kNewUnmappedArgumentsElements:
arguments_elements_.insert(node);
return NoChange();
default: {
// TODO(sigurds): Change this to GetFrameStateInputCount once
// it is working. For now we use EffectInputCount > 0 to determine
// whether a node might have a frame state input.
if (node->op()->EffectInputCount() > 0) {
ReduceFrameStateInputs(node);
}
return NoChange();
}
}
}
// While doing DFS on the FrameState tree, we have to recognize duplicate
// occurrences of virtual objects.
class Deduplicator {
public:
explicit Deduplicator(Zone* zone) : is_duplicate_(zone) {}
bool SeenBefore(const VirtualObject* vobject) {
VirtualObject::Id id = vobject->id();
if (id >= is_duplicate_.size()) {
is_duplicate_.resize(id + 1);
}
bool is_duplicate = is_duplicate_[id];
is_duplicate_[id] = true;
return is_duplicate;
}
private:
ZoneVector<bool> is_duplicate_;
};
void NewEscapeAnalysisReducer::ReduceFrameStateInputs(Node* node) {
DCHECK_GE(node->op()->EffectInputCount(), 1);
for (int i = 0; i < node->InputCount(); ++i) {
Node* input = node->InputAt(i);
if (input->opcode() == IrOpcode::kFrameState) {
Deduplicator deduplicator(zone());
if (Node* ret = ReduceDeoptState(input, node, &deduplicator)) {
node->ReplaceInput(i, ret);
}
}
}
}
Node* NewEscapeAnalysisReducer::ReduceDeoptState(Node* node, Node* effect,
Deduplicator* deduplicator) {
if (node->opcode() == IrOpcode::kFrameState) {
NodeHashCache::Constructor new_node(&node_cache_, node);
// This input order is important to match the DFS traversal used in the
// instruction selector. Otherwise, the instruction selector might find a
// duplicate node before the original one.
for (int input_id : {kFrameStateOuterStateInput, kFrameStateFunctionInput,
kFrameStateParametersInput, kFrameStateContextInput,
kFrameStateLocalsInput, kFrameStateStackInput}) {
Node* input = node->InputAt(input_id);
new_node.ReplaceInput(ReduceDeoptState(input, effect, deduplicator),
input_id);
}
return new_node.Get();
} else if (node->opcode() == IrOpcode::kStateValues) {
NodeHashCache::Constructor new_node(&node_cache_, node);
for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
Node* input = NodeProperties::GetValueInput(node, i);
new_node.ReplaceValueInput(ReduceDeoptState(input, effect, deduplicator),
i);
}
return new_node.Get();
} else if (const VirtualObject* vobject =
analysis_result().GetVirtualObject(SkipTypeGuards(node))) {
if (vobject->HasEscaped()) return node;
if (deduplicator->SeenBefore(vobject)) {
return ObjectIdNode(vobject);
} else {
std::vector<Node*> inputs;
for (int offset = 0; offset < vobject->size(); offset += kPointerSize) {
Node* field =
analysis_result().GetVirtualObjectField(vobject, offset, effect);
CHECK_NOT_NULL(field);
if (field != jsgraph()->Dead()) {
inputs.push_back(ReduceDeoptState(field, effect, deduplicator));
}
}
int num_inputs = static_cast<int>(inputs.size());
NodeHashCache::Constructor new_node(
&node_cache_,
jsgraph()->common()->ObjectState(vobject->id(), num_inputs),
num_inputs, &inputs.front(), NodeProperties::GetType(node));
return new_node.Get();
}
} else {
return node;
}
}
void NewEscapeAnalysisReducer::VerifyReplacement() const {
AllNodes all(zone(), jsgraph()->graph());
for (Node* node : all.reachable) {
if (node->opcode() == IrOpcode::kAllocate) {
if (const VirtualObject* vobject =
analysis_result().GetVirtualObject(node)) {
if (!vobject->HasEscaped()) {
V8_Fatal(__FILE__, __LINE__,
"Escape analysis failed to remove node %s#%d\n",
node->op()->mnemonic(), node->id());
}
}
}
}
}
void NewEscapeAnalysisReducer::Finalize() {
for (Node* node : arguments_elements_) {
DCHECK(node->opcode() == IrOpcode::kNewUnmappedArgumentsElements);
Node* arguments_frame = NodeProperties::GetValueInput(node, 0);
if (arguments_frame->opcode() != IrOpcode::kArgumentsFrame) continue;
Node* arguments_length = NodeProperties::GetValueInput(node, 1);
if (arguments_length->opcode() != IrOpcode::kArgumentsLength) continue;
Node* arguments_length_state = nullptr;
for (Edge edge : arguments_length->use_edges()) {
Node* use = edge.from();
switch (use->opcode()) {
case IrOpcode::kObjectState:
case IrOpcode::kTypedObjectState:
case IrOpcode::kStateValues:
case IrOpcode::kTypedStateValues:
if (!arguments_length_state) {
arguments_length_state = jsgraph()->graph()->NewNode(
jsgraph()->common()->ArgumentsLengthState(
IsRestLengthOf(arguments_length->op())));
NodeProperties::SetType(arguments_length_state,
Type::OtherInternal());
}
edge.UpdateTo(arguments_length_state);
break;
default:
break;
}
}
bool escaping_use = false;
ZoneVector<Node*> loads(zone());
for (Edge edge : node->use_edges()) {
Node* use = edge.from();
if (!NodeProperties::IsValueEdge(edge)) continue;
if (use->use_edges().empty()) {
// A node without uses is dead, so we don't have to care about it.
continue;
}
switch (use->opcode()) {
case IrOpcode::kStateValues:
case IrOpcode::kTypedStateValues:
case IrOpcode::kObjectState:
case IrOpcode::kTypedObjectState:
break;
case IrOpcode::kLoadElement:
loads.push_back(use);
break;
case IrOpcode::kLoadField:
if (FieldAccessOf(use->op()).offset == FixedArray::kLengthOffset) {
loads.push_back(use);
} else {
escaping_use = true;
}
break;
default:
// If the arguments elements node node is used by an unhandled node,
// then we cannot remove this allocation.
escaping_use = true;
break;
}
if (escaping_use) break;
}
if (!escaping_use) {
Node* arguments_elements_state = jsgraph()->graph()->NewNode(
jsgraph()->common()->ArgumentsElementsState(
IsRestLengthOf(arguments_length->op())));
NodeProperties::SetType(arguments_elements_state, Type::OtherInternal());
ReplaceWithValue(node, arguments_elements_state);
ElementAccess stack_access;
stack_access.base_is_tagged = BaseTaggedness::kUntaggedBase;
// Reduce base address by {kPointerSize} such that (length - index)
// resolves to the right position.
stack_access.header_size =
CommonFrameConstants::kFixedFrameSizeAboveFp - kPointerSize;
stack_access.type = Type::NonInternal();
stack_access.machine_type = MachineType::AnyTagged();
stack_access.write_barrier_kind = WriteBarrierKind::kNoWriteBarrier;
const Operator* load_stack_op =
jsgraph()->simplified()->LoadElement(stack_access);
for (Node* load : loads) {
switch (load->opcode()) {
case IrOpcode::kLoadElement: {
Node* index = NodeProperties::GetValueInput(load, 1);
// {offset} is a reverted index starting from 1. The base address is
// adapted to allow offsets starting from 1.
Node* offset = jsgraph()->graph()->NewNode(
jsgraph()->simplified()->NumberSubtract(), arguments_length,
index);
NodeProperties::SetType(offset,
TypeCache::Get().kArgumentsLengthType);
NodeProperties::ReplaceValueInput(load, arguments_frame, 0);
NodeProperties::ReplaceValueInput(load, offset, 1);
NodeProperties::ChangeOp(load, load_stack_op);
break;
}
case IrOpcode::kLoadField: {
DCHECK_EQ(FieldAccessOf(load->op()).offset,
FixedArray::kLengthOffset);
Node* length = NodeProperties::GetValueInput(node, 1);
ReplaceWithValue(load, length);
break;
}
default:
UNREACHABLE();
}
}
}
}
}
Node* NodeHashCache::Query(Node* node) {
auto it = cache_.find(node);
if (it != cache_.end()) {
return *it;
} else {
return nullptr;
}
}
NodeHashCache::Constructor::Constructor(NodeHashCache* cache,
const Operator* op, int input_count,
Node** inputs, Type* type)
: node_cache_(cache), from_(nullptr) {
if (node_cache_->temp_nodes_.size() > 0) {
tmp_ = node_cache_->temp_nodes_.back();
node_cache_->temp_nodes_.pop_back();
int tmp_input_count = tmp_->InputCount();
if (input_count <= tmp_input_count) {
tmp_->TrimInputCount(input_count);
}
for (int i = 0; i < input_count; ++i) {
if (i < tmp_input_count) {
tmp_->ReplaceInput(i, inputs[i]);
} else {
tmp_->AppendInput(node_cache_->graph_->zone(), inputs[i]);
}
}
NodeProperties::ChangeOp(tmp_, op);
} else {
tmp_ = node_cache_->graph_->NewNode(op, input_count, inputs);
}
NodeProperties::SetType(tmp_, type);
}
Node* NodeHashCache::Constructor::Get() {
DCHECK(tmp_ || from_);
Node* node;
if (!tmp_) {
node = node_cache_->Query(from_);
if (!node) node = from_;
} else {
node = node_cache_->Query(tmp_);
if (node) {
node_cache_->temp_nodes_.push_back(tmp_);
} else {
node = tmp_;
node_cache_->Insert(node);
}
}
tmp_ = from_ = nullptr;
return node;
}
Node* NodeHashCache::Constructor::MutableNode() {
DCHECK(tmp_ || from_);
if (!tmp_) {
if (node_cache_->temp_nodes_.empty()) {
tmp_ = node_cache_->graph_->CloneNode(from_);
} else {
tmp_ = node_cache_->temp_nodes_.back();
node_cache_->temp_nodes_.pop_back();
int from_input_count = from_->InputCount();
int tmp_input_count = tmp_->InputCount();
if (from_input_count <= tmp_input_count) {
tmp_->TrimInputCount(from_input_count);
}
for (int i = 0; i < from_input_count; ++i) {
if (i < tmp_input_count) {
tmp_->ReplaceInput(i, from_->InputAt(i));
} else {
tmp_->AppendInput(node_cache_->graph_->zone(), from_->InputAt(i));
}
}
NodeProperties::SetType(tmp_, NodeProperties::GetType(from_));
NodeProperties::ChangeOp(tmp_, from_->op());
}
}
return tmp_;
}
#undef TRACE
} // namespace compiler
} // namespace internal
} // namespace v8
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_NEW_ESCAPE_ANALYSIS_REDUCER_H_
#define V8_COMPILER_NEW_ESCAPE_ANALYSIS_REDUCER_H_
#include "src/base/compiler-specific.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/new-escape-analysis.h"
#include "src/globals.h"
namespace v8 {
namespace internal {
namespace compiler {
class Deduplicator;
class JSGraph;
// Perform hash-consing when creating or mutating nodes. Used to avoid duplicate
// nodes when creating ObjectState, StateValues and FrameState nodes
class NodeHashCache {
public:
NodeHashCache(Graph* graph, Zone* zone)
: graph_(graph), cache_(zone), temp_nodes_(zone) {}
// Handle to a conceptually new mutable node. Tries to re-use existing nodes
// and to recycle memory if possible.
class Constructor {
public:
// Construct a new node as a clone of [from].
Constructor(NodeHashCache* cache, Node* from)
: node_cache_(cache), from_(from), tmp_(nullptr) {}
// Construct a new node from scratch.
Constructor(NodeHashCache* cache, const Operator* op, int input_count,
Node** inputs, Type* type);
// Modify the new node.
void ReplaceValueInput(Node* input, int i) {
if (!tmp_ && input == NodeProperties::GetValueInput(from_, i)) return;
Node* node = MutableNode();
NodeProperties::ReplaceValueInput(node, input, i);
}
void ReplaceInput(Node* input, int i) {
if (!tmp_ && input == from_->InputAt(i)) return;
Node* node = MutableNode();
node->ReplaceInput(i, input);
}
// Obtain the mutated node or a cached copy. Invalidates the [Constructor].
Node* Get();
private:
Node* MutableNode();
NodeHashCache* node_cache_;
// Original node, copied on write.
Node* from_;
// Temporary node used for mutations, can be recycled if cache is hit.
Node* tmp_;
};
private:
Node* Query(Node* node);
void Insert(Node* node) { cache_.insert(node); }
Graph* graph_;
struct NodeEquals {
bool operator()(Node* a, Node* b) const {
return NodeProperties::Equals(a, b);
}
};
struct NodeHashCode {
size_t operator()(Node* n) const { return NodeProperties::HashCode(n); }
};
ZoneUnorderedSet<Node*, NodeHashCode, NodeEquals> cache_;
// Unused nodes whose memory can be recycled.
ZoneVector<Node*> temp_nodes_;
};
// Modify the graph according to the information computed in the previous phase.
class V8_EXPORT_PRIVATE NewEscapeAnalysisReducer final
: public NON_EXPORTED_BASE(AdvancedReducer) {
public:
NewEscapeAnalysisReducer(Editor* editor, JSGraph* jsgraph,
EscapeAnalysisResult analysis_result, Zone* zone);
Reduction Reduce(Node* node) override;
const char* reducer_name() const override {
return "NewEscapeAnalysisReducer";
}
void Finalize() override;
// Verifies that all virtual allocation nodes have been dealt with. Run it
// after this reducer has been applied.
void VerifyReplacement() const;
private:
void ReduceFrameStateInputs(Node* node);
Node* ReduceDeoptState(Node* node, Node* effect, Deduplicator* deduplicator);
Node* ObjectIdNode(const VirtualObject* vobject);
Node* MaybeGuard(Node* original, Node* replacement);
JSGraph* jsgraph() const { return jsgraph_; }
EscapeAnalysisResult analysis_result() const { return analysis_result_; }
Zone* zone() const { return zone_; }
JSGraph* const jsgraph_;
EscapeAnalysisResult analysis_result_;
ZoneVector<Node*> object_id_cache_;
NodeHashCache node_cache_;
ZoneSet<Node*> arguments_elements_;
Zone* const zone_;
DISALLOW_COPY_AND_ASSIGN(NewEscapeAnalysisReducer);
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_NEW_ESCAPE_ANALYSIS_REDUCER_H_
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/new-escape-analysis.h"
#include "src/bootstrapper.h"
#include "src/compiler/linkage.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/operator-properties.h"
#include "src/compiler/simplified-operator.h"
#include "src/objects-inl.h"
#ifdef DEBUG
#define TRACE(...) \
do { \
if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
} while (false)
#else
#define TRACE(...)
#endif
namespace v8 {
namespace internal {
namespace compiler {
template <class T>
class Sidetable {
public:
explicit Sidetable(Zone* zone) : map_(zone) {}
T& operator[](const Node* node) {
NodeId id = node->id();
if (id >= map_.size()) {
map_.resize(id + 1);
}
return map_[id];
}
private:
ZoneVector<T> map_;
};
template <class T>
class SparseSidetable {
public:
explicit SparseSidetable(Zone* zone, T def_value = T())
: def_value_(std::move(def_value)), map_(zone) {}
void Set(const Node* node, T value) {
auto iter = map_.find(node->id());
if (iter != map_.end()) {
iter->second = std::move(value);
} else if (value != def_value_) {
map_.insert(iter, std::make_pair(node->id(), std::move(value)));
}
}
const T& Get(const Node* node) const {
auto iter = map_.find(node->id());
return iter != map_.end() ? iter->second : def_value_;
}
private:
T def_value_;
ZoneUnorderedMap<NodeId, T> map_;
};
// Keeps track of the changes to the current node during reduction.
// Encapsulates the current state of the IR graph and the reducer state like
// side-tables. All access to the IR and the reducer state should happen through
// a ReduceScope to ensure that changes and dependencies are tracked and all
// necessary node revisitations happen.
class ReduceScope {
public:
typedef EffectGraphReducer::Reduction Reduction;
explicit ReduceScope(Node* node, Reduction* reduction)
: current_node_(node), reduction_(reduction) {}
protected:
Node* current_node() const { return current_node_; }
Reduction* reduction() { return reduction_; }
private:
Node* current_node_;
Reduction* reduction_;
};
// A VariableTracker object keeps track of the values of variables at all points
// of the effect chain and introduces new phi nodes when necessary.
// Initially and by default, variables are mapped to nullptr, which means that
// the variable allocation point does not dominate the current point on the
// effect chain. We map variables that represent uninitialized memory to the
// Dead node to ensure it is not read.
// Unmapped values are impossible by construction, it is indistinguishable if a
// PersistentMap does not contain an element or maps it to the default element.
class VariableTracker {
private:
// The state of all variables at one point in the effect chain.
class State {
typedef PersistentMap<Variable, Node*> Map;
public:
explicit State(Zone* zone) : map_(zone) {}
Node* Get(Variable var) const {
CHECK(var != Variable::Invalid());
return map_.Get(var);
}
void Set(Variable var, Node* node) {
CHECK(var != Variable::Invalid());
return map_.Set(var, node);
}
Map::iterator begin() const { return map_.begin(); }
Map::iterator end() const { return map_.end(); }
bool operator!=(const State& other) const { return map_ != other.map_; }
private:
Map map_;
};
public:
VariableTracker(JSGraph* graph, EffectGraphReducer* reducer, Zone* zone);
Variable NewVariable() { return Variable(next_variable_++); }
Node* Get(Variable var, Node* effect) { return table_.Get(effect).Get(var); }
Zone* zone() { return zone_; }
class Scope : public ReduceScope {
public:
Scope(VariableTracker* tracker, Node* node, Reduction* reduction);
~Scope();
Node* Get(Variable var) { return current_state_.Get(var); }
void Set(Variable var, Node* node) { current_state_.Set(var, node); }
private:
VariableTracker* states_;
State current_state_;
};
private:
State MergeInputs(Node* effect_phi);
Zone* zone_;
JSGraph* graph_;
SparseSidetable<State> table_;
ZoneVector<Node*> buffer_;
EffectGraphReducer* reducer_;
int next_variable_ = 0;
DISALLOW_COPY_AND_ASSIGN(VariableTracker);
};
// Encapsulates the current state of the escape analysis reducer to preserve
// invariants regarding changes and re-visitation.
class EscapeAnalysisTracker : public ZoneObject {
public:
EscapeAnalysisTracker(JSGraph* jsgraph, EffectGraphReducer* reducer,
Zone* zone)
: virtual_objects_(zone),
replacements_(zone),
variable_states_(jsgraph, reducer, zone),
jsgraph_(jsgraph),
zone_(zone) {}
class Scope : public VariableTracker::Scope {
public:
Scope(EffectGraphReducer* reducer, EscapeAnalysisTracker* tracker,
Node* node, Reduction* reduction)
: VariableTracker::Scope(&tracker->variable_states_, node, reduction),
tracker_(tracker),
reducer_(reducer) {}
const VirtualObject* GetVirtualObject(Node* node) {
VirtualObject* vobject = tracker_->virtual_objects_.Get(node);
if (vobject) vobject->AddDependency(current_node());
return vobject;
}
// Create or retrieve a virtual object for the current node.
const VirtualObject* InitVirtualObject(int size) {
DCHECK(current_node()->opcode() == IrOpcode::kAllocate);
VirtualObject* vobject = tracker_->virtual_objects_.Get(current_node());
if (vobject) {
CHECK(vobject->size() == size);
} else {
vobject = tracker_->NewVirtualObject(size);
}
if (vobject) vobject->AddDependency(current_node());
vobject_ = vobject;
return vobject;
}
void SetVirtualObject(Node* object) {
vobject_ = tracker_->virtual_objects_.Get(object);
}
void SetEscaped(Node* node) {
if (VirtualObject* object = tracker_->virtual_objects_.Get(node)) {
if (object->HasEscaped()) return;
TRACE("Setting %s#%d to escaped because of use by %s#%d\n",
node->op()->mnemonic(), node->id(),
current_node()->op()->mnemonic(), current_node()->id());
object->SetEscaped();
object->RevisitDependants(reducer_);
}
}
// The inputs of the current node have to be accessed through the scope to
// ensure that they respect the node replacements.
Node* ValueInput(int i) {
return tracker_->ResolveReplacement(
NodeProperties::GetValueInput(current_node(), i));
}
Node* ContextInput() {
return tracker_->ResolveReplacement(
NodeProperties::GetContextInput(current_node()));
}
void SetReplacement(Node* replacement) {
replacement_ = replacement;
vobject_ =
replacement ? tracker_->virtual_objects_.Get(replacement) : nullptr;
TRACE("Set %s#%d as replacement.\n", replacement->op()->mnemonic(),
replacement->id());
}
void MarkForDeletion() { SetReplacement(tracker_->jsgraph_->Dead()); }
~Scope() {
if (replacement_ != tracker_->replacements_[current_node()] ||
vobject_ != tracker_->virtual_objects_.Get(current_node())) {
reduction()->set_value_changed();
}
tracker_->replacements_[current_node()] = replacement_;
tracker_->virtual_objects_.Set(current_node(), vobject_);
}
private:
EscapeAnalysisTracker* tracker_;
EffectGraphReducer* reducer_;
VirtualObject* vobject_ = nullptr;
Node* replacement_ = nullptr;
};
Node* GetReplacementOf(Node* node) { return replacements_[node]; }
Node* ResolveReplacement(Node* node) {
if (Node* replacement = GetReplacementOf(node)) {
// Replacements cannot have replacements. This is important to ensure
// re-visitation: If a replacement is replaced, then all nodes accessing
// the replacement have to be updated.
DCHECK_NULL(GetReplacementOf(replacement));
return replacement;
}
return node;
}
private:
friend class EscapeAnalysisResult;
static const size_t kMaxTrackedObjects = 100;
VirtualObject* NewVirtualObject(int size) {
if (next_object_id_ >= kMaxTrackedObjects) return nullptr;
return new (zone_)
VirtualObject(&variable_states_, next_object_id_++, size);
}
SparseSidetable<VirtualObject*> virtual_objects_;
Sidetable<Node*> replacements_;
VariableTracker variable_states_;
VirtualObject::Id next_object_id_ = 0;
JSGraph* const jsgraph_;
Zone* const zone_;
DISALLOW_COPY_AND_ASSIGN(EscapeAnalysisTracker);
};
EffectGraphReducer::EffectGraphReducer(
Graph* graph, std::function<void(Node*, Reduction*)> reduce, Zone* zone)
: graph_(graph),
state_(graph, kNumStates),
revisit_(zone),
stack_(zone),
reduce_(reduce) {}
void EffectGraphReducer::ReduceFrom(Node* node) {
// Perform DFS and eagerly trigger revisitation as soon as possible.
// A stack element {node, i} indicates that input i of node should be visited
// next.
DCHECK(stack_.empty());
stack_.push({node, 0});
while (!stack_.empty()) {
Node* current = stack_.top().node;
int& input_index = stack_.top().input_index;
if (input_index < current->InputCount()) {
Node* input = current->InputAt(input_index);
input_index++;
switch (state_.Get(input)) {
case State::kVisited:
// The input is already reduced.
break;
case State::kOnStack:
// The input is on the DFS stack right now, so it will be revisited
// later anyway.
break;
case State::kUnvisited:
case State::kRevisit: {
state_.Set(input, State::kOnStack);
stack_.push({input, 0});
break;
}
}
} else {
stack_.pop();
Reduction reduction;
reduce_(current, &reduction);
for (Edge edge : current->use_edges()) {
// Mark uses for revisitation.
Node* use = edge.from();
if (NodeProperties::IsEffectEdge(edge)) {
if (reduction.effect_changed()) Revisit(use);
} else {
if (reduction.value_changed()) Revisit(use);
}
}
state_.Set(current, State::kVisited);
// Process the revisitation buffer immediately. This improves performance
// of escape analysis. Using a stack for {revisit_} reverses the order in
// which the revisitation happens. This also seems to improve performance.
while (!revisit_.empty()) {
Node* revisit = revisit_.top();
if (state_.Get(revisit) == State::kRevisit) {
state_.Set(revisit, State::kOnStack);
stack_.push({revisit, 0});
}
revisit_.pop();
}
}
}
}
void EffectGraphReducer::Revisit(Node* node) {
if (state_.Get(node) == State::kVisited) {
TRACE(" Queueing for revisit: %s#%d\n", node->op()->mnemonic(),
node->id());
state_.Set(node, State::kRevisit);
revisit_.push(node);
}
}
VariableTracker::VariableTracker(JSGraph* graph, EffectGraphReducer* reducer,
Zone* zone)
: zone_(zone),
graph_(graph),
table_(zone, State(zone)),
buffer_(zone),
reducer_(reducer) {}
VariableTracker::Scope::Scope(VariableTracker* states, Node* node,
Reduction* reduction)
: ReduceScope(node, reduction),
states_(states),
current_state_(states->zone_) {
switch (node->opcode()) {
case IrOpcode::kEffectPhi:
current_state_ = states_->MergeInputs(node);
break;
default:
int effect_inputs = node->op()->EffectInputCount();
if (effect_inputs == 1) {
current_state_ =
states_->table_.Get(NodeProperties::GetEffectInput(node, 0));
} else {
DCHECK_EQ(0, effect_inputs);
}
}
}
VariableTracker::Scope::~Scope() {
if (!reduction()->effect_changed() &&
states_->table_.Get(current_node()) != current_state_) {
reduction()->set_effect_changed();
}
states_->table_.Set(current_node(), current_state_);
}
VariableTracker::State VariableTracker::MergeInputs(Node* effect_phi) {
// A variable that is mapped to [nullptr] was not assigned a value on every
// execution path to the current effect phi. Relying on the invariant that
// every variable is initialized (at least with a sentinel like the Dead
// node), this means that the variable initialization does not dominate the
// current point. So for loop effect phis, we can keep nullptr for a variable
// as long as the first input of the loop has nullptr for this variable. For
// non-loop effect phis, we can even keep it nullptr as long as any input has
// nullptr.
DCHECK(effect_phi->opcode() == IrOpcode::kEffectPhi);
int arity = effect_phi->op()->EffectInputCount();
Node* control = NodeProperties::GetControlInput(effect_phi, 0);
TRACE("control: %s#%d\n", control->op()->mnemonic(), control->id());
bool is_loop = control->opcode() == IrOpcode::kLoop;
buffer_.reserve(arity + 1);
State first_input = table_.Get(NodeProperties::GetEffectInput(effect_phi, 0));
State result = first_input;
for (std::pair<Variable, Node*> var_value : first_input) {
if (Node* value = var_value.second) {
Variable var = var_value.first;
TRACE("var %i:\n", var.id_);
buffer_.clear();
buffer_.push_back(value);
bool identical_inputs = true;
int num_defined_inputs = 1;
TRACE(" input 0: %s#%d\n", value->op()->mnemonic(), value->id());
for (int i = 1; i < arity; ++i) {
Node* next_value =
table_.Get(NodeProperties::GetEffectInput(effect_phi, i)).Get(var);
if (next_value != value) identical_inputs = false;
if (next_value != nullptr) {
num_defined_inputs++;
TRACE(" input %i: %s#%d\n", i, next_value->op()->mnemonic(),
next_value->id());
} else {
TRACE(" input %i: nullptr\n", i);
}
buffer_.push_back(next_value);
}
Node* old_value = table_.Get(effect_phi).Get(var);
if (old_value) {
TRACE(" old: %s#%d\n", old_value->op()->mnemonic(), old_value->id());
} else {
TRACE(" old: nullptr\n");
}
// Reuse a previously created phi node if possible.
if (old_value && old_value->opcode() == IrOpcode::kPhi &&
NodeProperties::GetControlInput(old_value, 0) == control) {
// Since a phi node can never dominate its control node,
// [old_value] cannot originate from the inputs. Thus [old_value]
// must have been created by a previous reduction of this [effect_phi].
for (int i = 0; i < arity; ++i) {
NodeProperties::ReplaceValueInput(
old_value, buffer_[i] ? buffer_[i] : graph_->Dead(), i);
// This change cannot affect the rest of the reducer, so there is no
// need to trigger additional revisitations.
}
result.Set(var, old_value);
} else {
if (num_defined_inputs == 1 && is_loop) {
// For loop effect phis, the variable initialization dominates iff it
// dominates the first input.
DCHECK_EQ(2, arity);
DCHECK_EQ(value, buffer_[0]);
result.Set(var, value);
} else if (num_defined_inputs < arity) {
// If the variable is undefined on some input of this non-loop effect
// phi, then its initialization does not dominate this point.
result.Set(var, nullptr);
} else {
DCHECK_EQ(num_defined_inputs, arity);
// We only create a phi if the values are different.
if (identical_inputs) {
result.Set(var, value);
} else {
TRACE("Creating new phi\n");
buffer_.push_back(control);
Node* phi = graph_->graph()->NewNode(
graph_->common()->Phi(MachineRepresentation::kTagged, arity),
arity + 1, &buffer_.front());
// TODO(tebbi): Computing precise types here is tricky, because of
// the necessary revisitations. If we really need this, we should
// probably do it afterwards.
NodeProperties::SetType(phi, Type::Any());
reducer_->AddRoot(phi);
result.Set(var, phi);
}
}
}
#ifdef DEBUG
if (Node* result_node = result.Get(var)) {
TRACE(" result: %s#%d\n", result_node->op()->mnemonic(),
result_node->id());
} else {
TRACE(" result: nullptr\n");
}
#endif
}
}
return result;
}
namespace {
int OffsetOfFieldAccess(const Operator* op) {
DCHECK(op->opcode() == IrOpcode::kLoadField ||
op->opcode() == IrOpcode::kStoreField);
FieldAccess access = FieldAccessOf(op);
return access.offset;
}
Maybe<int> OffsetOfElementsAccess(const Operator* op, Node* index_node) {
DCHECK(op->opcode() == IrOpcode::kLoadElement ||
op->opcode() == IrOpcode::kStoreElement);
Type* index_type = NodeProperties::GetType(index_node);
if (!index_type->Is(Type::Number())) return Nothing<int>();
double max = index_type->Max();
double min = index_type->Min();
int index = static_cast<int>(min);
if (!(index == min && index == max)) return Nothing<int>();
ElementAccess access = ElementAccessOf(op);
DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
kPointerSizeLog2);
return Just(access.header_size + (index << ElementSizeLog2Of(
access.machine_type.representation())));
}
void ReduceNode(const Operator* op, EscapeAnalysisTracker::Scope* current,
JSGraph* jsgraph) {
switch (op->opcode()) {
case IrOpcode::kAllocate: {
NumberMatcher size(current->ValueInput(0));
if (!size.HasValue()) break;
int size_int = static_cast<int>(size.Value());
if (size_int != size.Value()) break;
if (const VirtualObject* vobject = current->InitVirtualObject(size_int)) {
// Initialize with dead nodes as a sentinel for uninitialized memory.
for (Variable field : *vobject) {
current->Set(field, jsgraph->Dead());
}
}
break;
}
case IrOpcode::kFinishRegion:
current->SetVirtualObject(current->ValueInput(0));
break;
case IrOpcode::kStoreField: {
Node* object = current->ValueInput(0);
Node* value = current->ValueInput(1);
const VirtualObject* vobject = current->GetVirtualObject(object);
Variable var;
if (vobject && !vobject->HasEscaped() &&
vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) {
current->Set(var, value);
current->MarkForDeletion();
} else {
current->SetEscaped(object);
current->SetEscaped(value);
}
break;
}
case IrOpcode::kStoreElement: {
Node* object = current->ValueInput(0);
Node* index = current->ValueInput(1);
Node* value = current->ValueInput(2);
const VirtualObject* vobject = current->GetVirtualObject(object);
int offset;
Variable var;
if (vobject && !vobject->HasEscaped() &&
OffsetOfElementsAccess(op, index).To(&offset) &&
vobject->FieldAt(offset).To(&var)) {
current->Set(var, value);
current->MarkForDeletion();
} else {
current->SetEscaped(value);
current->SetEscaped(object);
}
break;
}
case IrOpcode::kLoadField: {
Node* object = current->ValueInput(0);
const VirtualObject* vobject = current->GetVirtualObject(object);
Variable var;
if (vobject && !vobject->HasEscaped() &&
vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) {
current->SetReplacement(current->Get(var));
} else {
// TODO(tebbi): At the moment, we mark objects as escaping if there
// is a load from an invalid location to avoid dead nodes. This is a
// workaround that should be removed once we can handle dead nodes
// everywhere.
current->SetEscaped(object);
}
break;
}
case IrOpcode::kLoadElement: {
Node* object = current->ValueInput(0);
Node* index = current->ValueInput(1);
const VirtualObject* vobject = current->GetVirtualObject(object);
int offset;
Variable var;
if (vobject && !vobject->HasEscaped() &&
OffsetOfElementsAccess(op, index).To(&offset) &&
vobject->FieldAt(offset).To(&var)) {
current->SetReplacement(current->Get(var));
} else {
current->SetEscaped(object);
}
break;
}
case IrOpcode::kTypeGuard: {
// The type-guard is re-introduced in the final reducer if the types
// don't match.
current->SetReplacement(current->ValueInput(0));
break;
}
case IrOpcode::kReferenceEqual: {
Node* left = current->ValueInput(0);
Node* right = current->ValueInput(1);
const VirtualObject* left_object = current->GetVirtualObject(left);
const VirtualObject* right_object = current->GetVirtualObject(right);
Node* replacement = nullptr;
if (left_object && !left_object->HasEscaped()) {
if (right_object && !right_object->HasEscaped() &&
left_object->id() == right_object->id()) {
replacement = jsgraph->TrueConstant();
} else {
replacement = jsgraph->FalseConstant();
}
} else if (right_object && !right_object->HasEscaped()) {
replacement = jsgraph->FalseConstant();
}
if (replacement) {
// TODO(tebbi) This is a workaround for uninhabited types. If we
// replaced a value of uninhabited type with a constant, we would
// widen the type of the node. This could produce inconsistent
// types (which might confuse representation selection). We get
// around this by refusing to constant-fold and escape-analyze
// if the type is not inhabited.
if (NodeProperties::GetType(left)->IsInhabited() &&
NodeProperties::GetType(right)->IsInhabited()) {
current->SetReplacement(replacement);
} else {
current->SetEscaped(left);
current->SetEscaped(right);
}
}
break;
}
case IrOpcode::kCheckMaps: {
CheckMapsParameters params = CheckMapsParametersOf(op);
Node* checked = current->ValueInput(0);
const VirtualObject* vobject = current->GetVirtualObject(checked);
Variable map_field;
if (vobject && !vobject->HasEscaped() &&
vobject->FieldAt(HeapObject::kMapOffset).To(&map_field)) {
Node* map = current->Get(map_field);
if (map) {
Type* const map_type = NodeProperties::GetType(map);
if (map_type->IsHeapConstant() &&
params.maps().contains(ZoneHandleSet<Map>(bit_cast<Handle<Map>>(
map_type->AsHeapConstant()->Value())))) {
current->MarkForDeletion();
break;
}
}
}
current->SetEscaped(checked);
break;
}
case IrOpcode::kCheckHeapObject: {
Node* checked = current->ValueInput(0);
switch (checked->opcode()) {
case IrOpcode::kAllocate:
case IrOpcode::kFinishRegion:
case IrOpcode::kHeapConstant:
current->SetReplacement(checked);
break;
default:
current->SetEscaped(checked);
break;
}
break;
}
case IrOpcode::kMapGuard: {
Node* object = current->ValueInput(0);
const VirtualObject* vobject = current->GetVirtualObject(object);
if (vobject && !vobject->HasEscaped()) {
current->MarkForDeletion();
}
break;
}
case IrOpcode::kStateValues:
case IrOpcode::kFrameState:
// These uses are always safe.
break;
default: {
// For unknown nodes, treat all value inputs as escaping.
int value_input_count = op->ValueInputCount();
for (int i = 0; i < value_input_count; ++i) {
Node* input = current->ValueInput(i);
current->SetEscaped(input);
}
if (OperatorProperties::HasContextInput(op)) {
current->SetEscaped(current->ContextInput());
}
break;
}
}
}
} // namespace
void NewEscapeAnalysis::Reduce(Node* node, Reduction* reduction) {
const Operator* op = node->op();
TRACE("Reducing %s#%d\n", op->mnemonic(), node->id());
EscapeAnalysisTracker::Scope current(this, tracker_, node, reduction);
ReduceNode(op, &current, jsgraph());
}
NewEscapeAnalysis::NewEscapeAnalysis(JSGraph* jsgraph, Zone* zone)
: EffectGraphReducer(
jsgraph->graph(),
[this](Node* node, Reduction* reduction) { Reduce(node, reduction); },
zone),
tracker_(new (zone) EscapeAnalysisTracker(jsgraph, this, zone)),
jsgraph_(jsgraph) {}
Node* EscapeAnalysisResult::GetReplacementOf(Node* node) {
return tracker_->GetReplacementOf(node);
}
Node* EscapeAnalysisResult::GetVirtualObjectField(const VirtualObject* vobject,
int field, Node* effect) {
return tracker_->variable_states_.Get(vobject->FieldAt(field).FromJust(),
effect);
}
const VirtualObject* EscapeAnalysisResult::GetVirtualObject(Node* node) {
return tracker_->virtual_objects_.Get(node);
}
VirtualObject::VirtualObject(VariableTracker* var_states, VirtualObject::Id id,
int size)
: Dependable(var_states->zone()), id_(id), fields_(var_states->zone()) {
DCHECK(size % kPointerSize == 0);
TRACE("Creating VirtualObject id:%d size:%d\n", id, size);
int num_fields = size / kPointerSize;
fields_.reserve(num_fields);
for (int i = 0; i < num_fields; ++i) {
fields_.push_back(var_states->NewVariable());
}
}
#undef TRACE
} // namespace compiler
} // namespace internal
} // namespace v8
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_NEW_ESCAPE_ANALYSIS_H_
#define V8_COMPILER_NEW_ESCAPE_ANALYSIS_H_
#include "src/base/functional.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/persistent-map.h"
#include "src/globals.h"
namespace v8 {
namespace internal {
namespace compiler {
class CommonOperatorBuilder;
class VariableTracker;
class EscapeAnalysisTracker;
// {EffectGraphReducer} reduces up to a fixed point. It distinguishes changes to
// the effect output of a node from changes to the value output to reduce the
// number of revisitations.
class EffectGraphReducer {
public:
class Reduction {
public:
bool value_changed() const { return value_changed_; }
void set_value_changed() { value_changed_ = true; }
bool effect_changed() const { return effect_changed_; }
void set_effect_changed() { effect_changed_ = true; }
private:
bool value_changed_ = false;
bool effect_changed_ = false;
};
EffectGraphReducer(Graph* graph,
std::function<void(Node*, Reduction*)> reduce, Zone* zone);
void ReduceGraph() { ReduceFrom(graph_->end()); }
// Mark node for revisitation.
void Revisit(Node* node);
// Add a new root node to start reduction from. This is useful if the reducer
// adds nodes that are not yet reachable, but should already be considered
// part of the graph.
void AddRoot(Node* node) {
DCHECK(state_.Get(node) == State::kUnvisited);
state_.Set(node, State::kRevisit);
revisit_.push(node);
}
bool Complete() { return stack_.empty() && revisit_.empty(); }
private:
struct NodeState {
Node* node;
int input_index;
};
void ReduceFrom(Node* node);
enum class State : uint8_t { kUnvisited = 0, kRevisit, kOnStack, kVisited };
const uint8_t kNumStates = static_cast<uint8_t>(State::kVisited) + 1;
Graph* graph_;
NodeMarker<State> state_;
ZoneStack<Node*> revisit_;
ZoneStack<NodeState> stack_;
std::function<void(Node*, Reduction*)> reduce_;
};
// A variable is an abstract storage location, which is lowered to SSA values
// and phi nodes by {VariableTracker}.
class Variable {
public:
Variable() : id_(kInvalid) {}
bool operator==(Variable other) const { return id_ == other.id_; }
bool operator!=(Variable other) const { return id_ != other.id_; }
bool operator<(Variable other) const { return id_ < other.id_; }
static Variable Invalid() { return Variable(kInvalid); }
friend V8_INLINE size_t hash_value(Variable v) {
return base::hash_value(v.id_);
}
friend std::ostream& operator<<(std::ostream& os, Variable var) {
return os << var.id_;
}
private:
typedef int Id;
explicit Variable(Id id) : id_(id) {}
Id id_;
static const Id kInvalid = -1;
friend class VariableTracker;
};
// An object that can track the nodes in the graph whose current reduction
// depends on the value of the object.
class Dependable : public ZoneObject {
public:
explicit Dependable(Zone* zone) : dependants_(zone) {}
void AddDependency(Node* node) { dependants_.push_back(node); }
void RevisitDependants(EffectGraphReducer* reducer) {
for (Node* node : dependants_) {
reducer->Revisit(node);
}
dependants_.clear();
}
private:
ZoneVector<Node*> dependants_;
};
// A virtual object represents an allocation site and tracks the Variables
// associated with its fields as well as its global escape status.
class VirtualObject : public Dependable {
public:
typedef uint32_t Id;
typedef ZoneVector<Variable>::const_iterator const_iterator;
VirtualObject(VariableTracker* var_states, Id id, int size);
Maybe<Variable> FieldAt(int offset) const {
DCHECK(offset % kPointerSize == 0);
CHECK(!HasEscaped());
if (offset >= size()) {
// This can only happen in unreachable code.
return Nothing<Variable>();
}
return Just(fields_.at(offset / kPointerSize));
}
Id id() const { return id_; }
int size() const { return static_cast<int>(kPointerSize * fields_.size()); }
// Escaped might mean that the object escaped to untracked memory or that it
// is used in an operation that requires materialization.
void SetEscaped() { escaped_ = true; }
bool HasEscaped() const { return escaped_; }
const_iterator begin() const { return fields_.begin(); }
const_iterator end() const { return fields_.end(); }
private:
bool escaped_ = false;
Id id_;
ZoneVector<Variable> fields_;
};
class EscapeAnalysisResult {
public:
explicit EscapeAnalysisResult(EscapeAnalysisTracker* tracker)
: tracker_(tracker) {}
const VirtualObject* GetVirtualObject(Node* node);
Node* GetVirtualObjectField(const VirtualObject* vobject, int field,
Node* effect);
Node* GetReplacementOf(Node* node);
private:
EscapeAnalysisTracker* tracker_;
};
class V8_EXPORT_PRIVATE NewEscapeAnalysis final
: public NON_EXPORTED_BASE(EffectGraphReducer) {
public:
NewEscapeAnalysis(JSGraph* jsgraph, Zone* zone);
EscapeAnalysisResult analysis_result() {
DCHECK(Complete());
return EscapeAnalysisResult(tracker_);
}
private:
void Reduce(Node* node, Reduction* reduction);
JSGraph* jsgraph() { return jsgraph_; }
EscapeAnalysisTracker* tracker_;
JSGraph* jsgraph_;
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_NEW_ESCAPE_ANALYSIS_H_
...@@ -49,8 +49,6 @@ ...@@ -49,8 +49,6 @@
#include "src/compiler/machine-operator-reducer.h" #include "src/compiler/machine-operator-reducer.h"
#include "src/compiler/memory-optimizer.h" #include "src/compiler/memory-optimizer.h"
#include "src/compiler/move-optimizer.h" #include "src/compiler/move-optimizer.h"
#include "src/compiler/new-escape-analysis-reducer.h"
#include "src/compiler/new-escape-analysis.h"
#include "src/compiler/osr.h" #include "src/compiler/osr.h"
#include "src/compiler/pipeline-statistics.h" #include "src/compiler/pipeline-statistics.h"
#include "src/compiler/redundancy-elimination.h" #include "src/compiler/redundancy-elimination.h"
...@@ -1057,32 +1055,16 @@ struct EscapeAnalysisPhase { ...@@ -1057,32 +1055,16 @@ struct EscapeAnalysisPhase {
static const char* phase_name() { return "escape analysis"; } static const char* phase_name() { return "escape analysis"; }
void Run(PipelineData* data, Zone* temp_zone) { void Run(PipelineData* data, Zone* temp_zone) {
if (FLAG_turbo_new_escape) { EscapeAnalysis escape_analysis(data->jsgraph(), temp_zone);
NewEscapeAnalysis escape_analysis(data->jsgraph(), temp_zone);
escape_analysis.ReduceGraph(); escape_analysis.ReduceGraph();
JSGraphReducer reducer(data->jsgraph(), temp_zone); JSGraphReducer reducer(data->jsgraph(), temp_zone);
NewEscapeAnalysisReducer escape_reducer(&reducer, data->jsgraph(), EscapeAnalysisReducer escape_reducer(&reducer, data->jsgraph(),
escape_analysis.analysis_result(), escape_analysis.analysis_result(),
temp_zone); temp_zone);
AddReducer(data, &reducer, &escape_reducer); AddReducer(data, &reducer, &escape_reducer);
reducer.ReduceGraph(); reducer.ReduceGraph();
// TODO(tebbi): Turn this into a debug mode check once we have confidence. // TODO(tebbi): Turn this into a debug mode check once we have confidence.
escape_reducer.VerifyReplacement(); escape_reducer.VerifyReplacement();
} else {
EscapeAnalysis escape_analysis(data->graph(), data->jsgraph()->common(),
temp_zone);
if (!escape_analysis.Run()) return;
JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
EscapeAnalysisReducer escape_reducer(&graph_reducer, data->jsgraph(),
&escape_analysis, temp_zone);
AddReducer(data, &graph_reducer, &escape_reducer);
graph_reducer.ReduceGraph();
if (escape_reducer.compilation_failed()) {
data->set_compilation_failed();
return;
}
escape_reducer.VerifyReplacement();
}
} }
}; };
......
...@@ -439,8 +439,6 @@ DEFINE_BOOL(turbo_loop_variable, true, "Turbofan loop variable optimization") ...@@ -439,8 +439,6 @@ DEFINE_BOOL(turbo_loop_variable, true, "Turbofan loop variable optimization")
DEFINE_BOOL(turbo_cf_optimization, true, "optimize control flow in TurboFan") DEFINE_BOOL(turbo_cf_optimization, true, "optimize control flow in TurboFan")
DEFINE_BOOL(turbo_frame_elision, true, "elide frames in TurboFan") DEFINE_BOOL(turbo_frame_elision, true, "elide frames in TurboFan")
DEFINE_BOOL(turbo_escape, true, "enable escape analysis") DEFINE_BOOL(turbo_escape, true, "enable escape analysis")
DEFINE_BOOL(turbo_new_escape, true,
"enable new implementation of escape analysis")
DEFINE_BOOL(turbo_instruction_scheduling, false, DEFINE_BOOL(turbo_instruction_scheduling, false,
"enable instruction scheduling in TurboFan") "enable instruction scheduling in TurboFan")
DEFINE_BOOL(turbo_stress_instruction_scheduling, false, DEFINE_BOOL(turbo_stress_instruction_scheduling, false,
......
...@@ -805,10 +805,6 @@ ...@@ -805,10 +805,6 @@
'compiler/memory-optimizer.h', 'compiler/memory-optimizer.h',
'compiler/move-optimizer.cc', 'compiler/move-optimizer.cc',
'compiler/move-optimizer.h', 'compiler/move-optimizer.h',
'compiler/new-escape-analysis.cc',
'compiler/new-escape-analysis.h',
'compiler/new-escape-analysis-reducer.cc',
'compiler/new-escape-analysis-reducer.h',
'compiler/node-aux-data.h', 'compiler/node-aux-data.h',
'compiler/node-cache.cc', 'compiler/node-cache.cc',
'compiler/node-cache.h', 'compiler/node-cache.h',
......
...@@ -56,7 +56,6 @@ v8_executable("unittests") { ...@@ -56,7 +56,6 @@ v8_executable("unittests") {
"compiler/dead-code-elimination-unittest.cc", "compiler/dead-code-elimination-unittest.cc",
"compiler/diamond-unittest.cc", "compiler/diamond-unittest.cc",
"compiler/effect-control-linearizer-unittest.cc", "compiler/effect-control-linearizer-unittest.cc",
"compiler/escape-analysis-unittest.cc",
"compiler/graph-reducer-unittest.cc", "compiler/graph-reducer-unittest.cc",
"compiler/graph-reducer-unittest.h", "compiler/graph-reducer-unittest.h",
"compiler/graph-trimmer-unittest.cc", "compiler/graph-trimmer-unittest.cc",
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/escape-analysis.h"
#include "src/bit-vector.h"
#include "src/compiler/escape-analysis-reducer.h"
#include "src/compiler/graph-visualizer.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/node-properties.h"
#include "src/compiler/simplified-operator.h"
#include "src/compiler/types.h"
#include "src/zone/zone-containers.h"
#include "test/unittests/compiler/graph-unittest.h"
namespace v8 {
namespace internal {
namespace compiler {
class EscapeAnalysisTest : public TypedGraphTest {
public:
EscapeAnalysisTest()
: simplified_(zone()),
jsgraph_(isolate(), graph(), common(), nullptr, nullptr, nullptr),
escape_analysis_(graph(), common(), zone()),
effect_(graph()->start()),
control_(graph()->start()) {}
~EscapeAnalysisTest() {}
EscapeAnalysis* escape_analysis() { return &escape_analysis_; }
protected:
void Analysis() { escape_analysis_.Run(); }
void Transformation() {
GraphReducer graph_reducer(zone(), graph());
EscapeAnalysisReducer escape_reducer(&graph_reducer, &jsgraph_,
&escape_analysis_, zone());
graph_reducer.AddReducer(&escape_reducer);
graph_reducer.ReduceGraph();
}
// ---------------------------------Node Creation Helper----------------------
Node* BeginRegion(Node* effect = nullptr) {
if (!effect) {
effect = effect_;
}
return effect_ = graph()->NewNode(
common()->BeginRegion(RegionObservability::kObservable), effect);
}
Node* FinishRegion(Node* value, Node* effect = nullptr) {
if (!effect) {
effect = effect_;
}
return effect_ = graph()->NewNode(common()->FinishRegion(), value, effect);
}
Node* Allocate(Node* size, Node* effect = nullptr, Node* control = nullptr) {
if (!effect) {
effect = effect_;
}
if (!control) {
control = control_;
}
return effect_ = graph()->NewNode(simplified()->Allocate(Type::Any()), size,
effect, control);
}
Node* Constant(int num) {
return graph()->NewNode(common()->NumberConstant(num));
}
Node* Store(const FieldAccess& access, Node* allocation, Node* value,
Node* effect = nullptr, Node* control = nullptr) {
if (!effect) {
effect = effect_;
}
if (!control) {
control = control_;
}
return effect_ = graph()->NewNode(simplified()->StoreField(access),
allocation, value, effect, control);
}
Node* StoreElement(const ElementAccess& access, Node* allocation, Node* index,
Node* value, Node* effect = nullptr,
Node* control = nullptr) {
if (!effect) {
effect = effect_;
}
if (!control) {
control = control_;
}
return effect_ =
graph()->NewNode(simplified()->StoreElement(access), allocation,
index, value, effect, control);
}
Node* Load(const FieldAccess& access, Node* from, Node* effect = nullptr,
Node* control = nullptr) {
if (!effect) {
effect = effect_;
}
if (!control) {
control = control_;
}
return graph()->NewNode(simplified()->LoadField(access), from, effect,
control);
}
Node* Return(Node* value, Node* effect = nullptr, Node* control = nullptr) {
if (!effect) {
effect = effect_;
}
if (!control) {
control = control_;
}
Node* zero = graph()->NewNode(common()->NumberConstant(0));
return control_ = graph()->NewNode(common()->Return(), zero, value, effect,
control);
}
void EndGraph() {
for (Edge edge : graph()->end()->input_edges()) {
if (NodeProperties::IsControlEdge(edge)) {
edge.UpdateTo(control_);
}
}
}
Node* Branch() {
return control_ =
graph()->NewNode(common()->Branch(), Constant(0), control_);
}
Node* IfTrue() {
return control_ = graph()->NewNode(common()->IfTrue(), control_);
}
Node* IfFalse() { return graph()->NewNode(common()->IfFalse(), control_); }
Node* Merge2(Node* control1, Node* control2) {
return control_ = graph()->NewNode(common()->Merge(2), control1, control2);
}
FieldAccess FieldAccessAtIndex(int offset) {
FieldAccess access = {kTaggedBase, offset,
MaybeHandle<Name>(), MaybeHandle<Map>(),
Type::Any(), MachineType::AnyTagged(),
kFullWriteBarrier};
return access;
}
ElementAccess MakeElementAccess(int header_size) {
ElementAccess access = {kTaggedBase, header_size, Type::Any(),
MachineType::AnyTagged(), kFullWriteBarrier};
return access;
}
// ---------------------------------Assertion Helper--------------------------
void ExpectReplacement(Node* node, Node* rep) {
EXPECT_EQ(rep, escape_analysis()->GetReplacement(node));
}
void ExpectReplacementPhi(Node* node, Node* left, Node* right) {
Node* rep = escape_analysis()->GetReplacement(node);
ASSERT_NE(nullptr, rep);
ASSERT_EQ(IrOpcode::kPhi, rep->opcode());
EXPECT_EQ(left, NodeProperties::GetValueInput(rep, 0));
EXPECT_EQ(right, NodeProperties::GetValueInput(rep, 1));
}
void ExpectVirtual(Node* node) {
EXPECT_TRUE(node->opcode() == IrOpcode::kAllocate ||
node->opcode() == IrOpcode::kFinishRegion);
EXPECT_TRUE(escape_analysis()->IsVirtual(node));
}
void ExpectEscaped(Node* node) {
EXPECT_TRUE(node->opcode() == IrOpcode::kAllocate ||
node->opcode() == IrOpcode::kFinishRegion);
EXPECT_TRUE(escape_analysis()->IsEscaped(node));
}
SimplifiedOperatorBuilder* simplified() { return &simplified_; }
Node* effect() { return effect_; }
Node* control() { return control_; }
private:
SimplifiedOperatorBuilder simplified_;
JSGraph jsgraph_;
EscapeAnalysis escape_analysis_;
Node* effect_;
Node* control_;
};
// -----------------------------------------------------------------------------
// Test cases.
TEST_F(EscapeAnalysisTest, StraightNonEscape) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Node* load = Load(FieldAccessAtIndex(0), finish);
Node* result = Return(load);
EndGraph();
Analysis();
ExpectVirtual(allocation);
ExpectReplacement(load, object1);
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, StraightNonEscapeNonConstStore) {
Node* object1 = Constant(1);
Node* object2 = Constant(2);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
Node* index =
graph()->NewNode(common()->Select(MachineRepresentation::kTagged),
object1, object2, control());
StoreElement(MakeElementAccess(0), allocation, index, object1);
Node* finish = FinishRegion(allocation);
Node* load = Load(FieldAccessAtIndex(0), finish);
Node* result = Return(load);
EndGraph();
Analysis();
ExpectEscaped(allocation);
ExpectReplacement(load, nullptr);
Transformation();
ASSERT_EQ(load, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, StraightEscape) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Node* load = Load(FieldAccessAtIndex(0), finish);
Node* result = Return(allocation);
EndGraph();
graph()->end()->AppendInput(zone(), load);
Analysis();
ExpectEscaped(allocation);
ExpectReplacement(load, object1);
Transformation();
ASSERT_EQ(allocation, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, StoreLoadEscape) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation1 = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation1, object1);
Node* finish1 = FinishRegion(allocation1);
BeginRegion();
Node* allocation2 = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation2, finish1);
Node* finish2 = FinishRegion(allocation2);
Node* load = Load(FieldAccessAtIndex(0), finish2);
Node* result = Return(load);
EndGraph();
Analysis();
ExpectEscaped(allocation1);
ExpectVirtual(allocation2);
ExpectReplacement(load, finish1);
Transformation();
ASSERT_EQ(finish1, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, BranchNonEscape) {
Node* object1 = Constant(1);
Node* object2 = Constant(2);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Branch();
Node* ifFalse = IfFalse();
Node* ifTrue = IfTrue();
Node* effect1 =
Store(FieldAccessAtIndex(0), allocation, object1, finish, ifFalse);
Node* effect2 =
Store(FieldAccessAtIndex(0), allocation, object2, finish, ifTrue);
Node* merge = Merge2(ifFalse, ifTrue);
Node* phi = graph()->NewNode(common()->EffectPhi(2), effect1, effect2, merge);
Node* load = Load(FieldAccessAtIndex(0), finish, phi, merge);
Node* result = Return(load, phi);
EndGraph();
graph()->end()->AppendInput(zone(), result);
Analysis();
ExpectVirtual(allocation);
ExpectReplacementPhi(load, object1, object2);
Node* replacement_phi = escape_analysis()->GetReplacement(load);
Transformation();
ASSERT_EQ(replacement_phi, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, BranchEscapeOne) {
Node* object1 = Constant(1);
Node* object2 = Constant(2);
Node* index = graph()->NewNode(common()->Parameter(0), start());
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Branch();
Node* ifFalse = IfFalse();
Node* ifTrue = IfTrue();
Node* effect1 =
Store(FieldAccessAtIndex(0), allocation, object1, finish, ifFalse);
Node* effect2 = StoreElement(MakeElementAccess(0), allocation, index, object2,
finish, ifTrue);
Node* merge = Merge2(ifFalse, ifTrue);
Node* phi = graph()->NewNode(common()->EffectPhi(2), effect1, effect2, merge);
Node* load = Load(FieldAccessAtIndex(0), finish, phi, merge);
Node* result = Return(load, phi);
EndGraph();
Analysis();
ExpectEscaped(allocation);
ExpectReplacement(load, nullptr);
Transformation();
ASSERT_EQ(load, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, BranchEscapeThroughStore) {
Node* object1 = Constant(1);
Node* object2 = Constant(2);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
FinishRegion(allocation);
BeginRegion();
Node* allocation2 = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object2);
Node* finish2 = FinishRegion(allocation2);
Branch();
Node* ifFalse = IfFalse();
Node* ifTrue = IfTrue();
Node* effect1 =
Store(FieldAccessAtIndex(0), allocation, allocation2, finish2, ifFalse);
Node* merge = Merge2(ifFalse, ifTrue);
Node* phi = graph()->NewNode(common()->EffectPhi(2), effect1, finish2, merge);
Node* load = Load(FieldAccessAtIndex(0), finish2, phi, merge);
Node* result = Return(allocation, phi);
EndGraph();
graph()->end()->AppendInput(zone(), load);
Analysis();
ExpectEscaped(allocation);
ExpectEscaped(allocation2);
ExpectReplacement(load, nullptr);
Transformation();
ASSERT_EQ(allocation, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, DanglingLoadOrder) {
Node* object1 = Constant(1);
Node* object2 = Constant(2);
Node* allocation = Allocate(Constant(kPointerSize));
Node* store1 = Store(FieldAccessAtIndex(0), allocation, object1);
Node* load1 = Load(FieldAccessAtIndex(0), allocation);
Node* store2 = Store(FieldAccessAtIndex(0), allocation, object2);
Node* load2 = Load(FieldAccessAtIndex(0), allocation, store1);
Node* result = Return(load2);
EndGraph();
graph()->end()->AppendInput(zone(), store2);
graph()->end()->AppendInput(zone(), load1);
Analysis();
ExpectVirtual(allocation);
ExpectReplacement(load1, object1);
ExpectReplacement(load2, object1);
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, DeoptReplacement) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Node* effect1 = Store(FieldAccessAtIndex(0), allocation, object1, finish);
Branch();
Node* ifFalse = IfFalse();
Node* state_values1 = graph()->NewNode(
common()->StateValues(1, SparseInputMask::Dense()), finish);
Node* state_values2 =
graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
Node* state_values3 =
graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
Node* frame_state = graph()->NewNode(
common()->FrameState(BailoutId::None(), OutputFrameStateCombine::Ignore(),
nullptr),
state_values1, state_values2, state_values3, UndefinedConstant(),
graph()->start(), graph()->start());
Node* deopt = graph()->NewNode(
common()->Deoptimize(DeoptimizeKind::kEager, DeoptimizeReason::kNoReason),
frame_state, effect1, ifFalse);
Node* ifTrue = IfTrue();
Node* load = Load(FieldAccessAtIndex(0), finish, effect1, ifTrue);
Node* result = Return(load, effect1, ifTrue);
EndGraph();
graph()->end()->AppendInput(zone(), deopt);
Analysis();
ExpectVirtual(allocation);
ExpectReplacement(load, object1);
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
Node* object_state = NodeProperties::GetValueInput(state_values1, 0);
ASSERT_EQ(object_state->opcode(), IrOpcode::kObjectState);
ASSERT_EQ(1, object_state->op()->ValueInputCount());
ASSERT_EQ(object1, NodeProperties::GetValueInput(object_state, 0));
}
TEST_F(EscapeAnalysisTest, DISABLED_DeoptReplacementIdentity) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize * 2));
Store(FieldAccessAtIndex(0), allocation, object1);
Store(FieldAccessAtIndex(kPointerSize), allocation, allocation);
Node* finish = FinishRegion(allocation);
Node* effect1 = Store(FieldAccessAtIndex(0), allocation, object1, finish);
Branch();
Node* ifFalse = IfFalse();
Node* state_values1 = graph()->NewNode(
common()->StateValues(1, SparseInputMask::Dense()), finish);
Node* state_values2 = graph()->NewNode(
common()->StateValues(1, SparseInputMask::Dense()), finish);
Node* state_values3 =
graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
Node* frame_state = graph()->NewNode(
common()->FrameState(BailoutId::None(), OutputFrameStateCombine::Ignore(),
nullptr),
state_values1, state_values2, state_values3, UndefinedConstant(),
graph()->start(), graph()->start());
Node* deopt = graph()->NewNode(
common()->Deoptimize(DeoptimizeKind::kEager, DeoptimizeReason::kNoReason),
frame_state, effect1, ifFalse);
Node* ifTrue = IfTrue();
Node* load = Load(FieldAccessAtIndex(0), finish, effect1, ifTrue);
Node* result = Return(load, effect1, ifTrue);
EndGraph();
graph()->end()->AppendInput(zone(), deopt);
Analysis();
ExpectVirtual(allocation);
ExpectReplacement(load, object1);
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
Node* object_state = NodeProperties::GetValueInput(state_values1, 0);
ASSERT_EQ(object_state->opcode(), IrOpcode::kObjectState);
ASSERT_EQ(2, object_state->op()->ValueInputCount());
ASSERT_EQ(object1, NodeProperties::GetValueInput(object_state, 0));
ASSERT_EQ(object_state, NodeProperties::GetValueInput(object_state, 1));
Node* object_state2 = NodeProperties::GetValueInput(state_values1, 0);
ASSERT_EQ(object_state, object_state2);
}
} // namespace compiler
} // namespace internal
} // namespace v8
...@@ -49,7 +49,6 @@ ...@@ -49,7 +49,6 @@
'compiler/dead-code-elimination-unittest.cc', 'compiler/dead-code-elimination-unittest.cc',
'compiler/diamond-unittest.cc', 'compiler/diamond-unittest.cc',
'compiler/effect-control-linearizer-unittest.cc', 'compiler/effect-control-linearizer-unittest.cc',
'compiler/escape-analysis-unittest.cc',
'compiler/graph-reducer-unittest.cc', 'compiler/graph-reducer-unittest.cc',
'compiler/graph-reducer-unittest.h', 'compiler/graph-reducer-unittest.h',
'compiler/graph-trimmer-unittest.cc', 'compiler/graph-trimmer-unittest.cc',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment