Commit 46473f82 authored by Tobias Tebbi's avatar Tobias Tebbi Committed by Commit Bot

[turbofan] delete old implementation of escape analysis

Bug: 
Change-Id: Ib9e0d0844ad5e7bc6cd038f736546cad77669321
Reviewed-on: https://chromium-review.googlesource.com/641530Reviewed-by: 's avatarJaroslav Sevcik <jarin@chromium.org>
Commit-Queue: Tobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47699}
parent 8efc5f04
...@@ -1419,10 +1419,6 @@ v8_source_set("v8_base") { ...@@ -1419,10 +1419,6 @@ v8_source_set("v8_base") {
"src/compiler/memory-optimizer.h", "src/compiler/memory-optimizer.h",
"src/compiler/move-optimizer.cc", "src/compiler/move-optimizer.cc",
"src/compiler/move-optimizer.h", "src/compiler/move-optimizer.h",
"src/compiler/new-escape-analysis-reducer.cc",
"src/compiler/new-escape-analysis-reducer.h",
"src/compiler/new-escape-analysis.cc",
"src/compiler/new-escape-analysis.h",
"src/compiler/node-aux-data.h", "src/compiler/node-aux-data.h",
"src/compiler/node-cache.cc", "src/compiler/node-cache.cc",
"src/compiler/node-cache.h", "src/compiler/node-cache.h",
...@@ -2079,8 +2075,6 @@ v8_source_set("v8_base") { ...@@ -2079,8 +2075,6 @@ v8_source_set("v8_base") {
jumbo_excluded_sources = [ jumbo_excluded_sources = [
# TODO(mostynb@opera.com): don't exclude these http://crbug.com/752428 # TODO(mostynb@opera.com): don't exclude these http://crbug.com/752428
"src/profiler/heap-snapshot-generator.cc", # Macro clash in mman-linux.h "src/profiler/heap-snapshot-generator.cc", # Macro clash in mman-linux.h
"src/compiler/escape-analysis.cc", # Symbol clashes with new-escape-analysis.cc
"src/compiler/escape-analysis-reducer.cc", # Symbol clashes with new-escape-analysis-reducer.cc
] ]
} }
......
// Copyright 2015 the V8 project authors. All rights reserved. // Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
#include "src/compiler/escape-analysis-reducer.h" #include "src/compiler/escape-analysis-reducer.h"
#include "src/compiler/all-nodes.h" #include "src/compiler/all-nodes.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/simplified-operator.h" #include "src/compiler/simplified-operator.h"
#include "src/compiler/type-cache.h" #include "src/compiler/type-cache.h"
#include "src/counters.h"
#include "src/frame-constants.h" #include "src/frame-constants.h"
namespace v8 { namespace v8 {
...@@ -24,111 +22,33 @@ namespace compiler { ...@@ -24,111 +22,33 @@ namespace compiler {
#define TRACE(...) #define TRACE(...)
#endif // DEBUG #endif // DEBUG
EscapeAnalysisReducer::EscapeAnalysisReducer(Editor* editor, JSGraph* jsgraph, EscapeAnalysisReducer::EscapeAnalysisReducer(
EscapeAnalysis* escape_analysis, Editor* editor, JSGraph* jsgraph, EscapeAnalysisResult analysis_result,
Zone* zone) Zone* zone)
: AdvancedReducer(editor), : AdvancedReducer(editor),
jsgraph_(jsgraph), jsgraph_(jsgraph),
escape_analysis_(escape_analysis), analysis_result_(analysis_result),
zone_(zone), object_id_cache_(zone),
fully_reduced_(static_cast<int>(jsgraph->graph()->NodeCount() * 2), zone), node_cache_(jsgraph->graph(), zone),
exists_virtual_allocate_(escape_analysis->ExistsVirtualAllocate()) {} arguments_elements_(zone),
zone_(zone) {}
Reduction EscapeAnalysisReducer::ReduceNode(Node* node) { Node* EscapeAnalysisReducer::MaybeGuard(Node* original, Node* replacement) {
if (node->id() < static_cast<NodeId>(fully_reduced_.length()) &&
fully_reduced_.Contains(node->id())) {
return NoChange();
}
switch (node->opcode()) {
case IrOpcode::kLoadField:
case IrOpcode::kLoadElement:
return ReduceLoad(node);
case IrOpcode::kStoreField:
case IrOpcode::kStoreElement:
return ReduceStore(node);
case IrOpcode::kCheckMaps:
return ReduceCheckMaps(node);
case IrOpcode::kAllocate:
return ReduceAllocate(node);
case IrOpcode::kFinishRegion:
return ReduceFinishRegion(node);
case IrOpcode::kReferenceEqual:
return ReduceReferenceEqual(node);
case IrOpcode::kObjectIsSmi:
return ReduceObjectIsSmi(node);
// FrameStates and Value nodes are preprocessed here,
// and visited via ReduceFrameStateUses from their user nodes.
case IrOpcode::kFrameState:
case IrOpcode::kStateValues: {
if (node->id() >= static_cast<NodeId>(fully_reduced_.length()) ||
fully_reduced_.Contains(node->id())) {
break;
}
bool depends_on_object_state = false;
for (Node* input : node->inputs()) {
switch (input->opcode()) {
case IrOpcode::kAllocate:
case IrOpcode::kFinishRegion:
depends_on_object_state =
depends_on_object_state || escape_analysis()->IsVirtual(input);
break;
case IrOpcode::kFrameState:
case IrOpcode::kStateValues:
depends_on_object_state =
depends_on_object_state ||
input->id() >= static_cast<NodeId>(fully_reduced_.length()) ||
!fully_reduced_.Contains(input->id());
break;
default:
break;
}
}
if (!depends_on_object_state) {
fully_reduced_.Add(node->id());
}
return NoChange();
}
case IrOpcode::kNewUnmappedArgumentsElements:
arguments_elements_.insert(node);
break;
default:
// TODO(sigurds): Change this to GetFrameStateInputCount once
// it is working. For now we use EffectInputCount > 0 to determine
// whether a node might have a frame state input.
if (exists_virtual_allocate_ && node->op()->EffectInputCount() > 0) {
return ReduceFrameStateUses(node);
}
break;
}
return NoChange();
}
Reduction EscapeAnalysisReducer::Reduce(Node* node) {
Reduction reduction = ReduceNode(node);
if (reduction.Changed() && node != reduction.replacement()) {
escape_analysis()->SetReplacement(node, reduction.replacement());
}
return reduction;
}
namespace {
Node* MaybeGuard(JSGraph* jsgraph, Zone* zone, Node* original,
Node* replacement) {
// We might need to guard the replacement if the type of the {replacement} // We might need to guard the replacement if the type of the {replacement}
// node is not in a sub-type relation to the type of the the {original} node. // node is not in a sub-type relation to the type of the the {original} node.
Type* const replacement_type = NodeProperties::GetType(replacement); Type* const replacement_type = NodeProperties::GetType(replacement);
Type* const original_type = NodeProperties::GetType(original); Type* const original_type = NodeProperties::GetType(original);
if (!replacement_type->Is(original_type)) { if (!replacement_type->Is(original_type)) {
Node* const control = NodeProperties::GetControlInput(original); Node* const control = NodeProperties::GetControlInput(original);
replacement = jsgraph->graph()->NewNode( replacement = jsgraph()->graph()->NewNode(
jsgraph->common()->TypeGuard(original_type), replacement, control); jsgraph()->common()->TypeGuard(original_type), replacement, control);
NodeProperties::SetType(replacement, original_type); NodeProperties::SetType(replacement, original_type);
} }
return replacement; return replacement;
} }
namespace {
Node* SkipTypeGuards(Node* node) { Node* SkipTypeGuards(Node* node) {
while (node->opcode() == IrOpcode::kTypeGuard) { while (node->opcode() == IrOpcode::kTypeGuard) {
node = NodeProperties::GetValueInput(node, 0); node = NodeProperties::GetValueInput(node, 0);
...@@ -138,269 +58,156 @@ Node* SkipTypeGuards(Node* node) { ...@@ -138,269 +58,156 @@ Node* SkipTypeGuards(Node* node) {
} // namespace } // namespace
Reduction EscapeAnalysisReducer::ReduceLoad(Node* node) { Node* EscapeAnalysisReducer::ObjectIdNode(const VirtualObject* vobject) {
DCHECK(node->opcode() == IrOpcode::kLoadField || VirtualObject::Id id = vobject->id();
node->opcode() == IrOpcode::kLoadElement); if (id >= object_id_cache_.size()) object_id_cache_.resize(id + 1);
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) { if (!object_id_cache_[id]) {
fully_reduced_.Add(node->id()); Node* node = jsgraph()->graph()->NewNode(jsgraph()->common()->ObjectId(id));
} NodeProperties::SetType(node, Type::Object());
if (escape_analysis()->IsVirtual( object_id_cache_[id] = node;
SkipTypeGuards(NodeProperties::GetValueInput(node, 0)))) {
if (Node* rep = escape_analysis()->GetReplacement(node)) {
TRACE("Replaced #%d (%s) with #%d (%s)\n", node->id(),
node->op()->mnemonic(), rep->id(), rep->op()->mnemonic());
rep = MaybeGuard(jsgraph(), zone(), node, rep);
ReplaceWithValue(node, rep);
return Replace(rep);
}
} }
return NoChange(); return object_id_cache_[id];
} }
Reduction EscapeAnalysisReducer::Reduce(Node* node) {
Reduction EscapeAnalysisReducer::ReduceStore(Node* node) { if (Node* replacement = analysis_result().GetReplacementOf(node)) {
DCHECK(node->opcode() == IrOpcode::kStoreField || DCHECK(node->opcode() != IrOpcode::kAllocate &&
node->opcode() == IrOpcode::kStoreElement); node->opcode() != IrOpcode::kFinishRegion);
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) { DCHECK_NE(replacement, node);
fully_reduced_.Add(node->id()); if (replacement != jsgraph()->Dead()) {
} replacement = MaybeGuard(node, replacement);
if (escape_analysis()->IsVirtual( }
SkipTypeGuards(NodeProperties::GetValueInput(node, 0)))) {
TRACE("Removed #%d (%s) from effect chain\n", node->id(),
node->op()->mnemonic());
RelaxEffectsAndControls(node);
return Changed(node);
}
return NoChange();
}
Reduction EscapeAnalysisReducer::ReduceCheckMaps(Node* node) {
DCHECK(node->opcode() == IrOpcode::kCheckMaps);
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) {
fully_reduced_.Add(node->id());
}
if (escape_analysis()->IsVirtual(
SkipTypeGuards(NodeProperties::GetValueInput(node, 0))) &&
!escape_analysis()->IsEscaped(node)) {
TRACE("Removed #%d (%s) from effect chain\n", node->id(),
node->op()->mnemonic());
RelaxEffectsAndControls(node);
return Changed(node);
}
return NoChange();
}
Reduction EscapeAnalysisReducer::ReduceAllocate(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) {
fully_reduced_.Add(node->id());
}
if (escape_analysis()->IsVirtual(node)) {
RelaxEffectsAndControls(node); RelaxEffectsAndControls(node);
TRACE("Removed allocate #%d from effect chain\n", node->id()); return Replace(replacement);
return Changed(node);
} }
return NoChange();
}
Reduction EscapeAnalysisReducer::ReduceFinishRegion(Node* node) { switch (node->opcode()) {
DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion); case IrOpcode::kAllocate: {
Node* effect = NodeProperties::GetEffectInput(node, 0); const VirtualObject* vobject = analysis_result().GetVirtualObject(node);
if (effect->opcode() == IrOpcode::kBeginRegion) { if (vobject && !vobject->HasEscaped()) {
// We only add it now to remove empty Begin/Finish region pairs RelaxEffectsAndControls(node);
// in the process. }
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) { return NoChange();
fully_reduced_.Add(node->id());
} }
RelaxEffectsAndControls(effect); case IrOpcode::kFinishRegion: {
RelaxEffectsAndControls(node); Node* effect = NodeProperties::GetEffectInput(node, 0);
#ifdef DEBUG if (effect->opcode() == IrOpcode::kBeginRegion) {
if (FLAG_trace_turbo_escape) { RelaxEffectsAndControls(effect);
PrintF("Removed region #%d / #%d from effect chain,", effect->id(), RelaxEffectsAndControls(node);
node->id());
PrintF(" %d user(s) of #%d remain(s):", node->UseCount(), node->id());
for (Edge edge : node->use_edges()) {
PrintF(" #%d", edge.from()->id());
} }
PrintF("\n"); return NoChange();
} }
#endif // DEBUG case IrOpcode::kNewUnmappedArgumentsElements:
return Changed(node); arguments_elements_.insert(node);
} return NoChange();
return NoChange(); default: {
} // TODO(sigurds): Change this to GetFrameStateInputCount once
// it is working. For now we use EffectInputCount > 0 to determine
// whether a node might have a frame state input.
Reduction EscapeAnalysisReducer::ReduceReferenceEqual(Node* node) { if (node->op()->EffectInputCount() > 0) {
DCHECK_EQ(node->opcode(), IrOpcode::kReferenceEqual); ReduceFrameStateInputs(node);
Node* left = SkipTypeGuards(NodeProperties::GetValueInput(node, 0)); }
Node* right = SkipTypeGuards(NodeProperties::GetValueInput(node, 1)); return NoChange();
if (escape_analysis()->IsVirtual(left)) {
if (escape_analysis()->IsVirtual(right) &&
escape_analysis()->CompareVirtualObjects(left, right)) {
ReplaceWithValue(node, jsgraph()->TrueConstant());
TRACE("Replaced ref eq #%d with true\n", node->id());
return Replace(jsgraph()->TrueConstant());
} }
// Right-hand side is not a virtual object, or a different one.
ReplaceWithValue(node, jsgraph()->FalseConstant());
TRACE("Replaced ref eq #%d with false\n", node->id());
return Replace(jsgraph()->FalseConstant());
} else if (escape_analysis()->IsVirtual(right)) {
// Left-hand side is not a virtual object.
ReplaceWithValue(node, jsgraph()->FalseConstant());
TRACE("Replaced ref eq #%d with false\n", node->id());
return Replace(jsgraph()->FalseConstant());
} }
return NoChange();
} }
// While doing DFS on the FrameState tree, we have to recognize duplicate
Reduction EscapeAnalysisReducer::ReduceObjectIsSmi(Node* node) { // occurrences of virtual objects.
DCHECK_EQ(node->opcode(), IrOpcode::kObjectIsSmi); class Deduplicator {
Node* input = SkipTypeGuards(NodeProperties::GetValueInput(node, 0)); public:
if (escape_analysis()->IsVirtual(input)) { explicit Deduplicator(Zone* zone) : is_duplicate_(zone) {}
ReplaceWithValue(node, jsgraph()->FalseConstant()); bool SeenBefore(const VirtualObject* vobject) {
TRACE("Replaced ObjectIsSmi #%d with false\n", node->id()); VirtualObject::Id id = vobject->id();
return Replace(jsgraph()->FalseConstant()); if (id >= is_duplicate_.size()) {
is_duplicate_.resize(id + 1);
}
bool is_duplicate = is_duplicate_[id];
is_duplicate_[id] = true;
return is_duplicate;
} }
return NoChange();
}
private:
ZoneVector<bool> is_duplicate_;
};
Reduction EscapeAnalysisReducer::ReduceFrameStateUses(Node* node) { void EscapeAnalysisReducer::ReduceFrameStateInputs(Node* node) {
DCHECK_GE(node->op()->EffectInputCount(), 1); DCHECK_GE(node->op()->EffectInputCount(), 1);
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) {
fully_reduced_.Add(node->id());
}
bool changed = false;
for (int i = 0; i < node->InputCount(); ++i) { for (int i = 0; i < node->InputCount(); ++i) {
Node* input = node->InputAt(i); Node* input = node->InputAt(i);
if (input->opcode() == IrOpcode::kFrameState) { if (input->opcode() == IrOpcode::kFrameState) {
if (Node* ret = ReduceDeoptState(input, node, false)) { Deduplicator deduplicator(zone());
if (Node* ret = ReduceDeoptState(input, node, &deduplicator)) {
node->ReplaceInput(i, ret); node->ReplaceInput(i, ret);
changed = true;
} }
} }
} }
if (changed) {
return Changed(node);
}
return NoChange();
} }
// Returns the clone if it duplicated the node, and null otherwise.
Node* EscapeAnalysisReducer::ReduceDeoptState(Node* node, Node* effect, Node* EscapeAnalysisReducer::ReduceDeoptState(Node* node, Node* effect,
bool multiple_users) { Deduplicator* deduplicator) {
DCHECK(node->opcode() == IrOpcode::kFrameState ||
node->opcode() == IrOpcode::kStateValues);
if (node->id() < static_cast<NodeId>(fully_reduced_.length()) &&
fully_reduced_.Contains(node->id())) {
return nullptr;
}
TRACE("Reducing %s %d\n", node->op()->mnemonic(), node->id());
Node* clone = nullptr;
bool node_multiused = node->UseCount() > 1;
bool multiple_users_rec = multiple_users || node_multiused;
for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
Node* input = NodeProperties::GetValueInput(node, i);
if (input->opcode() == IrOpcode::kStateValues) {
if (Node* ret = ReduceDeoptState(input, effect, multiple_users_rec)) {
if (node_multiused || (multiple_users && !clone)) {
TRACE(" Cloning #%d", node->id());
node = clone = jsgraph()->graph()->CloneNode(node);
TRACE(" to #%d\n", node->id());
node_multiused = false;
}
NodeProperties::ReplaceValueInput(node, ret, i);
}
} else {
if (Node* ret = ReduceStateValueInput(node, i, effect, node_multiused,
clone, multiple_users)) {
DCHECK_NULL(clone);
node_multiused = false; // Don't clone anymore.
node = clone = ret;
}
}
}
if (node->opcode() == IrOpcode::kFrameState) { if (node->opcode() == IrOpcode::kFrameState) {
Node* outer_frame_state = NodeProperties::GetFrameStateInput(node); NodeHashCache::Constructor new_node(&node_cache_, node);
if (outer_frame_state->opcode() == IrOpcode::kFrameState) { // This input order is important to match the DFS traversal used in the
if (Node* ret = // instruction selector. Otherwise, the instruction selector might find a
ReduceDeoptState(outer_frame_state, effect, multiple_users_rec)) { // duplicate node before the original one.
if (node_multiused || (multiple_users && !clone)) { for (int input_id : {kFrameStateOuterStateInput, kFrameStateFunctionInput,
TRACE(" Cloning #%d", node->id()); kFrameStateParametersInput, kFrameStateContextInput,
node = clone = jsgraph()->graph()->CloneNode(node); kFrameStateLocalsInput, kFrameStateStackInput}) {
TRACE(" to #%d\n", node->id()); Node* input = node->InputAt(input_id);
} new_node.ReplaceInput(ReduceDeoptState(input, effect, deduplicator),
NodeProperties::ReplaceFrameStateInput(node, ret); input_id);
}
} }
} return new_node.Get();
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) { } else if (node->opcode() == IrOpcode::kStateValues) {
fully_reduced_.Add(node->id()); NodeHashCache::Constructor new_node(&node_cache_, node);
} for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
return clone; Node* input = NodeProperties::GetValueInput(node, i);
} new_node.ReplaceValueInput(ReduceDeoptState(input, effect, deduplicator),
i);
}
// Returns the clone if it duplicated the node, and null otherwise. return new_node.Get();
Node* EscapeAnalysisReducer::ReduceStateValueInput(Node* node, int node_index, } else if (const VirtualObject* vobject =
Node* effect, analysis_result().GetVirtualObject(SkipTypeGuards(node))) {
bool node_multiused, if (vobject->HasEscaped()) return node;
bool already_cloned, if (deduplicator->SeenBefore(vobject)) {
bool multiple_users) { return ObjectIdNode(vobject);
Node* input = SkipTypeGuards(NodeProperties::GetValueInput(node, node_index)); } else {
if (node->id() < static_cast<NodeId>(fully_reduced_.length()) && std::vector<Node*> inputs;
fully_reduced_.Contains(node->id())) { for (int offset = 0; offset < vobject->size(); offset += kPointerSize) {
return nullptr; Node* field =
} analysis_result().GetVirtualObjectField(vobject, offset, effect);
TRACE("Reducing State Input #%d (%s)\n", input->id(), CHECK_NOT_NULL(field);
input->op()->mnemonic()); if (field != jsgraph()->Dead()) {
Node* clone = nullptr; inputs.push_back(ReduceDeoptState(field, effect, deduplicator));
if (input->opcode() == IrOpcode::kFinishRegion ||
input->opcode() == IrOpcode::kAllocate) {
if (escape_analysis()->IsVirtual(input)) {
if (escape_analysis()->IsCyclicObjectState(effect, input)) {
// TODO(mstarzinger): Represent cyclic object states differently to
// ensure the scheduler can properly handle such object states.
compilation_failed_ = true;
return nullptr;
}
if (Node* object_state =
escape_analysis()->GetOrCreateObjectState(effect, input)) {
if (node_multiused || (multiple_users && !already_cloned)) {
TRACE("Cloning #%d", node->id());
node = clone = jsgraph()->graph()->CloneNode(node);
TRACE(" to #%d\n", node->id());
node_multiused = false;
already_cloned = true;
} }
NodeProperties::ReplaceValueInput(node, object_state, node_index);
TRACE("Replaced state #%d input #%d with object state #%d\n",
node->id(), input->id(), object_state->id());
} else {
TRACE("No object state replacement for #%d at effect #%d available.\n",
input->id(), effect->id());
UNREACHABLE();
} }
int num_inputs = static_cast<int>(inputs.size());
NodeHashCache::Constructor new_node(
&node_cache_,
jsgraph()->common()->ObjectState(vobject->id(), num_inputs),
num_inputs, &inputs.front(), NodeProperties::GetType(node));
return new_node.Get();
} }
} else {
return node;
} }
return clone;
} }
void EscapeAnalysisReducer::VerifyReplacement() const { void EscapeAnalysisReducer::VerifyReplacement() const {
#ifdef DEBUG
AllNodes all(zone(), jsgraph()->graph()); AllNodes all(zone(), jsgraph()->graph());
for (Node* node : all.reachable) { for (Node* node : all.reachable) {
if (node->opcode() == IrOpcode::kAllocate) { if (node->opcode() == IrOpcode::kAllocate) {
CHECK(!escape_analysis_->IsVirtual(node)); if (const VirtualObject* vobject =
analysis_result().GetVirtualObject(node)) {
if (!vobject->HasEscaped()) {
V8_Fatal(__FILE__, __LINE__,
"Escape analysis failed to remove node %s#%d\n",
node->op()->mnemonic(), node->id());
}
}
} }
} }
#endif // DEBUG
} }
void EscapeAnalysisReducer::Finalize() { void EscapeAnalysisReducer::Finalize() {
...@@ -517,6 +324,88 @@ void EscapeAnalysisReducer::Finalize() { ...@@ -517,6 +324,88 @@ void EscapeAnalysisReducer::Finalize() {
} }
} }
Node* NodeHashCache::Query(Node* node) {
auto it = cache_.find(node);
if (it != cache_.end()) {
return *it;
} else {
return nullptr;
}
}
NodeHashCache::Constructor::Constructor(NodeHashCache* cache,
const Operator* op, int input_count,
Node** inputs, Type* type)
: node_cache_(cache), from_(nullptr) {
if (node_cache_->temp_nodes_.size() > 0) {
tmp_ = node_cache_->temp_nodes_.back();
node_cache_->temp_nodes_.pop_back();
int tmp_input_count = tmp_->InputCount();
if (input_count <= tmp_input_count) {
tmp_->TrimInputCount(input_count);
}
for (int i = 0; i < input_count; ++i) {
if (i < tmp_input_count) {
tmp_->ReplaceInput(i, inputs[i]);
} else {
tmp_->AppendInput(node_cache_->graph_->zone(), inputs[i]);
}
}
NodeProperties::ChangeOp(tmp_, op);
} else {
tmp_ = node_cache_->graph_->NewNode(op, input_count, inputs);
}
NodeProperties::SetType(tmp_, type);
}
Node* NodeHashCache::Constructor::Get() {
DCHECK(tmp_ || from_);
Node* node;
if (!tmp_) {
node = node_cache_->Query(from_);
if (!node) node = from_;
} else {
node = node_cache_->Query(tmp_);
if (node) {
node_cache_->temp_nodes_.push_back(tmp_);
} else {
node = tmp_;
node_cache_->Insert(node);
}
}
tmp_ = from_ = nullptr;
return node;
}
Node* NodeHashCache::Constructor::MutableNode() {
DCHECK(tmp_ || from_);
if (!tmp_) {
if (node_cache_->temp_nodes_.empty()) {
tmp_ = node_cache_->graph_->CloneNode(from_);
} else {
tmp_ = node_cache_->temp_nodes_.back();
node_cache_->temp_nodes_.pop_back();
int from_input_count = from_->InputCount();
int tmp_input_count = tmp_->InputCount();
if (from_input_count <= tmp_input_count) {
tmp_->TrimInputCount(from_input_count);
}
for (int i = 0; i < from_input_count; ++i) {
if (i < tmp_input_count) {
tmp_->ReplaceInput(i, from_->InputAt(i));
} else {
tmp_->AppendInput(node_cache_->graph_->zone(), from_->InputAt(i));
}
}
NodeProperties::SetType(tmp_, NodeProperties::GetType(from_));
NodeProperties::ChangeOp(tmp_, from_->op());
}
}
return tmp_;
}
#undef TRACE
} // namespace compiler } // namespace compiler
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
// Copyright 2015 the V8 project authors. All rights reserved. // Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
...@@ -6,7 +6,6 @@ ...@@ -6,7 +6,6 @@
#define V8_COMPILER_ESCAPE_ANALYSIS_REDUCER_H_ #define V8_COMPILER_ESCAPE_ANALYSIS_REDUCER_H_
#include "src/base/compiler-specific.h" #include "src/base/compiler-specific.h"
#include "src/bit-vector.h"
#include "src/compiler/escape-analysis.h" #include "src/compiler/escape-analysis.h"
#include "src/compiler/graph-reducer.h" #include "src/compiler/graph-reducer.h"
#include "src/globals.h" #include "src/globals.h"
...@@ -15,55 +14,101 @@ namespace v8 { ...@@ -15,55 +14,101 @@ namespace v8 {
namespace internal { namespace internal {
namespace compiler { namespace compiler {
// Forward declarations. class Deduplicator;
class JSGraph; class JSGraph;
// Perform hash-consing when creating or mutating nodes. Used to avoid duplicate
// nodes when creating ObjectState, StateValues and FrameState nodes
class NodeHashCache {
public:
NodeHashCache(Graph* graph, Zone* zone)
: graph_(graph), cache_(zone), temp_nodes_(zone) {}
// Handle to a conceptually new mutable node. Tries to re-use existing nodes
// and to recycle memory if possible.
class Constructor {
public:
// Construct a new node as a clone of [from].
Constructor(NodeHashCache* cache, Node* from)
: node_cache_(cache), from_(from), tmp_(nullptr) {}
// Construct a new node from scratch.
Constructor(NodeHashCache* cache, const Operator* op, int input_count,
Node** inputs, Type* type);
// Modify the new node.
void ReplaceValueInput(Node* input, int i) {
if (!tmp_ && input == NodeProperties::GetValueInput(from_, i)) return;
Node* node = MutableNode();
NodeProperties::ReplaceValueInput(node, input, i);
}
void ReplaceInput(Node* input, int i) {
if (!tmp_ && input == from_->InputAt(i)) return;
Node* node = MutableNode();
node->ReplaceInput(i, input);
}
// Obtain the mutated node or a cached copy. Invalidates the [Constructor].
Node* Get();
private:
Node* MutableNode();
NodeHashCache* node_cache_;
// Original node, copied on write.
Node* from_;
// Temporary node used for mutations, can be recycled if cache is hit.
Node* tmp_;
};
private:
Node* Query(Node* node);
void Insert(Node* node) { cache_.insert(node); }
Graph* graph_;
struct NodeEquals {
bool operator()(Node* a, Node* b) const {
return NodeProperties::Equals(a, b);
}
};
struct NodeHashCode {
size_t operator()(Node* n) const { return NodeProperties::HashCode(n); }
};
ZoneUnorderedSet<Node*, NodeHashCode, NodeEquals> cache_;
// Unused nodes whose memory can be recycled.
ZoneVector<Node*> temp_nodes_;
};
// Modify the graph according to the information computed in the previous phase.
class V8_EXPORT_PRIVATE EscapeAnalysisReducer final class V8_EXPORT_PRIVATE EscapeAnalysisReducer final
: public NON_EXPORTED_BASE(AdvancedReducer) { : public NON_EXPORTED_BASE(AdvancedReducer) {
public: public:
EscapeAnalysisReducer(Editor* editor, JSGraph* jsgraph, EscapeAnalysisReducer(Editor* editor, JSGraph* jsgraph,
EscapeAnalysis* escape_analysis, Zone* zone); EscapeAnalysisResult analysis_result, Zone* zone);
Reduction Reduce(Node* node) override;
const char* reducer_name() const override { return "EscapeAnalysisReducer"; } const char* reducer_name() const override { return "EscapeAnalysisReducer"; }
Reduction Reduce(Node* node) final;
void Finalize() override; void Finalize() override;
// Verifies that all virtual allocation nodes have been dealt with. Run it // Verifies that all virtual allocation nodes have been dealt with. Run it
// after this reducer has been applied. Has no effect in release mode. // after this reducer has been applied.
void VerifyReplacement() const; void VerifyReplacement() const;
bool compilation_failed() const { return compilation_failed_; }
private: private:
Reduction ReduceNode(Node* node); void ReduceFrameStateInputs(Node* node);
Reduction ReduceLoad(Node* node); Node* ReduceDeoptState(Node* node, Node* effect, Deduplicator* deduplicator);
Reduction ReduceStore(Node* node); Node* ObjectIdNode(const VirtualObject* vobject);
Reduction ReduceCheckMaps(Node* node); Node* MaybeGuard(Node* original, Node* replacement);
Reduction ReduceAllocate(Node* node);
Reduction ReduceFinishRegion(Node* node);
Reduction ReduceReferenceEqual(Node* node);
Reduction ReduceObjectIsSmi(Node* node);
Reduction ReduceFrameStateUses(Node* node);
Node* ReduceDeoptState(Node* node, Node* effect, bool multiple_users);
Node* ReduceStateValueInput(Node* node, int node_index, Node* effect,
bool node_multiused, bool already_cloned,
bool multiple_users);
JSGraph* jsgraph() const { return jsgraph_; } JSGraph* jsgraph() const { return jsgraph_; }
EscapeAnalysis* escape_analysis() const { return escape_analysis_; } EscapeAnalysisResult analysis_result() const { return analysis_result_; }
Zone* zone() const { return zone_; } Zone* zone() const { return zone_; }
JSGraph* const jsgraph_; JSGraph* const jsgraph_;
EscapeAnalysis* escape_analysis_; EscapeAnalysisResult analysis_result_;
ZoneVector<Node*> object_id_cache_;
NodeHashCache node_cache_;
ZoneSet<Node*> arguments_elements_;
Zone* const zone_; Zone* const zone_;
// This bit vector marks nodes we already processed (allocs, loads, stores)
// and nodes that do not need a visit from ReduceDeoptState etc.
BitVector fully_reduced_;
bool exists_virtual_allocate_;
std::set<Node*> arguments_elements_;
bool compilation_failed_ = false;
DISALLOW_COPY_AND_ASSIGN(EscapeAnalysisReducer); DISALLOW_COPY_AND_ASSIGN(EscapeAnalysisReducer);
}; };
......
// Copyright 2015 the V8 project authors. All rights reserved. // Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
#include "src/compiler/escape-analysis.h" #include "src/compiler/escape-analysis.h"
#include <limits>
#include "src/base/flags.h"
#include "src/bootstrapper.h" #include "src/bootstrapper.h"
#include "src/compilation-dependencies.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/js-operator.h"
#include "src/compiler/linkage.h" #include "src/compiler/linkage.h"
#include "src/compiler/node-matchers.h" #include "src/compiler/node-matchers.h"
#include "src/compiler/node-properties.h"
#include "src/compiler/node.h"
#include "src/compiler/operator-properties.h" #include "src/compiler/operator-properties.h"
#include "src/compiler/simplified-operator.h" #include "src/compiler/simplified-operator.h"
#include "src/compiler/type-cache.h"
#include "src/objects-inl.h" #include "src/objects-inl.h"
namespace v8 {
namespace internal {
namespace compiler {
typedef NodeId Alias;
#ifdef DEBUG #ifdef DEBUG
#define TRACE(...) \ #define TRACE(...) \
do { \ do { \
...@@ -36,1760 +20,718 @@ typedef NodeId Alias; ...@@ -36,1760 +20,718 @@ typedef NodeId Alias;
#define TRACE(...) #define TRACE(...)
#endif #endif
// EscapeStatusAnalysis determines for each allocation whether it escapes. namespace v8 {
class EscapeStatusAnalysis : public ZoneObject { namespace internal {
public: namespace compiler {
enum Status {
kUnknown = 0u,
kTracked = 1u << 0,
kEscaped = 1u << 1,
kOnStack = 1u << 2,
kVisited = 1u << 3,
// A node is dangling, if it is a load of some kind, and does not have
// an effect successor.
kDanglingComputed = 1u << 4,
kDangling = 1u << 5,
// A node is is an effect branch point, if it has more than 2 non-dangling
// effect successors.
kBranchPointComputed = 1u << 6,
kBranchPoint = 1u << 7,
kInQueue = 1u << 8
};
typedef base::Flags<Status, uint16_t> StatusFlags;
void RunStatusAnalysis();
bool IsVirtual(Node* node);
bool IsEscaped(Node* node);
bool IsAllocation(Node* node);
bool IsInQueue(NodeId id);
void SetInQueue(NodeId id, bool on_stack);
void DebugPrint();
EscapeStatusAnalysis(EscapeAnalysis* object_analysis, Graph* graph,
Zone* zone);
void EnqueueForStatusAnalysis(Node* node);
bool SetEscaped(Node* node);
bool IsEffectBranchPoint(Node* node);
bool IsDanglingEffectNode(Node* node);
void ResizeStatusVector();
size_t GetStatusVectorSize();
bool IsVirtual(NodeId id);
Graph* graph() const { return graph_; }
void AssignAliases();
Alias GetAlias(NodeId id) const { return aliases_[id]; }
const ZoneVector<Alias>& GetAliasMap() const { return aliases_; }
Alias AliasCount() const { return next_free_alias_; }
static const Alias kNotReachable;
static const Alias kUntrackable;
bool IsNotReachable(Node* node);
private:
void Process(Node* node);
void ProcessAllocate(Node* node);
void ProcessFinishRegion(Node* node);
void ProcessStoreField(Node* node);
void ProcessStoreElement(Node* node);
bool CheckUsesForEscape(Node* node, bool phi_escaping = false) {
return CheckUsesForEscape(node, node, phi_escaping);
}
bool CheckUsesForEscape(Node* node, Node* rep, bool phi_escaping = false);
void RevisitUses(Node* node);
void RevisitInputs(Node* node);
Alias NextAlias() { return next_free_alias_++; }
bool HasEntry(Node* node);
bool IsAllocationPhi(Node* node);
ZoneVector<Node*> stack_;
EscapeAnalysis* object_analysis_;
Graph* const graph_;
ZoneVector<StatusFlags> status_;
Alias next_free_alias_;
ZoneVector<Node*> status_stack_;
ZoneVector<Alias> aliases_;
DISALLOW_COPY_AND_ASSIGN(EscapeStatusAnalysis);
};
DEFINE_OPERATORS_FOR_FLAGS(EscapeStatusAnalysis::StatusFlags)
const Alias EscapeStatusAnalysis::kNotReachable =
std::numeric_limits<Alias>::max();
const Alias EscapeStatusAnalysis::kUntrackable =
std::numeric_limits<Alias>::max() - 1;
namespace impl {
class VirtualObject : public ZoneObject { template <class T>
class Sidetable {
public: public:
enum Status { explicit Sidetable(Zone* zone) : map_(zone) {}
kInitial = 0, T& operator[](const Node* node) {
kTracked = 1u << 0, NodeId id = node->id();
kInitialized = 1u << 1, if (id >= map_.size()) {
kCopyRequired = 1u << 2, map_.resize(id + 1);
};
typedef base::Flags<Status, unsigned char> StatusFlags;
VirtualObject(NodeId id, VirtualState* owner, Zone* zone)
: id_(id),
status_(kInitial),
fields_(zone),
phi_(zone),
object_state_(nullptr),
owner_(owner) {}
VirtualObject(VirtualState* owner, const VirtualObject& other)
: id_(other.id_),
status_(other.status_ & ~kCopyRequired),
fields_(other.fields_),
phi_(other.phi_),
object_state_(other.object_state_),
owner_(owner) {}
VirtualObject(NodeId id, VirtualState* owner, Zone* zone, size_t field_number,
bool initialized)
: id_(id),
status_(kTracked | (initialized ? kInitialized : kInitial)),
fields_(zone),
phi_(zone),
object_state_(nullptr),
owner_(owner) {
fields_.resize(field_number);
phi_.resize(field_number, false);
}
Node* GetField(size_t offset) { return fields_[offset]; }
bool IsCreatedPhi(size_t offset) { return phi_[offset]; }
void SetField(size_t offset, Node* node, bool created_phi = false) {
TRACE(" VirtualObject(%p)[%zu] changes from #%i to #%i\n",
static_cast<void*>(this), offset,
fields_[offset] ? fields_[offset]->id() : -1, node ? node->id() : -1);
fields_[offset] = node;
phi_[offset] = created_phi;
}
bool IsTracked() const { return status_ & kTracked; }
bool IsInitialized() const { return status_ & kInitialized; }
bool SetInitialized() { return status_ |= kInitialized; }
VirtualState* owner() const { return owner_; }
Node** fields_array() { return &fields_.front(); }
size_t field_count() { return fields_.size(); }
bool ResizeFields(size_t field_count) {
if (field_count > fields_.size()) {
fields_.resize(field_count);
phi_.resize(field_count);
return true;
}
return false;
}
void ClearAllFields() {
for (size_t i = 0; i < fields_.size(); ++i) {
fields_[i] = nullptr;
phi_[i] = false;
} }
return map_[id];
} }
bool AllFieldsClear() {
for (size_t i = 0; i < fields_.size(); ++i) {
if (fields_[i] != nullptr) {
return false;
}
}
return true;
}
bool UpdateFrom(const VirtualObject& other);
bool MergeFrom(MergeCache* cache, Node* at, Graph* graph,
CommonOperatorBuilder* common, bool initialMerge);
void SetObjectState(Node* node) { object_state_ = node; }
Node* GetObjectState() const { return object_state_; }
bool IsCopyRequired() const { return status_ & kCopyRequired; }
void SetCopyRequired() { status_ |= kCopyRequired; }
bool NeedCopyForModification() {
if (!IsCopyRequired() || !IsInitialized()) {
return false;
}
return true;
}
NodeId id() const { return id_; }
void id(NodeId id) { id_ = id; }
private: private:
bool MergeFields(size_t i, Node* at, MergeCache* cache, Graph* graph, ZoneVector<T> map_;
CommonOperatorBuilder* common);
NodeId id_;
StatusFlags status_;
ZoneVector<Node*> fields_;
ZoneVector<bool> phi_;
Node* object_state_;
VirtualState* owner_;
DISALLOW_COPY_AND_ASSIGN(VirtualObject);
}; };
DEFINE_OPERATORS_FOR_FLAGS(VirtualObject::StatusFlags) template <class T>
class SparseSidetable {
bool VirtualObject::UpdateFrom(const VirtualObject& other) {
TRACE("%p.UpdateFrom(%p)\n", static_cast<void*>(this),
static_cast<const void*>(&other));
bool changed = status_ != other.status_;
status_ = other.status_;
phi_ = other.phi_;
if (fields_.size() != other.fields_.size()) {
fields_ = other.fields_;
return true;
}
for (size_t i = 0; i < fields_.size(); ++i) {
if (fields_[i] != other.fields_[i]) {
changed = true;
fields_[i] = other.fields_[i];
}
}
return changed;
}
class VirtualState : public ZoneObject {
public: public:
VirtualState(Node* owner, Zone* zone, size_t size) explicit SparseSidetable(Zone* zone, T def_value = T())
: info_(size, nullptr, zone), : def_value_(std::move(def_value)), map_(zone) {}
initialized_(static_cast<int>(size), zone), void Set(const Node* node, T value) {
owner_(owner) {} auto iter = map_.find(node->id());
if (iter != map_.end()) {
VirtualState(Node* owner, const VirtualState& state) iter->second = std::move(value);
: info_(state.info_.size(), nullptr, state.info_.get_allocator().zone()), } else if (value != def_value_) {
initialized_(state.initialized_.length(), map_.insert(iter, std::make_pair(node->id(), std::move(value)));
state.info_.get_allocator().zone()),
owner_(owner) {
for (size_t i = 0; i < info_.size(); ++i) {
if (state.info_[i]) {
info_[i] = state.info_[i];
}
} }
} }
const T& Get(const Node* node) const {
VirtualObject* VirtualObjectFromAlias(size_t alias); auto iter = map_.find(node->id());
void SetVirtualObject(Alias alias, VirtualObject* state); return iter != map_.end() ? iter->second : def_value_;
bool UpdateFrom(VirtualState* state, Zone* zone);
bool MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
CommonOperatorBuilder* common, Node* at);
size_t size() const { return info_.size(); }
Node* owner() const { return owner_; }
VirtualObject* Copy(VirtualObject* obj, Alias alias);
void SetCopyRequired() {
for (VirtualObject* obj : info_) {
if (obj) obj->SetCopyRequired();
}
} }
private: private:
ZoneVector<VirtualObject*> info_; T def_value_;
BitVector initialized_; ZoneUnorderedMap<NodeId, T> map_;
Node* owner_;
DISALLOW_COPY_AND_ASSIGN(VirtualState);
}; };
class MergeCache : public ZoneObject { // Keeps track of the changes to the current node during reduction.
// Encapsulates the current state of the IR graph and the reducer state like
// side-tables. All access to the IR and the reducer state should happen through
// a ReduceScope to ensure that changes and dependencies are tracked and all
// necessary node revisitations happen.
class ReduceScope {
public: public:
explicit MergeCache(Zone* zone) typedef EffectGraphReducer::Reduction Reduction;
: states_(zone), objects_(zone), fields_(zone) { explicit ReduceScope(Node* node, Reduction* reduction)
states_.reserve(5); : current_node_(node), reduction_(reduction) {}
objects_.reserve(5);
fields_.reserve(5);
}
ZoneVector<VirtualState*>& states() { return states_; }
ZoneVector<VirtualObject*>& objects() { return objects_; }
ZoneVector<Node*>& fields() { return fields_; }
void Clear() {
states_.clear();
objects_.clear();
fields_.clear();
}
size_t LoadVirtualObjectsFromStatesFor(Alias alias);
void LoadVirtualObjectsForFieldsFrom(VirtualState* state,
const ZoneVector<Alias>& aliases);
Node* GetFields(size_t pos);
private: protected:
ZoneVector<VirtualState*> states_; Node* current_node() const { return current_node_; }
ZoneVector<VirtualObject*> objects_; Reduction* reduction() { return reduction_; }
ZoneVector<Node*> fields_;
DISALLOW_COPY_AND_ASSIGN(MergeCache); private:
Node* current_node_;
Reduction* reduction_;
}; };
size_t MergeCache::LoadVirtualObjectsFromStatesFor(Alias alias) { // A VariableTracker object keeps track of the values of variables at all points
objects_.clear(); // of the effect chain and introduces new phi nodes when necessary.
DCHECK_GT(states_.size(), 0u); // Initially and by default, variables are mapped to nullptr, which means that
size_t min = std::numeric_limits<size_t>::max(); // the variable allocation point does not dominate the current point on the
for (VirtualState* state : states_) { // effect chain. We map variables that represent uninitialized memory to the
if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) { // Dead node to ensure it is not read.
objects_.push_back(obj); // Unmapped values are impossible by construction, it is indistinguishable if a
min = std::min(obj->field_count(), min); // PersistentMap does not contain an element or maps it to the default element.
} class VariableTracker {
} private:
return min; // The state of all variables at one point in the effect chain.
} class State {
typedef PersistentMap<Variable, Node*> Map;
void MergeCache::LoadVirtualObjectsForFieldsFrom(
VirtualState* state, const ZoneVector<Alias>& aliases) { public:
objects_.clear(); explicit State(Zone* zone) : map_(zone) {}
size_t max_alias = state->size(); Node* Get(Variable var) const {
for (Node* field : fields_) { CHECK(var != Variable::Invalid());
Alias alias = aliases[field->id()]; return map_.Get(var);
if (alias >= max_alias) continue; }
if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) { void Set(Variable var, Node* node) {
objects_.push_back(obj); CHECK(var != Variable::Invalid());
} return map_.Set(var, node);
} }
} Map::iterator begin() const { return map_.begin(); }
Map::iterator end() const { return map_.end(); }
Node* MergeCache::GetFields(size_t pos) { bool operator!=(const State& other) const { return map_ != other.map_; }
fields_.clear();
Node* rep = pos >= objects_.front()->field_count() private:
? nullptr Map map_;
: objects_.front()->GetField(pos); };
for (VirtualObject* obj : objects_) {
if (pos >= obj->field_count()) continue;
Node* field = obj->GetField(pos);
if (field) {
fields_.push_back(field);
}
if (field != rep) {
rep = nullptr;
}
}
return rep;
}
VirtualObject* VirtualState::Copy(VirtualObject* obj, Alias alias) {
if (obj->owner() == this) return obj;
VirtualObject* new_obj =
new (info_.get_allocator().zone()) VirtualObject(this, *obj);
TRACE("At state %p, alias @%d (#%d), copying virtual object from %p to %p\n",
static_cast<void*>(this), alias, obj->id(), static_cast<void*>(obj),
static_cast<void*>(new_obj));
info_[alias] = new_obj;
return new_obj;
}
VirtualObject* VirtualState::VirtualObjectFromAlias(size_t alias) {
return info_[alias];
}
void VirtualState::SetVirtualObject(Alias alias, VirtualObject* obj) {
info_[alias] = obj;
if (obj) initialized_.Add(alias);
}
bool VirtualState::UpdateFrom(VirtualState* from, Zone* zone) {
if (from == this) return false;
bool changed = false;
for (Alias alias = 0; alias < size(); ++alias) {
VirtualObject* ls = VirtualObjectFromAlias(alias);
VirtualObject* rs = from->VirtualObjectFromAlias(alias);
if (ls == rs || rs == nullptr) continue;
if (ls == nullptr) {
ls = new (zone) VirtualObject(this, *rs);
SetVirtualObject(alias, ls);
changed = true;
continue;
}
TRACE(" Updating fields of @%d\n", alias);
changed = ls->UpdateFrom(*rs) || changed;
}
return false;
}
namespace {
bool IsEquivalentPhi(Node* node1, Node* node2) {
if (node1 == node2) return true;
if (node1->opcode() != IrOpcode::kPhi || node2->opcode() != IrOpcode::kPhi ||
node1->op()->ValueInputCount() != node2->op()->ValueInputCount()) {
return false;
}
for (int i = 0; i < node1->op()->ValueInputCount(); ++i) {
Node* input1 = NodeProperties::GetValueInput(node1, i);
Node* input2 = NodeProperties::GetValueInput(node2, i);
if (!IsEquivalentPhi(input1, input2)) {
return false;
}
}
return true;
}
} // namespace public:
VariableTracker(JSGraph* graph, EffectGraphReducer* reducer, Zone* zone);
Variable NewVariable() { return Variable(next_variable_++); }
Node* Get(Variable var, Node* effect) { return table_.Get(effect).Get(var); }
Zone* zone() { return zone_; }
class Scope : public ReduceScope {
public:
Scope(VariableTracker* tracker, Node* node, Reduction* reduction);
~Scope();
Node* Get(Variable var) { return current_state_.Get(var); }
void Set(Variable var, Node* node) { current_state_.Set(var, node); }
private:
VariableTracker* states_;
State current_state_;
};
bool VirtualObject::MergeFields(size_t i, Node* at, MergeCache* cache, private:
Graph* graph, CommonOperatorBuilder* common) { State MergeInputs(Node* effect_phi);
bool changed = false; Zone* zone_;
int value_input_count = static_cast<int>(cache->fields().size()); JSGraph* graph_;
Node* rep = GetField(i); SparseSidetable<State> table_;
if (!rep || !IsCreatedPhi(i)) { ZoneVector<Node*> buffer_;
for (Node* input : cache->fields()) { EffectGraphReducer* reducer_;
CHECK_NOT_NULL(input); int next_variable_ = 0;
CHECK(!input->IsDead());
} DISALLOW_COPY_AND_ASSIGN(VariableTracker);
Node* control = NodeProperties::GetControlInput(at); };
cache->fields().push_back(control);
Node* phi = graph->NewNode(
common->Phi(MachineRepresentation::kTagged, value_input_count),
value_input_count + 1, &cache->fields().front());
NodeProperties::SetType(phi, Type::Any());
SetField(i, phi, true);
#ifdef DEBUG // Encapsulates the current state of the escape analysis reducer to preserve
if (FLAG_trace_turbo_escape) { // invariants regarding changes and re-visitation.
PrintF(" Creating Phi #%d as merge of", phi->id()); class EscapeAnalysisTracker : public ZoneObject {
for (int i = 0; i < value_input_count; i++) { public:
PrintF(" #%d (%s)", cache->fields()[i]->id(), EscapeAnalysisTracker(JSGraph* jsgraph, EffectGraphReducer* reducer,
cache->fields()[i]->op()->mnemonic()); Zone* zone)
} : virtual_objects_(zone),
PrintF("\n"); replacements_(zone),
} variable_states_(jsgraph, reducer, zone),
#endif jsgraph_(jsgraph),
changed = true; zone_(zone) {}
} else {
DCHECK(rep->opcode() == IrOpcode::kPhi); class Scope : public VariableTracker::Scope {
for (int n = 0; n < value_input_count; ++n) { public:
Node* old = NodeProperties::GetValueInput(rep, n); Scope(EffectGraphReducer* reducer, EscapeAnalysisTracker* tracker,
if (old != cache->fields()[n]) { Node* node, Reduction* reduction)
changed = true; : VariableTracker::Scope(&tracker->variable_states_, node, reduction),
NodeProperties::ReplaceValueInput(rep, cache->fields()[n], n); tracker_(tracker),
reducer_(reducer) {}
const VirtualObject* GetVirtualObject(Node* node) {
VirtualObject* vobject = tracker_->virtual_objects_.Get(node);
if (vobject) vobject->AddDependency(current_node());
return vobject;
}
// Create or retrieve a virtual object for the current node.
const VirtualObject* InitVirtualObject(int size) {
DCHECK(current_node()->opcode() == IrOpcode::kAllocate);
VirtualObject* vobject = tracker_->virtual_objects_.Get(current_node());
if (vobject) {
CHECK(vobject->size() == size);
} else {
vobject = tracker_->NewVirtualObject(size);
} }
if (vobject) vobject->AddDependency(current_node());
vobject_ = vobject;
return vobject;
} }
}
return changed;
}
bool VirtualObject::MergeFrom(MergeCache* cache, Node* at, Graph* graph, void SetVirtualObject(Node* object) {
CommonOperatorBuilder* common, vobject_ = tracker_->virtual_objects_.Get(object);
bool initialMerge) {
DCHECK(at->opcode() == IrOpcode::kEffectPhi ||
at->opcode() == IrOpcode::kPhi);
bool changed = false;
for (size_t i = 0; i < field_count(); ++i) {
if (!initialMerge && GetField(i) == nullptr) continue;
Node* field = cache->GetFields(i);
if (field && !IsCreatedPhi(i)) {
changed = changed || GetField(i) != field;
SetField(i, field);
TRACE(" Field %zu agree on rep #%d\n", i, field->id());
} else {
size_t arity = at->opcode() == IrOpcode::kEffectPhi
? at->op()->EffectInputCount()
: at->op()->ValueInputCount();
if (cache->fields().size() == arity) {
changed = MergeFields(i, at, cache, graph, common) || changed;
} else {
if (GetField(i) != nullptr) {
TRACE(" Field %zu cleared\n", i);
changed = true;
}
SetField(i, nullptr);
}
} }
}
return changed;
}
bool VirtualState::MergeFrom(MergeCache* cache, Zone* zone, Graph* graph, void SetEscaped(Node* node) {
CommonOperatorBuilder* common, Node* at) { if (VirtualObject* object = tracker_->virtual_objects_.Get(node)) {
DCHECK_GT(cache->states().size(), 0u); if (object->HasEscaped()) return;
bool changed = false; TRACE("Setting %s#%d to escaped because of use by %s#%d\n",
for (Alias alias = 0; alias < size(); ++alias) { node->op()->mnemonic(), node->id(),
cache->objects().clear(); current_node()->op()->mnemonic(), current_node()->id());
VirtualObject* mergeObject = VirtualObjectFromAlias(alias); object->SetEscaped();
bool copy_merge_object = false; object->RevisitDependants(reducer_);
size_t fields = std::numeric_limits<size_t>::max();
for (VirtualState* state : cache->states()) {
if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
cache->objects().push_back(obj);
if (mergeObject == obj) {
copy_merge_object = true;
}
fields = std::min(obj->field_count(), fields);
} }
} }
if (cache->objects().size() == cache->states().size() && // The inputs of the current node have to be accessed through the scope to
(mergeObject || !initialized_.Contains(alias))) { // ensure that they respect the node replacements.
bool initialMerge = false; Node* ValueInput(int i) {
if (!mergeObject) { return tracker_->ResolveReplacement(
initialMerge = true; NodeProperties::GetValueInput(current_node(), i));
VirtualObject* obj = new (zone)
VirtualObject(cache->objects().front()->id(), this, zone, fields,
cache->objects().front()->IsInitialized());
SetVirtualObject(alias, obj);
mergeObject = obj;
changed = true;
} else if (copy_merge_object) {
VirtualObject* obj = new (zone) VirtualObject(this, *mergeObject);
SetVirtualObject(alias, obj);
mergeObject = obj;
changed = true;
} else {
changed = mergeObject->ResizeFields(fields) || changed;
}
#ifdef DEBUG
if (FLAG_trace_turbo_escape) {
PrintF(" Alias @%d, merging into %p virtual objects", alias,
static_cast<void*>(mergeObject));
for (size_t i = 0; i < cache->objects().size(); i++) {
PrintF(" %p", static_cast<void*>(cache->objects()[i]));
}
PrintF("\n");
}
#endif // DEBUG
changed =
mergeObject->MergeFrom(cache, at, graph, common, initialMerge) ||
changed;
} else {
if (mergeObject) {
TRACE(" Alias %d, virtual object removed\n", alias);
changed = true;
}
SetVirtualObject(alias, nullptr);
} }
} Node* ContextInput() {
return changed; return tracker_->ResolveReplacement(
} NodeProperties::GetContextInput(current_node()));
} // namespace impl
using namespace impl;
EscapeStatusAnalysis::EscapeStatusAnalysis(EscapeAnalysis* object_analysis,
Graph* graph, Zone* zone)
: stack_(zone),
object_analysis_(object_analysis),
graph_(graph),
status_(zone),
next_free_alias_(0),
status_stack_(zone),
aliases_(zone) {}
bool EscapeStatusAnalysis::HasEntry(Node* node) {
return status_[node->id()] & (kTracked | kEscaped);
}
bool EscapeStatusAnalysis::IsVirtual(Node* node) {
return IsVirtual(node->id());
}
bool EscapeStatusAnalysis::IsVirtual(NodeId id) {
return (status_[id] & kTracked) && !(status_[id] & kEscaped);
}
bool EscapeStatusAnalysis::IsEscaped(Node* node) {
return status_[node->id()] & kEscaped;
}
bool EscapeStatusAnalysis::IsAllocation(Node* node) {
return node->opcode() == IrOpcode::kAllocate ||
node->opcode() == IrOpcode::kFinishRegion;
}
bool EscapeStatusAnalysis::SetEscaped(Node* node) {
bool changed = !(status_[node->id()] & kEscaped);
status_[node->id()] |= kEscaped | kTracked;
return changed;
}
bool EscapeStatusAnalysis::IsInQueue(NodeId id) {
return status_[id] & kInQueue;
}
void EscapeStatusAnalysis::SetInQueue(NodeId id, bool on_stack) {
if (on_stack) {
status_[id] |= kInQueue;
} else {
status_[id] &= ~kInQueue;
}
}
void EscapeStatusAnalysis::ResizeStatusVector() {
if (status_.size() <= graph()->NodeCount()) {
status_.resize(graph()->NodeCount() * 1.1, kUnknown);
}
}
size_t EscapeStatusAnalysis::GetStatusVectorSize() { return status_.size(); }
void EscapeStatusAnalysis::RunStatusAnalysis() {
// TODO(tebbi): This checks for faulty VirtualObject states, which can happen
// due to bug https://bugs.chromium.org/p/v8/issues/detail?id=6302. As a
// workaround, we set everything to escaped if such a faulty state was
// detected.
bool all_objects_complete = object_analysis_->AllObjectsComplete();
ResizeStatusVector();
while (!status_stack_.empty()) {
Node* node = status_stack_.back();
status_stack_.pop_back();
status_[node->id()] &= ~kOnStack;
Process(node);
status_[node->id()] |= kVisited;
if (!all_objects_complete) SetEscaped(node);
}
}
void EscapeStatusAnalysis::EnqueueForStatusAnalysis(Node* node) {
DCHECK_NOT_NULL(node);
if (!(status_[node->id()] & kOnStack)) {
status_stack_.push_back(node);
status_[node->id()] |= kOnStack;
}
}
void EscapeStatusAnalysis::RevisitInputs(Node* node) {
for (Edge edge : node->input_edges()) {
Node* input = edge.to();
if (!(status_[input->id()] & kOnStack)) {
status_stack_.push_back(input);
status_[input->id()] |= kOnStack;
} }
}
}
void EscapeStatusAnalysis::RevisitUses(Node* node) { void SetReplacement(Node* replacement) {
for (Edge edge : node->use_edges()) { replacement_ = replacement;
Node* use = edge.from(); vobject_ =
if (!(status_[use->id()] & kOnStack) && !IsNotReachable(use)) { replacement ? tracker_->virtual_objects_.Get(replacement) : nullptr;
status_stack_.push_back(use); TRACE("Set %s#%d as replacement.\n", replacement->op()->mnemonic(),
status_[use->id()] |= kOnStack; replacement->id());
} }
}
}
void EscapeStatusAnalysis::Process(Node* node) { void MarkForDeletion() { SetReplacement(tracker_->jsgraph_->Dead()); }
switch (node->opcode()) {
case IrOpcode::kAllocate: ~Scope() {
ProcessAllocate(node); if (replacement_ != tracker_->replacements_[current_node()] ||
break; vobject_ != tracker_->virtual_objects_.Get(current_node())) {
case IrOpcode::kFinishRegion: reduction()->set_value_changed();
ProcessFinishRegion(node);
break;
case IrOpcode::kStoreField:
ProcessStoreField(node);
break;
case IrOpcode::kStoreElement:
ProcessStoreElement(node);
break;
case IrOpcode::kLoadField:
case IrOpcode::kLoadElement: {
if (Node* rep = object_analysis_->GetReplacement(node)) {
if (IsAllocation(rep) && CheckUsesForEscape(node, rep)) {
RevisitInputs(rep);
RevisitUses(rep);
}
} else {
Node* from = NodeProperties::GetValueInput(node, 0);
from = object_analysis_->ResolveReplacement(from);
if (SetEscaped(from)) {
TRACE("Setting #%d (%s) to escaped because of unresolved load #%i\n",
from->id(), from->op()->mnemonic(), node->id());
RevisitInputs(from);
RevisitUses(from);
}
} }
RevisitUses(node); tracker_->replacements_[current_node()] = replacement_;
break; tracker_->virtual_objects_.Set(current_node(), vobject_);
} }
case IrOpcode::kPhi:
if (!HasEntry(node)) {
status_[node->id()] |= kTracked;
RevisitUses(node);
}
if (!IsAllocationPhi(node) && SetEscaped(node)) {
RevisitInputs(node);
RevisitUses(node);
}
CheckUsesForEscape(node);
default:
break;
}
}
bool EscapeStatusAnalysis::IsAllocationPhi(Node* node) {
for (Edge edge : node->input_edges()) {
Node* input = edge.to();
if (input->opcode() == IrOpcode::kPhi && !IsEscaped(input)) continue;
if (IsAllocation(input)) continue;
return false;
}
return true;
}
void EscapeStatusAnalysis::ProcessStoreField(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
Node* to = NodeProperties::GetValueInput(node, 0);
Node* val = NodeProperties::GetValueInput(node, 1);
if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
RevisitUses(val);
RevisitInputs(val);
TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
val->id(), val->op()->mnemonic(), to->id());
}
}
void EscapeStatusAnalysis::ProcessStoreElement(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
Node* to = NodeProperties::GetValueInput(node, 0);
Node* val = NodeProperties::GetValueInput(node, 2);
if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
RevisitUses(val);
RevisitInputs(val);
TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
val->id(), val->op()->mnemonic(), to->id());
}
}
void EscapeStatusAnalysis::ProcessAllocate(Node* node) { private:
DCHECK_EQ(node->opcode(), IrOpcode::kAllocate); EscapeAnalysisTracker* tracker_;
if (!HasEntry(node)) { EffectGraphReducer* reducer_;
status_[node->id()] |= kTracked; VirtualObject* vobject_ = nullptr;
TRACE("Created status entry for node #%d (%s)\n", node->id(), Node* replacement_ = nullptr;
node->op()->mnemonic()); };
NumberMatcher size(node->InputAt(0));
DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
RevisitUses(node);
if (!size.HasValue() && SetEscaped(node)) {
TRACE("Setting #%d to escaped because of non-const alloc\n", node->id());
// This node is already known to escape, uses do not have to be checked
// for escape.
return;
}
}
if (CheckUsesForEscape(node, true)) {
RevisitUses(node);
}
}
bool EscapeStatusAnalysis::CheckUsesForEscape(Node* uses, Node* rep, Node* GetReplacementOf(Node* node) { return replacements_[node]; }
bool phi_escaping) { Node* ResolveReplacement(Node* node) {
for (Edge edge : uses->use_edges()) { if (Node* replacement = GetReplacementOf(node)) {
Node* use = edge.from(); // Replacements cannot have replacements. This is important to ensure
if (IsNotReachable(use)) continue; // re-visitation: If a replacement is replaced, then all nodes accessing
if (edge.index() >= use->op()->ValueInputCount() + // the replacement have to be updated.
OperatorProperties::GetContextInputCount(use->op())) DCHECK_NULL(GetReplacementOf(replacement));
continue; return replacement;
switch (use->opcode()) {
case IrOpcode::kPhi:
if (phi_escaping && SetEscaped(rep)) {
TRACE(
"Setting #%d (%s) to escaped because of use by phi node "
"#%d (%s)\n",
rep->id(), rep->op()->mnemonic(), use->id(),
use->op()->mnemonic());
return true;
}
// Fallthrough.
case IrOpcode::kStoreField:
case IrOpcode::kLoadField:
case IrOpcode::kStoreElement:
case IrOpcode::kLoadElement:
case IrOpcode::kFrameState:
case IrOpcode::kStateValues:
case IrOpcode::kReferenceEqual:
case IrOpcode::kFinishRegion:
case IrOpcode::kCheckMaps:
if (IsEscaped(use) && SetEscaped(rep)) {
TRACE(
"Setting #%d (%s) to escaped because of use by escaping node "
"#%d (%s)\n",
rep->id(), rep->op()->mnemonic(), use->id(),
use->op()->mnemonic());
return true;
}
break;
case IrOpcode::kObjectIsSmi:
if (!IsAllocation(rep) && SetEscaped(rep)) {
TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
rep->id(), rep->op()->mnemonic(), use->id(),
use->op()->mnemonic());
return true;
}
break;
case IrOpcode::kSelect:
// TODO(mstarzinger): The following list of operators will eventually be
// handled by the EscapeAnalysisReducer (similar to ObjectIsSmi).
case IrOpcode::kConvertTaggedHoleToUndefined:
case IrOpcode::kStringEqual:
case IrOpcode::kStringLessThan:
case IrOpcode::kStringLessThanOrEqual:
case IrOpcode::kTypeGuard:
case IrOpcode::kPlainPrimitiveToNumber:
case IrOpcode::kPlainPrimitiveToWord32:
case IrOpcode::kPlainPrimitiveToFloat64:
case IrOpcode::kStringCharAt:
case IrOpcode::kStringCharCodeAt:
case IrOpcode::kSeqStringCharCodeAt:
case IrOpcode::kStringIndexOf:
case IrOpcode::kStringToLowerCaseIntl:
case IrOpcode::kStringToUpperCaseIntl:
case IrOpcode::kObjectIsCallable:
case IrOpcode::kObjectIsDetectableCallable:
case IrOpcode::kObjectIsNaN:
case IrOpcode::kObjectIsNonCallable:
case IrOpcode::kObjectIsNumber:
case IrOpcode::kObjectIsReceiver:
case IrOpcode::kObjectIsString:
case IrOpcode::kObjectIsSymbol:
case IrOpcode::kObjectIsUndetectable:
case IrOpcode::kNumberLessThan:
case IrOpcode::kNumberLessThanOrEqual:
case IrOpcode::kNumberEqual:
#define CASE(opcode) case IrOpcode::k##opcode:
SIMPLIFIED_NUMBER_BINOP_LIST(CASE)
SIMPLIFIED_NUMBER_UNOP_LIST(CASE)
#undef CASE
if (SetEscaped(rep)) {
TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
rep->id(), rep->op()->mnemonic(), use->id(),
use->op()->mnemonic());
return true;
}
break;
default:
DCHECK(use->op()->EffectInputCount() > 0 ||
uses->op()->EffectInputCount() == 0 ||
IrOpcode::IsJsOpcode(use->opcode()));
if (SetEscaped(rep)) {
TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
rep->id(), rep->op()->mnemonic(), use->id(),
use->op()->mnemonic());
return true;
}
} }
return node;
} }
return false;
}
void EscapeStatusAnalysis::ProcessFinishRegion(Node* node) { private:
DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion); friend class EscapeAnalysisResult;
if (!HasEntry(node)) { static const size_t kMaxTrackedObjects = 100;
status_[node->id()] |= kTracked;
RevisitUses(node);
}
if (CheckUsesForEscape(node, true)) {
RevisitInputs(node);
RevisitUses(node);
}
}
void EscapeStatusAnalysis::DebugPrint() { VirtualObject* NewVirtualObject(int size) {
for (NodeId id = 0; id < status_.size(); id++) { if (next_object_id_ >= kMaxTrackedObjects) return nullptr;
if (status_[id] & kTracked) { return new (zone_)
PrintF("Node #%d is %s\n", id, VirtualObject(&variable_states_, next_object_id_++, size);
(status_[id] & kEscaped) ? "escaping" : "virtual");
}
} }
}
EscapeAnalysis::EscapeAnalysis(Graph* graph, CommonOperatorBuilder* common, SparseSidetable<VirtualObject*> virtual_objects_;
Zone* zone) Sidetable<Node*> replacements_;
: zone_(zone), VariableTracker variable_states_;
slot_not_analyzed_(graph->NewNode(common->NumberConstant(0x1c0debad))), VirtualObject::Id next_object_id_ = 0;
common_(common), JSGraph* const jsgraph_;
status_analysis_(new (zone) EscapeStatusAnalysis(this, graph, zone)), Zone* const zone_;
virtual_states_(zone),
replacements_(zone),
cycle_detection_(zone),
cache_(nullptr) {
// Type slot_not_analyzed_ manually.
double v = OpParameter<double>(slot_not_analyzed_);
NodeProperties::SetType(slot_not_analyzed_, Type::Range(v, v, zone));
}
EscapeAnalysis::~EscapeAnalysis() {} DISALLOW_COPY_AND_ASSIGN(EscapeAnalysisTracker);
};
bool EscapeAnalysis::Run() {
replacements_.resize(graph()->NodeCount());
status_analysis_->AssignAliases();
if (status_analysis_->AliasCount() > 0) {
cache_ = new (zone()) MergeCache(zone());
replacements_.resize(graph()->NodeCount());
status_analysis_->ResizeStatusVector();
RunObjectAnalysis();
status_analysis_->RunStatusAnalysis();
return true;
} else {
return false;
}
}
void EscapeStatusAnalysis::AssignAliases() { EffectGraphReducer::EffectGraphReducer(
size_t max_size = 1024; Graph* graph, std::function<void(Node*, Reduction*)> reduce, Zone* zone)
size_t min_size = 32; : graph_(graph),
size_t stack_size = state_(graph, kNumStates),
std::min(std::max(graph()->NodeCount() / 5, min_size), max_size); revisit_(zone),
stack_.reserve(stack_size); stack_(zone),
ResizeStatusVector(); reduce_(reduce) {}
stack_.push_back(graph()->end());
CHECK_LT(graph()->NodeCount(), kUntrackable); void EffectGraphReducer::ReduceFrom(Node* node) {
aliases_.resize(graph()->NodeCount(), kNotReachable); // Perform DFS and eagerly trigger revisitation as soon as possible.
aliases_[graph()->end()->id()] = kUntrackable; // A stack element {node, i} indicates that input i of node should be visited
status_stack_.reserve(8); // next.
TRACE("Discovering trackable nodes"); DCHECK(stack_.empty());
stack_.push({node, 0});
while (!stack_.empty()) { while (!stack_.empty()) {
Node* node = stack_.back(); Node* current = stack_.top().node;
stack_.pop_back(); int& input_index = stack_.top().input_index;
switch (node->opcode()) { if (input_index < current->InputCount()) {
case IrOpcode::kAllocate: Node* input = current->InputAt(input_index);
if (aliases_[node->id()] >= kUntrackable) { input_index++;
aliases_[node->id()] = NextAlias(); switch (state_.Get(input)) {
TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(), case State::kVisited:
node->id()); // The input is already reduced.
EnqueueForStatusAnalysis(node); break;
case State::kOnStack:
// The input is on the DFS stack right now, so it will be revisited
// later anyway.
break;
case State::kUnvisited:
case State::kRevisit: {
state_.Set(input, State::kOnStack);
stack_.push({input, 0});
break;
} }
break; }
case IrOpcode::kFinishRegion: { } else {
Node* allocate = NodeProperties::GetValueInput(node, 0); stack_.pop();
DCHECK_NOT_NULL(allocate); Reduction reduction;
if (allocate->opcode() == IrOpcode::kAllocate) { reduce_(current, &reduction);
if (aliases_[allocate->id()] >= kUntrackable) { for (Edge edge : current->use_edges()) {
if (aliases_[allocate->id()] == kNotReachable) { // Mark uses for revisitation.
stack_.push_back(allocate); Node* use = edge.from();
} if (NodeProperties::IsEffectEdge(edge)) {
aliases_[allocate->id()] = NextAlias(); if (reduction.effect_changed()) Revisit(use);
TRACE(" @%d:%s#%u", aliases_[allocate->id()], } else {
allocate->op()->mnemonic(), allocate->id()); if (reduction.value_changed()) Revisit(use);
EnqueueForStatusAnalysis(allocate);
}
aliases_[node->id()] = aliases_[allocate->id()];
TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
node->id());
} }
break;
} }
default: state_.Set(current, State::kVisited);
DCHECK_EQ(aliases_[node->id()], kUntrackable); // Process the revisitation buffer immediately. This improves performance
break; // of escape analysis. Using a stack for {revisit_} reverses the order in
} // which the revisitation happens. This also seems to improve performance.
for (Edge edge : node->input_edges()) { while (!revisit_.empty()) {
Node* input = edge.to(); Node* revisit = revisit_.top();
if (aliases_[input->id()] == kNotReachable) { if (state_.Get(revisit) == State::kRevisit) {
stack_.push_back(input); state_.Set(revisit, State::kOnStack);
aliases_[input->id()] = kUntrackable; stack_.push({revisit, 0});
}
revisit_.pop();
} }
} }
} }
TRACE("\n");
} }
bool EscapeStatusAnalysis::IsNotReachable(Node* node) { void EffectGraphReducer::Revisit(Node* node) {
if (node->id() >= aliases_.size()) { if (state_.Get(node) == State::kVisited) {
return false; TRACE(" Queueing for revisit: %s#%d\n", node->op()->mnemonic(),
node->id());
state_.Set(node, State::kRevisit);
revisit_.push(node);
} }
return aliases_[node->id()] == kNotReachable;
} }
bool EscapeAnalysis::AllObjectsComplete() { VariableTracker::VariableTracker(JSGraph* graph, EffectGraphReducer* reducer,
for (VirtualState* state : virtual_states_) { Zone* zone)
if (state) { : zone_(zone),
for (size_t i = 0; i < state->size(); ++i) { graph_(graph),
if (VirtualObject* object = state->VirtualObjectFromAlias(i)) { table_(zone, State(zone)),
if (!object->AllFieldsClear()) { buffer_(zone),
for (size_t i = 0; i < object->field_count(); ++i) { reducer_(reducer) {}
if (object->GetField(i) == nullptr) {
return false; VariableTracker::Scope::Scope(VariableTracker* states, Node* node,
} Reduction* reduction)
} : ReduceScope(node, reduction),
} states_(states),
current_state_(states->zone_) {
switch (node->opcode()) {
case IrOpcode::kEffectPhi:
current_state_ = states_->MergeInputs(node);
break;
default:
int effect_inputs = node->op()->EffectInputCount();
if (effect_inputs == 1) {
current_state_ =
states_->table_.Get(NodeProperties::GetEffectInput(node, 0));
} else {
DCHECK_EQ(0, effect_inputs);
}
}
}
VariableTracker::Scope::~Scope() {
if (!reduction()->effect_changed() &&
states_->table_.Get(current_node()) != current_state_) {
reduction()->set_effect_changed();
}
states_->table_.Set(current_node(), current_state_);
}
VariableTracker::State VariableTracker::MergeInputs(Node* effect_phi) {
// A variable that is mapped to [nullptr] was not assigned a value on every
// execution path to the current effect phi. Relying on the invariant that
// every variable is initialized (at least with a sentinel like the Dead
// node), this means that the variable initialization does not dominate the
// current point. So for loop effect phis, we can keep nullptr for a variable
// as long as the first input of the loop has nullptr for this variable. For
// non-loop effect phis, we can even keep it nullptr as long as any input has
// nullptr.
DCHECK(effect_phi->opcode() == IrOpcode::kEffectPhi);
int arity = effect_phi->op()->EffectInputCount();
Node* control = NodeProperties::GetControlInput(effect_phi, 0);
TRACE("control: %s#%d\n", control->op()->mnemonic(), control->id());
bool is_loop = control->opcode() == IrOpcode::kLoop;
buffer_.reserve(arity + 1);
State first_input = table_.Get(NodeProperties::GetEffectInput(effect_phi, 0));
State result = first_input;
for (std::pair<Variable, Node*> var_value : first_input) {
if (Node* value = var_value.second) {
Variable var = var_value.first;
TRACE("var %i:\n", var.id_);
buffer_.clear();
buffer_.push_back(value);
bool identical_inputs = true;
int num_defined_inputs = 1;
TRACE(" input 0: %s#%d\n", value->op()->mnemonic(), value->id());
for (int i = 1; i < arity; ++i) {
Node* next_value =
table_.Get(NodeProperties::GetEffectInput(effect_phi, i)).Get(var);
if (next_value != value) identical_inputs = false;
if (next_value != nullptr) {
num_defined_inputs++;
TRACE(" input %i: %s#%d\n", i, next_value->op()->mnemonic(),
next_value->id());
} else {
TRACE(" input %i: nullptr\n", i);
} }
buffer_.push_back(next_value);
} }
}
}
return true;
}
void EscapeAnalysis::RunObjectAnalysis() { Node* old_value = table_.Get(effect_phi).Get(var);
virtual_states_.resize(graph()->NodeCount()); if (old_value) {
ZoneDeque<Node*> queue(zone()); TRACE(" old: %s#%d\n", old_value->op()->mnemonic(), old_value->id());
queue.push_back(graph()->start()); } else {
ZoneVector<Node*> danglers(zone()); TRACE(" old: nullptr\n");
while (!queue.empty()) { }
Node* node = queue.back(); // Reuse a previously created phi node if possible.
queue.pop_back(); if (old_value && old_value->opcode() == IrOpcode::kPhi &&
status_analysis_->SetInQueue(node->id(), false); NodeProperties::GetControlInput(old_value, 0) == control) {
if (Process(node)) { // Since a phi node can never dominate its control node,
for (Edge edge : node->use_edges()) { // [old_value] cannot originate from the inputs. Thus [old_value]
Node* use = edge.from(); // must have been created by a previous reduction of this [effect_phi].
if (status_analysis_->IsNotReachable(use)) { for (int i = 0; i < arity; ++i) {
continue; NodeProperties::ReplaceValueInput(
old_value, buffer_[i] ? buffer_[i] : graph_->Dead(), i);
// This change cannot affect the rest of the reducer, so there is no
// need to trigger additional revisitations.
} }
if (NodeProperties::IsEffectEdge(edge)) { result.Set(var, old_value);
// Iteration order: depth first, but delay phis. } else {
// We need DFS do avoid some duplication of VirtualStates and if (num_defined_inputs == 1 && is_loop) {
// VirtualObjects, and we want to delay phis to improve performance. // For loop effect phis, the variable initialization dominates iff it
if (use->opcode() == IrOpcode::kEffectPhi) { // dominates the first input.
if (!status_analysis_->IsInQueue(use->id())) { DCHECK_EQ(2, arity);
status_analysis_->SetInQueue(use->id(), true); DCHECK_EQ(value, buffer_[0]);
queue.push_front(use); result.Set(var, value);
} } else if (num_defined_inputs < arity) {
} else if ((use->opcode() != IrOpcode::kLoadField && // If the variable is undefined on some input of this non-loop effect
use->opcode() != IrOpcode::kLoadElement) || // phi, then its initialization does not dominate this point.
!status_analysis_->IsDanglingEffectNode(use)) { result.Set(var, nullptr);
if (!status_analysis_->IsInQueue(use->id())) { } else {
status_analysis_->SetInQueue(use->id(), true); DCHECK_EQ(num_defined_inputs, arity);
queue.push_back(use); // We only create a phi if the values are different.
} if (identical_inputs) {
result.Set(var, value);
} else { } else {
danglers.push_back(use); TRACE("Creating new phi\n");
buffer_.push_back(control);
Node* phi = graph_->graph()->NewNode(
graph_->common()->Phi(MachineRepresentation::kTagged, arity),
arity + 1, &buffer_.front());
// TODO(tebbi): Computing precise types here is tricky, because of
// the necessary revisitations. If we really need this, we should
// probably do it afterwards.
NodeProperties::SetType(phi, Type::Any());
reducer_->AddRoot(phi);
result.Set(var, phi);
} }
} }
} }
// Danglers need to be processed immediately, even if they are
// on the stack. Since they do not have effect outputs,
// we don't have to track whether they are on the stack.
queue.insert(queue.end(), danglers.begin(), danglers.end());
danglers.clear();
}
}
#ifdef DEBUG #ifdef DEBUG
if (FLAG_trace_turbo_escape) { if (Node* result_node = result.Get(var)) {
DebugPrint(); TRACE(" result: %s#%d\n", result_node->op()->mnemonic(),
} result_node->id());
#endif } else {
} TRACE(" result: nullptr\n");
bool EscapeStatusAnalysis::IsDanglingEffectNode(Node* node) {
if (status_[node->id()] & kDanglingComputed) {
return status_[node->id()] & kDangling;
}
if (node->op()->EffectInputCount() == 0 ||
node->op()->EffectOutputCount() == 0 ||
(node->op()->EffectInputCount() == 1 &&
NodeProperties::GetEffectInput(node)->opcode() == IrOpcode::kStart)) {
// The start node is used as sentinel for nodes that are in general
// effectful, but of which an analysis has determined that they do not
// produce effects in this instance. We don't consider these nodes dangling.
status_[node->id()] |= kDanglingComputed;
return false;
}
for (Edge edge : node->use_edges()) {
Node* use = edge.from();
if (aliases_[use->id()] == kNotReachable) continue;
if (NodeProperties::IsEffectEdge(edge)) {
status_[node->id()] |= kDanglingComputed;
return false;
}
}
status_[node->id()] |= kDanglingComputed | kDangling;
return true;
}
bool EscapeStatusAnalysis::IsEffectBranchPoint(Node* node) {
if (status_[node->id()] & kBranchPointComputed) {
return status_[node->id()] & kBranchPoint;
}
int count = 0;
for (Edge edge : node->use_edges()) {
Node* use = edge.from();
if (aliases_[use->id()] == kNotReachable) continue;
if (NodeProperties::IsEffectEdge(edge)) {
if ((use->opcode() == IrOpcode::kLoadField ||
use->opcode() == IrOpcode::kLoadElement ||
use->opcode() == IrOpcode::kLoad) &&
IsDanglingEffectNode(use))
continue;
if (++count > 1) {
status_[node->id()] |= kBranchPointComputed | kBranchPoint;
return true;
} }
#endif
} }
} }
status_[node->id()] |= kBranchPointComputed; return result;
return false;
} }
namespace { namespace {
bool HasFrameStateInput(const Operator* op) { int OffsetOfFieldAccess(const Operator* op) {
if (op->opcode() == IrOpcode::kCall || DCHECK(op->opcode() == IrOpcode::kLoadField ||
op->opcode() == IrOpcode::kCallWithCallerSavedRegisters || op->opcode() == IrOpcode::kStoreField);
op->opcode() == IrOpcode::kTailCall) { FieldAccess access = FieldAccessOf(op);
const CallDescriptor* d = CallDescriptorOf(op); return access.offset;
return d->NeedsFrameState(); }
} else {
return OperatorProperties::HasFrameStateInput(op); Maybe<int> OffsetOfElementsAccess(const Operator* op, Node* index_node) {
} DCHECK(op->opcode() == IrOpcode::kLoadElement ||
} op->opcode() == IrOpcode::kStoreElement);
Type* index_type = NodeProperties::GetType(index_node);
} // namespace if (!index_type->Is(Type::Number())) return Nothing<int>();
double max = index_type->Max();
bool EscapeAnalysis::Process(Node* node) { double min = index_type->Min();
switch (node->opcode()) { int index = static_cast<int>(min);
case IrOpcode::kAllocate: if (!(index == min && index == max)) return Nothing<int>();
ProcessAllocation(node); ElementAccess access = ElementAccessOf(op);
break; DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
case IrOpcode::kBeginRegion: kPointerSizeLog2);
ForwardVirtualState(node); return Just(access.header_size + (index << ElementSizeLog2Of(
access.machine_type.representation())));
}
void ReduceNode(const Operator* op, EscapeAnalysisTracker::Scope* current,
JSGraph* jsgraph) {
switch (op->opcode()) {
case IrOpcode::kAllocate: {
NumberMatcher size(current->ValueInput(0));
if (!size.HasValue()) break;
int size_int = static_cast<int>(size.Value());
if (size_int != size.Value()) break;
if (const VirtualObject* vobject = current->InitVirtualObject(size_int)) {
// Initialize with dead nodes as a sentinel for uninitialized memory.
for (Variable field : *vobject) {
current->Set(field, jsgraph->Dead());
}
}
break; break;
}
case IrOpcode::kFinishRegion: case IrOpcode::kFinishRegion:
ProcessFinishRegion(node); current->SetVirtualObject(current->ValueInput(0));
break;
case IrOpcode::kStoreField:
ProcessStoreField(node);
break;
case IrOpcode::kLoadField:
ProcessLoadField(node);
break;
case IrOpcode::kStoreElement:
ProcessStoreElement(node);
break;
case IrOpcode::kLoadElement:
ProcessLoadElement(node);
break; break;
case IrOpcode::kCheckMaps: case IrOpcode::kStoreField: {
ProcessCheckMaps(node); Node* object = current->ValueInput(0);
break; Node* value = current->ValueInput(1);
case IrOpcode::kStart: const VirtualObject* vobject = current->GetVirtualObject(object);
ProcessStart(node); Variable var;
break; if (vobject && !vobject->HasEscaped() &&
case IrOpcode::kEffectPhi: vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) {
return ProcessEffectPhi(node); current->Set(var, value);
break; current->MarkForDeletion();
default: } else {
if (node->op()->EffectInputCount() > 0) { current->SetEscaped(object);
ForwardVirtualState(node); current->SetEscaped(value);
} }
ProcessAllocationUsers(node);
break; break;
}
if (HasFrameStateInput(node->op())) {
virtual_states_[node->id()]->SetCopyRequired();
}
return true;
}
void EscapeAnalysis::ProcessAllocationUsers(Node* node) {
for (Edge edge : node->input_edges()) {
Node* input = edge.to();
Node* use = edge.from();
if (edge.index() >= use->op()->ValueInputCount() +
OperatorProperties::GetContextInputCount(use->op()))
continue;
switch (node->opcode()) {
case IrOpcode::kStoreField:
case IrOpcode::kLoadField:
case IrOpcode::kStoreElement:
case IrOpcode::kLoadElement:
case IrOpcode::kFrameState:
case IrOpcode::kStateValues:
case IrOpcode::kReferenceEqual:
case IrOpcode::kFinishRegion:
case IrOpcode::kObjectIsSmi:
break;
case IrOpcode::kCheckMaps: {
CheckMapsParameters params = CheckMapsParametersOf(node->op());
if (params.flags() == CheckMapsFlag::kNone) break;
} // Fallthrough.
default:
VirtualState* state = virtual_states_[node->id()];
if (VirtualObject* obj =
GetVirtualObject(state, ResolveReplacement(input))) {
if (!obj->AllFieldsClear()) {
obj = CopyForModificationAt(obj, state, node);
obj->ClearAllFields();
TRACE("Cleared all fields of @%d:#%d\n",
status_analysis_->GetAlias(obj->id()), obj->id());
}
}
break;
} }
} case IrOpcode::kStoreElement: {
} Node* object = current->ValueInput(0);
Node* index = current->ValueInput(1);
VirtualState* EscapeAnalysis::CopyForModificationAt(VirtualState* state, Node* value = current->ValueInput(2);
Node* node) { const VirtualObject* vobject = current->GetVirtualObject(object);
if (state->owner() != node) { int offset;
VirtualState* new_state = new (zone()) VirtualState(node, *state); Variable var;
virtual_states_[node->id()] = new_state; if (vobject && !vobject->HasEscaped() &&
TRACE("Copying virtual state %p to new state %p at node %s#%d\n", OffsetOfElementsAccess(op, index).To(&offset) &&
static_cast<void*>(state), static_cast<void*>(new_state), vobject->FieldAt(offset).To(&var)) {
node->op()->mnemonic(), node->id()); current->Set(var, value);
return new_state; current->MarkForDeletion();
} } else {
return state; current->SetEscaped(value);
} current->SetEscaped(object);
VirtualObject* EscapeAnalysis::CopyForModificationAt(VirtualObject* obj,
VirtualState* state,
Node* node) {
if (obj->NeedCopyForModification()) {
state = CopyForModificationAt(state, node);
// TODO(tebbi): this copies the complete virtual state. Replace with a more
// precise analysis of which objects are actually affected by the change.
Alias changed_alias = status_analysis_->GetAlias(obj->id());
for (Alias alias = 0; alias < state->size(); ++alias) {
if (VirtualObject* next_obj = state->VirtualObjectFromAlias(alias)) {
if (alias != changed_alias && next_obj->NeedCopyForModification()) {
state->Copy(next_obj, alias);
}
} }
break;
} }
return state->Copy(obj, changed_alias); case IrOpcode::kLoadField: {
} Node* object = current->ValueInput(0);
return obj; const VirtualObject* vobject = current->GetVirtualObject(object);
} Variable var;
if (vobject && !vobject->HasEscaped() &&
void EscapeAnalysis::ForwardVirtualState(Node* node) { vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) {
DCHECK_EQ(node->op()->EffectInputCount(), 1); current->SetReplacement(current->Get(var));
#ifdef DEBUG } else {
if (node->opcode() != IrOpcode::kLoadField && // TODO(tebbi): At the moment, we mark objects as escaping if there
node->opcode() != IrOpcode::kLoadElement && // is a load from an invalid location to avoid dead nodes. This is a
node->opcode() != IrOpcode::kLoad && // workaround that should be removed once we can handle dead nodes
status_analysis_->IsDanglingEffectNode(node)) { // everywhere.
PrintF("Dangeling effect node: #%d (%s)\n", node->id(), current->SetEscaped(object);
node->op()->mnemonic());
UNREACHABLE();
}
#endif // DEBUG
Node* effect = NodeProperties::GetEffectInput(node);
DCHECK_NOT_NULL(virtual_states_[effect->id()]);
if (virtual_states_[node->id()]) {
TRACE("Updating virtual state %p at %s#%d from virtual state %p at %s#%d\n",
static_cast<void*>(virtual_states_[node->id()]),
node->op()->mnemonic(), node->id(),
static_cast<void*>(virtual_states_[effect->id()]),
effect->op()->mnemonic(), effect->id());
virtual_states_[node->id()]->UpdateFrom(virtual_states_[effect->id()],
zone());
} else {
virtual_states_[node->id()] = virtual_states_[effect->id()];
TRACE("Forwarding object state %p from %s#%d to %s#%d",
static_cast<void*>(virtual_states_[effect->id()]),
effect->op()->mnemonic(), effect->id(), node->op()->mnemonic(),
node->id());
if (status_analysis_->IsEffectBranchPoint(effect)) {
virtual_states_[node->id()]->SetCopyRequired();
TRACE(", effect input %s#%d is branch point", effect->op()->mnemonic(),
effect->id());
}
TRACE("\n");
}
}
void EscapeAnalysis::ProcessStart(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kStart);
virtual_states_[node->id()] =
new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
}
bool EscapeAnalysis::ProcessEffectPhi(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kEffectPhi);
bool changed = false;
VirtualState* mergeState = virtual_states_[node->id()];
if (!mergeState) {
mergeState =
new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
virtual_states_[node->id()] = mergeState;
changed = true;
TRACE("Effect Phi #%d got new virtual state %p.\n", node->id(),
static_cast<void*>(mergeState));
}
cache_->Clear();
TRACE("At Effect Phi #%d, merging states into %p:", node->id(),
static_cast<void*>(mergeState));
for (int i = 0; i < node->op()->EffectInputCount(); ++i) {
Node* input = NodeProperties::GetEffectInput(node, i);
VirtualState* state = virtual_states_[input->id()];
if (state) {
cache_->states().push_back(state);
if (state == mergeState) {
mergeState = new (zone())
VirtualState(node, zone(), status_analysis_->AliasCount());
virtual_states_[node->id()] = mergeState;
changed = true;
} }
break;
} }
TRACE(" %p (from %d %s)", static_cast<void*>(state), input->id(), case IrOpcode::kLoadElement: {
input->op()->mnemonic()); Node* object = current->ValueInput(0);
} Node* index = current->ValueInput(1);
TRACE("\n"); const VirtualObject* vobject = current->GetVirtualObject(object);
int offset;
if (cache_->states().size() == 0) { Variable var;
return changed; if (vobject && !vobject->HasEscaped() &&
} OffsetOfElementsAccess(op, index).To(&offset) &&
vobject->FieldAt(offset).To(&var)) {
changed = current->SetReplacement(current->Get(var));
mergeState->MergeFrom(cache_, zone(), graph(), common(), node) || changed; } else {
current->SetEscaped(object);
TRACE("Merge %s the node.\n", changed ? "changed" : "did not change");
if (changed) {
status_analysis_->ResizeStatusVector();
}
return changed;
}
void EscapeAnalysis::ProcessAllocation(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
ForwardVirtualState(node);
VirtualState* state = virtual_states_[node->id()];
Alias alias = status_analysis_->GetAlias(node->id());
// Check if we have already processed this node.
if (state->VirtualObjectFromAlias(alias)) {
return;
}
if (state->owner()->opcode() == IrOpcode::kEffectPhi) {
state = CopyForModificationAt(state, node);
}
NumberMatcher size(node->InputAt(0));
DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
if (size.HasValue()) {
VirtualObject* obj = new (zone()) VirtualObject(
node->id(), state, zone(), size.Value() / kPointerSize, false);
state->SetVirtualObject(alias, obj);
} else {
state->SetVirtualObject(
alias, new (zone()) VirtualObject(node->id(), state, zone()));
}
}
void EscapeAnalysis::ProcessFinishRegion(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
ForwardVirtualState(node);
Node* allocation = NodeProperties::GetValueInput(node, 0);
if (allocation->opcode() == IrOpcode::kAllocate) {
VirtualState* state = virtual_states_[node->id()];
VirtualObject* obj =
state->VirtualObjectFromAlias(status_analysis_->GetAlias(node->id()));
DCHECK_NOT_NULL(obj);
obj->SetInitialized();
}
}
Node* EscapeAnalysis::replacement(Node* node) {
if (node->id() >= replacements_.size()) return nullptr;
return replacements_[node->id()];
}
bool EscapeAnalysis::SetReplacement(Node* node, Node* rep) {
bool changed = replacements_[node->id()] != rep;
replacements_[node->id()] = rep;
return changed;
}
bool EscapeAnalysis::UpdateReplacement(VirtualState* state, Node* node,
Node* rep) {
if (SetReplacement(node, rep)) {
if (rep) {
TRACE("Replacement of #%d is #%d (%s)\n", node->id(), rep->id(),
rep->op()->mnemonic());
} else {
TRACE("Replacement of #%d cleared\n", node->id());
}
return true;
}
return false;
}
Node* EscapeAnalysis::ResolveReplacement(Node* node) {
while (replacement(node)) {
node = replacement(node);
}
return node;
}
Node* EscapeAnalysis::GetReplacement(Node* node) {
Node* result = nullptr;
while (replacement(node)) {
node = result = replacement(node);
}
return result;
}
bool EscapeAnalysis::IsVirtual(Node* node) {
if (node->id() >= status_analysis_->GetStatusVectorSize()) {
return false;
}
return status_analysis_->IsVirtual(node);
}
bool EscapeAnalysis::IsEscaped(Node* node) {
if (node->id() >= status_analysis_->GetStatusVectorSize()) {
return false;
}
return status_analysis_->IsEscaped(node);
}
bool EscapeAnalysis::CompareVirtualObjects(Node* left, Node* right) {
DCHECK(IsVirtual(left) && IsVirtual(right));
left = ResolveReplacement(left);
right = ResolveReplacement(right);
if (IsEquivalentPhi(left, right)) {
return true;
}
return false;
}
namespace {
#ifdef DEBUG
bool IsOffsetForFieldAccessCorrect(const FieldAccess& access) {
#if V8_TARGET_LITTLE_ENDIAN
return (access.offset % kPointerSize) == 0;
#else
return ((access.offset +
(1 << ElementSizeLog2Of(access.machine_type.representation()))) %
kPointerSize) == 0;
#endif
}
#endif
int OffsetForFieldAccess(Node* node) {
FieldAccess access = FieldAccessOf(node->op());
DCHECK(IsOffsetForFieldAccessCorrect(access));
return access.offset / kPointerSize;
}
int OffsetForElementAccess(Node* node, int index) {
ElementAccess access = ElementAccessOf(node->op());
DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
kPointerSizeLog2);
DCHECK_EQ(access.header_size % kPointerSize, 0);
return access.header_size / kPointerSize + index;
}
} // namespace
void EscapeAnalysis::ProcessLoadField(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kLoadField);
ForwardVirtualState(node);
Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
VirtualState* state = virtual_states_[node->id()];
if (VirtualObject* object = GetVirtualObject(state, from)) {
if (!object->IsTracked()) return;
int offset = OffsetForFieldAccess(node);
if (static_cast<size_t>(offset) >= object->field_count()) {
// We have a load from a field that is not inside the {object}. This
// can only happen with conflicting type feedback and for dead {node}s.
// For now, we just mark the {object} as escaping.
// TODO(turbofan): Consider introducing an Undefined or None operator
// that we can replace this load with, since we know it's dead code.
if (status_analysis_->SetEscaped(from)) {
TRACE(
"Setting #%d (%s) to escaped because load field #%d from "
"offset %d outside of object\n",
from->id(), from->op()->mnemonic(), node->id(), offset);
} }
return; break;
} }
Node* value = object->GetField(offset); case IrOpcode::kTypeGuard: {
if (value) { // The type-guard is re-introduced in the final reducer if the types
value = ResolveReplacement(value); // don't match.
current->SetReplacement(current->ValueInput(0));
break;
} }
// Record that the load has this alias. case IrOpcode::kReferenceEqual: {
UpdateReplacement(state, node, value); Node* left = current->ValueInput(0);
} else { Node* right = current->ValueInput(1);
UpdateReplacement(state, node, nullptr); const VirtualObject* left_object = current->GetVirtualObject(left);
} const VirtualObject* right_object = current->GetVirtualObject(right);
} Node* replacement = nullptr;
if (left_object && !left_object->HasEscaped()) {
void EscapeAnalysis::ProcessCheckMaps(Node* node) { if (right_object && !right_object->HasEscaped() &&
DCHECK_EQ(node->opcode(), IrOpcode::kCheckMaps); left_object->id() == right_object->id()) {
ForwardVirtualState(node); replacement = jsgraph->TrueConstant();
Node* checked = ResolveReplacement(NodeProperties::GetValueInput(node, 0)); } else {
if (FLAG_turbo_experimental) { replacement = jsgraph->FalseConstant();
VirtualState* state = virtual_states_[node->id()];
if (VirtualObject* object = GetVirtualObject(state, checked)) {
if (!object->IsTracked()) {
if (status_analysis_->SetEscaped(node)) {
TRACE(
"Setting #%d (%s) to escaped because checked object #%i is not "
"tracked\n",
node->id(), node->op()->mnemonic(), object->id());
} }
return; } else if (right_object && !right_object->HasEscaped()) {
} replacement = jsgraph->FalseConstant();
CheckMapsParameters params = CheckMapsParametersOf(node->op()); }
if (replacement) {
Node* value = object->GetField(HeapObject::kMapOffset / kPointerSize); // TODO(tebbi) This is a workaround for uninhabited types. If we
if (value) { // replaced a value of uninhabited type with a constant, we would
value = ResolveReplacement(value); // widen the type of the node. This could produce inconsistent
// TODO(tebbi): We want to extend this beyond constant folding with a // types (which might confuse representation selection). We get
// CheckMapsValue operator that takes the load-eliminated map value as // around this by refusing to constant-fold and escape-analyze
// input. // if the type is not inhabited.
if (value->opcode() == IrOpcode::kHeapConstant && if (NodeProperties::GetType(left)->IsInhabited() &&
params.maps().contains(ZoneHandleSet<Map>(bit_cast<Handle<Map>>( NodeProperties::GetType(right)->IsInhabited()) {
OpParameter<Handle<HeapObject>>(value))))) { current->SetReplacement(replacement);
TRACE("CheckMaps #%i seems to be redundant (until now).\n", } else {
node->id()); current->SetEscaped(left);
return; current->SetEscaped(right);
} }
} }
break;
} }
} case IrOpcode::kCheckMaps: {
if (status_analysis_->SetEscaped(node)) { CheckMapsParameters params = CheckMapsParametersOf(op);
TRACE("Setting #%d (%s) to escaped (checking #%i)\n", node->id(), Node* checked = current->ValueInput(0);
node->op()->mnemonic(), checked->id()); const VirtualObject* vobject = current->GetVirtualObject(checked);
} Variable map_field;
} if (vobject && !vobject->HasEscaped() &&
vobject->FieldAt(HeapObject::kMapOffset).To(&map_field)) {
void EscapeAnalysis::ProcessLoadElement(Node* node) { Node* map = current->Get(map_field);
DCHECK_EQ(node->opcode(), IrOpcode::kLoadElement); if (map) {
ForwardVirtualState(node); Type* const map_type = NodeProperties::GetType(map);
Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0)); if (map_type->IsHeapConstant() &&
VirtualState* state = virtual_states_[node->id()]; params.maps().contains(ZoneHandleSet<Map>(bit_cast<Handle<Map>>(
Node* index_node = node->InputAt(1); map_type->AsHeapConstant()->Value())))) {
NumberMatcher index(index_node); current->MarkForDeletion();
DCHECK(index_node->opcode() != IrOpcode::kInt32Constant && break;
index_node->opcode() != IrOpcode::kInt64Constant && }
index_node->opcode() != IrOpcode::kFloat32Constant && }
index_node->opcode() != IrOpcode::kFloat64Constant);
if (index.HasValue()) {
if (VirtualObject* object = GetVirtualObject(state, from)) {
if (!object->IsTracked()) return;
int offset = OffsetForElementAccess(node, index.Value());
if (static_cast<size_t>(offset) >= object->field_count()) return;
Node* value = object->GetField(offset);
if (value) {
value = ResolveReplacement(value);
} }
// Record that the load has this alias. current->SetEscaped(checked);
UpdateReplacement(state, node, value); break;
} else {
UpdateReplacement(state, node, nullptr);
}
} else {
// We have a load from a non-const index, cannot eliminate object.
if (status_analysis_->SetEscaped(from)) {
TRACE(
"Setting #%d (%s) to escaped because load element #%d from non-const "
"index #%d (%s)\n",
from->id(), from->op()->mnemonic(), node->id(), index_node->id(),
index_node->op()->mnemonic());
} }
} case IrOpcode::kCheckHeapObject: {
} Node* checked = current->ValueInput(0);
switch (checked->opcode()) {
void EscapeAnalysis::ProcessStoreField(Node* node) { case IrOpcode::kAllocate:
DCHECK_EQ(node->opcode(), IrOpcode::kStoreField); case IrOpcode::kFinishRegion:
ForwardVirtualState(node); case IrOpcode::kHeapConstant:
Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0)); current->SetReplacement(checked);
VirtualState* state = virtual_states_[node->id()]; break;
if (VirtualObject* object = GetVirtualObject(state, to)) { default:
if (!object->IsTracked()) return; current->SetEscaped(checked);
int offset = OffsetForFieldAccess(node); break;
if (static_cast<size_t>(offset) >= object->field_count()) {
// We have a store to a field that is not inside the {object}. This
// can only happen with conflicting type feedback and for dead {node}s.
// For now, we just mark the {object} as escaping.
// TODO(turbofan): Consider just eliminating the store in the reducer
// pass, as it's dead code anyways.
if (status_analysis_->SetEscaped(to)) {
TRACE(
"Setting #%d (%s) to escaped because store field #%d to "
"offset %d outside of object\n",
to->id(), to->op()->mnemonic(), node->id(), offset);
} }
return; break;
}
Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 1));
// TODO(mstarzinger): The following is a workaround to not track some well
// known raw fields. We only ever store default initial values into these
// fields which are hard-coded in {TranslatedState::MaterializeAt} as well.
if (val->opcode() == IrOpcode::kInt32Constant ||
val->opcode() == IrOpcode::kInt64Constant) {
DCHECK(FieldAccessOf(node->op()).offset == Name::kHashFieldOffset);
val = slot_not_analyzed_;
}
object = CopyForModificationAt(object, state, node);
if (object->GetField(offset) != val) {
object->SetField(offset, val);
} }
} case IrOpcode::kMapGuard: {
} Node* object = current->ValueInput(0);
const VirtualObject* vobject = current->GetVirtualObject(object);
void EscapeAnalysis::ProcessStoreElement(Node* node) { if (vobject && !vobject->HasEscaped()) {
DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement); current->MarkForDeletion();
ForwardVirtualState(node);
Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
Node* index_node = node->InputAt(1);
NumberMatcher index(index_node);
DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
index_node->opcode() != IrOpcode::kInt64Constant &&
index_node->opcode() != IrOpcode::kFloat32Constant &&
index_node->opcode() != IrOpcode::kFloat64Constant);
VirtualState* state = virtual_states_[node->id()];
if (index.HasValue()) {
if (VirtualObject* object = GetVirtualObject(state, to)) {
if (!object->IsTracked()) return;
int offset = OffsetForElementAccess(node, index.Value());
if (static_cast<size_t>(offset) >= object->field_count()) return;
Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 2));
object = CopyForModificationAt(object, state, node);
if (object->GetField(offset) != val) {
object->SetField(offset, val);
} }
break;
} }
} else { case IrOpcode::kStateValues:
// We have a store to a non-const index, cannot eliminate object. case IrOpcode::kFrameState:
if (status_analysis_->SetEscaped(to)) { // These uses are always safe.
TRACE( break;
"Setting #%d (%s) to escaped because store element #%d to non-const " default: {
"index #%d (%s)\n", // For unknown nodes, treat all value inputs as escaping.
to->id(), to->op()->mnemonic(), node->id(), index_node->id(), int value_input_count = op->ValueInputCount();
index_node->op()->mnemonic()); for (int i = 0; i < value_input_count; ++i) {
} Node* input = current->ValueInput(i);
if (VirtualObject* object = GetVirtualObject(state, to)) { current->SetEscaped(input);
if (!object->IsTracked()) return;
object = CopyForModificationAt(object, state, node);
if (!object->AllFieldsClear()) {
object->ClearAllFields();
TRACE("Cleared all fields of @%d:#%d\n",
status_analysis_->GetAlias(object->id()), object->id());
} }
} if (OperatorProperties::HasContextInput(op)) {
} current->SetEscaped(current->ContextInput());
}
Node* EscapeAnalysis::GetOrCreateObjectState(Node* effect, Node* node) {
if ((node->opcode() == IrOpcode::kFinishRegion ||
node->opcode() == IrOpcode::kAllocate) &&
IsVirtual(node)) {
if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
ResolveReplacement(node))) {
if (Node* object_state = vobj->GetObjectState()) {
return object_state;
} else {
cache_->fields().clear();
for (size_t i = 0; i < vobj->field_count(); ++i) {
if (Node* field = vobj->GetField(i)) {
cache_->fields().push_back(ResolveReplacement(field));
} else {
return nullptr;
}
}
int input_count = static_cast<int>(cache_->fields().size());
Node* new_object_state =
graph()->NewNode(common()->ObjectState(vobj->id(), input_count),
input_count, &cache_->fields().front());
NodeProperties::SetType(new_object_state, Type::OtherInternal());
vobj->SetObjectState(new_object_state);
TRACE(
"Creating object state #%d for vobj %p (from node #%d) at effect "
"#%d\n",
new_object_state->id(), static_cast<void*>(vobj), node->id(),
effect->id());
// Now fix uses of other objects.
for (size_t i = 0; i < vobj->field_count(); ++i) {
if (Node* field = vobj->GetField(i)) {
if (Node* field_object_state =
GetOrCreateObjectState(effect, field)) {
NodeProperties::ReplaceValueInput(
new_object_state, field_object_state, static_cast<int>(i));
}
}
}
return new_object_state;
} }
break;
} }
} }
return nullptr;
} }
bool EscapeAnalysis::IsCyclicObjectState(Node* effect, Node* node) { } // namespace
if ((node->opcode() == IrOpcode::kFinishRegion ||
node->opcode() == IrOpcode::kAllocate) && void EscapeAnalysis::Reduce(Node* node, Reduction* reduction) {
IsVirtual(node)) { const Operator* op = node->op();
if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()], TRACE("Reducing %s#%d\n", op->mnemonic(), node->id());
ResolveReplacement(node))) {
if (cycle_detection_.find(vobj) != cycle_detection_.end()) return true; EscapeAnalysisTracker::Scope current(this, tracker_, node, reduction);
cycle_detection_.insert(vobj); ReduceNode(op, &current, jsgraph());
bool cycle_detected = false;
for (size_t i = 0; i < vobj->field_count(); ++i) {
if (Node* field = vobj->GetField(i)) {
if (IsCyclicObjectState(effect, field)) cycle_detected = true;
}
}
cycle_detection_.erase(vobj);
return cycle_detected;
}
}
return false;
} }
void EscapeAnalysis::DebugPrintState(VirtualState* state) { EscapeAnalysis::EscapeAnalysis(JSGraph* jsgraph, Zone* zone)
PrintF("Dumping virtual state %p\n", static_cast<void*>(state)); : EffectGraphReducer(
for (Alias alias = 0; alias < status_analysis_->AliasCount(); ++alias) { jsgraph->graph(),
if (VirtualObject* object = state->VirtualObjectFromAlias(alias)) { [this](Node* node, Reduction* reduction) { Reduce(node, reduction); },
PrintF(" Alias @%d: Object #%d with %zu fields\n", alias, object->id(), zone),
object->field_count()); tracker_(new (zone) EscapeAnalysisTracker(jsgraph, this, zone)),
for (size_t i = 0; i < object->field_count(); ++i) { jsgraph_(jsgraph) {}
if (Node* f = object->GetField(i)) {
PrintF(" Field %zu = #%d (%s)\n", i, f->id(), f->op()->mnemonic()); Node* EscapeAnalysisResult::GetReplacementOf(Node* node) {
} return tracker_->GetReplacementOf(node);
}
}
}
} }
void EscapeAnalysis::DebugPrint() { Node* EscapeAnalysisResult::GetVirtualObjectField(const VirtualObject* vobject,
ZoneVector<VirtualState*> object_states(zone()); int field, Node* effect) {
for (NodeId id = 0; id < virtual_states_.size(); id++) { return tracker_->variable_states_.Get(vobject->FieldAt(field).FromJust(),
if (VirtualState* states = virtual_states_[id]) { effect);
if (std::find(object_states.begin(), object_states.end(), states) ==
object_states.end()) {
object_states.push_back(states);
}
}
}
for (size_t n = 0; n < object_states.size(); n++) {
DebugPrintState(object_states[n]);
}
} }
VirtualObject* EscapeAnalysis::GetVirtualObject(VirtualState* state, const VirtualObject* EscapeAnalysisResult::GetVirtualObject(Node* node) {
Node* node) { return tracker_->virtual_objects_.Get(node);
if (node->id() >= status_analysis_->GetAliasMap().size()) return nullptr;
Alias alias = status_analysis_->GetAlias(node->id());
if (alias >= state->size()) return nullptr;
return state->VirtualObjectFromAlias(alias);
} }
bool EscapeAnalysis::ExistsVirtualAllocate() { VirtualObject::VirtualObject(VariableTracker* var_states, VirtualObject::Id id,
for (size_t id = 0; id < status_analysis_->GetAliasMap().size(); ++id) { int size)
Alias alias = status_analysis_->GetAlias(static_cast<NodeId>(id)); : Dependable(var_states->zone()), id_(id), fields_(var_states->zone()) {
if (alias < EscapeStatusAnalysis::kUntrackable) { DCHECK(size % kPointerSize == 0);
if (status_analysis_->IsVirtual(static_cast<int>(id))) { TRACE("Creating VirtualObject id:%d size:%d\n", id, size);
return true; int num_fields = size / kPointerSize;
} fields_.reserve(num_fields);
} for (int i = 0; i < num_fields; ++i) {
fields_.push_back(var_states->NewVariable());
} }
return false;
} }
Graph* EscapeAnalysis::graph() const { return status_analysis_->graph(); }
#undef TRACE #undef TRACE
} // namespace compiler } // namespace compiler
......
// Copyright 2015 the V8 project authors. All rights reserved. // Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
#ifndef V8_COMPILER_ESCAPE_ANALYSIS_H_ #ifndef V8_COMPILER_ESCAPE_ANALYSIS_H_
#define V8_COMPILER_ESCAPE_ANALYSIS_H_ #define V8_COMPILER_ESCAPE_ANALYSIS_H_
#include "src/compiler/graph.h" #include "src/base/functional.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/persistent-map.h"
#include "src/globals.h" #include "src/globals.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
namespace compiler { namespace compiler {
// Forward declarations.
class CommonOperatorBuilder; class CommonOperatorBuilder;
class EscapeStatusAnalysis; class VariableTracker;
namespace impl { class EscapeAnalysisTracker;
class MergeCache;
class VirtualState; // {EffectGraphReducer} reduces up to a fixed point. It distinguishes changes to
class VirtualObject; // the effect output of a node from changes to the value output to reduce the
}; // namespace impl // number of revisitations.
class EffectGraphReducer {
// EscapeObjectAnalysis simulates stores to determine values of loads if public:
// an object is virtual and eliminated. class Reduction {
class V8_EXPORT_PRIVATE EscapeAnalysis { public:
bool value_changed() const { return value_changed_; }
void set_value_changed() { value_changed_ = true; }
bool effect_changed() const { return effect_changed_; }
void set_effect_changed() { effect_changed_ = true; }
private:
bool value_changed_ = false;
bool effect_changed_ = false;
};
EffectGraphReducer(Graph* graph,
std::function<void(Node*, Reduction*)> reduce, Zone* zone);
void ReduceGraph() { ReduceFrom(graph_->end()); }
// Mark node for revisitation.
void Revisit(Node* node);
// Add a new root node to start reduction from. This is useful if the reducer
// adds nodes that are not yet reachable, but should already be considered
// part of the graph.
void AddRoot(Node* node) {
DCHECK(state_.Get(node) == State::kUnvisited);
state_.Set(node, State::kRevisit);
revisit_.push(node);
}
bool Complete() { return stack_.empty() && revisit_.empty(); }
private:
struct NodeState {
Node* node;
int input_index;
};
void ReduceFrom(Node* node);
enum class State : uint8_t { kUnvisited = 0, kRevisit, kOnStack, kVisited };
const uint8_t kNumStates = static_cast<uint8_t>(State::kVisited) + 1;
Graph* graph_;
NodeMarker<State> state_;
ZoneStack<Node*> revisit_;
ZoneStack<NodeState> stack_;
std::function<void(Node*, Reduction*)> reduce_;
};
// A variable is an abstract storage location, which is lowered to SSA values
// and phi nodes by {VariableTracker}.
class Variable {
public:
Variable() : id_(kInvalid) {}
bool operator==(Variable other) const { return id_ == other.id_; }
bool operator!=(Variable other) const { return id_ != other.id_; }
bool operator<(Variable other) const { return id_ < other.id_; }
static Variable Invalid() { return Variable(kInvalid); }
friend V8_INLINE size_t hash_value(Variable v) {
return base::hash_value(v.id_);
}
friend std::ostream& operator<<(std::ostream& os, Variable var) {
return os << var.id_;
}
private:
typedef int Id;
explicit Variable(Id id) : id_(id) {}
Id id_;
static const Id kInvalid = -1;
friend class VariableTracker;
};
// An object that can track the nodes in the graph whose current reduction
// depends on the value of the object.
class Dependable : public ZoneObject {
public: public:
EscapeAnalysis(Graph* graph, CommonOperatorBuilder* common, Zone* zone); explicit Dependable(Zone* zone) : dependants_(zone) {}
~EscapeAnalysis(); void AddDependency(Node* node) { dependants_.push_back(node); }
void RevisitDependants(EffectGraphReducer* reducer) {
bool Run(); for (Node* node : dependants_) {
reducer->Revisit(node);
Node* GetReplacement(Node* node); }
Node* ResolveReplacement(Node* node); dependants_.clear();
bool IsVirtual(Node* node); }
bool IsEscaped(Node* node);
bool CompareVirtualObjects(Node* left, Node* right); private:
Node* GetOrCreateObjectState(Node* effect, Node* node); ZoneVector<Node*> dependants_;
bool IsCyclicObjectState(Node* effect, Node* node); };
bool ExistsVirtualAllocate();
bool SetReplacement(Node* node, Node* rep); // A virtual object represents an allocation site and tracks the Variables
bool AllObjectsComplete(); // associated with its fields as well as its global escape status.
class VirtualObject : public Dependable {
public:
typedef uint32_t Id;
typedef ZoneVector<Variable>::const_iterator const_iterator;
VirtualObject(VariableTracker* var_states, Id id, int size);
Maybe<Variable> FieldAt(int offset) const {
DCHECK(offset % kPointerSize == 0);
CHECK(!HasEscaped());
if (offset >= size()) {
// This can only happen in unreachable code.
return Nothing<Variable>();
}
return Just(fields_.at(offset / kPointerSize));
}
Id id() const { return id_; }
int size() const { return static_cast<int>(kPointerSize * fields_.size()); }
// Escaped might mean that the object escaped to untracked memory or that it
// is used in an operation that requires materialization.
void SetEscaped() { escaped_ = true; }
bool HasEscaped() const { return escaped_; }
const_iterator begin() const { return fields_.begin(); }
const_iterator end() const { return fields_.end(); }
private:
bool escaped_ = false;
Id id_;
ZoneVector<Variable> fields_;
};
class EscapeAnalysisResult {
public:
explicit EscapeAnalysisResult(EscapeAnalysisTracker* tracker)
: tracker_(tracker) {}
const VirtualObject* GetVirtualObject(Node* node);
Node* GetVirtualObjectField(const VirtualObject* vobject, int field,
Node* effect);
Node* GetReplacementOf(Node* node);
private:
EscapeAnalysisTracker* tracker_;
};
class V8_EXPORT_PRIVATE EscapeAnalysis final
: public NON_EXPORTED_BASE(EffectGraphReducer) {
public:
EscapeAnalysis(JSGraph* jsgraph, Zone* zone);
EscapeAnalysisResult analysis_result() {
DCHECK(Complete());
return EscapeAnalysisResult(tracker_);
}
private: private:
void RunObjectAnalysis(); void Reduce(Node* node, Reduction* reduction);
bool Process(Node* node); JSGraph* jsgraph() { return jsgraph_; }
void ProcessLoadField(Node* node); EscapeAnalysisTracker* tracker_;
void ProcessStoreField(Node* node); JSGraph* jsgraph_;
void ProcessLoadElement(Node* node);
void ProcessStoreElement(Node* node);
void ProcessCheckMaps(Node* node);
void ProcessAllocationUsers(Node* node);
void ProcessAllocation(Node* node);
void ProcessFinishRegion(Node* node);
void ProcessCall(Node* node);
void ProcessStart(Node* node);
bool ProcessEffectPhi(Node* node);
void ForwardVirtualState(Node* node);
impl::VirtualState* CopyForModificationAt(impl::VirtualState* state,
Node* node);
impl::VirtualObject* CopyForModificationAt(impl::VirtualObject* obj,
impl::VirtualState* state,
Node* node);
Node* replacement(Node* node);
bool UpdateReplacement(impl::VirtualState* state, Node* node, Node* rep);
impl::VirtualObject* GetVirtualObject(impl::VirtualState* state, Node* node);
void DebugPrint();
void DebugPrintState(impl::VirtualState* state);
Graph* graph() const;
Zone* zone() const { return zone_; }
CommonOperatorBuilder* common() const { return common_; }
Zone* const zone_;
Node* const slot_not_analyzed_;
CommonOperatorBuilder* const common_;
EscapeStatusAnalysis* status_analysis_;
ZoneVector<impl::VirtualState*> virtual_states_;
ZoneVector<Node*> replacements_;
ZoneSet<impl::VirtualObject*> cycle_detection_;
impl::MergeCache* cache_;
DISALLOW_COPY_AND_ASSIGN(EscapeAnalysis);
}; };
} // namespace compiler } // namespace compiler
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/new-escape-analysis-reducer.h"
#include "src/compiler/all-nodes.h"
#include "src/compiler/simplified-operator.h"
#include "src/compiler/type-cache.h"
#include "src/frame-constants.h"
namespace v8 {
namespace internal {
namespace compiler {
#ifdef DEBUG
#define TRACE(...) \
do { \
if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
} while (false)
#else
#define TRACE(...)
#endif // DEBUG
NewEscapeAnalysisReducer::NewEscapeAnalysisReducer(
Editor* editor, JSGraph* jsgraph, EscapeAnalysisResult analysis_result,
Zone* zone)
: AdvancedReducer(editor),
jsgraph_(jsgraph),
analysis_result_(analysis_result),
object_id_cache_(zone),
node_cache_(jsgraph->graph(), zone),
arguments_elements_(zone),
zone_(zone) {}
Node* NewEscapeAnalysisReducer::MaybeGuard(Node* original, Node* replacement) {
// We might need to guard the replacement if the type of the {replacement}
// node is not in a sub-type relation to the type of the the {original} node.
Type* const replacement_type = NodeProperties::GetType(replacement);
Type* const original_type = NodeProperties::GetType(original);
if (!replacement_type->Is(original_type)) {
Node* const control = NodeProperties::GetControlInput(original);
replacement = jsgraph()->graph()->NewNode(
jsgraph()->common()->TypeGuard(original_type), replacement, control);
NodeProperties::SetType(replacement, original_type);
}
return replacement;
}
namespace {
Node* SkipTypeGuards(Node* node) {
while (node->opcode() == IrOpcode::kTypeGuard) {
node = NodeProperties::GetValueInput(node, 0);
}
return node;
}
} // namespace
Node* NewEscapeAnalysisReducer::ObjectIdNode(const VirtualObject* vobject) {
VirtualObject::Id id = vobject->id();
if (id >= object_id_cache_.size()) object_id_cache_.resize(id + 1);
if (!object_id_cache_[id]) {
Node* node = jsgraph()->graph()->NewNode(jsgraph()->common()->ObjectId(id));
NodeProperties::SetType(node, Type::Object());
object_id_cache_[id] = node;
}
return object_id_cache_[id];
}
Reduction NewEscapeAnalysisReducer::Reduce(Node* node) {
if (Node* replacement = analysis_result().GetReplacementOf(node)) {
DCHECK(node->opcode() != IrOpcode::kAllocate &&
node->opcode() != IrOpcode::kFinishRegion);
DCHECK_NE(replacement, node);
if (replacement != jsgraph()->Dead()) {
replacement = MaybeGuard(node, replacement);
}
RelaxEffectsAndControls(node);
return Replace(replacement);
}
switch (node->opcode()) {
case IrOpcode::kAllocate: {
const VirtualObject* vobject = analysis_result().GetVirtualObject(node);
if (vobject && !vobject->HasEscaped()) {
RelaxEffectsAndControls(node);
}
return NoChange();
}
case IrOpcode::kFinishRegion: {
Node* effect = NodeProperties::GetEffectInput(node, 0);
if (effect->opcode() == IrOpcode::kBeginRegion) {
RelaxEffectsAndControls(effect);
RelaxEffectsAndControls(node);
}
return NoChange();
}
case IrOpcode::kNewUnmappedArgumentsElements:
arguments_elements_.insert(node);
return NoChange();
default: {
// TODO(sigurds): Change this to GetFrameStateInputCount once
// it is working. For now we use EffectInputCount > 0 to determine
// whether a node might have a frame state input.
if (node->op()->EffectInputCount() > 0) {
ReduceFrameStateInputs(node);
}
return NoChange();
}
}
}
// While doing DFS on the FrameState tree, we have to recognize duplicate
// occurrences of virtual objects.
class Deduplicator {
public:
explicit Deduplicator(Zone* zone) : is_duplicate_(zone) {}
bool SeenBefore(const VirtualObject* vobject) {
VirtualObject::Id id = vobject->id();
if (id >= is_duplicate_.size()) {
is_duplicate_.resize(id + 1);
}
bool is_duplicate = is_duplicate_[id];
is_duplicate_[id] = true;
return is_duplicate;
}
private:
ZoneVector<bool> is_duplicate_;
};
void NewEscapeAnalysisReducer::ReduceFrameStateInputs(Node* node) {
DCHECK_GE(node->op()->EffectInputCount(), 1);
for (int i = 0; i < node->InputCount(); ++i) {
Node* input = node->InputAt(i);
if (input->opcode() == IrOpcode::kFrameState) {
Deduplicator deduplicator(zone());
if (Node* ret = ReduceDeoptState(input, node, &deduplicator)) {
node->ReplaceInput(i, ret);
}
}
}
}
Node* NewEscapeAnalysisReducer::ReduceDeoptState(Node* node, Node* effect,
Deduplicator* deduplicator) {
if (node->opcode() == IrOpcode::kFrameState) {
NodeHashCache::Constructor new_node(&node_cache_, node);
// This input order is important to match the DFS traversal used in the
// instruction selector. Otherwise, the instruction selector might find a
// duplicate node before the original one.
for (int input_id : {kFrameStateOuterStateInput, kFrameStateFunctionInput,
kFrameStateParametersInput, kFrameStateContextInput,
kFrameStateLocalsInput, kFrameStateStackInput}) {
Node* input = node->InputAt(input_id);
new_node.ReplaceInput(ReduceDeoptState(input, effect, deduplicator),
input_id);
}
return new_node.Get();
} else if (node->opcode() == IrOpcode::kStateValues) {
NodeHashCache::Constructor new_node(&node_cache_, node);
for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
Node* input = NodeProperties::GetValueInput(node, i);
new_node.ReplaceValueInput(ReduceDeoptState(input, effect, deduplicator),
i);
}
return new_node.Get();
} else if (const VirtualObject* vobject =
analysis_result().GetVirtualObject(SkipTypeGuards(node))) {
if (vobject->HasEscaped()) return node;
if (deduplicator->SeenBefore(vobject)) {
return ObjectIdNode(vobject);
} else {
std::vector<Node*> inputs;
for (int offset = 0; offset < vobject->size(); offset += kPointerSize) {
Node* field =
analysis_result().GetVirtualObjectField(vobject, offset, effect);
CHECK_NOT_NULL(field);
if (field != jsgraph()->Dead()) {
inputs.push_back(ReduceDeoptState(field, effect, deduplicator));
}
}
int num_inputs = static_cast<int>(inputs.size());
NodeHashCache::Constructor new_node(
&node_cache_,
jsgraph()->common()->ObjectState(vobject->id(), num_inputs),
num_inputs, &inputs.front(), NodeProperties::GetType(node));
return new_node.Get();
}
} else {
return node;
}
}
void NewEscapeAnalysisReducer::VerifyReplacement() const {
AllNodes all(zone(), jsgraph()->graph());
for (Node* node : all.reachable) {
if (node->opcode() == IrOpcode::kAllocate) {
if (const VirtualObject* vobject =
analysis_result().GetVirtualObject(node)) {
if (!vobject->HasEscaped()) {
V8_Fatal(__FILE__, __LINE__,
"Escape analysis failed to remove node %s#%d\n",
node->op()->mnemonic(), node->id());
}
}
}
}
}
void NewEscapeAnalysisReducer::Finalize() {
for (Node* node : arguments_elements_) {
DCHECK(node->opcode() == IrOpcode::kNewUnmappedArgumentsElements);
Node* arguments_frame = NodeProperties::GetValueInput(node, 0);
if (arguments_frame->opcode() != IrOpcode::kArgumentsFrame) continue;
Node* arguments_length = NodeProperties::GetValueInput(node, 1);
if (arguments_length->opcode() != IrOpcode::kArgumentsLength) continue;
Node* arguments_length_state = nullptr;
for (Edge edge : arguments_length->use_edges()) {
Node* use = edge.from();
switch (use->opcode()) {
case IrOpcode::kObjectState:
case IrOpcode::kTypedObjectState:
case IrOpcode::kStateValues:
case IrOpcode::kTypedStateValues:
if (!arguments_length_state) {
arguments_length_state = jsgraph()->graph()->NewNode(
jsgraph()->common()->ArgumentsLengthState(
IsRestLengthOf(arguments_length->op())));
NodeProperties::SetType(arguments_length_state,
Type::OtherInternal());
}
edge.UpdateTo(arguments_length_state);
break;
default:
break;
}
}
bool escaping_use = false;
ZoneVector<Node*> loads(zone());
for (Edge edge : node->use_edges()) {
Node* use = edge.from();
if (!NodeProperties::IsValueEdge(edge)) continue;
if (use->use_edges().empty()) {
// A node without uses is dead, so we don't have to care about it.
continue;
}
switch (use->opcode()) {
case IrOpcode::kStateValues:
case IrOpcode::kTypedStateValues:
case IrOpcode::kObjectState:
case IrOpcode::kTypedObjectState:
break;
case IrOpcode::kLoadElement:
loads.push_back(use);
break;
case IrOpcode::kLoadField:
if (FieldAccessOf(use->op()).offset == FixedArray::kLengthOffset) {
loads.push_back(use);
} else {
escaping_use = true;
}
break;
default:
// If the arguments elements node node is used by an unhandled node,
// then we cannot remove this allocation.
escaping_use = true;
break;
}
if (escaping_use) break;
}
if (!escaping_use) {
Node* arguments_elements_state = jsgraph()->graph()->NewNode(
jsgraph()->common()->ArgumentsElementsState(
IsRestLengthOf(arguments_length->op())));
NodeProperties::SetType(arguments_elements_state, Type::OtherInternal());
ReplaceWithValue(node, arguments_elements_state);
ElementAccess stack_access;
stack_access.base_is_tagged = BaseTaggedness::kUntaggedBase;
// Reduce base address by {kPointerSize} such that (length - index)
// resolves to the right position.
stack_access.header_size =
CommonFrameConstants::kFixedFrameSizeAboveFp - kPointerSize;
stack_access.type = Type::NonInternal();
stack_access.machine_type = MachineType::AnyTagged();
stack_access.write_barrier_kind = WriteBarrierKind::kNoWriteBarrier;
const Operator* load_stack_op =
jsgraph()->simplified()->LoadElement(stack_access);
for (Node* load : loads) {
switch (load->opcode()) {
case IrOpcode::kLoadElement: {
Node* index = NodeProperties::GetValueInput(load, 1);
// {offset} is a reverted index starting from 1. The base address is
// adapted to allow offsets starting from 1.
Node* offset = jsgraph()->graph()->NewNode(
jsgraph()->simplified()->NumberSubtract(), arguments_length,
index);
NodeProperties::SetType(offset,
TypeCache::Get().kArgumentsLengthType);
NodeProperties::ReplaceValueInput(load, arguments_frame, 0);
NodeProperties::ReplaceValueInput(load, offset, 1);
NodeProperties::ChangeOp(load, load_stack_op);
break;
}
case IrOpcode::kLoadField: {
DCHECK_EQ(FieldAccessOf(load->op()).offset,
FixedArray::kLengthOffset);
Node* length = NodeProperties::GetValueInput(node, 1);
ReplaceWithValue(load, length);
break;
}
default:
UNREACHABLE();
}
}
}
}
}
Node* NodeHashCache::Query(Node* node) {
auto it = cache_.find(node);
if (it != cache_.end()) {
return *it;
} else {
return nullptr;
}
}
NodeHashCache::Constructor::Constructor(NodeHashCache* cache,
const Operator* op, int input_count,
Node** inputs, Type* type)
: node_cache_(cache), from_(nullptr) {
if (node_cache_->temp_nodes_.size() > 0) {
tmp_ = node_cache_->temp_nodes_.back();
node_cache_->temp_nodes_.pop_back();
int tmp_input_count = tmp_->InputCount();
if (input_count <= tmp_input_count) {
tmp_->TrimInputCount(input_count);
}
for (int i = 0; i < input_count; ++i) {
if (i < tmp_input_count) {
tmp_->ReplaceInput(i, inputs[i]);
} else {
tmp_->AppendInput(node_cache_->graph_->zone(), inputs[i]);
}
}
NodeProperties::ChangeOp(tmp_, op);
} else {
tmp_ = node_cache_->graph_->NewNode(op, input_count, inputs);
}
NodeProperties::SetType(tmp_, type);
}
Node* NodeHashCache::Constructor::Get() {
DCHECK(tmp_ || from_);
Node* node;
if (!tmp_) {
node = node_cache_->Query(from_);
if (!node) node = from_;
} else {
node = node_cache_->Query(tmp_);
if (node) {
node_cache_->temp_nodes_.push_back(tmp_);
} else {
node = tmp_;
node_cache_->Insert(node);
}
}
tmp_ = from_ = nullptr;
return node;
}
Node* NodeHashCache::Constructor::MutableNode() {
DCHECK(tmp_ || from_);
if (!tmp_) {
if (node_cache_->temp_nodes_.empty()) {
tmp_ = node_cache_->graph_->CloneNode(from_);
} else {
tmp_ = node_cache_->temp_nodes_.back();
node_cache_->temp_nodes_.pop_back();
int from_input_count = from_->InputCount();
int tmp_input_count = tmp_->InputCount();
if (from_input_count <= tmp_input_count) {
tmp_->TrimInputCount(from_input_count);
}
for (int i = 0; i < from_input_count; ++i) {
if (i < tmp_input_count) {
tmp_->ReplaceInput(i, from_->InputAt(i));
} else {
tmp_->AppendInput(node_cache_->graph_->zone(), from_->InputAt(i));
}
}
NodeProperties::SetType(tmp_, NodeProperties::GetType(from_));
NodeProperties::ChangeOp(tmp_, from_->op());
}
}
return tmp_;
}
#undef TRACE
} // namespace compiler
} // namespace internal
} // namespace v8
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_NEW_ESCAPE_ANALYSIS_REDUCER_H_
#define V8_COMPILER_NEW_ESCAPE_ANALYSIS_REDUCER_H_
#include "src/base/compiler-specific.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/new-escape-analysis.h"
#include "src/globals.h"
namespace v8 {
namespace internal {
namespace compiler {
class Deduplicator;
class JSGraph;
// Perform hash-consing when creating or mutating nodes. Used to avoid duplicate
// nodes when creating ObjectState, StateValues and FrameState nodes
class NodeHashCache {
public:
NodeHashCache(Graph* graph, Zone* zone)
: graph_(graph), cache_(zone), temp_nodes_(zone) {}
// Handle to a conceptually new mutable node. Tries to re-use existing nodes
// and to recycle memory if possible.
class Constructor {
public:
// Construct a new node as a clone of [from].
Constructor(NodeHashCache* cache, Node* from)
: node_cache_(cache), from_(from), tmp_(nullptr) {}
// Construct a new node from scratch.
Constructor(NodeHashCache* cache, const Operator* op, int input_count,
Node** inputs, Type* type);
// Modify the new node.
void ReplaceValueInput(Node* input, int i) {
if (!tmp_ && input == NodeProperties::GetValueInput(from_, i)) return;
Node* node = MutableNode();
NodeProperties::ReplaceValueInput(node, input, i);
}
void ReplaceInput(Node* input, int i) {
if (!tmp_ && input == from_->InputAt(i)) return;
Node* node = MutableNode();
node->ReplaceInput(i, input);
}
// Obtain the mutated node or a cached copy. Invalidates the [Constructor].
Node* Get();
private:
Node* MutableNode();
NodeHashCache* node_cache_;
// Original node, copied on write.
Node* from_;
// Temporary node used for mutations, can be recycled if cache is hit.
Node* tmp_;
};
private:
Node* Query(Node* node);
void Insert(Node* node) { cache_.insert(node); }
Graph* graph_;
struct NodeEquals {
bool operator()(Node* a, Node* b) const {
return NodeProperties::Equals(a, b);
}
};
struct NodeHashCode {
size_t operator()(Node* n) const { return NodeProperties::HashCode(n); }
};
ZoneUnorderedSet<Node*, NodeHashCode, NodeEquals> cache_;
// Unused nodes whose memory can be recycled.
ZoneVector<Node*> temp_nodes_;
};
// Modify the graph according to the information computed in the previous phase.
class V8_EXPORT_PRIVATE NewEscapeAnalysisReducer final
: public NON_EXPORTED_BASE(AdvancedReducer) {
public:
NewEscapeAnalysisReducer(Editor* editor, JSGraph* jsgraph,
EscapeAnalysisResult analysis_result, Zone* zone);
Reduction Reduce(Node* node) override;
const char* reducer_name() const override {
return "NewEscapeAnalysisReducer";
}
void Finalize() override;
// Verifies that all virtual allocation nodes have been dealt with. Run it
// after this reducer has been applied.
void VerifyReplacement() const;
private:
void ReduceFrameStateInputs(Node* node);
Node* ReduceDeoptState(Node* node, Node* effect, Deduplicator* deduplicator);
Node* ObjectIdNode(const VirtualObject* vobject);
Node* MaybeGuard(Node* original, Node* replacement);
JSGraph* jsgraph() const { return jsgraph_; }
EscapeAnalysisResult analysis_result() const { return analysis_result_; }
Zone* zone() const { return zone_; }
JSGraph* const jsgraph_;
EscapeAnalysisResult analysis_result_;
ZoneVector<Node*> object_id_cache_;
NodeHashCache node_cache_;
ZoneSet<Node*> arguments_elements_;
Zone* const zone_;
DISALLOW_COPY_AND_ASSIGN(NewEscapeAnalysisReducer);
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_NEW_ESCAPE_ANALYSIS_REDUCER_H_
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/new-escape-analysis.h"
#include "src/bootstrapper.h"
#include "src/compiler/linkage.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/operator-properties.h"
#include "src/compiler/simplified-operator.h"
#include "src/objects-inl.h"
#ifdef DEBUG
#define TRACE(...) \
do { \
if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
} while (false)
#else
#define TRACE(...)
#endif
namespace v8 {
namespace internal {
namespace compiler {
template <class T>
class Sidetable {
public:
explicit Sidetable(Zone* zone) : map_(zone) {}
T& operator[](const Node* node) {
NodeId id = node->id();
if (id >= map_.size()) {
map_.resize(id + 1);
}
return map_[id];
}
private:
ZoneVector<T> map_;
};
template <class T>
class SparseSidetable {
public:
explicit SparseSidetable(Zone* zone, T def_value = T())
: def_value_(std::move(def_value)), map_(zone) {}
void Set(const Node* node, T value) {
auto iter = map_.find(node->id());
if (iter != map_.end()) {
iter->second = std::move(value);
} else if (value != def_value_) {
map_.insert(iter, std::make_pair(node->id(), std::move(value)));
}
}
const T& Get(const Node* node) const {
auto iter = map_.find(node->id());
return iter != map_.end() ? iter->second : def_value_;
}
private:
T def_value_;
ZoneUnorderedMap<NodeId, T> map_;
};
// Keeps track of the changes to the current node during reduction.
// Encapsulates the current state of the IR graph and the reducer state like
// side-tables. All access to the IR and the reducer state should happen through
// a ReduceScope to ensure that changes and dependencies are tracked and all
// necessary node revisitations happen.
class ReduceScope {
public:
typedef EffectGraphReducer::Reduction Reduction;
explicit ReduceScope(Node* node, Reduction* reduction)
: current_node_(node), reduction_(reduction) {}
protected:
Node* current_node() const { return current_node_; }
Reduction* reduction() { return reduction_; }
private:
Node* current_node_;
Reduction* reduction_;
};
// A VariableTracker object keeps track of the values of variables at all points
// of the effect chain and introduces new phi nodes when necessary.
// Initially and by default, variables are mapped to nullptr, which means that
// the variable allocation point does not dominate the current point on the
// effect chain. We map variables that represent uninitialized memory to the
// Dead node to ensure it is not read.
// Unmapped values are impossible by construction, it is indistinguishable if a
// PersistentMap does not contain an element or maps it to the default element.
class VariableTracker {
private:
// The state of all variables at one point in the effect chain.
class State {
typedef PersistentMap<Variable, Node*> Map;
public:
explicit State(Zone* zone) : map_(zone) {}
Node* Get(Variable var) const {
CHECK(var != Variable::Invalid());
return map_.Get(var);
}
void Set(Variable var, Node* node) {
CHECK(var != Variable::Invalid());
return map_.Set(var, node);
}
Map::iterator begin() const { return map_.begin(); }
Map::iterator end() const { return map_.end(); }
bool operator!=(const State& other) const { return map_ != other.map_; }
private:
Map map_;
};
public:
VariableTracker(JSGraph* graph, EffectGraphReducer* reducer, Zone* zone);
Variable NewVariable() { return Variable(next_variable_++); }
Node* Get(Variable var, Node* effect) { return table_.Get(effect).Get(var); }
Zone* zone() { return zone_; }
class Scope : public ReduceScope {
public:
Scope(VariableTracker* tracker, Node* node, Reduction* reduction);
~Scope();
Node* Get(Variable var) { return current_state_.Get(var); }
void Set(Variable var, Node* node) { current_state_.Set(var, node); }
private:
VariableTracker* states_;
State current_state_;
};
private:
State MergeInputs(Node* effect_phi);
Zone* zone_;
JSGraph* graph_;
SparseSidetable<State> table_;
ZoneVector<Node*> buffer_;
EffectGraphReducer* reducer_;
int next_variable_ = 0;
DISALLOW_COPY_AND_ASSIGN(VariableTracker);
};
// Encapsulates the current state of the escape analysis reducer to preserve
// invariants regarding changes and re-visitation.
class EscapeAnalysisTracker : public ZoneObject {
public:
EscapeAnalysisTracker(JSGraph* jsgraph, EffectGraphReducer* reducer,
Zone* zone)
: virtual_objects_(zone),
replacements_(zone),
variable_states_(jsgraph, reducer, zone),
jsgraph_(jsgraph),
zone_(zone) {}
class Scope : public VariableTracker::Scope {
public:
Scope(EffectGraphReducer* reducer, EscapeAnalysisTracker* tracker,
Node* node, Reduction* reduction)
: VariableTracker::Scope(&tracker->variable_states_, node, reduction),
tracker_(tracker),
reducer_(reducer) {}
const VirtualObject* GetVirtualObject(Node* node) {
VirtualObject* vobject = tracker_->virtual_objects_.Get(node);
if (vobject) vobject->AddDependency(current_node());
return vobject;
}
// Create or retrieve a virtual object for the current node.
const VirtualObject* InitVirtualObject(int size) {
DCHECK(current_node()->opcode() == IrOpcode::kAllocate);
VirtualObject* vobject = tracker_->virtual_objects_.Get(current_node());
if (vobject) {
CHECK(vobject->size() == size);
} else {
vobject = tracker_->NewVirtualObject(size);
}
if (vobject) vobject->AddDependency(current_node());
vobject_ = vobject;
return vobject;
}
void SetVirtualObject(Node* object) {
vobject_ = tracker_->virtual_objects_.Get(object);
}
void SetEscaped(Node* node) {
if (VirtualObject* object = tracker_->virtual_objects_.Get(node)) {
if (object->HasEscaped()) return;
TRACE("Setting %s#%d to escaped because of use by %s#%d\n",
node->op()->mnemonic(), node->id(),
current_node()->op()->mnemonic(), current_node()->id());
object->SetEscaped();
object->RevisitDependants(reducer_);
}
}
// The inputs of the current node have to be accessed through the scope to
// ensure that they respect the node replacements.
Node* ValueInput(int i) {
return tracker_->ResolveReplacement(
NodeProperties::GetValueInput(current_node(), i));
}
Node* ContextInput() {
return tracker_->ResolveReplacement(
NodeProperties::GetContextInput(current_node()));
}
void SetReplacement(Node* replacement) {
replacement_ = replacement;
vobject_ =
replacement ? tracker_->virtual_objects_.Get(replacement) : nullptr;
TRACE("Set %s#%d as replacement.\n", replacement->op()->mnemonic(),
replacement->id());
}
void MarkForDeletion() { SetReplacement(tracker_->jsgraph_->Dead()); }
~Scope() {
if (replacement_ != tracker_->replacements_[current_node()] ||
vobject_ != tracker_->virtual_objects_.Get(current_node())) {
reduction()->set_value_changed();
}
tracker_->replacements_[current_node()] = replacement_;
tracker_->virtual_objects_.Set(current_node(), vobject_);
}
private:
EscapeAnalysisTracker* tracker_;
EffectGraphReducer* reducer_;
VirtualObject* vobject_ = nullptr;
Node* replacement_ = nullptr;
};
Node* GetReplacementOf(Node* node) { return replacements_[node]; }
Node* ResolveReplacement(Node* node) {
if (Node* replacement = GetReplacementOf(node)) {
// Replacements cannot have replacements. This is important to ensure
// re-visitation: If a replacement is replaced, then all nodes accessing
// the replacement have to be updated.
DCHECK_NULL(GetReplacementOf(replacement));
return replacement;
}
return node;
}
private:
friend class EscapeAnalysisResult;
static const size_t kMaxTrackedObjects = 100;
VirtualObject* NewVirtualObject(int size) {
if (next_object_id_ >= kMaxTrackedObjects) return nullptr;
return new (zone_)
VirtualObject(&variable_states_, next_object_id_++, size);
}
SparseSidetable<VirtualObject*> virtual_objects_;
Sidetable<Node*> replacements_;
VariableTracker variable_states_;
VirtualObject::Id next_object_id_ = 0;
JSGraph* const jsgraph_;
Zone* const zone_;
DISALLOW_COPY_AND_ASSIGN(EscapeAnalysisTracker);
};
EffectGraphReducer::EffectGraphReducer(
Graph* graph, std::function<void(Node*, Reduction*)> reduce, Zone* zone)
: graph_(graph),
state_(graph, kNumStates),
revisit_(zone),
stack_(zone),
reduce_(reduce) {}
void EffectGraphReducer::ReduceFrom(Node* node) {
// Perform DFS and eagerly trigger revisitation as soon as possible.
// A stack element {node, i} indicates that input i of node should be visited
// next.
DCHECK(stack_.empty());
stack_.push({node, 0});
while (!stack_.empty()) {
Node* current = stack_.top().node;
int& input_index = stack_.top().input_index;
if (input_index < current->InputCount()) {
Node* input = current->InputAt(input_index);
input_index++;
switch (state_.Get(input)) {
case State::kVisited:
// The input is already reduced.
break;
case State::kOnStack:
// The input is on the DFS stack right now, so it will be revisited
// later anyway.
break;
case State::kUnvisited:
case State::kRevisit: {
state_.Set(input, State::kOnStack);
stack_.push({input, 0});
break;
}
}
} else {
stack_.pop();
Reduction reduction;
reduce_(current, &reduction);
for (Edge edge : current->use_edges()) {
// Mark uses for revisitation.
Node* use = edge.from();
if (NodeProperties::IsEffectEdge(edge)) {
if (reduction.effect_changed()) Revisit(use);
} else {
if (reduction.value_changed()) Revisit(use);
}
}
state_.Set(current, State::kVisited);
// Process the revisitation buffer immediately. This improves performance
// of escape analysis. Using a stack for {revisit_} reverses the order in
// which the revisitation happens. This also seems to improve performance.
while (!revisit_.empty()) {
Node* revisit = revisit_.top();
if (state_.Get(revisit) == State::kRevisit) {
state_.Set(revisit, State::kOnStack);
stack_.push({revisit, 0});
}
revisit_.pop();
}
}
}
}
void EffectGraphReducer::Revisit(Node* node) {
if (state_.Get(node) == State::kVisited) {
TRACE(" Queueing for revisit: %s#%d\n", node->op()->mnemonic(),
node->id());
state_.Set(node, State::kRevisit);
revisit_.push(node);
}
}
VariableTracker::VariableTracker(JSGraph* graph, EffectGraphReducer* reducer,
Zone* zone)
: zone_(zone),
graph_(graph),
table_(zone, State(zone)),
buffer_(zone),
reducer_(reducer) {}
VariableTracker::Scope::Scope(VariableTracker* states, Node* node,
Reduction* reduction)
: ReduceScope(node, reduction),
states_(states),
current_state_(states->zone_) {
switch (node->opcode()) {
case IrOpcode::kEffectPhi:
current_state_ = states_->MergeInputs(node);
break;
default:
int effect_inputs = node->op()->EffectInputCount();
if (effect_inputs == 1) {
current_state_ =
states_->table_.Get(NodeProperties::GetEffectInput(node, 0));
} else {
DCHECK_EQ(0, effect_inputs);
}
}
}
VariableTracker::Scope::~Scope() {
if (!reduction()->effect_changed() &&
states_->table_.Get(current_node()) != current_state_) {
reduction()->set_effect_changed();
}
states_->table_.Set(current_node(), current_state_);
}
VariableTracker::State VariableTracker::MergeInputs(Node* effect_phi) {
// A variable that is mapped to [nullptr] was not assigned a value on every
// execution path to the current effect phi. Relying on the invariant that
// every variable is initialized (at least with a sentinel like the Dead
// node), this means that the variable initialization does not dominate the
// current point. So for loop effect phis, we can keep nullptr for a variable
// as long as the first input of the loop has nullptr for this variable. For
// non-loop effect phis, we can even keep it nullptr as long as any input has
// nullptr.
DCHECK(effect_phi->opcode() == IrOpcode::kEffectPhi);
int arity = effect_phi->op()->EffectInputCount();
Node* control = NodeProperties::GetControlInput(effect_phi, 0);
TRACE("control: %s#%d\n", control->op()->mnemonic(), control->id());
bool is_loop = control->opcode() == IrOpcode::kLoop;
buffer_.reserve(arity + 1);
State first_input = table_.Get(NodeProperties::GetEffectInput(effect_phi, 0));
State result = first_input;
for (std::pair<Variable, Node*> var_value : first_input) {
if (Node* value = var_value.second) {
Variable var = var_value.first;
TRACE("var %i:\n", var.id_);
buffer_.clear();
buffer_.push_back(value);
bool identical_inputs = true;
int num_defined_inputs = 1;
TRACE(" input 0: %s#%d\n", value->op()->mnemonic(), value->id());
for (int i = 1; i < arity; ++i) {
Node* next_value =
table_.Get(NodeProperties::GetEffectInput(effect_phi, i)).Get(var);
if (next_value != value) identical_inputs = false;
if (next_value != nullptr) {
num_defined_inputs++;
TRACE(" input %i: %s#%d\n", i, next_value->op()->mnemonic(),
next_value->id());
} else {
TRACE(" input %i: nullptr\n", i);
}
buffer_.push_back(next_value);
}
Node* old_value = table_.Get(effect_phi).Get(var);
if (old_value) {
TRACE(" old: %s#%d\n", old_value->op()->mnemonic(), old_value->id());
} else {
TRACE(" old: nullptr\n");
}
// Reuse a previously created phi node if possible.
if (old_value && old_value->opcode() == IrOpcode::kPhi &&
NodeProperties::GetControlInput(old_value, 0) == control) {
// Since a phi node can never dominate its control node,
// [old_value] cannot originate from the inputs. Thus [old_value]
// must have been created by a previous reduction of this [effect_phi].
for (int i = 0; i < arity; ++i) {
NodeProperties::ReplaceValueInput(
old_value, buffer_[i] ? buffer_[i] : graph_->Dead(), i);
// This change cannot affect the rest of the reducer, so there is no
// need to trigger additional revisitations.
}
result.Set(var, old_value);
} else {
if (num_defined_inputs == 1 && is_loop) {
// For loop effect phis, the variable initialization dominates iff it
// dominates the first input.
DCHECK_EQ(2, arity);
DCHECK_EQ(value, buffer_[0]);
result.Set(var, value);
} else if (num_defined_inputs < arity) {
// If the variable is undefined on some input of this non-loop effect
// phi, then its initialization does not dominate this point.
result.Set(var, nullptr);
} else {
DCHECK_EQ(num_defined_inputs, arity);
// We only create a phi if the values are different.
if (identical_inputs) {
result.Set(var, value);
} else {
TRACE("Creating new phi\n");
buffer_.push_back(control);
Node* phi = graph_->graph()->NewNode(
graph_->common()->Phi(MachineRepresentation::kTagged, arity),
arity + 1, &buffer_.front());
// TODO(tebbi): Computing precise types here is tricky, because of
// the necessary revisitations. If we really need this, we should
// probably do it afterwards.
NodeProperties::SetType(phi, Type::Any());
reducer_->AddRoot(phi);
result.Set(var, phi);
}
}
}
#ifdef DEBUG
if (Node* result_node = result.Get(var)) {
TRACE(" result: %s#%d\n", result_node->op()->mnemonic(),
result_node->id());
} else {
TRACE(" result: nullptr\n");
}
#endif
}
}
return result;
}
namespace {
int OffsetOfFieldAccess(const Operator* op) {
DCHECK(op->opcode() == IrOpcode::kLoadField ||
op->opcode() == IrOpcode::kStoreField);
FieldAccess access = FieldAccessOf(op);
return access.offset;
}
Maybe<int> OffsetOfElementsAccess(const Operator* op, Node* index_node) {
DCHECK(op->opcode() == IrOpcode::kLoadElement ||
op->opcode() == IrOpcode::kStoreElement);
Type* index_type = NodeProperties::GetType(index_node);
if (!index_type->Is(Type::Number())) return Nothing<int>();
double max = index_type->Max();
double min = index_type->Min();
int index = static_cast<int>(min);
if (!(index == min && index == max)) return Nothing<int>();
ElementAccess access = ElementAccessOf(op);
DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
kPointerSizeLog2);
return Just(access.header_size + (index << ElementSizeLog2Of(
access.machine_type.representation())));
}
void ReduceNode(const Operator* op, EscapeAnalysisTracker::Scope* current,
JSGraph* jsgraph) {
switch (op->opcode()) {
case IrOpcode::kAllocate: {
NumberMatcher size(current->ValueInput(0));
if (!size.HasValue()) break;
int size_int = static_cast<int>(size.Value());
if (size_int != size.Value()) break;
if (const VirtualObject* vobject = current->InitVirtualObject(size_int)) {
// Initialize with dead nodes as a sentinel for uninitialized memory.
for (Variable field : *vobject) {
current->Set(field, jsgraph->Dead());
}
}
break;
}
case IrOpcode::kFinishRegion:
current->SetVirtualObject(current->ValueInput(0));
break;
case IrOpcode::kStoreField: {
Node* object = current->ValueInput(0);
Node* value = current->ValueInput(1);
const VirtualObject* vobject = current->GetVirtualObject(object);
Variable var;
if (vobject && !vobject->HasEscaped() &&
vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) {
current->Set(var, value);
current->MarkForDeletion();
} else {
current->SetEscaped(object);
current->SetEscaped(value);
}
break;
}
case IrOpcode::kStoreElement: {
Node* object = current->ValueInput(0);
Node* index = current->ValueInput(1);
Node* value = current->ValueInput(2);
const VirtualObject* vobject = current->GetVirtualObject(object);
int offset;
Variable var;
if (vobject && !vobject->HasEscaped() &&
OffsetOfElementsAccess(op, index).To(&offset) &&
vobject->FieldAt(offset).To(&var)) {
current->Set(var, value);
current->MarkForDeletion();
} else {
current->SetEscaped(value);
current->SetEscaped(object);
}
break;
}
case IrOpcode::kLoadField: {
Node* object = current->ValueInput(0);
const VirtualObject* vobject = current->GetVirtualObject(object);
Variable var;
if (vobject && !vobject->HasEscaped() &&
vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) {
current->SetReplacement(current->Get(var));
} else {
// TODO(tebbi): At the moment, we mark objects as escaping if there
// is a load from an invalid location to avoid dead nodes. This is a
// workaround that should be removed once we can handle dead nodes
// everywhere.
current->SetEscaped(object);
}
break;
}
case IrOpcode::kLoadElement: {
Node* object = current->ValueInput(0);
Node* index = current->ValueInput(1);
const VirtualObject* vobject = current->GetVirtualObject(object);
int offset;
Variable var;
if (vobject && !vobject->HasEscaped() &&
OffsetOfElementsAccess(op, index).To(&offset) &&
vobject->FieldAt(offset).To(&var)) {
current->SetReplacement(current->Get(var));
} else {
current->SetEscaped(object);
}
break;
}
case IrOpcode::kTypeGuard: {
// The type-guard is re-introduced in the final reducer if the types
// don't match.
current->SetReplacement(current->ValueInput(0));
break;
}
case IrOpcode::kReferenceEqual: {
Node* left = current->ValueInput(0);
Node* right = current->ValueInput(1);
const VirtualObject* left_object = current->GetVirtualObject(left);
const VirtualObject* right_object = current->GetVirtualObject(right);
Node* replacement = nullptr;
if (left_object && !left_object->HasEscaped()) {
if (right_object && !right_object->HasEscaped() &&
left_object->id() == right_object->id()) {
replacement = jsgraph->TrueConstant();
} else {
replacement = jsgraph->FalseConstant();
}
} else if (right_object && !right_object->HasEscaped()) {
replacement = jsgraph->FalseConstant();
}
if (replacement) {
// TODO(tebbi) This is a workaround for uninhabited types. If we
// replaced a value of uninhabited type with a constant, we would
// widen the type of the node. This could produce inconsistent
// types (which might confuse representation selection). We get
// around this by refusing to constant-fold and escape-analyze
// if the type is not inhabited.
if (NodeProperties::GetType(left)->IsInhabited() &&
NodeProperties::GetType(right)->IsInhabited()) {
current->SetReplacement(replacement);
} else {
current->SetEscaped(left);
current->SetEscaped(right);
}
}
break;
}
case IrOpcode::kCheckMaps: {
CheckMapsParameters params = CheckMapsParametersOf(op);
Node* checked = current->ValueInput(0);
const VirtualObject* vobject = current->GetVirtualObject(checked);
Variable map_field;
if (vobject && !vobject->HasEscaped() &&
vobject->FieldAt(HeapObject::kMapOffset).To(&map_field)) {
Node* map = current->Get(map_field);
if (map) {
Type* const map_type = NodeProperties::GetType(map);
if (map_type->IsHeapConstant() &&
params.maps().contains(ZoneHandleSet<Map>(bit_cast<Handle<Map>>(
map_type->AsHeapConstant()->Value())))) {
current->MarkForDeletion();
break;
}
}
}
current->SetEscaped(checked);
break;
}
case IrOpcode::kCheckHeapObject: {
Node* checked = current->ValueInput(0);
switch (checked->opcode()) {
case IrOpcode::kAllocate:
case IrOpcode::kFinishRegion:
case IrOpcode::kHeapConstant:
current->SetReplacement(checked);
break;
default:
current->SetEscaped(checked);
break;
}
break;
}
case IrOpcode::kMapGuard: {
Node* object = current->ValueInput(0);
const VirtualObject* vobject = current->GetVirtualObject(object);
if (vobject && !vobject->HasEscaped()) {
current->MarkForDeletion();
}
break;
}
case IrOpcode::kStateValues:
case IrOpcode::kFrameState:
// These uses are always safe.
break;
default: {
// For unknown nodes, treat all value inputs as escaping.
int value_input_count = op->ValueInputCount();
for (int i = 0; i < value_input_count; ++i) {
Node* input = current->ValueInput(i);
current->SetEscaped(input);
}
if (OperatorProperties::HasContextInput(op)) {
current->SetEscaped(current->ContextInput());
}
break;
}
}
}
} // namespace
void NewEscapeAnalysis::Reduce(Node* node, Reduction* reduction) {
const Operator* op = node->op();
TRACE("Reducing %s#%d\n", op->mnemonic(), node->id());
EscapeAnalysisTracker::Scope current(this, tracker_, node, reduction);
ReduceNode(op, &current, jsgraph());
}
NewEscapeAnalysis::NewEscapeAnalysis(JSGraph* jsgraph, Zone* zone)
: EffectGraphReducer(
jsgraph->graph(),
[this](Node* node, Reduction* reduction) { Reduce(node, reduction); },
zone),
tracker_(new (zone) EscapeAnalysisTracker(jsgraph, this, zone)),
jsgraph_(jsgraph) {}
Node* EscapeAnalysisResult::GetReplacementOf(Node* node) {
return tracker_->GetReplacementOf(node);
}
Node* EscapeAnalysisResult::GetVirtualObjectField(const VirtualObject* vobject,
int field, Node* effect) {
return tracker_->variable_states_.Get(vobject->FieldAt(field).FromJust(),
effect);
}
const VirtualObject* EscapeAnalysisResult::GetVirtualObject(Node* node) {
return tracker_->virtual_objects_.Get(node);
}
VirtualObject::VirtualObject(VariableTracker* var_states, VirtualObject::Id id,
int size)
: Dependable(var_states->zone()), id_(id), fields_(var_states->zone()) {
DCHECK(size % kPointerSize == 0);
TRACE("Creating VirtualObject id:%d size:%d\n", id, size);
int num_fields = size / kPointerSize;
fields_.reserve(num_fields);
for (int i = 0; i < num_fields; ++i) {
fields_.push_back(var_states->NewVariable());
}
}
#undef TRACE
} // namespace compiler
} // namespace internal
} // namespace v8
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_NEW_ESCAPE_ANALYSIS_H_
#define V8_COMPILER_NEW_ESCAPE_ANALYSIS_H_
#include "src/base/functional.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/persistent-map.h"
#include "src/globals.h"
namespace v8 {
namespace internal {
namespace compiler {
class CommonOperatorBuilder;
class VariableTracker;
class EscapeAnalysisTracker;
// {EffectGraphReducer} reduces up to a fixed point. It distinguishes changes to
// the effect output of a node from changes to the value output to reduce the
// number of revisitations.
class EffectGraphReducer {
public:
class Reduction {
public:
bool value_changed() const { return value_changed_; }
void set_value_changed() { value_changed_ = true; }
bool effect_changed() const { return effect_changed_; }
void set_effect_changed() { effect_changed_ = true; }
private:
bool value_changed_ = false;
bool effect_changed_ = false;
};
EffectGraphReducer(Graph* graph,
std::function<void(Node*, Reduction*)> reduce, Zone* zone);
void ReduceGraph() { ReduceFrom(graph_->end()); }
// Mark node for revisitation.
void Revisit(Node* node);
// Add a new root node to start reduction from. This is useful if the reducer
// adds nodes that are not yet reachable, but should already be considered
// part of the graph.
void AddRoot(Node* node) {
DCHECK(state_.Get(node) == State::kUnvisited);
state_.Set(node, State::kRevisit);
revisit_.push(node);
}
bool Complete() { return stack_.empty() && revisit_.empty(); }
private:
struct NodeState {
Node* node;
int input_index;
};
void ReduceFrom(Node* node);
enum class State : uint8_t { kUnvisited = 0, kRevisit, kOnStack, kVisited };
const uint8_t kNumStates = static_cast<uint8_t>(State::kVisited) + 1;
Graph* graph_;
NodeMarker<State> state_;
ZoneStack<Node*> revisit_;
ZoneStack<NodeState> stack_;
std::function<void(Node*, Reduction*)> reduce_;
};
// A variable is an abstract storage location, which is lowered to SSA values
// and phi nodes by {VariableTracker}.
class Variable {
public:
Variable() : id_(kInvalid) {}
bool operator==(Variable other) const { return id_ == other.id_; }
bool operator!=(Variable other) const { return id_ != other.id_; }
bool operator<(Variable other) const { return id_ < other.id_; }
static Variable Invalid() { return Variable(kInvalid); }
friend V8_INLINE size_t hash_value(Variable v) {
return base::hash_value(v.id_);
}
friend std::ostream& operator<<(std::ostream& os, Variable var) {
return os << var.id_;
}
private:
typedef int Id;
explicit Variable(Id id) : id_(id) {}
Id id_;
static const Id kInvalid = -1;
friend class VariableTracker;
};
// An object that can track the nodes in the graph whose current reduction
// depends on the value of the object.
class Dependable : public ZoneObject {
public:
explicit Dependable(Zone* zone) : dependants_(zone) {}
void AddDependency(Node* node) { dependants_.push_back(node); }
void RevisitDependants(EffectGraphReducer* reducer) {
for (Node* node : dependants_) {
reducer->Revisit(node);
}
dependants_.clear();
}
private:
ZoneVector<Node*> dependants_;
};
// A virtual object represents an allocation site and tracks the Variables
// associated with its fields as well as its global escape status.
class VirtualObject : public Dependable {
public:
typedef uint32_t Id;
typedef ZoneVector<Variable>::const_iterator const_iterator;
VirtualObject(VariableTracker* var_states, Id id, int size);
Maybe<Variable> FieldAt(int offset) const {
DCHECK(offset % kPointerSize == 0);
CHECK(!HasEscaped());
if (offset >= size()) {
// This can only happen in unreachable code.
return Nothing<Variable>();
}
return Just(fields_.at(offset / kPointerSize));
}
Id id() const { return id_; }
int size() const { return static_cast<int>(kPointerSize * fields_.size()); }
// Escaped might mean that the object escaped to untracked memory or that it
// is used in an operation that requires materialization.
void SetEscaped() { escaped_ = true; }
bool HasEscaped() const { return escaped_; }
const_iterator begin() const { return fields_.begin(); }
const_iterator end() const { return fields_.end(); }
private:
bool escaped_ = false;
Id id_;
ZoneVector<Variable> fields_;
};
class EscapeAnalysisResult {
public:
explicit EscapeAnalysisResult(EscapeAnalysisTracker* tracker)
: tracker_(tracker) {}
const VirtualObject* GetVirtualObject(Node* node);
Node* GetVirtualObjectField(const VirtualObject* vobject, int field,
Node* effect);
Node* GetReplacementOf(Node* node);
private:
EscapeAnalysisTracker* tracker_;
};
class V8_EXPORT_PRIVATE NewEscapeAnalysis final
: public NON_EXPORTED_BASE(EffectGraphReducer) {
public:
NewEscapeAnalysis(JSGraph* jsgraph, Zone* zone);
EscapeAnalysisResult analysis_result() {
DCHECK(Complete());
return EscapeAnalysisResult(tracker_);
}
private:
void Reduce(Node* node, Reduction* reduction);
JSGraph* jsgraph() { return jsgraph_; }
EscapeAnalysisTracker* tracker_;
JSGraph* jsgraph_;
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_NEW_ESCAPE_ANALYSIS_H_
...@@ -49,8 +49,6 @@ ...@@ -49,8 +49,6 @@
#include "src/compiler/machine-operator-reducer.h" #include "src/compiler/machine-operator-reducer.h"
#include "src/compiler/memory-optimizer.h" #include "src/compiler/memory-optimizer.h"
#include "src/compiler/move-optimizer.h" #include "src/compiler/move-optimizer.h"
#include "src/compiler/new-escape-analysis-reducer.h"
#include "src/compiler/new-escape-analysis.h"
#include "src/compiler/osr.h" #include "src/compiler/osr.h"
#include "src/compiler/pipeline-statistics.h" #include "src/compiler/pipeline-statistics.h"
#include "src/compiler/redundancy-elimination.h" #include "src/compiler/redundancy-elimination.h"
...@@ -1057,32 +1055,16 @@ struct EscapeAnalysisPhase { ...@@ -1057,32 +1055,16 @@ struct EscapeAnalysisPhase {
static const char* phase_name() { return "escape analysis"; } static const char* phase_name() { return "escape analysis"; }
void Run(PipelineData* data, Zone* temp_zone) { void Run(PipelineData* data, Zone* temp_zone) {
if (FLAG_turbo_new_escape) { EscapeAnalysis escape_analysis(data->jsgraph(), temp_zone);
NewEscapeAnalysis escape_analysis(data->jsgraph(), temp_zone); escape_analysis.ReduceGraph();
escape_analysis.ReduceGraph(); JSGraphReducer reducer(data->jsgraph(), temp_zone);
JSGraphReducer reducer(data->jsgraph(), temp_zone); EscapeAnalysisReducer escape_reducer(&reducer, data->jsgraph(),
NewEscapeAnalysisReducer escape_reducer(&reducer, data->jsgraph(), escape_analysis.analysis_result(),
escape_analysis.analysis_result(), temp_zone);
temp_zone); AddReducer(data, &reducer, &escape_reducer);
AddReducer(data, &reducer, &escape_reducer); reducer.ReduceGraph();
reducer.ReduceGraph(); // TODO(tebbi): Turn this into a debug mode check once we have confidence.
// TODO(tebbi): Turn this into a debug mode check once we have confidence. escape_reducer.VerifyReplacement();
escape_reducer.VerifyReplacement();
} else {
EscapeAnalysis escape_analysis(data->graph(), data->jsgraph()->common(),
temp_zone);
if (!escape_analysis.Run()) return;
JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
EscapeAnalysisReducer escape_reducer(&graph_reducer, data->jsgraph(),
&escape_analysis, temp_zone);
AddReducer(data, &graph_reducer, &escape_reducer);
graph_reducer.ReduceGraph();
if (escape_reducer.compilation_failed()) {
data->set_compilation_failed();
return;
}
escape_reducer.VerifyReplacement();
}
} }
}; };
......
...@@ -439,8 +439,6 @@ DEFINE_BOOL(turbo_loop_variable, true, "Turbofan loop variable optimization") ...@@ -439,8 +439,6 @@ DEFINE_BOOL(turbo_loop_variable, true, "Turbofan loop variable optimization")
DEFINE_BOOL(turbo_cf_optimization, true, "optimize control flow in TurboFan") DEFINE_BOOL(turbo_cf_optimization, true, "optimize control flow in TurboFan")
DEFINE_BOOL(turbo_frame_elision, true, "elide frames in TurboFan") DEFINE_BOOL(turbo_frame_elision, true, "elide frames in TurboFan")
DEFINE_BOOL(turbo_escape, true, "enable escape analysis") DEFINE_BOOL(turbo_escape, true, "enable escape analysis")
DEFINE_BOOL(turbo_new_escape, true,
"enable new implementation of escape analysis")
DEFINE_BOOL(turbo_instruction_scheduling, false, DEFINE_BOOL(turbo_instruction_scheduling, false,
"enable instruction scheduling in TurboFan") "enable instruction scheduling in TurboFan")
DEFINE_BOOL(turbo_stress_instruction_scheduling, false, DEFINE_BOOL(turbo_stress_instruction_scheduling, false,
......
...@@ -805,10 +805,6 @@ ...@@ -805,10 +805,6 @@
'compiler/memory-optimizer.h', 'compiler/memory-optimizer.h',
'compiler/move-optimizer.cc', 'compiler/move-optimizer.cc',
'compiler/move-optimizer.h', 'compiler/move-optimizer.h',
'compiler/new-escape-analysis.cc',
'compiler/new-escape-analysis.h',
'compiler/new-escape-analysis-reducer.cc',
'compiler/new-escape-analysis-reducer.h',
'compiler/node-aux-data.h', 'compiler/node-aux-data.h',
'compiler/node-cache.cc', 'compiler/node-cache.cc',
'compiler/node-cache.h', 'compiler/node-cache.h',
......
...@@ -56,7 +56,6 @@ v8_executable("unittests") { ...@@ -56,7 +56,6 @@ v8_executable("unittests") {
"compiler/dead-code-elimination-unittest.cc", "compiler/dead-code-elimination-unittest.cc",
"compiler/diamond-unittest.cc", "compiler/diamond-unittest.cc",
"compiler/effect-control-linearizer-unittest.cc", "compiler/effect-control-linearizer-unittest.cc",
"compiler/escape-analysis-unittest.cc",
"compiler/graph-reducer-unittest.cc", "compiler/graph-reducer-unittest.cc",
"compiler/graph-reducer-unittest.h", "compiler/graph-reducer-unittest.h",
"compiler/graph-trimmer-unittest.cc", "compiler/graph-trimmer-unittest.cc",
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/escape-analysis.h"
#include "src/bit-vector.h"
#include "src/compiler/escape-analysis-reducer.h"
#include "src/compiler/graph-visualizer.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/node-properties.h"
#include "src/compiler/simplified-operator.h"
#include "src/compiler/types.h"
#include "src/zone/zone-containers.h"
#include "test/unittests/compiler/graph-unittest.h"
namespace v8 {
namespace internal {
namespace compiler {
class EscapeAnalysisTest : public TypedGraphTest {
public:
EscapeAnalysisTest()
: simplified_(zone()),
jsgraph_(isolate(), graph(), common(), nullptr, nullptr, nullptr),
escape_analysis_(graph(), common(), zone()),
effect_(graph()->start()),
control_(graph()->start()) {}
~EscapeAnalysisTest() {}
EscapeAnalysis* escape_analysis() { return &escape_analysis_; }
protected:
void Analysis() { escape_analysis_.Run(); }
void Transformation() {
GraphReducer graph_reducer(zone(), graph());
EscapeAnalysisReducer escape_reducer(&graph_reducer, &jsgraph_,
&escape_analysis_, zone());
graph_reducer.AddReducer(&escape_reducer);
graph_reducer.ReduceGraph();
}
// ---------------------------------Node Creation Helper----------------------
Node* BeginRegion(Node* effect = nullptr) {
if (!effect) {
effect = effect_;
}
return effect_ = graph()->NewNode(
common()->BeginRegion(RegionObservability::kObservable), effect);
}
Node* FinishRegion(Node* value, Node* effect = nullptr) {
if (!effect) {
effect = effect_;
}
return effect_ = graph()->NewNode(common()->FinishRegion(), value, effect);
}
Node* Allocate(Node* size, Node* effect = nullptr, Node* control = nullptr) {
if (!effect) {
effect = effect_;
}
if (!control) {
control = control_;
}
return effect_ = graph()->NewNode(simplified()->Allocate(Type::Any()), size,
effect, control);
}
Node* Constant(int num) {
return graph()->NewNode(common()->NumberConstant(num));
}
Node* Store(const FieldAccess& access, Node* allocation, Node* value,
Node* effect = nullptr, Node* control = nullptr) {
if (!effect) {
effect = effect_;
}
if (!control) {
control = control_;
}
return effect_ = graph()->NewNode(simplified()->StoreField(access),
allocation, value, effect, control);
}
Node* StoreElement(const ElementAccess& access, Node* allocation, Node* index,
Node* value, Node* effect = nullptr,
Node* control = nullptr) {
if (!effect) {
effect = effect_;
}
if (!control) {
control = control_;
}
return effect_ =
graph()->NewNode(simplified()->StoreElement(access), allocation,
index, value, effect, control);
}
Node* Load(const FieldAccess& access, Node* from, Node* effect = nullptr,
Node* control = nullptr) {
if (!effect) {
effect = effect_;
}
if (!control) {
control = control_;
}
return graph()->NewNode(simplified()->LoadField(access), from, effect,
control);
}
Node* Return(Node* value, Node* effect = nullptr, Node* control = nullptr) {
if (!effect) {
effect = effect_;
}
if (!control) {
control = control_;
}
Node* zero = graph()->NewNode(common()->NumberConstant(0));
return control_ = graph()->NewNode(common()->Return(), zero, value, effect,
control);
}
void EndGraph() {
for (Edge edge : graph()->end()->input_edges()) {
if (NodeProperties::IsControlEdge(edge)) {
edge.UpdateTo(control_);
}
}
}
Node* Branch() {
return control_ =
graph()->NewNode(common()->Branch(), Constant(0), control_);
}
Node* IfTrue() {
return control_ = graph()->NewNode(common()->IfTrue(), control_);
}
Node* IfFalse() { return graph()->NewNode(common()->IfFalse(), control_); }
Node* Merge2(Node* control1, Node* control2) {
return control_ = graph()->NewNode(common()->Merge(2), control1, control2);
}
FieldAccess FieldAccessAtIndex(int offset) {
FieldAccess access = {kTaggedBase, offset,
MaybeHandle<Name>(), MaybeHandle<Map>(),
Type::Any(), MachineType::AnyTagged(),
kFullWriteBarrier};
return access;
}
ElementAccess MakeElementAccess(int header_size) {
ElementAccess access = {kTaggedBase, header_size, Type::Any(),
MachineType::AnyTagged(), kFullWriteBarrier};
return access;
}
// ---------------------------------Assertion Helper--------------------------
void ExpectReplacement(Node* node, Node* rep) {
EXPECT_EQ(rep, escape_analysis()->GetReplacement(node));
}
void ExpectReplacementPhi(Node* node, Node* left, Node* right) {
Node* rep = escape_analysis()->GetReplacement(node);
ASSERT_NE(nullptr, rep);
ASSERT_EQ(IrOpcode::kPhi, rep->opcode());
EXPECT_EQ(left, NodeProperties::GetValueInput(rep, 0));
EXPECT_EQ(right, NodeProperties::GetValueInput(rep, 1));
}
void ExpectVirtual(Node* node) {
EXPECT_TRUE(node->opcode() == IrOpcode::kAllocate ||
node->opcode() == IrOpcode::kFinishRegion);
EXPECT_TRUE(escape_analysis()->IsVirtual(node));
}
void ExpectEscaped(Node* node) {
EXPECT_TRUE(node->opcode() == IrOpcode::kAllocate ||
node->opcode() == IrOpcode::kFinishRegion);
EXPECT_TRUE(escape_analysis()->IsEscaped(node));
}
SimplifiedOperatorBuilder* simplified() { return &simplified_; }
Node* effect() { return effect_; }
Node* control() { return control_; }
private:
SimplifiedOperatorBuilder simplified_;
JSGraph jsgraph_;
EscapeAnalysis escape_analysis_;
Node* effect_;
Node* control_;
};
// -----------------------------------------------------------------------------
// Test cases.
TEST_F(EscapeAnalysisTest, StraightNonEscape) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Node* load = Load(FieldAccessAtIndex(0), finish);
Node* result = Return(load);
EndGraph();
Analysis();
ExpectVirtual(allocation);
ExpectReplacement(load, object1);
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, StraightNonEscapeNonConstStore) {
Node* object1 = Constant(1);
Node* object2 = Constant(2);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
Node* index =
graph()->NewNode(common()->Select(MachineRepresentation::kTagged),
object1, object2, control());
StoreElement(MakeElementAccess(0), allocation, index, object1);
Node* finish = FinishRegion(allocation);
Node* load = Load(FieldAccessAtIndex(0), finish);
Node* result = Return(load);
EndGraph();
Analysis();
ExpectEscaped(allocation);
ExpectReplacement(load, nullptr);
Transformation();
ASSERT_EQ(load, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, StraightEscape) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Node* load = Load(FieldAccessAtIndex(0), finish);
Node* result = Return(allocation);
EndGraph();
graph()->end()->AppendInput(zone(), load);
Analysis();
ExpectEscaped(allocation);
ExpectReplacement(load, object1);
Transformation();
ASSERT_EQ(allocation, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, StoreLoadEscape) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation1 = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation1, object1);
Node* finish1 = FinishRegion(allocation1);
BeginRegion();
Node* allocation2 = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation2, finish1);
Node* finish2 = FinishRegion(allocation2);
Node* load = Load(FieldAccessAtIndex(0), finish2);
Node* result = Return(load);
EndGraph();
Analysis();
ExpectEscaped(allocation1);
ExpectVirtual(allocation2);
ExpectReplacement(load, finish1);
Transformation();
ASSERT_EQ(finish1, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, BranchNonEscape) {
Node* object1 = Constant(1);
Node* object2 = Constant(2);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Branch();
Node* ifFalse = IfFalse();
Node* ifTrue = IfTrue();
Node* effect1 =
Store(FieldAccessAtIndex(0), allocation, object1, finish, ifFalse);
Node* effect2 =
Store(FieldAccessAtIndex(0), allocation, object2, finish, ifTrue);
Node* merge = Merge2(ifFalse, ifTrue);
Node* phi = graph()->NewNode(common()->EffectPhi(2), effect1, effect2, merge);
Node* load = Load(FieldAccessAtIndex(0), finish, phi, merge);
Node* result = Return(load, phi);
EndGraph();
graph()->end()->AppendInput(zone(), result);
Analysis();
ExpectVirtual(allocation);
ExpectReplacementPhi(load, object1, object2);
Node* replacement_phi = escape_analysis()->GetReplacement(load);
Transformation();
ASSERT_EQ(replacement_phi, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, BranchEscapeOne) {
Node* object1 = Constant(1);
Node* object2 = Constant(2);
Node* index = graph()->NewNode(common()->Parameter(0), start());
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Branch();
Node* ifFalse = IfFalse();
Node* ifTrue = IfTrue();
Node* effect1 =
Store(FieldAccessAtIndex(0), allocation, object1, finish, ifFalse);
Node* effect2 = StoreElement(MakeElementAccess(0), allocation, index, object2,
finish, ifTrue);
Node* merge = Merge2(ifFalse, ifTrue);
Node* phi = graph()->NewNode(common()->EffectPhi(2), effect1, effect2, merge);
Node* load = Load(FieldAccessAtIndex(0), finish, phi, merge);
Node* result = Return(load, phi);
EndGraph();
Analysis();
ExpectEscaped(allocation);
ExpectReplacement(load, nullptr);
Transformation();
ASSERT_EQ(load, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, BranchEscapeThroughStore) {
Node* object1 = Constant(1);
Node* object2 = Constant(2);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
FinishRegion(allocation);
BeginRegion();
Node* allocation2 = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object2);
Node* finish2 = FinishRegion(allocation2);
Branch();
Node* ifFalse = IfFalse();
Node* ifTrue = IfTrue();
Node* effect1 =
Store(FieldAccessAtIndex(0), allocation, allocation2, finish2, ifFalse);
Node* merge = Merge2(ifFalse, ifTrue);
Node* phi = graph()->NewNode(common()->EffectPhi(2), effect1, finish2, merge);
Node* load = Load(FieldAccessAtIndex(0), finish2, phi, merge);
Node* result = Return(allocation, phi);
EndGraph();
graph()->end()->AppendInput(zone(), load);
Analysis();
ExpectEscaped(allocation);
ExpectEscaped(allocation2);
ExpectReplacement(load, nullptr);
Transformation();
ASSERT_EQ(allocation, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, DanglingLoadOrder) {
Node* object1 = Constant(1);
Node* object2 = Constant(2);
Node* allocation = Allocate(Constant(kPointerSize));
Node* store1 = Store(FieldAccessAtIndex(0), allocation, object1);
Node* load1 = Load(FieldAccessAtIndex(0), allocation);
Node* store2 = Store(FieldAccessAtIndex(0), allocation, object2);
Node* load2 = Load(FieldAccessAtIndex(0), allocation, store1);
Node* result = Return(load2);
EndGraph();
graph()->end()->AppendInput(zone(), store2);
graph()->end()->AppendInput(zone(), load1);
Analysis();
ExpectVirtual(allocation);
ExpectReplacement(load1, object1);
ExpectReplacement(load2, object1);
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
}
TEST_F(EscapeAnalysisTest, DeoptReplacement) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Node* effect1 = Store(FieldAccessAtIndex(0), allocation, object1, finish);
Branch();
Node* ifFalse = IfFalse();
Node* state_values1 = graph()->NewNode(
common()->StateValues(1, SparseInputMask::Dense()), finish);
Node* state_values2 =
graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
Node* state_values3 =
graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
Node* frame_state = graph()->NewNode(
common()->FrameState(BailoutId::None(), OutputFrameStateCombine::Ignore(),
nullptr),
state_values1, state_values2, state_values3, UndefinedConstant(),
graph()->start(), graph()->start());
Node* deopt = graph()->NewNode(
common()->Deoptimize(DeoptimizeKind::kEager, DeoptimizeReason::kNoReason),
frame_state, effect1, ifFalse);
Node* ifTrue = IfTrue();
Node* load = Load(FieldAccessAtIndex(0), finish, effect1, ifTrue);
Node* result = Return(load, effect1, ifTrue);
EndGraph();
graph()->end()->AppendInput(zone(), deopt);
Analysis();
ExpectVirtual(allocation);
ExpectReplacement(load, object1);
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
Node* object_state = NodeProperties::GetValueInput(state_values1, 0);
ASSERT_EQ(object_state->opcode(), IrOpcode::kObjectState);
ASSERT_EQ(1, object_state->op()->ValueInputCount());
ASSERT_EQ(object1, NodeProperties::GetValueInput(object_state, 0));
}
TEST_F(EscapeAnalysisTest, DISABLED_DeoptReplacementIdentity) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize * 2));
Store(FieldAccessAtIndex(0), allocation, object1);
Store(FieldAccessAtIndex(kPointerSize), allocation, allocation);
Node* finish = FinishRegion(allocation);
Node* effect1 = Store(FieldAccessAtIndex(0), allocation, object1, finish);
Branch();
Node* ifFalse = IfFalse();
Node* state_values1 = graph()->NewNode(
common()->StateValues(1, SparseInputMask::Dense()), finish);
Node* state_values2 = graph()->NewNode(
common()->StateValues(1, SparseInputMask::Dense()), finish);
Node* state_values3 =
graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
Node* frame_state = graph()->NewNode(
common()->FrameState(BailoutId::None(), OutputFrameStateCombine::Ignore(),
nullptr),
state_values1, state_values2, state_values3, UndefinedConstant(),
graph()->start(), graph()->start());
Node* deopt = graph()->NewNode(
common()->Deoptimize(DeoptimizeKind::kEager, DeoptimizeReason::kNoReason),
frame_state, effect1, ifFalse);
Node* ifTrue = IfTrue();
Node* load = Load(FieldAccessAtIndex(0), finish, effect1, ifTrue);
Node* result = Return(load, effect1, ifTrue);
EndGraph();
graph()->end()->AppendInput(zone(), deopt);
Analysis();
ExpectVirtual(allocation);
ExpectReplacement(load, object1);
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
Node* object_state = NodeProperties::GetValueInput(state_values1, 0);
ASSERT_EQ(object_state->opcode(), IrOpcode::kObjectState);
ASSERT_EQ(2, object_state->op()->ValueInputCount());
ASSERT_EQ(object1, NodeProperties::GetValueInput(object_state, 0));
ASSERT_EQ(object_state, NodeProperties::GetValueInput(object_state, 1));
Node* object_state2 = NodeProperties::GetValueInput(state_values1, 0);
ASSERT_EQ(object_state, object_state2);
}
} // namespace compiler
} // namespace internal
} // namespace v8
...@@ -49,7 +49,6 @@ ...@@ -49,7 +49,6 @@
'compiler/dead-code-elimination-unittest.cc', 'compiler/dead-code-elimination-unittest.cc',
'compiler/diamond-unittest.cc', 'compiler/diamond-unittest.cc',
'compiler/effect-control-linearizer-unittest.cc', 'compiler/effect-control-linearizer-unittest.cc',
'compiler/escape-analysis-unittest.cc',
'compiler/graph-reducer-unittest.cc', 'compiler/graph-reducer-unittest.cc',
'compiler/graph-reducer-unittest.h', 'compiler/graph-reducer-unittest.h',
'compiler/graph-trimmer-unittest.cc', 'compiler/graph-trimmer-unittest.cc',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment