Commit 4efbeac1 authored by sigurds's avatar sigurds Committed by Commit bot

[turbofan] Improve escape analysis.

* Treat Select nodes as escaping
* Correctly void virtual field information
  after a store to a non-const index
* Add a shortcut if all allocates escape
* Add a shortcut if no allocates are discovered
* Only reduce FrameState/StateValues nodes if they
  have virtual allocates as input (transitively)
* Fix bug in FrameState/StateValues duplication
* Add check to verifier: First 3 inputs of FrameState
  must be StateValues

R=mstarzinger@chromium.org
BUG=v8:4586
LOG=n

Review URL: https://codereview.chromium.org/1583213003

Cr-Commit-Position: refs/heads/master@{#33406}
parent ed24dfe8
......@@ -18,7 +18,8 @@ EscapeAnalysisReducer::EscapeAnalysisReducer(Editor* editor, JSGraph* jsgraph,
jsgraph_(jsgraph),
escape_analysis_(escape_analysis),
zone_(zone),
visited_(static_cast<int>(jsgraph->graph()->NodeCount()), zone) {}
visited_(static_cast<int>(jsgraph->graph()->NodeCount() * 2), zone),
exists_virtual_allocate_(true) {}
Reduction EscapeAnalysisReducer::Reduce(Node* node) {
......@@ -37,11 +38,41 @@ Reduction EscapeAnalysisReducer::Reduce(Node* node) {
return ReduceReferenceEqual(node);
case IrOpcode::kObjectIsSmi:
return ReduceObjectIsSmi(node);
case IrOpcode::kFrameState:
case IrOpcode::kStateValues: {
if (node->id() >= static_cast<NodeId>(visited_.length()) ||
visited_.Contains(node->id())) {
break;
}
bool needs_visit = false;
for (int i = 0; i < node->InputCount(); i++) {
Node* input = node->InputAt(i);
switch (input->opcode()) {
case IrOpcode::kAllocate:
case IrOpcode::kFinishRegion:
needs_visit = needs_visit || escape_analysis()->IsVirtual(input);
break;
case IrOpcode::kFrameState:
case IrOpcode::kStateValues:
needs_visit =
needs_visit ||
input->id() >= static_cast<NodeId>(visited_.length()) ||
!visited_.Contains(input->id());
break;
default:
break;
}
}
if (!needs_visit) {
visited_.Add(node->id());
}
break;
}
default:
// TODO(sigurds): Change this to GetFrameStateInputCount once
// it is working. For now we use EffectInputCount > 0 to determine
// whether a node might have a frame state input.
if (node->op()->EffectInputCount() > 0) {
if (exists_virtual_allocate_ && node->op()->EffectInputCount() > 0) {
return ReduceFrameStateUses(node);
}
break;
......@@ -174,7 +205,7 @@ Reduction EscapeAnalysisReducer::ReduceFrameStateUses(Node* node) {
for (int i = 0; i < node->InputCount(); ++i) {
Node* input = node->InputAt(i);
if (input->opcode() == IrOpcode::kFrameState) {
if (Node* ret = ReduceFrameState(input, node, false)) {
if (Node* ret = ReduceDeoptState(input, node, false)) {
node->ReplaceInput(i, ret);
changed = true;
}
......@@ -188,76 +219,61 @@ Reduction EscapeAnalysisReducer::ReduceFrameStateUses(Node* node) {
// Returns the clone if it duplicated the node, and null otherwise.
Node* EscapeAnalysisReducer::ReduceFrameState(Node* node, Node* effect,
Node* EscapeAnalysisReducer::ReduceDeoptState(Node* node, Node* effect,
bool multiple_users) {
DCHECK(node->opcode() == IrOpcode::kFrameState);
DCHECK(node->opcode() == IrOpcode::kFrameState ||
node->opcode() == IrOpcode::kStateValues);
if (node->id() < static_cast<NodeId>(visited_.length()) &&
visited_.Contains(node->id())) {
return nullptr;
}
if (FLAG_trace_turbo_escape) {
PrintF("Reducing FrameState %d\n", node->id());
PrintF("Reducing %s %d\n", node->op()->mnemonic(), node->id());
}
Node* clone = nullptr;
bool node_multiused = node->UseCount() > 1;
bool multiple_users_rec = multiple_users || node_multiused;
for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
Node* input = NodeProperties::GetValueInput(node, i);
Node* ret =
input->opcode() == IrOpcode::kStateValues
? ReduceStateValueInputs(input, effect, node->UseCount() > 1)
: ReduceStateValueInput(node, i, effect, node->UseCount() > 1);
if (ret) {
if (node->UseCount() > 1 || multiple_users) {
if (FLAG_trace_turbo_escape) {
PrintF(" Cloning #%d", node->id());
}
node = clone = jsgraph()->graph()->CloneNode(node);
if (FLAG_trace_turbo_escape) {
PrintF(" to #%d\n", node->id());
if (input->opcode() == IrOpcode::kStateValues) {
if (Node* ret = ReduceDeoptState(input, effect, multiple_users_rec)) {
if (node_multiused || (multiple_users && !clone)) {
if (FLAG_trace_turbo_escape) {
PrintF(" Cloning #%d", node->id());
}
node = clone = jsgraph()->graph()->CloneNode(node);
if (FLAG_trace_turbo_escape) {
PrintF(" to #%d\n", node->id());
}
node_multiused = false;
}
multiple_users = false; // Don't clone anymore.
NodeProperties::ReplaceValueInput(node, ret, i);
}
} else {
if (Node* ret = ReduceStateValueInput(node, i, effect, node_multiused,
clone, multiple_users)) {
DCHECK_NULL(clone);
node_multiused = false; // Don't clone anymore.
node = clone = ret;
}
NodeProperties::ReplaceValueInput(node, ret, i);
}
}
Node* outer_frame_state = NodeProperties::GetFrameStateInput(node, 0);
if (outer_frame_state->opcode() == IrOpcode::kFrameState) {
if (Node* ret =
ReduceFrameState(outer_frame_state, effect, node->UseCount() > 1)) {
if (node->UseCount() > 1 || multiple_users) {
if (FLAG_trace_turbo_escape) {
PrintF(" Cloning #%d", node->id());
}
node = clone = jsgraph()->graph()->CloneNode(node);
if (FLAG_trace_turbo_escape) {
PrintF(" to #%d\n", node->id());
if (node->opcode() == IrOpcode::kFrameState) {
Node* outer_frame_state = NodeProperties::GetFrameStateInput(node, 0);
if (outer_frame_state->opcode() == IrOpcode::kFrameState) {
if (Node* ret =
ReduceDeoptState(outer_frame_state, effect, multiple_users_rec)) {
if (node_multiused || (multiple_users && !clone)) {
if (FLAG_trace_turbo_escape) {
PrintF(" Cloning #%d", node->id());
}
node = clone = jsgraph()->graph()->CloneNode(node);
if (FLAG_trace_turbo_escape) {
PrintF(" to #%d\n", node->id());
}
}
multiple_users = false;
NodeProperties::ReplaceFrameStateInput(node, 0, ret);
}
NodeProperties::ReplaceFrameStateInput(node, 0, ret);
}
}
return clone;
}
// Returns the clone if it duplicated the node, and null otherwise.
Node* EscapeAnalysisReducer::ReduceStateValueInputs(Node* node, Node* effect,
bool multiple_users) {
if (FLAG_trace_turbo_escape) {
PrintF("Reducing StateValue #%d\n", node->id());
}
DCHECK(node->opcode() == IrOpcode::kStateValues);
DCHECK_NOT_NULL(effect);
Node* clone = nullptr;
for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
Node* input = NodeProperties::GetValueInput(node, i);
Node* ret = nullptr;
if (input->opcode() == IrOpcode::kStateValues) {
ret = ReduceStateValueInputs(input, effect, multiple_users);
} else {
ret = ReduceStateValueInput(node, i, effect, multiple_users);
}
if (ret) {
node = ret;
DCHECK_NULL(clone);
clone = ret;
multiple_users = false;
}
}
return clone;
......@@ -267,6 +283,8 @@ Node* EscapeAnalysisReducer::ReduceStateValueInputs(Node* node, Node* effect,
// Returns the clone if it duplicated the node, and null otherwise.
Node* EscapeAnalysisReducer::ReduceStateValueInput(Node* node, int node_index,
Node* effect,
bool node_multiused,
bool already_cloned,
bool multiple_users) {
Node* input = NodeProperties::GetValueInput(node, node_index);
if (FLAG_trace_turbo_escape) {
......@@ -279,7 +297,7 @@ Node* EscapeAnalysisReducer::ReduceStateValueInput(Node* node, int node_index,
if (escape_analysis()->IsVirtual(input)) {
if (Node* object_state =
escape_analysis()->GetOrCreateObjectState(effect, input)) {
if (node->UseCount() > 1 || multiple_users) {
if (node_multiused || (multiple_users && !already_cloned)) {
if (FLAG_trace_turbo_escape) {
PrintF("Cloning #%d", node->id());
}
......@@ -287,6 +305,8 @@ Node* EscapeAnalysisReducer::ReduceStateValueInput(Node* node, int node_index,
if (FLAG_trace_turbo_escape) {
PrintF(" to #%d\n", node->id());
}
node_multiused = false;
already_cloned = true;
}
NodeProperties::ReplaceValueInput(node, object_state, node_index);
if (FLAG_trace_turbo_escape) {
......
......@@ -29,6 +29,9 @@ class EscapeAnalysisReducer final : public AdvancedReducer {
EscapeAnalysis* escape_analysis, Zone* zone);
Reduction Reduce(Node* node) final;
void SetExistsVirtualAllocate(bool exists) {
exists_virtual_allocate_ = exists;
}
private:
Reduction ReduceLoad(Node* node);
......@@ -38,9 +41,9 @@ class EscapeAnalysisReducer final : public AdvancedReducer {
Reduction ReduceReferenceEqual(Node* node);
Reduction ReduceObjectIsSmi(Node* node);
Reduction ReduceFrameStateUses(Node* node);
Node* ReduceFrameState(Node* node, Node* effect, bool multiple_users);
Node* ReduceStateValueInputs(Node* node, Node* effect, bool multiple_users);
Node* ReduceDeoptState(Node* node, Node* effect, bool multiple_users);
Node* ReduceStateValueInput(Node* node, int node_index, Node* effect,
bool node_multiused, bool already_cloned,
bool multiple_users);
JSGraph* jsgraph() const { return jsgraph_; }
......@@ -51,7 +54,10 @@ class EscapeAnalysisReducer final : public AdvancedReducer {
JSGraph* const jsgraph_;
EscapeAnalysis* escape_analysis_;
Zone* const zone_;
// _visited marks nodes we already processed (allocs, loads, stores)
// and nodes that do not need a visit from ReduceDeoptState etc.
BitVector visited_;
bool exists_virtual_allocate_;
DISALLOW_COPY_AND_ASSIGN(EscapeAnalysisReducer);
};
......
This diff is collapsed.
......@@ -58,6 +58,7 @@ class EscapeStatusAnalysis {
void RevisitUses(Node* node);
void RevisitInputs(Node* node);
bool SetEscaped(Node* node);
bool IsVirtual(NodeId id);
bool HasEntry(Node* node);
void Resize();
size_t size();
......@@ -99,6 +100,7 @@ class EscapeAnalysis {
bool IsEscaped(Node* node);
bool CompareVirtualObjects(Node* left, Node* right);
Node* GetOrCreateObjectState(Node* effect, Node* node);
bool ExistsVirtualAllocate();
private:
void RunObjectAnalysis();
......
......@@ -664,6 +664,8 @@ struct EscapeAnalysisPhase {
JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
EscapeAnalysisReducer escape_reducer(&graph_reducer, data->jsgraph(),
&escape_analysis, temp_zone);
escape_reducer.SetExistsVirtualAllocate(
escape_analysis.ExistsVirtualAllocate());
AddReducer(data, &graph_reducer, &escape_reducer);
graph_reducer.ReduceGraph();
}
......
......@@ -428,13 +428,20 @@ void Verifier::Visitor::Check(Node* node) {
}
break;
}
case IrOpcode::kFrameState:
case IrOpcode::kFrameState: {
// TODO(jarin): what are the constraints on these?
CHECK_EQ(5, value_count);
CHECK_EQ(0, control_count);
CHECK_EQ(0, effect_count);
CHECK_EQ(6, input_count);
for (int i = 0; i < 3; ++i) {
CHECK(NodeProperties::GetValueInput(node, i)->opcode() ==
IrOpcode::kStateValues ||
NodeProperties::GetValueInput(node, i)->opcode() ==
IrOpcode::kTypedStateValues);
}
break;
}
case IrOpcode::kStateValues:
case IrOpcode::kObjectState:
case IrOpcode::kTypedStateValues:
......
......@@ -85,6 +85,20 @@ class EscapeAnalysisTest : public GraphTest {
allocation, value, effect, control);
}
Node* StoreElement(const ElementAccess& access, Node* allocation, Node* index,
Node* value, Node* effect = nullptr,
Node* control = nullptr) {
if (!effect) {
effect = effect_;
}
if (!control) {
control = control_;
}
return effect_ =
graph()->NewNode(simplified()->StoreElement(access), allocation,
index, value, effect, control);
}
Node* Load(const FieldAccess& access, Node* from, Node* effect = nullptr,
Node* control = nullptr) {
if (!effect) {
......@@ -131,12 +145,18 @@ class EscapeAnalysisTest : public GraphTest {
return control_ = graph()->NewNode(common()->Merge(2), control1, control2);
}
FieldAccess AccessAtIndex(int offset) {
FieldAccess FieldAccessAtIndex(int offset) {
FieldAccess access = {kTaggedBase, offset, MaybeHandle<Name>(), Type::Any(),
MachineType::AnyTagged()};
return access;
}
ElementAccess MakeElementAccess(int header_size) {
ElementAccess access = {kTaggedBase, header_size, Type::Any(),
MachineType::AnyTagged()};
return access;
}
// ---------------------------------Assertion Helper--------------------------
void ExpectReplacement(Node* node, Node* rep) {
......@@ -166,6 +186,7 @@ class EscapeAnalysisTest : public GraphTest {
SimplifiedOperatorBuilder* simplified() { return &simplified_; }
Node* effect() { return effect_; }
Node* control() { return control_; }
private:
SimplifiedOperatorBuilder simplified_;
......@@ -185,9 +206,9 @@ TEST_F(EscapeAnalysisTest, StraightNonEscape) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(AccessAtIndex(0), allocation, object1);
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Node* load = Load(AccessAtIndex(0), finish);
Node* load = Load(FieldAccessAtIndex(0), finish);
Node* result = Return(load);
EndGraph();
......@@ -202,13 +223,39 @@ TEST_F(EscapeAnalysisTest, StraightNonEscape) {
}
TEST_F(EscapeAnalysisTest, StraightNonEscapeNonConstStore) {
Node* object1 = Constant(1);
Node* object2 = Constant(2);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(FieldAccessAtIndex(0), allocation, object1);
Node* index =
graph()->NewNode(common()->Select(MachineRepresentation::kTagged),
object1, object2, control());
StoreElement(MakeElementAccess(0), allocation, index, object1);
Node* finish = FinishRegion(allocation);
Node* load = Load(FieldAccessAtIndex(0), finish);
Node* result = Return(load);
EndGraph();
Analysis();
ExpectEscaped(allocation);
ExpectReplacement(load, nullptr);
Transformation();
ASSERT_EQ(load, NodeProperties::GetValueInput(result, 0));
}
TEST_F(EscapeAnalysisTest, StraightEscape) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(AccessAtIndex(0), allocation, object1);
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Node* load = Load(AccessAtIndex(0), finish);
Node* load = Load(FieldAccessAtIndex(0), finish);
Node* result = Return(allocation);
EndGraph();
graph()->end()->AppendInput(zone(), load);
......@@ -229,15 +276,15 @@ TEST_F(EscapeAnalysisTest, StoreLoadEscape) {
BeginRegion();
Node* allocation1 = Allocate(Constant(kPointerSize));
Store(AccessAtIndex(0), allocation1, object1);
Store(FieldAccessAtIndex(0), allocation1, object1);
Node* finish1 = FinishRegion(allocation1);
BeginRegion();
Node* allocation2 = Allocate(Constant(kPointerSize));
Store(AccessAtIndex(0), allocation2, finish1);
Store(FieldAccessAtIndex(0), allocation2, finish1);
Node* finish2 = FinishRegion(allocation2);
Node* load = Load(AccessAtIndex(0), finish2);
Node* load = Load(FieldAccessAtIndex(0), finish2);
Node* result = Return(load);
EndGraph();
Analysis();
......@@ -257,16 +304,18 @@ TEST_F(EscapeAnalysisTest, BranchNonEscape) {
Node* object2 = Constant(2);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(AccessAtIndex(0), allocation, object1);
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Branch();
Node* ifFalse = IfFalse();
Node* ifTrue = IfTrue();
Node* effect1 = Store(AccessAtIndex(0), allocation, object1, finish, ifFalse);
Node* effect2 = Store(AccessAtIndex(0), allocation, object2, finish, ifTrue);
Node* effect1 =
Store(FieldAccessAtIndex(0), allocation, object1, finish, ifFalse);
Node* effect2 =
Store(FieldAccessAtIndex(0), allocation, object2, finish, ifTrue);
Node* merge = Merge2(ifFalse, ifTrue);
Node* phi = graph()->NewNode(common()->EffectPhi(2), effect1, effect2, merge);
Node* load = Load(AccessAtIndex(0), finish, phi, merge);
Node* load = Load(FieldAccessAtIndex(0), finish, phi, merge);
Node* result = Return(load, phi);
EndGraph();
graph()->end()->AppendInput(zone(), result);
......@@ -287,10 +336,10 @@ TEST_F(EscapeAnalysisTest, DanglingLoadOrder) {
Node* object1 = Constant(1);
Node* object2 = Constant(2);
Node* allocation = Allocate(Constant(kPointerSize));
Node* store1 = Store(AccessAtIndex(0), allocation, object1);
Node* load1 = Load(AccessAtIndex(0), allocation);
Node* store2 = Store(AccessAtIndex(0), allocation, object2);
Node* load2 = Load(AccessAtIndex(0), allocation, store1);
Node* store1 = Store(FieldAccessAtIndex(0), allocation, object1);
Node* load1 = Load(FieldAccessAtIndex(0), allocation);
Node* store2 = Store(FieldAccessAtIndex(0), allocation, object2);
Node* load2 = Load(FieldAccessAtIndex(0), allocation, store1);
Node* result = Return(load2);
EndGraph();
graph()->end()->AppendInput(zone(), store2);
......@@ -312,9 +361,9 @@ TEST_F(EscapeAnalysisTest, DeoptReplacement) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize));
Store(AccessAtIndex(0), allocation, object1);
Store(FieldAccessAtIndex(0), allocation, object1);
Node* finish = FinishRegion(allocation);
Node* effect1 = Store(AccessAtIndex(0), allocation, object1, finish);
Node* effect1 = Store(FieldAccessAtIndex(0), allocation, object1, finish);
Branch();
Node* ifFalse = IfFalse();
Node* state_values1 = graph()->NewNode(common()->StateValues(1), finish);
......@@ -328,7 +377,7 @@ TEST_F(EscapeAnalysisTest, DeoptReplacement) {
Node* deopt = graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
frame_state, effect1, ifFalse);
Node* ifTrue = IfTrue();
Node* load = Load(AccessAtIndex(0), finish, effect1, ifTrue);
Node* load = Load(FieldAccessAtIndex(0), finish, effect1, ifTrue);
Node* result = Return(load, effect1, ifTrue);
EndGraph();
graph()->end()->AppendInput(zone(), deopt);
......@@ -351,10 +400,10 @@ TEST_F(EscapeAnalysisTest, DeoptReplacementIdentity) {
Node* object1 = Constant(1);
BeginRegion();
Node* allocation = Allocate(Constant(kPointerSize * 2));
Store(AccessAtIndex(0), allocation, object1);
Store(AccessAtIndex(kPointerSize), allocation, allocation);
Store(FieldAccessAtIndex(0), allocation, object1);
Store(FieldAccessAtIndex(kPointerSize), allocation, allocation);
Node* finish = FinishRegion(allocation);
Node* effect1 = Store(AccessAtIndex(0), allocation, object1, finish);
Node* effect1 = Store(FieldAccessAtIndex(0), allocation, object1, finish);
Branch();
Node* ifFalse = IfFalse();
Node* state_values1 = graph()->NewNode(common()->StateValues(1), finish);
......@@ -368,7 +417,7 @@ TEST_F(EscapeAnalysisTest, DeoptReplacementIdentity) {
Node* deopt = graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
frame_state, effect1, ifFalse);
Node* ifTrue = IfTrue();
Node* load = Load(AccessAtIndex(0), finish, effect1, ifTrue);
Node* load = Load(FieldAccessAtIndex(0), finish, effect1, ifTrue);
Node* result = Return(load, effect1, ifTrue);
EndGraph();
graph()->end()->AppendInput(zone(), deopt);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment