Commit 5259af60 authored by sigurds's avatar sigurds Committed by Commit bot

[turbofan] Memory improvements for escape analysis

This CL reduces the memory overhead of escape analysis
by introducing a "copy on demand" strategy for virtual states
and virtual objects.

BUG=v8:4586
LOG=n

Review URL: https://codereview.chromium.org/1606613002

Cr-Commit-Position: refs/heads/master@{#33491}
parent 5eff5420
......@@ -27,11 +27,16 @@ EscapeAnalysisReducer::EscapeAnalysisReducer(Editor* editor, JSGraph* jsgraph,
jsgraph_(jsgraph),
escape_analysis_(escape_analysis),
zone_(zone),
visited_(static_cast<int>(jsgraph->graph()->NodeCount() * 2), zone),
fully_reduced_(static_cast<int>(jsgraph->graph()->NodeCount() * 2), zone),
exists_virtual_allocate_(true) {}
Reduction EscapeAnalysisReducer::Reduce(Node* node) {
if (node->id() < static_cast<NodeId>(fully_reduced_.length()) &&
fully_reduced_.Contains(node->id())) {
return NoChange();
}
switch (node->opcode()) {
case IrOpcode::kLoadField:
case IrOpcode::kLoadElement:
......@@ -47,35 +52,38 @@ Reduction EscapeAnalysisReducer::Reduce(Node* node) {
return ReduceReferenceEqual(node);
case IrOpcode::kObjectIsSmi:
return ReduceObjectIsSmi(node);
// FrameStates and Value nodes are preprocessed here,
// and visited via ReduceFrameStateUses from their user nodes.
case IrOpcode::kFrameState:
case IrOpcode::kStateValues: {
if (node->id() >= static_cast<NodeId>(visited_.length()) ||
visited_.Contains(node->id())) {
if (node->id() >= static_cast<NodeId>(fully_reduced_.length()) ||
fully_reduced_.Contains(node->id())) {
break;
}
bool needs_visit = false;
bool depends_on_object_state = false;
for (int i = 0; i < node->InputCount(); i++) {
Node* input = node->InputAt(i);
switch (input->opcode()) {
case IrOpcode::kAllocate:
case IrOpcode::kFinishRegion:
needs_visit = needs_visit || escape_analysis()->IsVirtual(input);
depends_on_object_state =
depends_on_object_state || escape_analysis()->IsVirtual(input);
break;
case IrOpcode::kFrameState:
case IrOpcode::kStateValues:
needs_visit =
needs_visit ||
input->id() >= static_cast<NodeId>(visited_.length()) ||
!visited_.Contains(input->id());
depends_on_object_state =
depends_on_object_state ||
input->id() >= static_cast<NodeId>(fully_reduced_.length()) ||
!fully_reduced_.Contains(input->id());
break;
default:
break;
}
}
if (!needs_visit) {
visited_.Add(node->id());
if (!depends_on_object_state) {
fully_reduced_.Add(node->id());
}
break;
return NoChange();
}
default:
// TODO(sigurds): Change this to GetFrameStateInputCount once
......@@ -93,10 +101,10 @@ Reduction EscapeAnalysisReducer::Reduce(Node* node) {
Reduction EscapeAnalysisReducer::ReduceLoad(Node* node) {
DCHECK(node->opcode() == IrOpcode::kLoadField ||
node->opcode() == IrOpcode::kLoadElement);
if (visited_.Contains(node->id())) return NoChange();
visited_.Add(node->id());
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) {
fully_reduced_.Add(node->id());
}
if (Node* rep = escape_analysis()->GetReplacement(node)) {
visited_.Add(node->id());
counters()->turbo_escape_loads_replaced()->Increment();
TRACE("Replaced #%d (%s) with #%d (%s)\n", node->id(),
node->op()->mnemonic(), rep->id(), rep->op()->mnemonic());
......@@ -110,8 +118,9 @@ Reduction EscapeAnalysisReducer::ReduceLoad(Node* node) {
Reduction EscapeAnalysisReducer::ReduceStore(Node* node) {
DCHECK(node->opcode() == IrOpcode::kStoreField ||
node->opcode() == IrOpcode::kStoreElement);
if (visited_.Contains(node->id())) return NoChange();
visited_.Add(node->id());
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) {
fully_reduced_.Add(node->id());
}
if (escape_analysis()->IsVirtual(NodeProperties::GetValueInput(node, 0))) {
TRACE("Removed #%d (%s) from effect chain\n", node->id(),
node->op()->mnemonic());
......@@ -124,8 +133,6 @@ Reduction EscapeAnalysisReducer::ReduceStore(Node* node) {
Reduction EscapeAnalysisReducer::ReduceAllocate(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
if (visited_.Contains(node->id())) return NoChange();
visited_.Add(node->id());
if (escape_analysis()->IsVirtual(node)) {
RelaxEffectsAndControls(node);
counters()->turbo_escape_allocs_replaced()->Increment();
......@@ -140,6 +147,9 @@ Reduction EscapeAnalysisReducer::ReduceFinishRegion(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
Node* effect = NodeProperties::GetEffectInput(node, 0);
if (effect->opcode() == IrOpcode::kBeginRegion) {
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) {
fully_reduced_.Add(node->id());
}
RelaxEffectsAndControls(effect);
RelaxEffectsAndControls(node);
#ifdef DEBUG
......@@ -177,6 +187,7 @@ Reduction EscapeAnalysisReducer::ReduceReferenceEqual(Node* node) {
// Left-hand side is not a virtual object.
ReplaceWithValue(node, jsgraph()->FalseConstant());
TRACE("Replaced ref eq #%d with false\n", node->id());
return Replace(node);
}
return NoChange();
}
......@@ -195,8 +206,6 @@ Reduction EscapeAnalysisReducer::ReduceObjectIsSmi(Node* node) {
Reduction EscapeAnalysisReducer::ReduceFrameStateUses(Node* node) {
if (visited_.Contains(node->id())) return NoChange();
visited_.Add(node->id());
DCHECK_GE(node->op()->EffectInputCount(), 1);
bool changed = false;
for (int i = 0; i < node->InputCount(); ++i) {
......@@ -220,8 +229,8 @@ Node* EscapeAnalysisReducer::ReduceDeoptState(Node* node, Node* effect,
bool multiple_users) {
DCHECK(node->opcode() == IrOpcode::kFrameState ||
node->opcode() == IrOpcode::kStateValues);
if (node->id() < static_cast<NodeId>(visited_.length()) &&
visited_.Contains(node->id())) {
if (node->id() < static_cast<NodeId>(fully_reduced_.length()) &&
fully_reduced_.Contains(node->id())) {
return nullptr;
}
TRACE("Reducing %s %d\n", node->op()->mnemonic(), node->id());
......@@ -263,6 +272,9 @@ Node* EscapeAnalysisReducer::ReduceDeoptState(Node* node, Node* effect,
}
}
}
if (node->id() < static_cast<NodeId>(fully_reduced_.length())) {
fully_reduced_.Add(node->id());
}
return clone;
}
......@@ -274,6 +286,10 @@ Node* EscapeAnalysisReducer::ReduceStateValueInput(Node* node, int node_index,
bool already_cloned,
bool multiple_users) {
Node* input = NodeProperties::GetValueInput(node, node_index);
if (node->id() < static_cast<NodeId>(fully_reduced_.length()) &&
fully_reduced_.Contains(node->id())) {
return nullptr;
}
TRACE("Reducing State Input #%d (%s)\n", input->id(),
input->op()->mnemonic());
Node* clone = nullptr;
......@@ -307,6 +323,36 @@ Counters* EscapeAnalysisReducer::counters() const {
return jsgraph_->isolate()->counters();
}
class EscapeAnalysisVerifier final : public AdvancedReducer {
public:
EscapeAnalysisVerifier(Editor* editor, EscapeAnalysis* escape_analysis)
: AdvancedReducer(editor), escape_analysis_(escape_analysis) {}
Reduction Reduce(Node* node) final {
switch (node->opcode()) {
case IrOpcode::kAllocate:
CHECK(!escape_analysis_->IsVirtual(node));
break;
default:
break;
}
return NoChange();
}
private:
EscapeAnalysis* escape_analysis_;
};
void EscapeAnalysisReducer::VerifyReplacement() const {
#ifdef DEBUG
GraphReducer graph_reducer(zone(), jsgraph()->graph());
EscapeAnalysisVerifier verifier(&graph_reducer, escape_analysis());
graph_reducer.AddReducer(&verifier);
graph_reducer.ReduceGraph();
#endif // DEBUG
}
} // namespace compiler
} // namespace internal
} // namespace v8
......@@ -32,6 +32,7 @@ class EscapeAnalysisReducer final : public AdvancedReducer {
void SetExistsVirtualAllocate(bool exists) {
exists_virtual_allocate_ = exists;
}
void VerifyReplacement() const;
private:
Reduction ReduceLoad(Node* node);
......@@ -56,7 +57,7 @@ class EscapeAnalysisReducer final : public AdvancedReducer {
Zone* const zone_;
// _visited marks nodes we already processed (allocs, loads, stores)
// and nodes that do not need a visit from ReduceDeoptState etc.
BitVector visited_;
BitVector fully_reduced_;
bool exists_virtual_allocate_;
DISALLOW_COPY_AND_ASSIGN(EscapeAnalysisReducer);
......
This diff is collapsed.
......@@ -22,30 +22,56 @@ class VirtualObject;
// EscapeStatusAnalysis determines for each allocation whether it escapes.
class EscapeStatusAnalysis {
public:
typedef NodeId Alias;
~EscapeStatusAnalysis();
enum EscapeStatusFlag {
enum Status {
kUnknown = 0u,
kTracked = 1u << 0,
kEscaped = 1u << 1,
kOnStack = 1u << 2,
kVisited = 1u << 3,
// A node is dangling, if it is a load of some kind, and does not have
// an effect successor.
kDanglingComputed = 1u << 4,
kDangling = 1u << 5,
// A node is is an effect branch point, if it has more than 2 non-dangling
// effect successors.
kBranchPointComputed = 1u << 6,
kBranchPoint = 1u << 7,
};
typedef base::Flags<EscapeStatusFlag, unsigned char> EscapeStatusFlags;
typedef base::Flags<Status, unsigned char> StatusFlags;
void Run();
void RunStatusAnalysis();
bool IsVirtual(Node* node);
bool IsEscaped(Node* node);
bool IsAllocation(Node* node);
void DebugPrint();
friend class EscapeAnalysis;
private:
EscapeStatusAnalysis(EscapeAnalysis* object_analysis, Graph* graph,
Zone* zone);
void EnqueueForStatusAnalysis(Node* node);
bool SetEscaped(Node* node);
bool IsEffectBranchPoint(Node* node);
bool IsDanglingEffectNode(Node* node);
void ResizeStatusVector();
size_t GetStatusVectorSize();
bool IsVirtual(NodeId id);
Graph* graph() const { return graph_; }
Zone* zone() const { return zone_; }
void AssignAliases();
Alias GetAlias(NodeId id) const { return aliases_[id]; }
const ZoneVector<Alias>& GetAliasMap() const { return aliases_; }
Alias AliasCount() const { return next_free_alias_; }
static const Alias kNotReachable;
static const Alias kUntrackable;
bool IsNotReachable(Node* node);
ZoneVector<Node*>& stack() { return stack_; }
private:
void Process(Node* node);
void ProcessAllocate(Node* node);
void ProcessFinishRegion(Node* node);
......@@ -57,27 +83,27 @@ class EscapeStatusAnalysis {
bool CheckUsesForEscape(Node* node, Node* rep, bool phi_escaping = false);
void RevisitUses(Node* node);
void RevisitInputs(Node* node);
bool SetEscaped(Node* node);
bool IsVirtual(NodeId id);
Alias NextAlias() { return next_free_alias_++; }
bool HasEntry(Node* node);
void Resize();
size_t size();
bool IsAllocationPhi(Node* node);
Graph* graph() const { return graph_; }
Zone* zone() const { return zone_; }
bool IsAllocationPhi(Node* node);
ZoneVector<Node*> stack_;
EscapeAnalysis* object_analysis_;
Graph* const graph_;
Zone* const zone_;
ZoneVector<EscapeStatusFlags> status_;
ZoneDeque<Node*> queue_;
ZoneVector<StatusFlags> status_;
Alias next_free_alias_;
ZoneVector<Node*> status_stack_;
ZoneVector<Alias> aliases_;
DISALLOW_COPY_AND_ASSIGN(EscapeStatusAnalysis);
};
DEFINE_OPERATORS_FOR_FLAGS(EscapeStatusAnalysis::EscapeStatusFlags)
DEFINE_OPERATORS_FOR_FLAGS(EscapeStatusAnalysis::StatusFlags)
// Forward Declaration.
......@@ -88,8 +114,7 @@ class MergeCache;
// an object is virtual and eliminated.
class EscapeAnalysis {
public:
typedef NodeId Alias;
using Alias = EscapeStatusAnalysis::Alias;
EscapeAnalysis(Graph* graph, CommonOperatorBuilder* common, Zone* zone);
~EscapeAnalysis();
......@@ -104,7 +129,6 @@ class EscapeAnalysis {
private:
void RunObjectAnalysis();
void AssignAliases();
bool Process(Node* node);
void ProcessLoadField(Node* node);
void ProcessStoreField(Node* node);
......@@ -120,10 +144,10 @@ class EscapeAnalysis {
VirtualState* states);
void ForwardVirtualState(Node* node);
bool IsEffectBranchPoint(Node* node);
bool IsDanglingEffectNode(Node* node);
int OffsetFromAccess(Node* node);
VirtualState* CopyForModificationAt(VirtualState* state, Node* node);
VirtualObject* CopyForModificationAt(VirtualObject* obj, VirtualState* state,
Node* node);
VirtualObject* GetVirtualObject(Node* at, NodeId id);
VirtualObject* ResolveVirtualObject(VirtualState* state, Node* node);
Node* GetReplacementIfSame(ZoneVector<VirtualObject*>& objs);
......@@ -142,24 +166,27 @@ class EscapeAnalysis {
void DebugPrintState(VirtualState* state);
void DebugPrintObject(VirtualObject* state, Alias id);
Alias NextAlias() { return next_free_alias_++; }
Alias AliasCount() const { return next_free_alias_; }
Graph* graph() const { return graph_; }
Graph* graph() const { return status_analysis_.graph(); }
Zone* zone() const { return status_analysis_.zone(); }
CommonOperatorBuilder* common() const { return common_; }
Zone* zone() const { return zone_; }
ZoneVector<Node*>& stack() { return status_analysis_.stack(); }
bool IsEffectBranchPoint(Node* node) {
return status_analysis_.IsEffectBranchPoint(node);
}
bool IsDanglingEffectNode(Node* node) {
return status_analysis_.IsDanglingEffectNode(node);
}
bool IsNotReachable(Node* node) {
return status_analysis_.IsNotReachable(node);
}
Alias GetAlias(NodeId id) const { return status_analysis_.GetAlias(id); }
Alias AliasCount() const { return status_analysis_.AliasCount(); }
static const Alias kNotReachable;
static const Alias kUntrackable;
Graph* const graph_;
EscapeStatusAnalysis status_analysis_;
CommonOperatorBuilder* const common_;
Zone* const zone_;
ZoneVector<VirtualState*> virtual_states_;
ZoneVector<Node*> replacements_;
EscapeStatusAnalysis escape_status_;
MergeCache* cache_;
ZoneVector<Alias> aliases_;
Alias next_free_alias_;
DISALLOW_COPY_AND_ASSIGN(EscapeAnalysis);
};
......
......@@ -668,6 +668,7 @@ struct EscapeAnalysisPhase {
escape_analysis.ExistsVirtualAllocate());
AddReducer(data, &graph_reducer, &escape_reducer);
graph_reducer.ReduceGraph();
escape_reducer.VerifyReplacement();
}
};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment