Commit 3f28ca94 authored by Ross McIlroy's avatar Ross McIlroy Committed by V8 LUCI CQ

[compiler] Simplify and optimize Scheduler::PrepareUses.

Simplifies the traversal of nodes in Scheduler::PrepareUses to
avoid having to carefully order stack traversal for pre/post
ordering visits. Instead simply pre visit when pushing a node
onto the stack, then post visit the node when popping it from
the stack and then visiting it's inputs. This keeps the same
invariants required, but reduces visit overhead.

In addition, move checking for CoupledControlEdges out of
Increment/DecrementUnscheduledUseCounts such that the
coupled control edge calculation only needs to be done once
per node, rather than once for every input of the node. Also
remove unecessary recursion from these functions.

All told, these optimizations reduce the PrepareUses overhead
by 40-50%.

BUG=v8:9684

Change-Id: I934523a732892a1f66d7e77f8d04e200169080f1
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2863602
Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
Reviewed-by: 's avatarNico Hartmann <nicohartmann@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74373}
parent db89ea81
...@@ -105,15 +105,8 @@ Scheduler::Placement Scheduler::InitializePlacement(Node* node) { ...@@ -105,15 +105,8 @@ Scheduler::Placement Scheduler::InitializePlacement(Node* node) {
data->placement_ = (p == kFixed ? kFixed : kCoupled); data->placement_ = (p == kFixed ? kFixed : kCoupled);
break; break;
} }
#define DEFINE_CONTROL_CASE(V) case IrOpcode::k##V:
CONTROL_OP_LIST(DEFINE_CONTROL_CASE)
#undef DEFINE_CONTROL_CASE
{
// Control nodes that were not control-reachable from end may float.
data->placement_ = kSchedulable;
break;
}
default: default:
// Control nodes that were not control-reachable from end may float.
data->placement_ = kSchedulable; data->placement_ = kSchedulable;
break; break;
} }
...@@ -172,31 +165,32 @@ void Scheduler::UpdatePlacement(Node* node, Placement placement) { ...@@ -172,31 +165,32 @@ void Scheduler::UpdatePlacement(Node* node, Placement placement) {
// Reduce the use count of the node's inputs to potentially make them // Reduce the use count of the node's inputs to potentially make them
// schedulable. If all the uses of a node have been scheduled, then the node // schedulable. If all the uses of a node have been scheduled, then the node
// itself can be scheduled. // itself can be scheduled.
base::Optional<int> coupled_control_edge = GetCoupledControlEdge(node);
for (Edge const edge : node->input_edges()) { for (Edge const edge : node->input_edges()) {
DecrementUnscheduledUseCount(edge.to(), edge.index(), edge.from()); DCHECK_EQ(node, edge.from());
if (edge.index() != coupled_control_edge) {
DecrementUnscheduledUseCount(edge.to(), node);
}
} }
data->placement_ = placement; data->placement_ = placement;
} }
base::Optional<int> Scheduler::GetCoupledControlEdge(Node* node) {
bool Scheduler::IsCoupledControlEdge(Node* node, int index) { if (GetPlacement(node) == kCoupled) {
return GetPlacement(node) == kCoupled && return NodeProperties::FirstControlIndex(node);
NodeProperties::FirstControlIndex(node) == index; }
return {};
} }
void Scheduler::IncrementUnscheduledUseCount(Node* node, Node* from) {
void Scheduler::IncrementUnscheduledUseCount(Node* node, int index,
Node* from) {
// Make sure that control edges from coupled nodes are not counted.
if (IsCoupledControlEdge(from, index)) return;
// Tracking use counts for fixed nodes is useless. // Tracking use counts for fixed nodes is useless.
if (GetPlacement(node) == kFixed) return; if (GetPlacement(node) == kFixed) return;
// Use count for coupled nodes is summed up on their control. // Use count for coupled nodes is summed up on their control.
if (GetPlacement(node) == kCoupled) { if (GetPlacement(node) == kCoupled) {
Node* control = NodeProperties::GetControlInput(node); node = NodeProperties::GetControlInput(node);
return IncrementUnscheduledUseCount(control, index, from); DCHECK_NE(GetPlacement(node), Placement::kFixed);
DCHECK_NE(GetPlacement(node), Placement::kCoupled);
} }
++(GetData(node)->unscheduled_count_); ++(GetData(node)->unscheduled_count_);
...@@ -207,19 +201,15 @@ void Scheduler::IncrementUnscheduledUseCount(Node* node, int index, ...@@ -207,19 +201,15 @@ void Scheduler::IncrementUnscheduledUseCount(Node* node, int index,
} }
} }
void Scheduler::DecrementUnscheduledUseCount(Node* node, Node* from) {
void Scheduler::DecrementUnscheduledUseCount(Node* node, int index,
Node* from) {
// Make sure that control edges from coupled nodes are not counted.
if (IsCoupledControlEdge(from, index)) return;
// Tracking use counts for fixed nodes is useless. // Tracking use counts for fixed nodes is useless.
if (GetPlacement(node) == kFixed) return; if (GetPlacement(node) == kFixed) return;
// Use count for coupled nodes is summed up on their control. // Use count for coupled nodes is summed up on their control.
if (GetPlacement(node) == kCoupled) { if (GetPlacement(node) == kCoupled) {
Node* control = NodeProperties::GetControlInput(node); node = NodeProperties::GetControlInput(node);
return DecrementUnscheduledUseCount(control, index, from); DCHECK_NE(GetPlacement(node), Placement::kFixed);
DCHECK_NE(GetPlacement(node), Placement::kCoupled);
} }
DCHECK_LT(0, GetData(node)->unscheduled_count_); DCHECK_LT(0, GetData(node)->unscheduled_count_);
...@@ -235,7 +225,6 @@ void Scheduler::DecrementUnscheduledUseCount(Node* node, int index, ...@@ -235,7 +225,6 @@ void Scheduler::DecrementUnscheduledUseCount(Node* node, int index,
} }
} }
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Phase 1: Build control-flow graph. // Phase 1: Build control-flow graph.
...@@ -1221,10 +1210,26 @@ void Scheduler::GenerateDominatorTree() { ...@@ -1221,10 +1210,26 @@ void Scheduler::GenerateDominatorTree() {
class PrepareUsesVisitor { class PrepareUsesVisitor {
public: public:
explicit PrepareUsesVisitor(Scheduler* scheduler) explicit PrepareUsesVisitor(Scheduler* scheduler, Graph* graph, Zone* zone)
: scheduler_(scheduler), schedule_(scheduler->schedule_) {} : scheduler_(scheduler),
schedule_(scheduler->schedule_),
graph_(graph),
visited_(graph_->NodeCount(), false, zone),
stack_(zone) {}
void Pre(Node* node) { void Run() {
InitializePlacement(graph_->end());
while (!stack_.empty()) {
Node* node = stack_.top();
stack_.pop();
VisitInputs(node);
}
}
private:
void InitializePlacement(Node* node) {
TRACE("Pre #%d:%s\n", node->id(), node->op()->mnemonic());
DCHECK(!Visited(node));
if (scheduler_->InitializePlacement(node) == Scheduler::kFixed) { if (scheduler_->InitializePlacement(node) == Scheduler::kFixed) {
// Fixed nodes are always roots for schedule late. // Fixed nodes are always roots for schedule late.
scheduler_->schedule_root_nodes_.push_back(node); scheduler_->schedule_root_nodes_.push_back(node);
...@@ -1241,21 +1246,37 @@ class PrepareUsesVisitor { ...@@ -1241,21 +1246,37 @@ class PrepareUsesVisitor {
schedule_->AddNode(block, node); schedule_->AddNode(block, node);
} }
} }
stack_.push(node);
visited_[node->id()] = true;
} }
void PostEdge(Node* from, int index, Node* to) { void VisitInputs(Node* node) {
// If the edge is from an unscheduled node, then tally it in the use count DCHECK_NE(scheduler_->GetPlacement(node), Scheduler::kUnknown);
// for all of its inputs. The same criterion will be used in ScheduleLate bool is_scheduled = schedule_->IsScheduled(node);
// for decrementing use counts. base::Optional<int> coupled_control_edge =
if (!schedule_->IsScheduled(from)) { scheduler_->GetCoupledControlEdge(node);
DCHECK_NE(Scheduler::kFixed, scheduler_->GetPlacement(from)); for (auto edge : node->input_edges()) {
scheduler_->IncrementUnscheduledUseCount(to, index, from); Node* to = edge.to();
DCHECK_EQ(node, edge.from());
if (!Visited(to)) {
InitializePlacement(to);
}
TRACE("PostEdge #%d:%s->#%d:%s\n", node->id(), node->op()->mnemonic(),
to->id(), to->op()->mnemonic());
DCHECK_NE(scheduler_->GetPlacement(to), Scheduler::kUnknown);
if (!is_scheduled && edge.index() != coupled_control_edge) {
scheduler_->IncrementUnscheduledUseCount(to, node);
}
} }
} }
private: bool Visited(Node* node) { return visited_[node->id()]; }
Scheduler* scheduler_; Scheduler* scheduler_;
Schedule* schedule_; Schedule* schedule_;
Graph* graph_;
BoolVector visited_;
ZoneStack<Node*> stack_;
}; };
...@@ -1264,28 +1285,8 @@ void Scheduler::PrepareUses() { ...@@ -1264,28 +1285,8 @@ void Scheduler::PrepareUses() {
// Count the uses of every node, which is used to ensure that all of a // Count the uses of every node, which is used to ensure that all of a
// node's uses are scheduled before the node itself. // node's uses are scheduled before the node itself.
PrepareUsesVisitor prepare_uses(this); PrepareUsesVisitor prepare_uses(this, graph_, zone_);
prepare_uses.Run();
// TODO(turbofan): simplify the careful pre/post ordering here.
BoolVector visited(graph_->NodeCount(), false, zone_);
ZoneStack<Node::InputEdges::iterator> stack(zone_);
Node* node = graph_->end();
prepare_uses.Pre(node);
visited[node->id()] = true;
stack.push(node->input_edges().begin());
while (!stack.empty()) {
tick_counter_->TickAndMaybeEnterSafepoint();
Edge edge = *stack.top();
Node* node = edge.to();
if (visited[node->id()]) {
prepare_uses.PostEdge(edge.from(), edge.index(), edge.to());
if (++stack.top() == edge.from()->input_edges().end()) stack.pop();
} else {
prepare_uses.Pre(node);
visited[node->id()] = true;
if (node->InputCount() > 0) stack.push(node->input_edges().begin());
}
}
} }
...@@ -1718,9 +1719,13 @@ class ScheduleLateNodeVisitor { ...@@ -1718,9 +1719,13 @@ class ScheduleLateNodeVisitor {
Node* CloneNode(Node* node) { Node* CloneNode(Node* node) {
int const input_count = node->InputCount(); int const input_count = node->InputCount();
base::Optional<int> coupled_control_edge =
scheduler_->GetCoupledControlEdge(node);
for (int index = 0; index < input_count; ++index) { for (int index = 0; index < input_count; ++index) {
Node* const input = node->InputAt(index); if (index != coupled_control_edge) {
scheduler_->IncrementUnscheduledUseCount(input, index, node); Node* const input = node->InputAt(index);
scheduler_->IncrementUnscheduledUseCount(input, node);
}
} }
Node* const copy = scheduler_->graph_->CloneNode(node); Node* const copy = scheduler_->graph_->CloneNode(node);
TRACE(("clone #%d:%s -> #%d\n"), node->id(), node->op()->mnemonic(), TRACE(("clone #%d:%s -> #%d\n"), node->id(), node->op()->mnemonic(),
......
...@@ -103,9 +103,10 @@ class V8_EXPORT_PRIVATE Scheduler { ...@@ -103,9 +103,10 @@ class V8_EXPORT_PRIVATE Scheduler {
void UpdatePlacement(Node* node, Placement placement); void UpdatePlacement(Node* node, Placement placement);
bool IsLive(Node* node); bool IsLive(Node* node);
inline bool IsCoupledControlEdge(Node* node, int index); // If the node is coupled, returns the coupled control edge index.
void IncrementUnscheduledUseCount(Node* node, int index, Node* from); inline base::Optional<int> GetCoupledControlEdge(Node* node);
void DecrementUnscheduledUseCount(Node* node, int index, Node* from); void IncrementUnscheduledUseCount(Node* node, Node* from);
void DecrementUnscheduledUseCount(Node* node, Node* from);
static void PropagateImmediateDominators(BasicBlock* block); static void PropagateImmediateDominators(BasicBlock* block);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment