Commit 3f28ca94 authored by Ross McIlroy's avatar Ross McIlroy Committed by V8 LUCI CQ

[compiler] Simplify and optimize Scheduler::PrepareUses.

Simplifies the traversal of nodes in Scheduler::PrepareUses to
avoid having to carefully order stack traversal for pre/post
ordering visits. Instead simply pre visit when pushing a node
onto the stack, then post visit the node when popping it from
the stack and then visiting it's inputs. This keeps the same
invariants required, but reduces visit overhead.

In addition, move checking for CoupledControlEdges out of
Increment/DecrementUnscheduledUseCounts such that the
coupled control edge calculation only needs to be done once
per node, rather than once for every input of the node. Also
remove unecessary recursion from these functions.

All told, these optimizations reduce the PrepareUses overhead
by 40-50%.

BUG=v8:9684

Change-Id: I934523a732892a1f66d7e77f8d04e200169080f1
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2863602
Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
Reviewed-by: 's avatarNico Hartmann <nicohartmann@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74373}
parent db89ea81
......@@ -105,15 +105,8 @@ Scheduler::Placement Scheduler::InitializePlacement(Node* node) {
data->placement_ = (p == kFixed ? kFixed : kCoupled);
break;
}
#define DEFINE_CONTROL_CASE(V) case IrOpcode::k##V:
CONTROL_OP_LIST(DEFINE_CONTROL_CASE)
#undef DEFINE_CONTROL_CASE
{
// Control nodes that were not control-reachable from end may float.
data->placement_ = kSchedulable;
break;
}
default:
// Control nodes that were not control-reachable from end may float.
data->placement_ = kSchedulable;
break;
}
......@@ -172,31 +165,32 @@ void Scheduler::UpdatePlacement(Node* node, Placement placement) {
// Reduce the use count of the node's inputs to potentially make them
// schedulable. If all the uses of a node have been scheduled, then the node
// itself can be scheduled.
base::Optional<int> coupled_control_edge = GetCoupledControlEdge(node);
for (Edge const edge : node->input_edges()) {
DecrementUnscheduledUseCount(edge.to(), edge.index(), edge.from());
DCHECK_EQ(node, edge.from());
if (edge.index() != coupled_control_edge) {
DecrementUnscheduledUseCount(edge.to(), node);
}
}
data->placement_ = placement;
}
bool Scheduler::IsCoupledControlEdge(Node* node, int index) {
return GetPlacement(node) == kCoupled &&
NodeProperties::FirstControlIndex(node) == index;
base::Optional<int> Scheduler::GetCoupledControlEdge(Node* node) {
if (GetPlacement(node) == kCoupled) {
return NodeProperties::FirstControlIndex(node);
}
return {};
}
void Scheduler::IncrementUnscheduledUseCount(Node* node, int index,
Node* from) {
// Make sure that control edges from coupled nodes are not counted.
if (IsCoupledControlEdge(from, index)) return;
void Scheduler::IncrementUnscheduledUseCount(Node* node, Node* from) {
// Tracking use counts for fixed nodes is useless.
if (GetPlacement(node) == kFixed) return;
// Use count for coupled nodes is summed up on their control.
if (GetPlacement(node) == kCoupled) {
Node* control = NodeProperties::GetControlInput(node);
return IncrementUnscheduledUseCount(control, index, from);
node = NodeProperties::GetControlInput(node);
DCHECK_NE(GetPlacement(node), Placement::kFixed);
DCHECK_NE(GetPlacement(node), Placement::kCoupled);
}
++(GetData(node)->unscheduled_count_);
......@@ -207,19 +201,15 @@ void Scheduler::IncrementUnscheduledUseCount(Node* node, int index,
}
}
void Scheduler::DecrementUnscheduledUseCount(Node* node, int index,
Node* from) {
// Make sure that control edges from coupled nodes are not counted.
if (IsCoupledControlEdge(from, index)) return;
void Scheduler::DecrementUnscheduledUseCount(Node* node, Node* from) {
// Tracking use counts for fixed nodes is useless.
if (GetPlacement(node) == kFixed) return;
// Use count for coupled nodes is summed up on their control.
if (GetPlacement(node) == kCoupled) {
Node* control = NodeProperties::GetControlInput(node);
return DecrementUnscheduledUseCount(control, index, from);
node = NodeProperties::GetControlInput(node);
DCHECK_NE(GetPlacement(node), Placement::kFixed);
DCHECK_NE(GetPlacement(node), Placement::kCoupled);
}
DCHECK_LT(0, GetData(node)->unscheduled_count_);
......@@ -235,7 +225,6 @@ void Scheduler::DecrementUnscheduledUseCount(Node* node, int index,
}
}
// -----------------------------------------------------------------------------
// Phase 1: Build control-flow graph.
......@@ -1221,10 +1210,26 @@ void Scheduler::GenerateDominatorTree() {
class PrepareUsesVisitor {
public:
explicit PrepareUsesVisitor(Scheduler* scheduler)
: scheduler_(scheduler), schedule_(scheduler->schedule_) {}
explicit PrepareUsesVisitor(Scheduler* scheduler, Graph* graph, Zone* zone)
: scheduler_(scheduler),
schedule_(scheduler->schedule_),
graph_(graph),
visited_(graph_->NodeCount(), false, zone),
stack_(zone) {}
void Pre(Node* node) {
void Run() {
InitializePlacement(graph_->end());
while (!stack_.empty()) {
Node* node = stack_.top();
stack_.pop();
VisitInputs(node);
}
}
private:
void InitializePlacement(Node* node) {
TRACE("Pre #%d:%s\n", node->id(), node->op()->mnemonic());
DCHECK(!Visited(node));
if (scheduler_->InitializePlacement(node) == Scheduler::kFixed) {
// Fixed nodes are always roots for schedule late.
scheduler_->schedule_root_nodes_.push_back(node);
......@@ -1241,21 +1246,37 @@ class PrepareUsesVisitor {
schedule_->AddNode(block, node);
}
}
stack_.push(node);
visited_[node->id()] = true;
}
void PostEdge(Node* from, int index, Node* to) {
// If the edge is from an unscheduled node, then tally it in the use count
// for all of its inputs. The same criterion will be used in ScheduleLate
// for decrementing use counts.
if (!schedule_->IsScheduled(from)) {
DCHECK_NE(Scheduler::kFixed, scheduler_->GetPlacement(from));
scheduler_->IncrementUnscheduledUseCount(to, index, from);
void VisitInputs(Node* node) {
DCHECK_NE(scheduler_->GetPlacement(node), Scheduler::kUnknown);
bool is_scheduled = schedule_->IsScheduled(node);
base::Optional<int> coupled_control_edge =
scheduler_->GetCoupledControlEdge(node);
for (auto edge : node->input_edges()) {
Node* to = edge.to();
DCHECK_EQ(node, edge.from());
if (!Visited(to)) {
InitializePlacement(to);
}
TRACE("PostEdge #%d:%s->#%d:%s\n", node->id(), node->op()->mnemonic(),
to->id(), to->op()->mnemonic());
DCHECK_NE(scheduler_->GetPlacement(to), Scheduler::kUnknown);
if (!is_scheduled && edge.index() != coupled_control_edge) {
scheduler_->IncrementUnscheduledUseCount(to, node);
}
}
}
private:
bool Visited(Node* node) { return visited_[node->id()]; }
Scheduler* scheduler_;
Schedule* schedule_;
Graph* graph_;
BoolVector visited_;
ZoneStack<Node*> stack_;
};
......@@ -1264,28 +1285,8 @@ void Scheduler::PrepareUses() {
// Count the uses of every node, which is used to ensure that all of a
// node's uses are scheduled before the node itself.
PrepareUsesVisitor prepare_uses(this);
// TODO(turbofan): simplify the careful pre/post ordering here.
BoolVector visited(graph_->NodeCount(), false, zone_);
ZoneStack<Node::InputEdges::iterator> stack(zone_);
Node* node = graph_->end();
prepare_uses.Pre(node);
visited[node->id()] = true;
stack.push(node->input_edges().begin());
while (!stack.empty()) {
tick_counter_->TickAndMaybeEnterSafepoint();
Edge edge = *stack.top();
Node* node = edge.to();
if (visited[node->id()]) {
prepare_uses.PostEdge(edge.from(), edge.index(), edge.to());
if (++stack.top() == edge.from()->input_edges().end()) stack.pop();
} else {
prepare_uses.Pre(node);
visited[node->id()] = true;
if (node->InputCount() > 0) stack.push(node->input_edges().begin());
}
}
PrepareUsesVisitor prepare_uses(this, graph_, zone_);
prepare_uses.Run();
}
......@@ -1718,9 +1719,13 @@ class ScheduleLateNodeVisitor {
Node* CloneNode(Node* node) {
int const input_count = node->InputCount();
base::Optional<int> coupled_control_edge =
scheduler_->GetCoupledControlEdge(node);
for (int index = 0; index < input_count; ++index) {
Node* const input = node->InputAt(index);
scheduler_->IncrementUnscheduledUseCount(input, index, node);
if (index != coupled_control_edge) {
Node* const input = node->InputAt(index);
scheduler_->IncrementUnscheduledUseCount(input, node);
}
}
Node* const copy = scheduler_->graph_->CloneNode(node);
TRACE(("clone #%d:%s -> #%d\n"), node->id(), node->op()->mnemonic(),
......
......@@ -103,9 +103,10 @@ class V8_EXPORT_PRIVATE Scheduler {
void UpdatePlacement(Node* node, Placement placement);
bool IsLive(Node* node);
inline bool IsCoupledControlEdge(Node* node, int index);
void IncrementUnscheduledUseCount(Node* node, int index, Node* from);
void DecrementUnscheduledUseCount(Node* node, int index, Node* from);
// If the node is coupled, returns the coupled control edge index.
inline base::Optional<int> GetCoupledControlEdge(Node* node);
void IncrementUnscheduledUseCount(Node* node, Node* from);
void DecrementUnscheduledUseCount(Node* node, Node* from);
static void PropagateImmediateDominators(BasicBlock* block);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment