Commit 849a226d authored by titzer@chromium.org's avatar titzer@chromium.org

Move some methods from OperatorProperties into Scheduler that are only related...

Move some methods from OperatorProperties into Scheduler that are only related to scheduling. Now these methods take a Node* parameter, as decisions relating floating control need distinguish not just operators but nodes.

R=bmeurer@chromium.org
BUG=

Review URL: https://codereview.chromium.org/474983003

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@23160 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 65ae6e92
......@@ -123,21 +123,6 @@ inline bool OperatorProperties::IsBasicBlockBegin(Operator* op) {
opcode == IrOpcode::kIfFalse;
}
inline bool OperatorProperties::CanBeScheduled(Operator* op) { return true; }
inline bool OperatorProperties::HasFixedSchedulePosition(Operator* op) {
IrOpcode::Value opcode = static_cast<IrOpcode::Value>(op->opcode());
return (IrOpcode::IsControlOpcode(opcode)) ||
opcode == IrOpcode::kParameter || opcode == IrOpcode::kEffectPhi ||
opcode == IrOpcode::kPhi;
}
inline bool OperatorProperties::IsScheduleRoot(Operator* op) {
uint8_t opcode = op->opcode();
return opcode == IrOpcode::kEnd || opcode == IrOpcode::kEffectPhi ||
opcode == IrOpcode::kPhi;
}
inline bool OperatorProperties::CanLazilyDeoptimize(Operator* op) {
// TODO(jarin) This function allows turning on lazy deoptimization
// incrementally. It will change as we turn on lazy deopt for
......
......@@ -35,11 +35,6 @@ class OperatorProperties {
static inline int GetControlOutputCount(Operator* op);
static inline bool IsBasicBlockBegin(Operator* op);
static inline bool CanBeScheduled(Operator* op);
static inline bool HasFixedSchedulePosition(Operator* op);
static inline bool IsScheduleRoot(Operator* op);
static inline bool CanLazilyDeoptimize(Operator* op);
};
}
......
......@@ -53,6 +53,29 @@ Schedule* Scheduler::ComputeSchedule(Graph* graph) {
}
bool Scheduler::IsBasicBlockBegin(Node* node) {
return OperatorProperties::IsBasicBlockBegin(node->op());
}
bool Scheduler::CanBeScheduled(Node* node) { return true; }
bool Scheduler::HasFixedSchedulePosition(Node* node) {
IrOpcode::Value opcode = node->opcode();
return (IrOpcode::IsControlOpcode(opcode)) ||
opcode == IrOpcode::kParameter || opcode == IrOpcode::kEffectPhi ||
opcode == IrOpcode::kPhi;
}
bool Scheduler::IsScheduleRoot(Node* node) {
IrOpcode::Value opcode = node->opcode();
return opcode == IrOpcode::kEnd || opcode == IrOpcode::kEffectPhi ||
opcode == IrOpcode::kPhi;
}
class CreateBlockVisitor : public NullNodeVisitor {
public:
explicit CreateBlockVisitor(Scheduler* scheduler) : scheduler_(scheduler) {}
......@@ -150,7 +173,7 @@ void Scheduler::AddPredecessorsForLoopsAndMerges() {
// For all of the merge's control inputs, add a goto at the end to the
// merge's basic block.
for (InputIter j = (*i)->inputs().begin(); j != (*i)->inputs().end(); ++j) {
if (OperatorProperties::IsBasicBlockBegin((*i)->op())) {
if (IsBasicBlockBegin((*i))) {
BasicBlock* predecessor_block = schedule_->block(*j);
if ((*j)->opcode() != IrOpcode::kReturn &&
(*j)->opcode() != IrOpcode::kDeoptimize) {
......@@ -368,7 +391,7 @@ class ScheduleEarlyNodeVisitor : public NullNodeVisitor {
int max_rpo = 0;
// Otherwise, the minimum rpo for the node is the max of all of the inputs.
if (!IsFixedNode(node)) {
DCHECK(!OperatorProperties::IsBasicBlockBegin(node->op()));
DCHECK(!scheduler_->IsBasicBlockBegin(node));
for (InputIter i = node->inputs().begin(); i != node->inputs().end();
++i) {
int control_rpo = scheduler_->schedule_early_rpo_index_[(*i)->id()];
......@@ -387,9 +410,9 @@ class ScheduleEarlyNodeVisitor : public NullNodeVisitor {
return GenericGraphVisit::CONTINUE;
}
static bool IsFixedNode(Node* node) {
return OperatorProperties::HasFixedSchedulePosition(node->op()) ||
!OperatorProperties::CanBeScheduled(node->op());
bool IsFixedNode(Node* node) {
return scheduler_->HasFixedSchedulePosition(node) ||
!scheduler_->CanBeScheduled(node);
}
// TODO(mstarzinger): Dirty hack to unblock others, schedule early should be
......@@ -431,7 +454,7 @@ class PrepareUsesVisitor : public NullNodeVisitor {
// right place; it's a convenient place during the preparation of use counts
// to schedule them.
if (!schedule_->IsScheduled(node) &&
OperatorProperties::HasFixedSchedulePosition(node->op())) {
scheduler_->HasFixedSchedulePosition(node)) {
if (FLAG_trace_turbo_scheduler) {
PrintF("Fixed position node %d is unscheduled, scheduling now\n",
node->id());
......@@ -445,7 +468,7 @@ class PrepareUsesVisitor : public NullNodeVisitor {
schedule_->AddNode(block, node);
}
if (OperatorProperties::IsScheduleRoot(node->op())) {
if (scheduler_->IsScheduleRoot(node)) {
scheduler_->schedule_root_nodes_.push_back(node);
}
......@@ -456,9 +479,8 @@ class PrepareUsesVisitor : public NullNodeVisitor {
// If the edge is from an unscheduled node, then tally it in the use count
// for all of its inputs. The same criterion will be used in ScheduleLate
// for decrementing use counts.
if (!schedule_->IsScheduled(from) &&
OperatorProperties::CanBeScheduled(from->op())) {
DCHECK(!OperatorProperties::HasFixedSchedulePosition(from->op()));
if (!schedule_->IsScheduled(from) && scheduler_->CanBeScheduled(from)) {
DCHECK(!scheduler_->HasFixedSchedulePosition(from));
++scheduler_->unscheduled_uses_[to->id()];
if (FLAG_trace_turbo_scheduler) {
PrintF("Incrementing uses of node %d from %d to %d\n", to->id(),
......@@ -491,11 +513,10 @@ class ScheduleLateNodeVisitor : public NullNodeVisitor {
GenericGraphVisit::Control Pre(Node* node) {
// Don't schedule nodes that cannot be scheduled or are already scheduled.
if (!OperatorProperties::CanBeScheduled(node->op()) ||
schedule_->IsScheduled(node)) {
if (!scheduler_->CanBeScheduled(node) || schedule_->IsScheduled(node)) {
return GenericGraphVisit::CONTINUE;
}
DCHECK(!OperatorProperties::HasFixedSchedulePosition(node->op()));
DCHECK(!scheduler_->HasFixedSchedulePosition(node));
// If all the uses of a node have been scheduled, then the node itself can
// be scheduled.
......@@ -562,7 +583,7 @@ class ScheduleLateNodeVisitor : public NullNodeVisitor {
BasicBlock* GetBlockForUse(Node::Edge edge) {
Node* use = edge.from();
IrOpcode::Value opcode = use->opcode();
// If the use is a phi, forward through the the phi to the basic block
// If the use is a phi, forward through the phi to the basic block
// corresponding to the phi's input.
if (opcode == IrOpcode::kPhi || opcode == IrOpcode::kEffectPhi) {
int index = edge.index();
......@@ -582,11 +603,6 @@ class ScheduleLateNodeVisitor : public NullNodeVisitor {
return result;
}
bool IsNodeEligible(Node* node) {
bool eligible = scheduler_->unscheduled_uses_[node->id()] == 0;
return eligible;
}
void ScheduleNode(BasicBlock* block, Node* node) {
schedule_->PlanNode(block, node);
scheduler_->scheduled_nodes_[block->id()].push_back(node);
......
......@@ -44,6 +44,11 @@ class Scheduler {
Scheduler(Zone* zone, Graph* graph, Schedule* schedule);
bool IsBasicBlockBegin(Node* node);
bool CanBeScheduled(Node* node);
bool HasFixedSchedulePosition(Node* node);
bool IsScheduleRoot(Node* node);
int GetRPONumber(BasicBlock* block) {
DCHECK(block->rpo_number_ >= 0 &&
block->rpo_number_ < static_cast<int>(schedule_->rpo_order_.size()));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment