Commit 0c0042cc authored by Ross McIlroy's avatar Ross McIlroy Committed by Commit Bot

[TurboProp] Add the ability for GraphAssembler to update basic blocks.

Adds the ability for the GraphAssembler to operate on, and maintain, a
scheduled graph. This will be used by TurboProp to maintain the initial
schedule created before effect-control-linearization, by updating this schedule
during effect-control, select and memory lowering stages rather than doing a
later reschedule.

In order to do this, an internal BlockUpdater is added to GraphAssembler,
which is enabled by passing the schedule to the GraphAssembler. The
GraphAssembler is modified to call into the block updater when nodes are added
and updates the schedule with new basic blocks when new control flow is updated.

BUG=v8:9684

Change-Id: I6d428ad21d869c472bb20f43cc8caf44722f090a
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1841355
Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64519}
parent 9c8f8fad
......@@ -41,7 +41,7 @@ class EffectControlLinearizer {
mask_array_index_(mask_array_index),
source_positions_(source_positions),
node_origins_(node_origins),
graph_assembler_(js_graph, nullptr, nullptr, temp_zone),
graph_assembler_(js_graph, temp_zone),
frame_state_zapper_(nullptr) {}
void Run();
......@@ -253,7 +253,6 @@ class EffectControlLinearizer {
Node* SmiShiftBitsConstant();
void TransitionElementsTo(Node* node, Node* array, ElementsKind from,
ElementsKind to);
void ConnectUnreachableToEnd(Node* effect, Node* control);
Factory* factory() const { return isolate()->factory(); }
Isolate* isolate() const { return jsgraph()->isolate(); }
......@@ -558,6 +557,8 @@ void EffectControlLinearizer::Run() {
for (BasicBlock* block : *(schedule()->rpo_order())) {
size_t instr = 0;
gasm()->Reset(block);
// The control node should be the first.
Node* control = block->NodeAt(instr);
DCHECK(NodeProperties::IsControl(control));
......@@ -740,6 +741,8 @@ void EffectControlLinearizer::ProcessNode(Node* node, Node** frame_state,
source_positions_->GetSourcePosition(node));
NodeOriginTable::Scope origin_scope(node_origins_, "process node", node);
gasm()->InitializeEffectControl(*effect, *control);
// If the node needs to be wired into the effect/control chain, do this
// here. Pass current frame state for lowering to eager deoptimization.
if (TryWireInStateEffect(node, *frame_state, effect, control)) {
......@@ -821,10 +824,12 @@ void EffectControlLinearizer::ProcessNode(Node* node, Node** frame_state,
*control = node;
}
gasm()->AddNode(node);
// Break the effect chain on {Unreachable} and reconnect to the graph end.
// Mark the following code for deletion by connecting to the {Dead} node.
if (node->opcode() == IrOpcode::kUnreachable) {
ConnectUnreachableToEnd(*effect, *control);
gasm()->ConnectUnreachableToEnd();
*effect = *control = jsgraph()->Dead();
}
}
......@@ -833,7 +838,6 @@ bool EffectControlLinearizer::TryWireInStateEffect(Node* node,
Node* frame_state,
Node** effect,
Node** control) {
gasm()->Reset(*effect, *control);
Node* result = nullptr;
switch (node->opcode()) {
case IrOpcode::kChangeBitToTagged:
......@@ -1327,19 +1331,12 @@ bool EffectControlLinearizer::TryWireInStateEffect(Node* node,
node->op()->mnemonic());
}
*effect = gasm()->ExtractCurrentEffect();
*control = gasm()->ExtractCurrentControl();
*effect = gasm()->current_effect();
*control = gasm()->current_control();
NodeProperties::ReplaceUses(node, result, *effect, *control);
return true;
}
void EffectControlLinearizer::ConnectUnreachableToEnd(Node* effect,
Node* control) {
DCHECK_EQ(effect->opcode(), IrOpcode::kUnreachable);
Node* throw_node = graph()->NewNode(common()->Throw(), effect, control);
NodeProperties::MergeControlToEnd(graph(), common(), throw_node);
}
#define __ gasm()->
Node* EffectControlLinearizer::LowerChangeFloat64ToTagged(Node* node) {
......
......@@ -6,90 +6,407 @@
#include "src/codegen/code-factory.h"
#include "src/compiler/linkage.h"
#include "src/compiler/schedule.h"
namespace v8 {
namespace internal {
namespace compiler {
GraphAssembler::GraphAssembler(JSGraph* jsgraph, Node* effect, Node* control,
Zone* zone)
class GraphAssembler::BasicBlockUpdater {
public:
BasicBlockUpdater(Schedule* schedule, Graph* graph, Zone* temp_zone);
Node* AddNode(Node* node);
Node* AddNode(Node* node, BasicBlock* to);
Node* AddClonedNode(Node* node);
BasicBlock* NewBasicBlock(bool deferred);
BasicBlock* SplitBasicBlock();
void AddBind(BasicBlock* block);
void AddBranch(Node* branch, BasicBlock* tblock, BasicBlock* fblock);
void AddGoto(BasicBlock* to);
void AddGoto(BasicBlock* from, BasicBlock* to);
void AddThrow(Node* node);
void StartBlock(BasicBlock* block);
BasicBlock* Finalize(BasicBlock* original);
BasicBlock* original_block() { return original_block_; }
BasicBlock::Control original_control() { return original_control_; }
Node* original_control_input() { return original_control_input_; }
private:
enum State { kUnchanged, kChanged };
Zone* temp_zone() { return temp_zone_; }
bool MightBeScheduled(Node* node);
void UpdateSuccessors(BasicBlock* block);
void SetBlockDeferredFromPredecessors();
void CopyForChange();
Zone* temp_zone_;
// Current basic block we are scheduling.
BasicBlock* current_block_;
// The original block that we are lowering.
BasicBlock* original_block_;
// Position in the current block, only applicable in the 'unchanged' state.
BasicBlock::iterator node_it_;
BasicBlock::iterator end_it_;
Schedule* schedule_;
Graph* graph_;
// The nodes in the original block if we are in 'changed' state. Retained to
// avoid invalidating iterators that are iterating over the original nodes of
// the block.
NodeVector saved_nodes_;
// The original control, control input and successors, to enable recovery of
// them when we finalize the block.
struct SuccessorInfo {
BasicBlock* block;
size_t index;
};
ZoneVector<SuccessorInfo> saved_successors_;
BasicBlock::Control original_control_;
Node* original_control_input_;
bool original_deferred_;
size_t original_node_count_;
State state_;
};
GraphAssembler::BasicBlockUpdater::BasicBlockUpdater(Schedule* schedule,
Graph* graph,
Zone* temp_zone)
: temp_zone_(temp_zone),
current_block_(nullptr),
original_block_(nullptr),
schedule_(schedule),
graph_(graph),
saved_nodes_(schedule->zone()),
saved_successors_(schedule->zone()),
original_control_(BasicBlock::kNone),
original_control_input_(nullptr),
original_deferred_(false),
original_node_count_(graph->NodeCount()),
state_(kUnchanged) {}
Node* GraphAssembler::BasicBlockUpdater::AddNode(Node* node) {
return AddNode(node, current_block_);
}
Node* GraphAssembler::BasicBlockUpdater::AddNode(Node* node, BasicBlock* to) {
if (state_ == kUnchanged) {
DCHECK_EQ(to, original_block());
if (node_it_ != end_it_ && *node_it_ == node) {
node_it_++;
return node;
}
CopyForChange();
}
// Add the node to the basic block.
DCHECK(!schedule_->IsScheduled(node));
schedule_->AddNode(to, node);
return node;
}
Node* GraphAssembler::BasicBlockUpdater::AddClonedNode(Node* node) {
DCHECK(node->op()->HasProperty(Operator::kPure));
if (schedule_->IsScheduled(node) &&
schedule_->block(node) == current_block_) {
// Node is already scheduled for the current block, don't add it again.
return node;
} else if (!schedule_->IsScheduled(node) && !MightBeScheduled(node)) {
// Node is not scheduled yet, so we can add it directly.
return AddNode(node);
} else {
// TODO(9684): Potentially add some per-block caching so we can avoid
// cloning if we've already cloned for this block.
return AddNode(graph_->CloneNode(node));
}
}
bool GraphAssembler::BasicBlockUpdater::MightBeScheduled(Node* node) {
// Return true if node was part of the original schedule and might currently
// be re-added to the schedule after a CopyForChange.
return node->id() < original_node_count_;
}
void GraphAssembler::BasicBlockUpdater::CopyForChange() {
DCHECK_EQ(kUnchanged, state_);
// Save successor.
DCHECK(saved_successors_.empty());
for (BasicBlock* successor : original_block()->successors()) {
for (size_t i = 0; i < successor->PredecessorCount(); i++) {
if (successor->PredecessorAt(i) == original_block()) {
saved_successors_.push_back({successor, i});
break;
}
}
}
DCHECK_EQ(saved_successors_.size(), original_block()->SuccessorCount());
// Save control.
original_control_ = original_block()->control();
original_control_input_ = original_block()->control_input();
// Save original nodes (to allow them to continue to be iterated by the user
// of graph assembler).
original_block()->nodes()->swap(saved_nodes_);
DCHECK(original_block()->nodes()->empty());
// Re-insert the nodes from the front of the block.
original_block()->InsertNodes(original_block()->begin(), saved_nodes_.begin(),
node_it_);
// Remove the tail from the schedule.
for (; node_it_ != end_it_; node_it_++) {
schedule_->SetBlockForNode(nullptr, *node_it_);
}
// Reset the control.
if (original_block()->control() != BasicBlock::kGoto) {
schedule_->SetBlockForNode(nullptr, original_block()->control_input());
}
original_block()->set_control_input(nullptr);
original_block()->set_control(BasicBlock::kNone);
original_block()->ClearSuccessors();
state_ = kChanged;
end_it_ = {};
node_it_ = {};
}
BasicBlock* GraphAssembler::BasicBlockUpdater::NewBasicBlock(bool deferred) {
BasicBlock* block = schedule_->NewBasicBlock();
block->set_deferred(deferred || original_deferred_);
return block;
}
BasicBlock* GraphAssembler::BasicBlockUpdater::SplitBasicBlock() {
return NewBasicBlock(current_block_->deferred());
}
void GraphAssembler::BasicBlockUpdater::AddBind(BasicBlock* to) {
DCHECK_NOT_NULL(to);
current_block_ = to;
// Basic block should only have the control node, if any.
DCHECK_LE(current_block_->NodeCount(), 1);
SetBlockDeferredFromPredecessors();
}
void GraphAssembler::BasicBlockUpdater::SetBlockDeferredFromPredecessors() {
if (!current_block_->deferred()) {
bool deferred = true;
for (BasicBlock* pred : current_block_->predecessors()) {
if (!pred->deferred()) {
deferred = false;
break;
}
}
current_block_->set_deferred(deferred);
}
}
void GraphAssembler::BasicBlockUpdater::AddBranch(Node* node,
BasicBlock* tblock,
BasicBlock* fblock) {
if (state_ == kUnchanged) {
DCHECK_EQ(current_block_, original_block());
CopyForChange();
}
DCHECK_EQ(state_, kChanged);
schedule_->AddBranch(current_block_, node, tblock, fblock);
current_block_ = nullptr;
}
void GraphAssembler::BasicBlockUpdater::AddGoto(BasicBlock* to) {
DCHECK_NOT_NULL(current_block_);
AddGoto(current_block_, to);
}
void GraphAssembler::BasicBlockUpdater::AddGoto(BasicBlock* from,
BasicBlock* to) {
if (state_ == kUnchanged) {
CopyForChange();
}
if (to->deferred() && !from->deferred()) {
// Add a new block with the correct deferred hint to avoid merges into the
// target block with different deferred hints.
// TODO(9684): Only split the current basic block if the label's target
// block has multiple merges.
BasicBlock* new_block = NewBasicBlock(to->deferred());
schedule_->AddGoto(from, new_block);
from = new_block;
}
schedule_->AddGoto(from, to);
current_block_ = nullptr;
}
void GraphAssembler::BasicBlockUpdater::AddThrow(Node* node) {
if (state_ == kUnchanged) {
CopyForChange();
}
schedule_->AddThrow(current_block_, node);
// Clear original successors and update the original control and control input
// to the throw, since this block is now connected directly to end().
saved_successors_.clear();
original_control_input_ = node;
original_control_ = BasicBlock::kThrow;
}
void GraphAssembler::BasicBlockUpdater::UpdateSuccessors(BasicBlock* block) {
for (SuccessorInfo succ : saved_successors_) {
(succ.block->predecessors())[succ.index] = block;
block->AddSuccessor(succ.block);
}
saved_successors_.clear();
block->set_control(original_control_);
block->set_control_input(original_control_input_);
if (original_control_input_ != nullptr) {
schedule_->SetBlockForNode(block, original_control_input_);
} else {
DCHECK_EQ(BasicBlock::kGoto, original_control_);
}
}
void GraphAssembler::BasicBlockUpdater::StartBlock(BasicBlock* block) {
DCHECK_NULL(current_block_);
DCHECK_NULL(original_block_);
DCHECK(saved_nodes_.empty());
block->ResetRPOInfo();
current_block_ = block;
original_block_ = block;
original_deferred_ = block->deferred();
node_it_ = block->begin();
end_it_ = block->end();
state_ = kUnchanged;
}
BasicBlock* GraphAssembler::BasicBlockUpdater::Finalize(BasicBlock* original) {
DCHECK_EQ(original, original_block());
BasicBlock* block = current_block_;
if (state_ == kChanged) {
UpdateSuccessors(block);
} else {
DCHECK_EQ(block, original_block());
if (node_it_ != end_it_) {
// We have not got to the end of the node list, we need to trim.
block->TrimNodes(node_it_);
}
}
original_control_ = BasicBlock::kNone;
saved_nodes_.clear();
original_deferred_ = false;
original_control_input_ = nullptr;
original_block_ = nullptr;
current_block_ = nullptr;
return block;
}
GraphAssembler::GraphAssembler(JSGraph* jsgraph, Zone* zone)
: temp_zone_(zone),
jsgraph_(jsgraph),
current_effect_(nullptr),
current_control_(nullptr),
block_updater_(nullptr) {}
GraphAssembler::GraphAssembler(JSGraph* jsgraph, Schedule* schedule, Zone* zone)
: temp_zone_(zone),
jsgraph_(jsgraph),
current_effect_(effect),
current_control_(control) {}
current_effect_(nullptr),
current_control_(nullptr),
block_updater_(new BasicBlockUpdater(schedule, jsgraph->graph(), zone)) {}
GraphAssembler::~GraphAssembler() = default;
Node* GraphAssembler::IntPtrConstant(intptr_t value) {
return jsgraph()->IntPtrConstant(value);
return AddClonedNode(jsgraph()->IntPtrConstant(value));
}
Node* GraphAssembler::Int32Constant(int32_t value) {
return jsgraph()->Int32Constant(value);
return AddClonedNode(jsgraph()->Int32Constant(value));
}
Node* GraphAssembler::Int64Constant(int64_t value) {
return jsgraph()->Int64Constant(value);
return AddClonedNode(jsgraph()->Int64Constant(value));
}
Node* GraphAssembler::UniqueIntPtrConstant(intptr_t value) {
return graph()->NewNode(
machine()->Is64() ? common()->Int64Constant(value)
: common()->Int32Constant(static_cast<int32_t>(value)));
return AddNode(graph()->NewNode(
machine()->Is64()
? common()->Int64Constant(value)
: common()->Int32Constant(static_cast<int32_t>(value))));
}
Node* GraphAssembler::SmiConstant(int32_t value) {
return jsgraph()->SmiConstant(value);
return AddClonedNode(jsgraph()->SmiConstant(value));
}
Node* GraphAssembler::Uint32Constant(int32_t value) {
return jsgraph()->Uint32Constant(value);
Node* GraphAssembler::Uint32Constant(uint32_t value) {
return AddClonedNode(jsgraph()->Uint32Constant(value));
}
Node* GraphAssembler::Float64Constant(double value) {
return jsgraph()->Float64Constant(value);
return AddClonedNode(jsgraph()->Float64Constant(value));
}
Node* GraphAssembler::HeapConstant(Handle<HeapObject> object) {
return jsgraph()->HeapConstant(object);
return AddClonedNode(jsgraph()->HeapConstant(object));
}
Node* GraphAssembler::NumberConstant(double value) {
return jsgraph()->Constant(value);
return AddClonedNode(jsgraph()->Constant(value));
}
Node* GraphAssembler::ExternalConstant(ExternalReference ref) {
return jsgraph()->ExternalConstant(ref);
return AddClonedNode(jsgraph()->ExternalConstant(ref));
}
Node* GraphAssembler::CEntryStubConstant(int result_size) {
return jsgraph()->CEntryStubConstant(result_size);
return AddClonedNode(jsgraph()->CEntryStubConstant(result_size));
}
Node* GraphAssembler::LoadFramePointer() {
return graph()->NewNode(machine()->LoadFramePointer());
return AddNode(graph()->NewNode(machine()->LoadFramePointer()));
}
#define SINGLETON_CONST_DEF(Name) \
Node* GraphAssembler::Name() { return jsgraph()->Name(); }
Node* GraphAssembler::Name() { return AddClonedNode(jsgraph()->Name()); }
JSGRAPH_SINGLETON_CONSTANT_LIST(SINGLETON_CONST_DEF)
#undef SINGLETON_CONST_DEF
#define PURE_UNOP_DEF(Name) \
Node* GraphAssembler::Name(Node* input) { \
return graph()->NewNode(machine()->Name(), input); \
return AddNode(graph()->NewNode(machine()->Name(), input)); \
}
PURE_ASSEMBLER_MACH_UNOP_LIST(PURE_UNOP_DEF)
#undef PURE_UNOP_DEF
#define PURE_BINOP_DEF(Name) \
Node* GraphAssembler::Name(Node* left, Node* right) { \
return graph()->NewNode(machine()->Name(), left, right); \
return AddNode(graph()->NewNode(machine()->Name(), left, right)); \
}
PURE_ASSEMBLER_MACH_BINOP_LIST(PURE_BINOP_DEF)
#undef PURE_BINOP_DEF
#define CHECKED_BINOP_DEF(Name) \
Node* GraphAssembler::Name(Node* left, Node* right) { \
return graph()->NewNode(machine()->Name(), left, right, current_control_); \
return AddNode( \
graph()->NewNode(machine()->Name(), left, right, current_control_)); \
}
CHECKED_ASSEMBLER_MACH_BINOP_LIST(CHECKED_BINOP_DEF)
#undef CHECKED_BINOP_DEF
......@@ -108,37 +425,39 @@ Node* GraphAssembler::TaggedEqual(Node* left, Node* right) {
Node* GraphAssembler::Float64RoundDown(Node* value) {
CHECK(machine()->Float64RoundDown().IsSupported());
return graph()->NewNode(machine()->Float64RoundDown().op(), value);
return AddNode(graph()->NewNode(machine()->Float64RoundDown().op(), value));
}
Node* GraphAssembler::Float64RoundTruncate(Node* value) {
CHECK(machine()->Float64RoundTruncate().IsSupported());
return graph()->NewNode(machine()->Float64RoundTruncate().op(), value);
return AddNode(
graph()->NewNode(machine()->Float64RoundTruncate().op(), value));
}
Node* GraphAssembler::Projection(int index, Node* value) {
return graph()->NewNode(common()->Projection(index), value, current_control_);
return AddNode(
graph()->NewNode(common()->Projection(index), value, current_control_));
}
Node* GraphAssembler::Allocate(AllocationType allocation, Node* size) {
return current_control_ = current_effect_ = graph()->NewNode(
simplified()->AllocateRaw(Type::Any(), allocation), size,
current_effect_, current_control_);
return AddNode(
graph()->NewNode(simplified()->AllocateRaw(Type::Any(), allocation), size,
current_effect_, current_control_));
}
Node* GraphAssembler::LoadField(FieldAccess const& access, Node* object) {
Node* value = current_effect_ =
graph()->NewNode(simplified()->LoadField(access), object, current_effect_,
current_control_);
Node* value =
AddNode(graph()->NewNode(simplified()->LoadField(access), object,
current_effect_, current_control_));
return InsertDecompressionIfNeeded(access.machine_type.representation(),
value);
}
Node* GraphAssembler::LoadElement(ElementAccess const& access, Node* object,
Node* index) {
Node* value = current_effect_ =
graph()->NewNode(simplified()->LoadElement(access), object, index,
current_effect_, current_control_);
Node* value =
AddNode(graph()->NewNode(simplified()->LoadElement(access), object, index,
current_effect_, current_control_));
return InsertDecompressionIfNeeded(access.machine_type.representation(),
value);
}
......@@ -147,41 +466,39 @@ Node* GraphAssembler::StoreField(FieldAccess const& access, Node* object,
Node* value) {
value =
InsertCompressionIfNeeded(access.machine_type.representation(), value);
return current_effect_ =
graph()->NewNode(simplified()->StoreField(access), object, value,
current_effect_, current_control_);
return AddNode(graph()->NewNode(simplified()->StoreField(access), object,
value, current_effect_, current_control_));
}
Node* GraphAssembler::StoreElement(ElementAccess const& access, Node* object,
Node* index, Node* value) {
value =
InsertCompressionIfNeeded(access.machine_type.representation(), value);
return current_effect_ =
graph()->NewNode(simplified()->StoreElement(access), object, index,
value, current_effect_, current_control_);
return AddNode(graph()->NewNode(simplified()->StoreElement(access), object,
index, value, current_effect_,
current_control_));
}
Node* GraphAssembler::DebugBreak() {
return current_effect_ = graph()->NewNode(machine()->DebugBreak(),
current_effect_, current_control_);
return AddNode(graph()->NewNode(machine()->DebugBreak(), current_effect_,
current_control_));
}
Node* GraphAssembler::Unreachable() {
return current_effect_ = graph()->NewNode(common()->Unreachable(),
current_effect_, current_control_);
return AddNode(graph()->NewNode(common()->Unreachable(), current_effect_,
current_control_));
}
Node* GraphAssembler::Store(StoreRepresentation rep, Node* object, Node* offset,
Node* value) {
value = InsertCompressionIfNeeded(rep.representation(), value);
return current_effect_ =
graph()->NewNode(machine()->Store(rep), object, offset, value,
current_effect_, current_control_);
return AddNode(graph()->NewNode(machine()->Store(rep), object, offset, value,
current_effect_, current_control_));
}
Node* GraphAssembler::Load(MachineType type, Node* object, Node* offset) {
Node* value = current_effect_ = graph()->NewNode(
machine()->Load(type), object, offset, current_effect_, current_control_);
Node* value = AddNode(graph()->NewNode(machine()->Load(type), object, offset,
current_effect_, current_control_));
return InsertDecompressionIfNeeded(type.representation(), value);
}
......@@ -192,8 +509,8 @@ Node* GraphAssembler::StoreUnaligned(MachineRepresentation rep, Node* object,
machine()->UnalignedStoreSupported(rep))
? machine()->Store(StoreRepresentation(rep, kNoWriteBarrier))
: machine()->UnalignedStore(rep);
return current_effect_ = graph()->NewNode(op, object, offset, value,
current_effect_, current_control_);
return AddNode(graph()->NewNode(op, object, offset, value, current_effect_,
current_control_));
}
Node* GraphAssembler::LoadUnaligned(MachineType type, Node* object,
......@@ -203,69 +520,63 @@ Node* GraphAssembler::LoadUnaligned(MachineType type, Node* object,
machine()->UnalignedLoadSupported(type.representation()))
? machine()->Load(type)
: machine()->UnalignedLoad(type);
return current_effect_ = graph()->NewNode(op, object, offset, current_effect_,
current_control_);
return AddNode(
graph()->NewNode(op, object, offset, current_effect_, current_control_));
}
Node* GraphAssembler::Retain(Node* buffer) {
return current_effect_ =
graph()->NewNode(common()->Retain(), buffer, current_effect_);
return AddNode(graph()->NewNode(common()->Retain(), buffer, current_effect_));
}
Node* GraphAssembler::UnsafePointerAdd(Node* base, Node* external) {
return current_effect_ =
graph()->NewNode(machine()->UnsafePointerAdd(), base, external,
current_effect_, current_control_);
return AddNode(graph()->NewNode(machine()->UnsafePointerAdd(), base, external,
current_effect_, current_control_));
}
Node* GraphAssembler::ToNumber(Node* value) {
return current_effect_ =
graph()->NewNode(ToNumberOperator(), ToNumberBuiltinConstant(),
value, NoContextConstant(), current_effect_);
return AddNode(graph()->NewNode(ToNumberOperator(), ToNumberBuiltinConstant(),
value, NoContextConstant(), current_effect_));
}
Node* GraphAssembler::BitcastWordToTagged(Node* value) {
return current_effect_ =
graph()->NewNode(machine()->BitcastWordToTagged(), value,
current_effect_, current_control_);
return AddNode(graph()->NewNode(machine()->BitcastWordToTagged(), value,
current_effect_, current_control_));
}
Node* GraphAssembler::BitcastTaggedToWord(Node* value) {
return current_effect_ =
graph()->NewNode(machine()->BitcastTaggedToWord(), value,
current_effect_, current_control_);
return AddNode(graph()->NewNode(machine()->BitcastTaggedToWord(), value,
current_effect_, current_control_));
}
Node* GraphAssembler::BitcastTaggedToWordForTagAndSmiBits(Node* value) {
return current_effect_ =
graph()->NewNode(machine()->BitcastTaggedToWordForTagAndSmiBits(),
value, current_effect_, current_control_);
return AddNode(
graph()->NewNode(machine()->BitcastTaggedToWordForTagAndSmiBits(), value,
current_effect_, current_control_));
}
Node* GraphAssembler::Word32PoisonOnSpeculation(Node* value) {
return current_effect_ =
graph()->NewNode(machine()->Word32PoisonOnSpeculation(), value,
current_effect_, current_control_);
return AddNode(graph()->NewNode(machine()->Word32PoisonOnSpeculation(), value,
current_effect_, current_control_));
}
Node* GraphAssembler::DeoptimizeIf(DeoptimizeReason reason,
FeedbackSource const& feedback,
Node* condition, Node* frame_state,
IsSafetyCheck is_safety_check) {
return current_control_ = current_effect_ = graph()->NewNode(
return AddNode(graph()->NewNode(
common()->DeoptimizeIf(DeoptimizeKind::kEager, reason, feedback,
is_safety_check),
condition, frame_state, current_effect_, current_control_);
condition, frame_state, current_effect_, current_control_));
}
Node* GraphAssembler::DeoptimizeIfNot(DeoptimizeReason reason,
FeedbackSource const& feedback,
Node* condition, Node* frame_state,
IsSafetyCheck is_safety_check) {
return current_control_ = current_effect_ = graph()->NewNode(
common()->DeoptimizeUnless(DeoptimizeKind::kEager, reason,
feedback, is_safety_check),
condition, frame_state, current_effect_, current_control_);
return AddNode(graph()->NewNode(
common()->DeoptimizeUnless(DeoptimizeKind::kEager, reason, feedback,
is_safety_check),
condition, frame_state, current_effect_, current_control_));
}
void GraphAssembler::Branch(Node* condition, GraphAssemblerLabel<0u>* if_true,
......@@ -281,27 +592,119 @@ void GraphAssembler::Branch(Node* condition, GraphAssemblerLabel<0u>* if_true,
Node* branch = graph()->NewNode(common()->Branch(hint, is_safety_check),
condition, current_control_);
current_control_ = graph()->NewNode(common()->IfTrue(), branch);
Node* if_true_control = current_control_ =
graph()->NewNode(common()->IfTrue(), branch);
MergeState(if_true);
current_control_ = graph()->NewNode(common()->IfFalse(), branch);
Node* if_false_control = current_control_ =
graph()->NewNode(common()->IfFalse(), branch);
MergeState(if_false);
if (block_updater_) {
// TODO(9684): Only split the current basic block if the label's target
// block has multiple merges.
BasicBlock* if_true_target = block_updater_->SplitBasicBlock();
BasicBlock* if_false_target = block_updater_->SplitBasicBlock();
block_updater_->AddBranch(branch, if_true_target, if_false_target);
block_updater_->AddNode(if_true_control, if_true_target);
block_updater_->AddGoto(if_true_target, if_true->basic_block());
block_updater_->AddNode(if_false_control, if_false_target);
block_updater_->AddGoto(if_false_target, if_false->basic_block());
}
current_control_ = nullptr;
current_effect_ = nullptr;
}
// Extractors (should be only used when destructing the assembler.
Node* GraphAssembler::ExtractCurrentControl() {
Node* result = current_control_;
current_control_ = nullptr;
return result;
void GraphAssembler::BindBasicBlock(BasicBlock* block) {
if (block_updater_) {
block_updater_->AddBind(block);
}
}
Node* GraphAssembler::ExtractCurrentEffect() {
Node* result = current_effect_;
current_effect_ = nullptr;
return result;
BasicBlock* GraphAssembler::NewBasicBlock(bool deferred) {
if (!block_updater_) return nullptr;
return block_updater_->NewBasicBlock(deferred);
}
void GraphAssembler::GotoBasicBlock(BasicBlock* block) {
if (block_updater_) {
block_updater_->AddGoto(block);
}
}
void GraphAssembler::GotoIfBasicBlock(BasicBlock* block, Node* branch,
IrOpcode::Value goto_if) {
if (block_updater_) {
// TODO(9684): Only split the current basic block for the goto_target
// if block has multiple merges.
BasicBlock* goto_target = block_updater_->SplitBasicBlock();
BasicBlock* fallthrough_target = block_updater_->SplitBasicBlock();
if (goto_if == IrOpcode::kIfTrue) {
block_updater_->AddBranch(branch, goto_target, fallthrough_target);
} else {
DCHECK_EQ(goto_if, IrOpcode::kIfFalse);
block_updater_->AddBranch(branch, fallthrough_target, goto_target);
}
block_updater_->AddNode(current_control_, goto_target);
block_updater_->AddGoto(goto_target, block);
block_updater_->AddBind(fallthrough_target);
}
}
BasicBlock* GraphAssembler::FinalizeCurrentBlock(BasicBlock* block) {
if (block_updater_) {
return block_updater_->Finalize(block);
}
return block;
}
void GraphAssembler::ConnectUnreachableToEnd() {
DCHECK_EQ(current_effect_->opcode(), IrOpcode::kUnreachable);
Node* throw_node =
graph()->NewNode(common()->Throw(), current_effect_, current_control_);
NodeProperties::MergeControlToEnd(graph(), common(), throw_node);
if (block_updater_) {
block_updater_->AddThrow(throw_node);
}
}
void GraphAssembler::UpdateEffectControlWith(Node* node) {
if (node->op()->EffectOutputCount() > 0) {
current_effect_ = node;
}
if (node->op()->ControlOutputCount() > 0) {
current_control_ = node;
}
}
Node* GraphAssembler::AddClonedNode(Node* node) {
DCHECK(node->op()->HasProperty(Operator::kPure));
if (block_updater_) {
node = block_updater_->AddClonedNode(node);
}
UpdateEffectControlWith(node);
return node;
}
Node* GraphAssembler::AddNode(Node* node) {
if (block_updater_) {
block_updater_->AddNode(node);
}
if (node->opcode() == IrOpcode::kTerminate) {
return node;
}
UpdateEffectControlWith(node);
return node;
}
Node* GraphAssembler::InsertDecompressionIfNeeded(MachineRepresentation rep,
......@@ -309,15 +712,16 @@ Node* GraphAssembler::InsertDecompressionIfNeeded(MachineRepresentation rep,
if (COMPRESS_POINTERS_BOOL) {
switch (rep) {
case MachineRepresentation::kCompressedPointer:
value = graph()->NewNode(
machine()->ChangeCompressedPointerToTaggedPointer(), value);
value = AddNode(graph()->NewNode(
machine()->ChangeCompressedPointerToTaggedPointer(), value));
break;
case MachineRepresentation::kCompressedSigned:
value = graph()->NewNode(
machine()->ChangeCompressedSignedToTaggedSigned(), value);
value = AddNode(graph()->NewNode(
machine()->ChangeCompressedSignedToTaggedSigned(), value));
break;
case MachineRepresentation::kCompressed:
value = graph()->NewNode(machine()->ChangeCompressedToTagged(), value);
value = AddNode(
graph()->NewNode(machine()->ChangeCompressedToTagged(), value));
break;
default:
break;
......@@ -331,15 +735,16 @@ Node* GraphAssembler::InsertCompressionIfNeeded(MachineRepresentation rep,
if (COMPRESS_POINTERS_BOOL) {
switch (rep) {
case MachineRepresentation::kCompressedPointer:
value = graph()->NewNode(
machine()->ChangeTaggedPointerToCompressedPointer(), value);
value = AddNode(graph()->NewNode(
machine()->ChangeTaggedPointerToCompressedPointer(), value));
break;
case MachineRepresentation::kCompressedSigned:
value = graph()->NewNode(
machine()->ChangeTaggedSignedToCompressedSigned(), value);
value = AddNode(graph()->NewNode(
machine()->ChangeTaggedSignedToCompressedSigned(), value));
break;
case MachineRepresentation::kCompressed:
value = graph()->NewNode(machine()->ChangeTaggedToCompressed(), value);
value = AddNode(
graph()->NewNode(machine()->ChangeTaggedToCompressed(), value));
break;
default:
break;
......@@ -348,7 +753,15 @@ Node* GraphAssembler::InsertCompressionIfNeeded(MachineRepresentation rep,
return value;
}
void GraphAssembler::Reset(Node* effect, Node* control) {
void GraphAssembler::Reset(BasicBlock* block) {
current_effect_ = nullptr;
current_control_ = nullptr;
if (block_updater_) {
block_updater_->StartBlock(block);
}
}
void GraphAssembler::InitializeEffectControl(Node* effect, Node* control) {
current_effect_ = effect;
current_control_ = control;
}
......
......@@ -18,6 +18,9 @@ class Graph;
namespace compiler {
class Schedule;
class BasicBlock;
#define PURE_ASSEMBLER_MACH_UNOP_LIST(V) \
V(ChangeInt32ToInt64) \
V(ChangeInt32ToFloat64) \
......@@ -125,8 +128,9 @@ class GraphAssemblerLabel {
Node* PhiAt(size_t index);
template <typename... Reps>
explicit GraphAssemblerLabel(GraphAssemblerLabelType type, Reps... reps)
: type_(type) {
explicit GraphAssemblerLabel(GraphAssemblerLabelType type,
BasicBlock* basic_block, Reps... reps)
: type_(type), basic_block_(basic_block) {
STATIC_ASSERT(VarCount == sizeof...(reps));
MachineRepresentation reps_array[] = {MachineRepresentation::kNone,
reps...};
......@@ -149,9 +153,11 @@ class GraphAssemblerLabel {
return type_ == GraphAssemblerLabelType::kDeferred;
}
bool IsLoop() const { return type_ == GraphAssemblerLabelType::kLoop; }
BasicBlock* basic_block() { return basic_block_; }
bool is_bound_ = false;
GraphAssemblerLabelType const type_;
BasicBlock* basic_block_;
size_t merged_count_ = 0;
Node* effect_;
Node* control_;
......@@ -161,38 +167,46 @@ class GraphAssemblerLabel {
class GraphAssembler {
public:
GraphAssembler(JSGraph* jsgraph, Node* effect, Node* control, Zone* zone);
// Constructs a GraphAssembler that operates on an unscheduled graph.
GraphAssembler(JSGraph* jsgraph, Zone* zone);
// Constructs a GraphAssembler that operates on a scheduled graph, updating
// the schedule in the process.
GraphAssembler(JSGraph* jsgraph, Schedule* schedule, Zone* zone);
~GraphAssembler();
void Reset(Node* effect, Node* control);
void Reset(BasicBlock* block);
void InitializeEffectControl(Node* effect, Node* control);
// Create label.
template <typename... Reps>
static GraphAssemblerLabel<sizeof...(Reps)> MakeLabelFor(
GraphAssemblerLabel<sizeof...(Reps)> MakeLabelFor(
GraphAssemblerLabelType type, Reps... reps) {
return GraphAssemblerLabel<sizeof...(Reps)>(type, reps...);
return GraphAssemblerLabel<sizeof...(Reps)>(
type, NewBasicBlock(type == GraphAssemblerLabelType::kDeferred),
reps...);
}
// Convenience wrapper for creating non-deferred labels.
template <typename... Reps>
static GraphAssemblerLabel<sizeof...(Reps)> MakeLabel(Reps... reps) {
GraphAssemblerLabel<sizeof...(Reps)> MakeLabel(Reps... reps) {
return MakeLabelFor(GraphAssemblerLabelType::kNonDeferred, reps...);
}
// Convenience wrapper for creating loop labels.
template <typename... Reps>
static GraphAssemblerLabel<sizeof...(Reps)> MakeLoopLabel(Reps... reps) {
GraphAssemblerLabel<sizeof...(Reps)> MakeLoopLabel(Reps... reps) {
return MakeLabelFor(GraphAssemblerLabelType::kLoop, reps...);
}
// Convenience wrapper for creating deferred labels.
template <typename... Reps>
static GraphAssemblerLabel<sizeof...(Reps)> MakeDeferredLabel(Reps... reps) {
GraphAssemblerLabel<sizeof...(Reps)> MakeDeferredLabel(Reps... reps) {
return MakeLabelFor(GraphAssemblerLabelType::kDeferred, reps...);
}
// Value creation.
Node* IntPtrConstant(intptr_t value);
Node* Uint32Constant(int32_t value);
Node* Uint32Constant(uint32_t value);
Node* Int32Constant(int32_t value);
Node* Int64Constant(int64_t value);
Node* UniqueIntPtrConstant(intptr_t value);
......@@ -288,19 +302,42 @@ class GraphAssembler {
void GotoIfNot(Node* condition, GraphAssemblerLabel<sizeof...(Vars)>* label,
Vars...);
// Extractors (should be only used when destructing/resetting the assembler).
Node* ExtractCurrentControl();
Node* ExtractCurrentEffect();
// Updates current effect and control based on outputs of {node}.
void UpdateEffectControlWith(Node* node);
// Adds {node} to the current position and updates assembler's current effect
// and control.
Node* AddNode(Node* node);
// Finalizes the {block} being processed by the assembler, returning the
// finalized block (which may be different from the original block).
BasicBlock* FinalizeCurrentBlock(BasicBlock* block);
void ConnectUnreachableToEnd();
Node* current_control() { return current_control_; }
Node* current_effect() { return current_effect_; }
private:
class BasicBlockUpdater;
// Adds a decompression node if pointer compression is enabled and the
// representation loaded is a compressed one. To be used after loads.
Node* InsertDecompressionIfNeeded(MachineRepresentation rep, Node* value);
// Adds a compression node if pointer compression is enabled and the
// representation to be stored is a compressed one. To be used before stores.
Node* InsertCompressionIfNeeded(MachineRepresentation rep, Node* value);
// Control flow helpers.
template <typename... Vars>
void MergeState(GraphAssemblerLabel<sizeof...(Vars)>* label, Vars... vars);
BasicBlock* NewBasicBlock(bool deferred);
void BindBasicBlock(BasicBlock* block);
void GotoBasicBlock(BasicBlock* block);
void GotoIfBasicBlock(BasicBlock* block, Node* branch,
IrOpcode::Value goto_if);
Node* AddClonedNode(Node* node);
Operator const* ToNumberOperator();
......@@ -319,6 +356,7 @@ class GraphAssembler {
JSGraph* jsgraph_;
Node* current_effect_;
Node* current_control_;
std::unique_ptr<BasicBlockUpdater> block_updater_;
};
template <size_t VarCount>
......@@ -412,8 +450,22 @@ void GraphAssembler::Bind(GraphAssemblerLabel<VarCount>* label) {
current_control_ = label->control_;
current_effect_ = label->effect_;
BindBasicBlock(label->basic_block());
label->SetBound();
if (label->merged_count_ > 1 || label->IsLoop()) {
AddNode(label->control_);
AddNode(label->effect_);
for (size_t i = 0; i < VarCount; i++) {
AddNode(label->bindings_[i]);
}
} else {
// If the basic block does not have a control node, insert a dummy
// Merge node, so that other passes have a control node to start from.
current_control_ =
AddNode(graph()->NewNode(common()->Merge(1), current_control_));
}
}
template <typename... Vars>
......@@ -422,6 +474,8 @@ void GraphAssembler::Goto(GraphAssemblerLabel<sizeof...(Vars)>* label,
DCHECK_NOT_NULL(current_control_);
DCHECK_NOT_NULL(current_effect_);
MergeState(label, vars...);
GotoBasicBlock(label->basic_block());
current_control_ = nullptr;
current_effect_ = nullptr;
}
......@@ -438,7 +492,8 @@ void GraphAssembler::GotoIf(Node* condition,
current_control_ = graph()->NewNode(common()->IfTrue(), branch);
MergeState(label, vars...);
current_control_ = graph()->NewNode(common()->IfFalse(), branch);
GotoIfBasicBlock(label->basic_block(), branch, IrOpcode::kIfTrue);
current_control_ = AddNode(graph()->NewNode(common()->IfFalse(), branch));
}
template <typename... Vars>
......@@ -452,7 +507,8 @@ void GraphAssembler::GotoIfNot(Node* condition,
current_control_ = graph()->NewNode(common()->IfFalse(), branch);
MergeState(label, vars...);
current_control_ = graph()->NewNode(common()->IfTrue(), branch);
GotoIfBasicBlock(label->basic_block(), branch, IrOpcode::kIfFalse);
current_control_ = AddNode(graph()->NewNode(common()->IfTrue(), branch));
}
template <typename... Args>
......@@ -471,7 +527,7 @@ Node* GraphAssembler::Call(const Operator* op, Args... args) {
Node* call = graph()->NewNode(op, size, args_array);
DCHECK_EQ(0, op->ControlOutputCount());
current_effect_ = call;
return call;
return AddNode(call);
}
} // namespace compiler
......
......@@ -50,7 +50,7 @@ MemoryLowering::MemoryLowering(JSGraph* jsgraph, Zone* zone,
const char* function_debug_name)
: jsgraph_(jsgraph),
zone_(zone),
graph_assembler_(jsgraph, nullptr, nullptr, zone),
graph_assembler_(jsgraph, zone),
allocation_folding_(allocation_folding),
poisoning_level_(poisoning_level),
write_barrier_assert_failed_(callback),
......@@ -96,7 +96,7 @@ Reduction MemoryLowering::ReduceAllocateRaw(
Node* effect = node->InputAt(1);
Node* control = node->InputAt(2);
gasm()->Reset(effect, control);
gasm()->InitializeEffectControl(effect, control);
Node* allocate_builtin;
if (allocation_type == AllocationType::kYoung) {
......@@ -162,8 +162,8 @@ Reduction MemoryLowering::ReduceAllocateRaw(
// Compute the effective inner allocated address.
value = __ BitcastWordToTagged(
__ IntAdd(state->top(), __ IntPtrConstant(kHeapObjectTag)));
effect = __ ExtractCurrentEffect();
control = __ ExtractCurrentControl();
effect = gasm()->current_effect();
control = gasm()->current_control();
// Extend the allocation {group}.
group->Add(value);
......@@ -216,8 +216,8 @@ Reduction MemoryLowering::ReduceAllocateRaw(
// Compute the initial object address.
value = __ BitcastWordToTagged(
__ IntAdd(done.PhiAt(0), __ IntPtrConstant(kHeapObjectTag)));
effect = __ ExtractCurrentEffect();
control = __ ExtractCurrentControl();
effect = gasm()->current_effect();
control = gasm()->current_control();
// Start a new allocation group.
AllocationGroup* group =
......@@ -264,8 +264,8 @@ Reduction MemoryLowering::ReduceAllocateRaw(
__ Bind(&done);
value = done.PhiAt(0);
effect = __ ExtractCurrentEffect();
control = __ ExtractCurrentControl();
effect = gasm()->current_effect();
control = gasm()->current_control();
if (state_ptr) {
// Create an unfoldable allocation group.
......
......@@ -75,6 +75,19 @@ void BasicBlock::set_loop_header(BasicBlock* loop_header) {
loop_header_ = loop_header;
}
void BasicBlock::TrimNodes(iterator new_end) { nodes_.erase(new_end, end()); }
void BasicBlock::ResetRPOInfo() {
loop_number_ = -1;
rpo_number_ = -1;
dominator_depth_ = -1;
dominator_ = nullptr;
rpo_next_ = nullptr;
loop_header_ = nullptr;
loop_end_ = nullptr;
loop_depth_ = 0;
}
// static
BasicBlock* BasicBlock::GetCommonDominator(BasicBlock* b1, BasicBlock* b2) {
while (b1 != b2) {
......
......@@ -114,6 +114,11 @@ class V8_EXPORT_PRIVATE BasicBlock final
nodes_.insert(insertion_point, insertion_start, insertion_end);
}
// Trim basic block to end at {new_end}.
void TrimNodes(iterator new_end);
void ResetRPOInfo();
// Accessors.
Control control() const { return control_; }
void set_control(Control control);
......@@ -148,6 +153,8 @@ class V8_EXPORT_PRIVATE BasicBlock final
int32_t rpo_number() const { return rpo_number_; }
void set_rpo_number(int32_t rpo_number);
NodeVector* nodes() { return &nodes_; }
// Loop membership helpers.
inline bool IsLoopHeader() const { return loop_end_ != nullptr; }
bool LoopContains(BasicBlock* block) const;
......@@ -268,6 +275,7 @@ class V8_EXPORT_PRIVATE Schedule final : public NON_EXPORTED_BASE(ZoneObject) {
Zone* zone() const { return zone_; }
private:
friend class GraphAssembler;
friend class Scheduler;
friend class BasicBlockInstrumentor;
friend class RawMachineAssembler;
......
......@@ -15,8 +15,7 @@ namespace internal {
namespace compiler {
SelectLowering::SelectLowering(JSGraph* jsgraph, Zone* zone)
: graph_assembler_(jsgraph, nullptr, nullptr, zone),
start_(jsgraph->graph()->start()) {}
: graph_assembler_(jsgraph, zone), start_(jsgraph->graph()->start()) {}
SelectLowering::~SelectLowering() = default;
......@@ -34,7 +33,7 @@ Node* SelectLowering::LowerSelect(Node* node) {
Node* vtrue = node->InputAt(1);
Node* vfalse = node->InputAt(2);
gasm()->Reset(start(), start());
gasm()->InitializeEffectControl(start(), start());
auto done = __ MakeLabel(p.representation());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment