Commit 818fa541 authored by Leszek Swirski's avatar Leszek Swirski Committed by V8 LUCI CQ

[maglev] Clean-up SetAccumulator

Bring back raw SetAccumulator, instead of the separate
SetAccumulatorToNew/ExistingNode. SetAccumulator (and StoreRegister) are
now expected to only ever be called on new Nodes, with some DCHECKs
tracking which nodes are new guaranteeing this.

Bug: v8:7700
Change-Id: I5657fa85dc05445bc3d6956ebcd5541ec1cedfad
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3579362
Auto-Submit: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: 's avatarVictor Gomes <victorgomes@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79908}
parent 53bdb1fb
...@@ -73,8 +73,6 @@ MaglevGraphBuilder::MaglevGraphBuilder(MaglevCompilationUnit* compilation_unit) ...@@ -73,8 +73,6 @@ MaglevGraphBuilder::MaglevGraphBuilder(MaglevCompilationUnit* compilation_unit)
interpreter::Register new_target_or_generator_register = interpreter::Register new_target_or_generator_register =
bytecode().incoming_new_target_or_generator_register(); bytecode().incoming_new_target_or_generator_register();
const compiler::BytecodeLivenessState* liveness =
bytecode_analysis().GetInLivenessFor(0);
int register_index = 0; int register_index = 0;
// TODO(leszeks): Don't emit if not needed. // TODO(leszeks): Don't emit if not needed.
ValueNode* undefined_value = ValueNode* undefined_value =
...@@ -82,19 +80,16 @@ MaglevGraphBuilder::MaglevGraphBuilder(MaglevCompilationUnit* compilation_unit) ...@@ -82,19 +80,16 @@ MaglevGraphBuilder::MaglevGraphBuilder(MaglevCompilationUnit* compilation_unit)
if (new_target_or_generator_register.is_valid()) { if (new_target_or_generator_register.is_valid()) {
int new_target_index = new_target_or_generator_register.index(); int new_target_index = new_target_or_generator_register.index();
for (; register_index < new_target_index; register_index++) { for (; register_index < new_target_index; register_index++) {
StoreRegister(interpreter::Register(register_index), undefined_value, StoreRegister(interpreter::Register(register_index), undefined_value);
liveness);
} }
StoreRegister( StoreRegister(
new_target_or_generator_register, new_target_or_generator_register,
// TODO(leszeks): Expose in Graph. // TODO(leszeks): Expose in Graph.
AddNewNode<RegisterInput>({}, kJavaScriptCallNewTargetRegister), AddNewNode<RegisterInput>({}, kJavaScriptCallNewTargetRegister));
liveness);
register_index++; register_index++;
} }
for (; register_index < register_count(); register_index++) { for (; register_index < register_count(); register_index++) {
StoreRegister(interpreter::Register(register_index), undefined_value, StoreRegister(interpreter::Register(register_index), undefined_value);
liveness);
} }
BasicBlock* first_block = CreateBlock<Jump>({}, &jump_targets_[0]); BasicBlock* first_block = CreateBlock<Jump>({}, &jump_targets_[0]);
...@@ -134,8 +129,8 @@ template <Operation kOperation> ...@@ -134,8 +129,8 @@ template <Operation kOperation>
void MaglevGraphBuilder::BuildGenericUnaryOperationNode() { void MaglevGraphBuilder::BuildGenericUnaryOperationNode() {
FeedbackSlot slot_index = GetSlotOperand(0); FeedbackSlot slot_index = GetSlotOperand(0);
ValueNode* value = GetAccumulator(); ValueNode* value = GetAccumulator();
SetAccumulatorToNewNode<GenericNodeForOperation<kOperation>>( SetAccumulator(AddNewNode<GenericNodeForOperation<kOperation>>(
{value}, compiler::FeedbackSource{feedback(), slot_index}); {value}, compiler::FeedbackSource{feedback(), slot_index}));
} }
template <Operation kOperation> template <Operation kOperation>
...@@ -143,8 +138,8 @@ void MaglevGraphBuilder::BuildGenericBinaryOperationNode() { ...@@ -143,8 +138,8 @@ void MaglevGraphBuilder::BuildGenericBinaryOperationNode() {
ValueNode* left = LoadRegister(0); ValueNode* left = LoadRegister(0);
ValueNode* right = GetAccumulator(); ValueNode* right = GetAccumulator();
FeedbackSlot slot_index = GetSlotOperand(1); FeedbackSlot slot_index = GetSlotOperand(1);
SetAccumulatorToNewNode<GenericNodeForOperation<kOperation>>( SetAccumulator(AddNewNode<GenericNodeForOperation<kOperation>>(
{left, right}, compiler::FeedbackSource{feedback(), slot_index}); {left, right}, compiler::FeedbackSource{feedback(), slot_index}));
} }
template <Operation kOperation> template <Operation kOperation>
...@@ -153,8 +148,8 @@ void MaglevGraphBuilder::BuildGenericBinarySmiOperationNode() { ...@@ -153,8 +148,8 @@ void MaglevGraphBuilder::BuildGenericBinarySmiOperationNode() {
Smi constant = Smi::FromInt(iterator_.GetImmediateOperand(0)); Smi constant = Smi::FromInt(iterator_.GetImmediateOperand(0));
ValueNode* right = AddNewNode<SmiConstant>({}, constant); ValueNode* right = AddNewNode<SmiConstant>({}, constant);
FeedbackSlot slot_index = GetSlotOperand(1); FeedbackSlot slot_index = GetSlotOperand(1);
SetAccumulatorToNewNode<GenericNodeForOperation<kOperation>>( SetAccumulator(AddNewNode<GenericNodeForOperation<kOperation>>(
{left, right}, compiler::FeedbackSource{feedback(), slot_index}); {left, right}, compiler::FeedbackSource{feedback(), slot_index}));
} }
template <Operation kOperation> template <Operation kOperation>
...@@ -177,7 +172,7 @@ void MaglevGraphBuilder::VisitBinaryOperation() { ...@@ -177,7 +172,7 @@ void MaglevGraphBuilder::VisitBinaryOperation() {
if (kOperation == Operation::kAdd) { if (kOperation == Operation::kAdd) {
ValueNode* result = AddNewNode<Int32AddWithOverflow>({left, right}); ValueNode* result = AddNewNode<Int32AddWithOverflow>({left, right});
SetAccumulatorToNewNode<CheckedSmiTag>({result}); SetAccumulator(AddNewNode<CheckedSmiTag>({result}));
return; return;
} }
} }
...@@ -202,14 +197,13 @@ void MaglevGraphBuilder::VisitBinarySmiOperation() { ...@@ -202,14 +197,13 @@ void MaglevGraphBuilder::VisitBinarySmiOperation() {
if (kOperation == Operation::kAdd) { if (kOperation == Operation::kAdd) {
if (constant == Smi::zero()) { if (constant == Smi::zero()) {
// For addition of zero, that passed the Smi check, we can simply // For addition of zero, when the accumulator passed the Smi check,
// return the accumulator. // it already has the right value, so we can just return.
SetAccumulatorToExistingNode(GetAccumulator());
return; return;
} }
ValueNode* right = AddNewNode<SmiConstant>({}, constant); ValueNode* right = AddNewNode<SmiConstant>({}, constant);
ValueNode* result = AddNewNode<Int32AddWithOverflow>({left, right}); ValueNode* result = AddNewNode<Int32AddWithOverflow>({left, right});
SetAccumulatorToNewNode<CheckedSmiTag>({result}); SetAccumulator(AddNewNode<CheckedSmiTag>({result}));
return; return;
} }
} }
...@@ -221,33 +215,34 @@ void MaglevGraphBuilder::VisitBinarySmiOperation() { ...@@ -221,33 +215,34 @@ void MaglevGraphBuilder::VisitBinarySmiOperation() {
} }
void MaglevGraphBuilder::VisitLdar() { void MaglevGraphBuilder::VisitLdar() {
SetAccumulatorToExistingNode(LoadRegister(0)); MoveNodeBetweenRegisters(iterator_.GetRegisterOperand(0),
interpreter::Register::virtual_accumulator());
} }
void MaglevGraphBuilder::VisitLdaZero() { void MaglevGraphBuilder::VisitLdaZero() {
SetAccumulatorToNewNode<SmiConstant>({}, Smi::zero()); SetAccumulator(AddNewNode<SmiConstant>({}, Smi::zero()));
} }
void MaglevGraphBuilder::VisitLdaSmi() { void MaglevGraphBuilder::VisitLdaSmi() {
Smi constant = Smi::FromInt(iterator_.GetImmediateOperand(0)); Smi constant = Smi::FromInt(iterator_.GetImmediateOperand(0));
SetAccumulatorToNewNode<SmiConstant>({}, constant); SetAccumulator(AddNewNode<SmiConstant>({}, constant));
} }
void MaglevGraphBuilder::VisitLdaUndefined() { void MaglevGraphBuilder::VisitLdaUndefined() {
SetAccumulatorToNewNode<RootConstant>({}, RootIndex::kUndefinedValue); SetAccumulator(AddNewNode<RootConstant>({}, RootIndex::kUndefinedValue));
} }
void MaglevGraphBuilder::VisitLdaNull() { void MaglevGraphBuilder::VisitLdaNull() {
SetAccumulatorToNewNode<RootConstant>({}, RootIndex::kNullValue); SetAccumulator(AddNewNode<RootConstant>({}, RootIndex::kNullValue));
} }
void MaglevGraphBuilder::VisitLdaTheHole() { void MaglevGraphBuilder::VisitLdaTheHole() {
SetAccumulatorToNewNode<RootConstant>({}, RootIndex::kTheHoleValue); SetAccumulator(AddNewNode<RootConstant>({}, RootIndex::kTheHoleValue));
} }
void MaglevGraphBuilder::VisitLdaTrue() { void MaglevGraphBuilder::VisitLdaTrue() {
SetAccumulatorToNewNode<RootConstant>({}, RootIndex::kTrueValue); SetAccumulator(AddNewNode<RootConstant>({}, RootIndex::kTrueValue));
} }
void MaglevGraphBuilder::VisitLdaFalse() { void MaglevGraphBuilder::VisitLdaFalse() {
SetAccumulatorToNewNode<RootConstant>({}, RootIndex::kFalseValue); SetAccumulator(AddNewNode<RootConstant>({}, RootIndex::kFalseValue));
} }
void MaglevGraphBuilder::VisitLdaConstant() { void MaglevGraphBuilder::VisitLdaConstant() {
SetAccumulatorToConstant(GetRefOperand<HeapObject>(0)); SetAccumulator(GetConstant(GetRefOperand<HeapObject>(0)));
} }
MAGLEV_UNIMPLEMENTED_BYTECODE(LdaContextSlot) MAGLEV_UNIMPLEMENTED_BYTECODE(LdaContextSlot)
MAGLEV_UNIMPLEMENTED_BYTECODE(LdaImmutableContextSlot) MAGLEV_UNIMPLEMENTED_BYTECODE(LdaImmutableContextSlot)
...@@ -257,26 +252,33 @@ void MaglevGraphBuilder::VisitLdaCurrentContextSlot() { ...@@ -257,26 +252,33 @@ void MaglevGraphBuilder::VisitLdaCurrentContextSlot() {
// TODO(leszeks): Passing a LoadHandler to LoadField here is a bit of // TODO(leszeks): Passing a LoadHandler to LoadField here is a bit of
// a hack, maybe we should have a LoadRawOffset or similar. // a hack, maybe we should have a LoadRawOffset or similar.
SetAccumulatorToNewNode<LoadField>( SetAccumulator(AddNewNode<LoadField>(
{context}, LoadHandler::LoadField( {context}, LoadHandler::LoadField(
isolate(), FieldIndex::ForInObjectOffset( isolate(), FieldIndex::ForInObjectOffset(
Context::OffsetOfElementAt(slot_index), Context::OffsetOfElementAt(slot_index),
FieldIndex::kTagged)) FieldIndex::kTagged))
->value()); ->value()));
} }
void MaglevGraphBuilder::VisitLdaImmutableCurrentContextSlot() { void MaglevGraphBuilder::VisitLdaImmutableCurrentContextSlot() {
// TODO(leszeks): Consider context specialising. // TODO(leszeks): Consider context specialising.
VisitLdaCurrentContextSlot(); VisitLdaCurrentContextSlot();
} }
void MaglevGraphBuilder::VisitStar() { void MaglevGraphBuilder::VisitStar() {
StoreRegister( MoveNodeBetweenRegisters(interpreter::Register::virtual_accumulator(),
iterator_.GetRegisterOperand(0), GetAccumulator(), iterator_.GetRegisterOperand(0));
bytecode_analysis().GetOutLivenessFor(iterator_.current_offset()));
} }
#define SHORT_STAR_VISITOR(Name, ...) \
void MaglevGraphBuilder::Visit##Name() { \
MoveNodeBetweenRegisters( \
interpreter::Register::virtual_accumulator(), \
interpreter::Register::FromShortStar(interpreter::Bytecode::k##Name)); \
}
SHORT_STAR_BYTECODE_LIST(SHORT_STAR_VISITOR)
#undef SHORT_STAR_VISITOR
void MaglevGraphBuilder::VisitMov() { void MaglevGraphBuilder::VisitMov() {
StoreRegister( MoveNodeBetweenRegisters(iterator_.GetRegisterOperand(0),
iterator_.GetRegisterOperand(1), LoadRegister(0), iterator_.GetRegisterOperand(1));
bytecode_analysis().GetOutLivenessFor(iterator_.current_offset()));
} }
MAGLEV_UNIMPLEMENTED_BYTECODE(PushContext) MAGLEV_UNIMPLEMENTED_BYTECODE(PushContext)
MAGLEV_UNIMPLEMENTED_BYTECODE(PopContext) MAGLEV_UNIMPLEMENTED_BYTECODE(PopContext)
...@@ -306,7 +308,7 @@ void MaglevGraphBuilder::BuildPropertyCellAccess( ...@@ -306,7 +308,7 @@ void MaglevGraphBuilder::BuildPropertyCellAccess(
DCHECK_EQ(PropertyKind::kData, property_details.kind()); DCHECK_EQ(PropertyKind::kData, property_details.kind());
if (!property_details.IsConfigurable() && property_details.IsReadOnly()) { if (!property_details.IsConfigurable() && property_details.IsReadOnly()) {
SetAccumulatorToConstant(property_cell_value); SetAccumulator(GetConstant(property_cell_value));
return; return;
} }
...@@ -321,7 +323,7 @@ void MaglevGraphBuilder::BuildPropertyCellAccess( ...@@ -321,7 +323,7 @@ void MaglevGraphBuilder::BuildPropertyCellAccess(
// Load from constant/undefined global property can be constant-folded. // Load from constant/undefined global property can be constant-folded.
if (property_cell_type == PropertyCellType::kConstant || if (property_cell_type == PropertyCellType::kConstant ||
property_cell_type == PropertyCellType::kUndefined) { property_cell_type == PropertyCellType::kUndefined) {
SetAccumulatorToConstant(property_cell_value); SetAccumulator(GetConstant(property_cell_value));
return; return;
} }
...@@ -329,12 +331,12 @@ void MaglevGraphBuilder::BuildPropertyCellAccess( ...@@ -329,12 +331,12 @@ void MaglevGraphBuilder::BuildPropertyCellAccess(
AddNewNode<Constant>({}, property_cell.AsHeapObject()); AddNewNode<Constant>({}, property_cell.AsHeapObject());
// TODO(leszeks): Padding a LoadHandler to LoadField here is a bit of // TODO(leszeks): Padding a LoadHandler to LoadField here is a bit of
// a hack, maybe we should have a LoadRawOffset or similar. // a hack, maybe we should have a LoadRawOffset or similar.
SetAccumulatorToNewNode<LoadField>( SetAccumulator(AddNewNode<LoadField>(
{property_cell_node}, {property_cell_node},
LoadHandler::LoadField( LoadHandler::LoadField(
isolate(), FieldIndex::ForInObjectOffset(PropertyCell::kValueOffset, isolate(), FieldIndex::ForInObjectOffset(PropertyCell::kValueOffset,
FieldIndex::kTagged)) FieldIndex::kTagged))
->value()); ->value()));
} }
void MaglevGraphBuilder::VisitLdaGlobal() { void MaglevGraphBuilder::VisitLdaGlobal() {
...@@ -362,7 +364,7 @@ void MaglevGraphBuilder::VisitLdaGlobal() { ...@@ -362,7 +364,7 @@ void MaglevGraphBuilder::VisitLdaGlobal() {
// TODO(leszeks): Handle the IsScriptContextSlot case. // TODO(leszeks): Handle the IsScriptContextSlot case.
ValueNode* context = GetContext(); ValueNode* context = GetContext();
SetAccumulatorToNewNode<LoadGlobal>({context}, name); SetAccumulator(AddNewNode<LoadGlobal>({context}, name));
} }
} }
MAGLEV_UNIMPLEMENTED_BYTECODE(LdaGlobalInsideTypeof) MAGLEV_UNIMPLEMENTED_BYTECODE(LdaGlobalInsideTypeof)
...@@ -397,7 +399,7 @@ void MaglevGraphBuilder::VisitGetNamedProperty() { ...@@ -397,7 +399,7 @@ void MaglevGraphBuilder::VisitGetNamedProperty() {
!LoadHandler::IsWasmStructBits::decode(handler)) { !LoadHandler::IsWasmStructBits::decode(handler)) {
AddNewNode<CheckMaps>({object}, AddNewNode<CheckMaps>({object},
MakeRef(broker(), map_and_handler.first)); MakeRef(broker(), map_and_handler.first));
SetAccumulatorToNewNode<LoadField>({object}, handler); SetAccumulator(AddNewNode<LoadField>({object}, handler));
return; return;
} }
} }
...@@ -405,9 +407,9 @@ void MaglevGraphBuilder::VisitGetNamedProperty() { ...@@ -405,9 +407,9 @@ void MaglevGraphBuilder::VisitGetNamedProperty() {
ValueNode* context = GetContext(); ValueNode* context = GetContext();
compiler::NameRef name = GetRefOperand<Name>(1); compiler::NameRef name = GetRefOperand<Name>(1);
SetAccumulatorToNewNode<LoadNamedGeneric>( SetAccumulator(AddNewNode<LoadNamedGeneric>(
{context, object}, name, {context, object}, name,
compiler::FeedbackSource{feedback(), slot_index}); compiler::FeedbackSource{feedback(), slot_index}));
} }
MAGLEV_UNIMPLEMENTED_BYTECODE(GetNamedPropertyFromSuper) MAGLEV_UNIMPLEMENTED_BYTECODE(GetNamedPropertyFromSuper)
...@@ -571,7 +573,7 @@ void MaglevGraphBuilder::BuildCallFromRegisterList( ...@@ -571,7 +573,7 @@ void MaglevGraphBuilder::BuildCallFromRegisterList(
call->set_arg(arg_index++, current_interpreter_frame_.get(args[i])); call->set_arg(arg_index++, current_interpreter_frame_.get(args[i]));
} }
SetAccumulatorToNewNode(call); SetAccumulator(call);
} }
void MaglevGraphBuilder::BuildCallFromRegisters( void MaglevGraphBuilder::BuildCallFromRegisters(
...@@ -601,7 +603,7 @@ void MaglevGraphBuilder::BuildCallFromRegisters( ...@@ -601,7 +603,7 @@ void MaglevGraphBuilder::BuildCallFromRegisters(
call->set_arg(arg_index++, LoadRegister(i + 1)); call->set_arg(arg_index++, LoadRegister(i + 1));
} }
SetAccumulatorToNewNode(call); SetAccumulator(call);
} }
void MaglevGraphBuilder::VisitCallAnyReceiver() { void MaglevGraphBuilder::VisitCallAnyReceiver() {
...@@ -745,8 +747,6 @@ void MaglevGraphBuilder::MergeIntoFrameState(BasicBlock* predecessor, ...@@ -745,8 +747,6 @@ void MaglevGraphBuilder::MergeIntoFrameState(BasicBlock* predecessor,
void MaglevGraphBuilder::BuildBranchIfTrue(ValueNode* node, int true_target, void MaglevGraphBuilder::BuildBranchIfTrue(ValueNode* node, int true_target,
int false_target) { int false_target) {
// TODO(verwaest): Materialize true/false in the respective environments.
if (GetOutLiveness()->AccumulatorIsLive()) SetAccumulatorToExistingNode(node);
BasicBlock* block = FinishBlock<BranchIfTrue>(next_offset(), {node}, BasicBlock* block = FinishBlock<BranchIfTrue>(next_offset(), {node},
&jump_targets_[true_target], &jump_targets_[true_target],
&jump_targets_[false_target]); &jump_targets_[false_target]);
...@@ -755,8 +755,6 @@ void MaglevGraphBuilder::BuildBranchIfTrue(ValueNode* node, int true_target, ...@@ -755,8 +755,6 @@ void MaglevGraphBuilder::BuildBranchIfTrue(ValueNode* node, int true_target,
void MaglevGraphBuilder::BuildBranchIfToBooleanTrue(ValueNode* node, void MaglevGraphBuilder::BuildBranchIfToBooleanTrue(ValueNode* node,
int true_target, int true_target,
int false_target) { int false_target) {
// TODO(verwaest): Materialize true/false in the respective environments.
if (GetOutLiveness()->AccumulatorIsLive()) SetAccumulatorToExistingNode(node);
BasicBlock* block = FinishBlock<BranchIfToBooleanTrue>( BasicBlock* block = FinishBlock<BranchIfToBooleanTrue>(
next_offset(), {node}, &jump_targets_[true_target], next_offset(), {node}, &jump_targets_[true_target],
&jump_targets_[false_target]); &jump_targets_[false_target]);
...@@ -807,15 +805,6 @@ MAGLEV_UNIMPLEMENTED_BYTECODE(GetIterator) ...@@ -807,15 +805,6 @@ MAGLEV_UNIMPLEMENTED_BYTECODE(GetIterator)
MAGLEV_UNIMPLEMENTED_BYTECODE(Debugger) MAGLEV_UNIMPLEMENTED_BYTECODE(Debugger)
MAGLEV_UNIMPLEMENTED_BYTECODE(IncBlockCounter) MAGLEV_UNIMPLEMENTED_BYTECODE(IncBlockCounter)
MAGLEV_UNIMPLEMENTED_BYTECODE(Abort) MAGLEV_UNIMPLEMENTED_BYTECODE(Abort)
#define SHORT_STAR_VISITOR(Name, ...) \
void MaglevGraphBuilder::Visit##Name() { \
StoreRegister( \
interpreter::Register::FromShortStar(interpreter::Bytecode::k##Name), \
GetAccumulator(), \
bytecode_analysis().GetOutLivenessFor(iterator_.current_offset())); \
}
SHORT_STAR_BYTECODE_LIST(SHORT_STAR_VISITOR)
#undef SHORT_STAR_VISITOR
void MaglevGraphBuilder::VisitWide() { UNREACHABLE(); } void MaglevGraphBuilder::VisitWide() { UNREACHABLE(); }
void MaglevGraphBuilder::VisitExtraWide() { UNREACHABLE(); } void MaglevGraphBuilder::VisitExtraWide() { UNREACHABLE(); }
......
...@@ -113,6 +113,10 @@ class MaglevGraphBuilder { ...@@ -113,6 +113,10 @@ class MaglevGraphBuilder {
StartNewBlock(offset); StartNewBlock(offset);
} }
DCHECK_NOT_NULL(current_block_); DCHECK_NOT_NULL(current_block_);
#ifdef DEBUG
// Clear new nodes for the next VisitFoo
new_nodes_.clear();
#endif
switch (iterator_.current_bytecode()) { switch (iterator_.current_bytecode()) {
#define BYTECODE_CASE(name, ...) \ #define BYTECODE_CASE(name, ...) \
case interpreter::Bytecode::k##name: \ case interpreter::Bytecode::k##name: \
...@@ -134,6 +138,9 @@ class MaglevGraphBuilder { ...@@ -134,6 +138,9 @@ class MaglevGraphBuilder {
} }
current_block_->nodes().Add(node); current_block_->nodes().Add(node);
if (has_graph_labeller()) graph_labeller()->RegisterNode(node); if (has_graph_labeller()) graph_labeller()->RegisterNode(node);
#ifdef DEBUG
new_nodes_.insert(node);
#endif
return node; return node;
} }
...@@ -180,39 +187,30 @@ class MaglevGraphBuilder { ...@@ -180,39 +187,30 @@ class MaglevGraphBuilder {
operand_index, isolate()))); operand_index, isolate())));
} }
// For cases where we're setting the accumulator to a previously created node ValueNode* GetConstant(const compiler::ObjectRef& ref) {
// (e.g. moving an interpreter register to the accumulator). if (ref.IsSmi()) {
// TODO(leszeks): Somehow DCHECK that this isn't a new node. return AddNewNode<SmiConstant>({}, Smi::FromInt(ref.AsSmi()));
void SetAccumulatorToExistingNode(ValueNode* node) {
current_interpreter_frame_.set_accumulator(node);
}
template <typename NodeT>
void SetAccumulatorToNewNode(NodeT* node) {
DCHECK_EQ(NodeT::kProperties.can_lazy_deopt(),
node->properties().can_lazy_deopt());
if constexpr (NodeT::kProperties.can_lazy_deopt()) {
DCHECK(!node->lazy_deopt_info()->result_location.is_valid());
node->lazy_deopt_info()->result_location =
interpreter::Register::virtual_accumulator();
} }
SetAccumulatorToExistingNode(node); // TODO(leszeks): Detect roots and use RootConstant.
return AddNewNode<Constant>({}, ref.AsHeapObject());
} }
template <typename NodeT, typename... Args> // Move an existing ValueNode between two registers. You can pass
void SetAccumulatorToNewNode(std::initializer_list<ValueNode*> inputs, // virtual_accumulator as the src or dst to move in or out of the accumulator.
Args&&... args) { void MoveNodeBetweenRegisters(interpreter::Register src,
NodeT* node = AddNewNode<NodeT>(inputs, args...); interpreter::Register dst) {
SetAccumulatorToNewNode(node); // We shouldn't be moving newly created nodes between registers.
} DCHECK_EQ(0, new_nodes_.count(current_interpreter_frame_.get(src)));
DCHECK_NOT_NULL(current_interpreter_frame_.get(src));
void SetAccumulatorToConstant(const compiler::ObjectRef& ref) { current_interpreter_frame_.set(dst, current_interpreter_frame_.get(src));
if (ref.IsSmi()) {
return SetAccumulatorToNewNode<SmiConstant>({},
Smi::FromInt(ref.AsSmi()));
} }
// TODO(leszeks): Detect roots and use RootConstant.
SetAccumulatorToNewNode<Constant>({}, ref.AsHeapObject()); template <typename NodeT>
void SetAccumulator(NodeT* node) {
// Accumulator stores are equivalent to stores to the virtual accumulator
// register.
StoreRegister(interpreter::Register::virtual_accumulator(), node);
} }
ValueNode* GetAccumulator() const { ValueNode* GetAccumulator() const {
...@@ -224,11 +222,13 @@ class MaglevGraphBuilder { ...@@ -224,11 +222,13 @@ class MaglevGraphBuilder {
return current_interpreter_frame_.get(source); return current_interpreter_frame_.get(source);
} }
void StoreRegister(interpreter::Register target, ValueNode* value, template <typename NodeT>
const compiler::BytecodeLivenessState* liveness) { void StoreRegister(interpreter::Register target, NodeT* value) {
if (target.index() >= 0 && !liveness->RegisterIsLive(target.index())) { // We should only set register values to nodes that were newly created in
return; // this Visit. Existing nodes should be moved between registers with
} // MoveNodeBetweenRegisters.
DCHECK_NE(0, new_nodes_.count(value));
MarkAsLazyDeoptResult(value, target);
current_interpreter_frame_.set(target, value); current_interpreter_frame_.set(target, value);
} }
...@@ -249,6 +249,18 @@ class MaglevGraphBuilder { ...@@ -249,6 +249,18 @@ class MaglevGraphBuilder {
*compilation_unit_, GetOutLiveness(), current_interpreter_frame_)); *compilation_unit_, GetOutLiveness(), current_interpreter_frame_));
} }
template <typename NodeT>
void MarkAsLazyDeoptResult(NodeT* value,
interpreter::Register result_location) {
DCHECK_EQ(NodeT::kProperties.can_lazy_deopt(),
value->properties().can_lazy_deopt());
if constexpr (NodeT::kProperties.can_lazy_deopt()) {
DCHECK(result_location.is_valid());
DCHECK(!value->lazy_deopt_info()->result_location.is_valid());
value->lazy_deopt_info()->result_location = result_location;
}
}
void MarkPossibleSideEffect() { void MarkPossibleSideEffect() {
// If there was a potential side effect, invalidate the previous checkpoint. // If there was a potential side effect, invalidate the previous checkpoint.
latest_checkpointed_state_.reset(); latest_checkpointed_state_.reset();
...@@ -424,6 +436,10 @@ class MaglevGraphBuilder { ...@@ -424,6 +436,10 @@ class MaglevGraphBuilder {
// TODO(v8:7700): Clean up after all bytecodes are supported. // TODO(v8:7700): Clean up after all bytecodes are supported.
bool found_unsupported_bytecode_ = false; bool found_unsupported_bytecode_ = false;
bool this_field_will_be_unused_once_all_bytecodes_are_supported_; bool this_field_will_be_unused_once_all_bytecodes_are_supported_;
#ifdef DEBUG
std::unordered_set<Node*> new_nodes_;
#endif
}; };
} // namespace maglev } // namespace maglev
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment