Commit 900e1e5f authored by Leszek Swirski's avatar Leszek Swirski Committed by V8 LUCI CQ

[maglev] Split off CheckMapsWithMigration

Make the normal CheckMaps non-calling, and add a new
CheckMapsWithMigration which still does the deferred call. Eventually
we'll want to also not mark this as calling, but keeping the two
separate is cleaner anyway.

Bug: v8:7700
Change-Id: Ideb2fcef147ab45d4a10bbdde9a85a55fbd56947
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3740725Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Auto-Submit: Leszek Swirski <leszeks@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81499}
parent 078f3fb4
......@@ -684,6 +684,15 @@ MAGLEV_UNIMPLEMENTED_BYTECODE(LdaLookupContextSlotInsideTypeof)
MAGLEV_UNIMPLEMENTED_BYTECODE(LdaLookupGlobalSlotInsideTypeof)
MAGLEV_UNIMPLEMENTED_BYTECODE(StaLookupSlot)
void MaglevGraphBuilder::BuildMapCheck(ValueNode* object,
const compiler::MapRef& map) {
if (map.is_migration_target()) {
AddNewNode<CheckMapsWithMigration>({object}, map);
} else {
AddNewNode<CheckMaps>({object}, map);
}
}
bool MaglevGraphBuilder::TryBuildMonomorphicLoad(ValueNode* object,
const compiler::MapRef& map,
MaybeObjectHandle handler) {
......@@ -708,7 +717,7 @@ bool MaglevGraphBuilder::TryBuildMonomorphicLoadFromSmiHandler(
if (kind != LoadHandler::Kind::kField) return false;
if (LoadHandler::IsWasmStructBits::decode(handler)) return false;
AddNewNode<CheckMaps>({object}, map);
BuildMapCheck(object, map);
ValueNode* load_source;
if (LoadHandler::IsInobjectBits::decode(handler)) {
......@@ -754,7 +763,7 @@ bool MaglevGraphBuilder::TryBuildMonomorphicLoadFromLoadHandler(
if (lookup_on_lookup_start_object) return false;
if (kind != LoadHandler::Kind::kConstantFromPrototype) return false;
AddNewNode<CheckMaps>({object}, map);
BuildMapCheck(object, map);
Object validity_cell = handler.validity_cell(local_isolate_);
if (validity_cell.IsCell(local_isolate_)) {
......@@ -876,7 +885,7 @@ void MaglevGraphBuilder::VisitSetNamedProperty() {
StoreHandler::RepresentationBits::decode(smi_handler);
if (kind == StoreHandler::Kind::kField &&
representation == Representation::kTagged) {
AddNewNode<CheckMaps>({object}, named_feedback.maps()[0]);
BuildMapCheck(object, named_feedback.maps()[0]);
ValueNode* value = GetAccumulatorTagged();
AddNewNode<StoreField>({object, value}, smi_handler);
return;
......@@ -928,7 +937,7 @@ void MaglevGraphBuilder::VisitDefineNamedOwnProperty() {
StoreHandler::RepresentationBits::decode(smi_handler);
if (kind == StoreHandler::Kind::kField &&
representation == Representation::kTagged) {
AddNewNode<CheckMaps>({object}, named_feedback.maps()[0]);
BuildMapCheck(object, named_feedback.maps()[0]);
ValueNode* value = GetAccumulatorTagged();
AddNewNode<StoreField>({object, value}, smi_handler);
return;
......
......@@ -622,6 +622,8 @@ class MaglevGraphBuilder {
bool TryBuildPropertyCellAccess(
const compiler::GlobalAccessFeedback& global_access_feedback);
void BuildMapCheck(ValueNode* object, const compiler::MapRef& map);
bool TryBuildMonomorphicLoad(ValueNode* object, const compiler::MapRef& map,
MaybeObjectHandle handler);
bool TryBuildMonomorphicLoadFromSmiHandler(ValueNode* object,
......
......@@ -86,8 +86,9 @@ class MaglevGraphVerifier {
case Opcode::kLoadDoubleField:
case Opcode::kLoadGlobal:
case Opcode::kLoadTaggedField:
// TODO(victorgomes): Can we check that the input is actually a map?
// TODO(victorgomes): Can we check that the input is actually a receiver?
case Opcode::kCheckMaps:
case Opcode::kCheckMapsWithMigration:
// TODO(victorgomes): Can we check that the input is Boolean?
case Opcode::kBranchIfToBooleanTrue:
case Opcode::kBranchIfTrue:
......
......@@ -627,50 +627,68 @@ void CheckMaps::GenerateCode(MaglevCodeGenState* code_gen_state,
__ LoadMap(map_tmp, object);
__ Cmp(map_tmp, map().object());
// TODO(leszeks): Encode as a bit on CheckMaps.
if (map().is_migration_target()) {
JumpToDeferredIf(
not_equal, code_gen_state,
[](MaglevCodeGenState* code_gen_state, Label* return_label,
Register object, CheckMaps* node, EagerDeoptInfo* deopt_info,
Register map_tmp) {
RegisterEagerDeopt(code_gen_state, deopt_info);
// If the map is not deprecated, deopt straight away.
__ movl(kScratchRegister,
FieldOperand(map_tmp, Map::kBitField3Offset));
__ testl(kScratchRegister,
Immediate(Map::Bits3::IsDeprecatedBit::kMask));
__ j(zero, &deopt_info->deopt_entry_label);
// Otherwise, try migrating the object. If the migration returns Smi
// zero, then it failed and we should deopt.
__ Push(object);
__ Move(kContextRegister,
code_gen_state->broker()->target_native_context().object());
// TODO(verwaest): We're calling so we need to spill around it.
__ CallRuntime(Runtime::kTryMigrateInstance);
__ cmpl(kReturnRegister0, Immediate(0));
__ j(equal, &deopt_info->deopt_entry_label);
// The migrated object is returned on success, retry the map check.
__ Move(object, kReturnRegister0);
__ LoadMap(map_tmp, object);
__ Cmp(map_tmp, node->map().object());
__ j(equal, return_label);
__ jmp(&deopt_info->deopt_entry_label);
},
object, this, eager_deopt_info(), map_tmp);
} else {
EmitEagerDeoptIf(not_equal, code_gen_state, this);
}
EmitEagerDeoptIf(not_equal, code_gen_state, this);
}
void CheckMaps::PrintParams(std::ostream& os,
MaglevGraphLabeller* graph_labeller) const {
os << "(" << *map().object() << ")";
}
void CheckMapsWithMigration::AllocateVreg(
MaglevVregAllocationState* vreg_state) {
UseRegister(actual_map_input());
set_temporaries_needed(1);
}
void CheckMapsWithMigration::GenerateCode(MaglevCodeGenState* code_gen_state,
const ProcessingState& state) {
Register object = ToRegister(actual_map_input());
Condition is_smi = __ CheckSmi(object);
EmitEagerDeoptIf(is_smi, code_gen_state, this);
RegList temps = temporaries();
Register map_tmp = temps.PopFirst();
__ LoadMap(map_tmp, object);
__ Cmp(map_tmp, map().object());
JumpToDeferredIf(
not_equal, code_gen_state,
[](MaglevCodeGenState* code_gen_state, Label* return_label,
Register object, CheckMapsWithMigration* node,
EagerDeoptInfo* deopt_info, Register map_tmp) {
RegisterEagerDeopt(code_gen_state, deopt_info);
// If the map is not deprecated, deopt straight away.
__ movl(kScratchRegister, FieldOperand(map_tmp, Map::kBitField3Offset));
__ testl(kScratchRegister,
Immediate(Map::Bits3::IsDeprecatedBit::kMask));
__ j(zero, &deopt_info->deopt_entry_label);
// Otherwise, try migrating the object. If the migration returns Smi
// zero, then it failed and we should deopt.
__ Push(object);
__ Move(kContextRegister,
code_gen_state->broker()->target_native_context().object());
// TODO(verwaest): We're calling so we need to spill around it.
__ CallRuntime(Runtime::kTryMigrateInstance);
__ cmpl(kReturnRegister0, Immediate(0));
__ j(equal, &deopt_info->deopt_entry_label);
// The migrated object is returned on success, retry the map check.
__ Move(object, kReturnRegister0);
__ LoadMap(map_tmp, object);
__ Cmp(map_tmp, node->map().object());
__ j(equal, return_label);
__ jmp(&deopt_info->deopt_entry_label);
},
object, this, eager_deopt_info(), map_tmp);
}
void CheckMapsWithMigration::PrintParams(
std::ostream& os, MaglevGraphLabeller* graph_labeller) const {
os << "(" << *map().object() << ")";
}
void LoadTaggedField::AllocateVreg(MaglevVregAllocationState* vreg_state) {
UseRegister(object_input());
DefineAsRegister(vreg_state, this);
......
......@@ -6,6 +6,7 @@
#define V8_MAGLEV_MAGLEV_IR_H_
#include "src/base/bit-field.h"
#include "src/base/logging.h"
#include "src/base/macros.h"
#include "src/base/small-vector.h"
#include "src/base/threaded-list.h"
......@@ -143,10 +144,11 @@ class CompactInterpreterFrameState;
V(ConstantGapMove) \
V(GapMove)
#define NODE_LIST(V) \
V(CheckMaps) \
V(StoreField) \
GAP_MOVE_NODE_LIST(V) \
#define NODE_LIST(V) \
V(CheckMaps) \
V(CheckMapsWithMigration) \
V(StoreField) \
GAP_MOVE_NODE_LIST(V) \
VALUE_NODE_LIST(V)
#define CONDITIONAL_CONTROL_NODE_LIST(V) \
......@@ -1655,7 +1657,35 @@ class CheckMaps : public FixedInputNodeT<1, CheckMaps> {
public:
explicit CheckMaps(uint64_t bitfield, const compiler::MapRef& map)
: Base(bitfield), map_(map) {}
: Base(bitfield), map_(map) {
DCHECK(!map.is_migration_target());
}
static constexpr OpProperties kProperties = OpProperties::EagerDeopt();
compiler::MapRef map() const { return map_; }
static constexpr int kActualMapIndex = 0;
Input& actual_map_input() { return input(kActualMapIndex); }
void AllocateVreg(MaglevVregAllocationState*);
void GenerateCode(MaglevCodeGenState*, const ProcessingState&);
void PrintParams(std::ostream&, MaglevGraphLabeller*) const;
private:
const compiler::MapRef map_;
};
class CheckMapsWithMigration
: public FixedInputNodeT<1, CheckMapsWithMigration> {
using Base = FixedInputNodeT<1, CheckMapsWithMigration>;
public:
explicit CheckMapsWithMigration(uint64_t bitfield,
const compiler::MapRef& map)
: Base(bitfield), map_(map) {
DCHECK(map.is_migration_target());
}
// TODO(verwaest): This just calls in deferred code, so probably we'll need to
// mark that to generate stack maps. Mark as call so we at least clear the
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment