Commit f6ee9ed0 authored by Manos Koukoutos's avatar Manos Koukoutos Committed by Commit Bot

[turbofan] Introduce LoadImmutable, use it in wasm compiler

LoadImmutable represents a load from a position in memory that is known
to be immutable, e.g. an immutable IsolateRoot or an immutable field of
a WasmInstanceObject. Because the returned value cannot change through
the execution of a function, LoadImmutable is a pure operator and does
not have effect or control edges.
This will allow more aggressive optimizations of loads of fields of
the Isolate and Instance that are known to be immutable.
Requires that the memory in question has been initialized at function
start even through inlining.

Note: We may reconsider this approach once we have escape analysis for
wasm, and replace it with immutable load/initialize operators that live
inside the effect chain and are less restriced.

Bug: v8:11510
Change-Id: I5e8e4f27d7008f39f01175ffa95a9c531ba63e66
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2775568Reviewed-by: 's avatarAndreas Haas <ahaas@chromium.org>
Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Commit-Queue: Manos Koukoutos <manoskouk@chromium.org>
Cr-Commit-Position: refs/heads/master@{#73594}
parent 5e8417a2
......@@ -1992,7 +1992,8 @@ bool InstructionSelector::ZeroExtendsWord32ToWord64NoPhis(Node* node) {
// zero-extension is a no-op.
return true;
}
case IrOpcode::kLoad: {
case IrOpcode::kLoad:
case IrOpcode::kLoadImmutable: {
// As for the operations above, a 32-bit load will implicitly clear the
// top 32 bits of the destination register.
LoadRepresentation load_rep = LoadRepresentationOf(node->op());
......
......@@ -1495,7 +1495,8 @@ void InstructionSelector::VisitNode(Node* node) {
case IrOpcode::kRetain:
VisitRetain(node);
return;
case IrOpcode::kLoad: {
case IrOpcode::kLoad:
case IrOpcode::kLoadImmutable: {
LoadRepresentation type = LoadRepresentationOf(node->op());
MarkAsRepresentation(type.representation(), node);
return VisitLoad(node);
......
......@@ -1487,7 +1487,8 @@ bool InstructionSelector::ZeroExtendsWord32ToWord64NoPhis(Node* node) {
case IrOpcode::kUint32Mod:
case IrOpcode::kUint32MulHigh:
return true;
case IrOpcode::kLoad: {
case IrOpcode::kLoad:
case IrOpcode::kLoadImmutable: {
LoadRepresentation load_rep = LoadRepresentationOf(node->op());
if (load_rep.IsUnsigned()) {
switch (load_rep.representation()) {
......
......@@ -396,7 +396,8 @@ bool ProduceWord32Result(Node* node) {
// return false;
// }
// }
case IrOpcode::kLoad: {
case IrOpcode::kLoad:
case IrOpcode::kLoadImmutable: {
LoadRepresentation load_rep = LoadRepresentationOf(node->op());
switch (load_rep.representation()) {
case MachineRepresentation::kWord32:
......@@ -1891,7 +1892,8 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
break;
case IrOpcode::kWord32And:
return VisitTestUnderMask(this, value, cont);
case IrOpcode::kLoad: {
case IrOpcode::kLoad:
case IrOpcode::kLoadImmutable: {
LoadRepresentation load_rep = LoadRepresentationOf(value->op());
switch (load_rep.representation()) {
case MachineRepresentation::kWord32:
......
......@@ -1434,6 +1434,7 @@ bool InstructionSelector::ZeroExtendsWord32ToWord64NoPhis(Node* node) {
}
}
case IrOpcode::kLoad:
case IrOpcode::kLoadImmutable:
case IrOpcode::kProtectedLoad:
case IrOpcode::kPoisonedLoad: {
// The movzxbl/movsxbl/movzxwl/movsxwl/movl operations implicitly
......@@ -1671,7 +1672,8 @@ void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
}
break;
}
case IrOpcode::kLoad: {
case IrOpcode::kLoad:
case IrOpcode::kLoadImmutable: {
if (TryMergeTruncateInt64ToInt32IntoLoad(this, node, value)) {
return;
}
......
......@@ -98,6 +98,7 @@ inline bool IsPointerConstant(Node* node) {
switch (node->opcode()) {
case IrOpcode::kHeapConstant:
case IrOpcode::kParameter:
case IrOpcode::kLoadImmutable:
return true;
default:
return false;
......
......@@ -254,6 +254,13 @@ void Int64Lowering::LowerNode(Node* node) {
machine()->UnalignedLoad(MachineType::Int32()));
break;
}
case IrOpcode::kLoadImmutable: {
MachineRepresentation rep =
LoadRepresentationOf(node->op()).representation();
LowerLoadOperator(node, rep,
machine()->LoadImmutable(MachineType::Int32()));
break;
}
case IrOpcode::kLoadFromObject: {
ObjectAccess access = ObjectAccessOf(node->op());
LowerLoadOperator(node, access.machine_type.representation(),
......
......@@ -63,29 +63,29 @@ void UnrollLoop(Node* loop_node, ZoneUnorderedSet<Node*>* loop, uint32_t depth,
// from the effect chain.
// The effect chain looks like this (* stand for irrelevant nodes):
//
// replacing effect (effect before stack check)
// * * | *
// | | | |
// ( Load )
// * * | *
// | | | |
// ( Load )
// {replacing_effect} (effect before stack check)
// * * | *
// | | | |
// ( LoadFromObject )
// | |
// stack_check
// | * | *
// | | | |
// | (call)
// | | *
// | | |
// {use} (stack check effect that we need to replace)
DCHECK_EQ(use->InputAt(1)->opcode(), IrOpcode::kCall);
DCHECK_EQ(use->InputAt(1)->InputAt(1), stack_check);
DCHECK_EQ(stack_check->InputAt(1)->opcode(),
// {stack_check}
// | * | *
// | | | |
// | ( Call )
// | | *
// | | |
// {use}: EffectPhi (stack check effect that we need to replace)
DCHECK_EQ(use->opcode(), IrOpcode::kEffectPhi);
DCHECK_EQ(NodeProperties::GetEffectInput(use, 1)->opcode(),
IrOpcode::kCall);
DCHECK_EQ(NodeProperties::GetEffectInput(use), stack_check);
DCHECK_EQ(NodeProperties::GetEffectInput(
NodeProperties::GetEffectInput(use, 1)),
stack_check);
DCHECK_EQ(NodeProperties::GetEffectInput(stack_check)->opcode(),
IrOpcode::kLoadFromObject);
DCHECK_EQ(stack_check->InputAt(1)->InputAt(2)->opcode(),
IrOpcode::kLoadFromObject);
Node* replacing_effect =
stack_check->InputAt(1)->InputAt(2)->InputAt(2);
Node* replacing_effect = NodeProperties::GetEffectInput(
NodeProperties::GetEffectInput(stack_check));
FOREACH_COPY_INDEX(i) {
COPY(use, i)->ReplaceUses(COPY(replacing_effect, i));
}
......
......@@ -122,6 +122,7 @@ class MachineRepresentationInferrer {
case IrOpcode::kWord32AtomicLoad:
case IrOpcode::kWord64AtomicLoad:
case IrOpcode::kLoad:
case IrOpcode::kLoadImmutable:
case IrOpcode::kProtectedLoad:
case IrOpcode::kPoisonedLoad:
representation_vector_[node->id()] = PromoteRepresentation(
......@@ -549,6 +550,7 @@ class MachineRepresentationChecker {
break;
case IrOpcode::kLoad:
case IrOpcode::kUnalignedLoad:
case IrOpcode::kLoadImmutable:
case IrOpcode::kWord32AtomicLoad:
case IrOpcode::kWord32AtomicPairLoad:
case IrOpcode::kWord64AtomicLoad:
......
......@@ -125,7 +125,8 @@ LoadRepresentation LoadRepresentationOf(Operator const* op) {
IrOpcode::kWord64AtomicLoad == op->opcode() ||
IrOpcode::kWord32AtomicPairLoad == op->opcode() ||
IrOpcode::kPoisonedLoad == op->opcode() ||
IrOpcode::kUnalignedLoad == op->opcode());
IrOpcode::kUnalignedLoad == op->opcode() ||
IrOpcode::kLoadImmutable == op->opcode());
return OpParameter<LoadRepresentation>(op);
}
......@@ -842,10 +843,18 @@ struct MachineOperatorGlobalCache {
Operator::kNoDeopt | Operator::kNoThrow, "ProtectedLoad", 2, 1, \
1, 1, 1, 0, MachineType::Type()) {} \
}; \
struct LoadImmutable##Type##Operator final \
: public Operator1<LoadRepresentation> { \
LoadImmutable##Type##Operator() \
: Operator1<LoadRepresentation>(IrOpcode::kLoadImmutable, \
Operator::kPure, "LoadImmutable", 2, \
0, 0, 1, 0, 0, MachineType::Type()) {} \
}; \
Load##Type##Operator kLoad##Type; \
PoisonedLoad##Type##Operator kPoisonedLoad##Type; \
UnalignedLoad##Type##Operator kUnalignedLoad##Type; \
ProtectedLoad##Type##Operator kProtectedLoad##Type;
ProtectedLoad##Type##Operator kProtectedLoad##Type; \
LoadImmutable##Type##Operator kLoadImmutable##Type;
MACHINE_TYPE_LIST(LOAD)
#undef LOAD
......@@ -1330,6 +1339,22 @@ const Operator* MachineOperatorBuilder::Load(LoadRepresentation rep) {
UNREACHABLE();
}
// Represents a load from a position in memory that is known to be immutable,
// e.g. an immutable IsolateRoot or an immutable field of a WasmInstanceObject.
// Because the returned value cannot change through the execution of a function,
// LoadImmutable is a pure operator and does not have effect or control edges.
// Requires that the memory in question has been initialized at function start
// even through inlining.
const Operator* MachineOperatorBuilder::LoadImmutable(LoadRepresentation rep) {
#define LOAD(Type) \
if (rep == MachineType::Type()) { \
return &cache_.kLoadImmutable##Type; \
}
MACHINE_TYPE_LIST(LOAD)
#undef LOAD
UNREACHABLE();
}
const Operator* MachineOperatorBuilder::PoisonedLoad(LoadRepresentation rep) {
#define LOAD(Type) \
if (rep == MachineType::Type()) { \
......
......@@ -841,6 +841,7 @@ class V8_EXPORT_PRIVATE MachineOperatorBuilder final
// load [base + index]
const Operator* Load(LoadRepresentation rep);
const Operator* LoadImmutable(LoadRepresentation rep);
const Operator* PoisonedLoad(LoadRepresentation rep);
const Operator* ProtectedLoad(LoadRepresentation rep);
......
......@@ -32,6 +32,7 @@ bool CanAllocate(const Node* node) {
case IrOpcode::kEffectPhi:
case IrOpcode::kIfException:
case IrOpcode::kLoad:
case IrOpcode::kLoadImmutable:
case IrOpcode::kLoadElement:
case IrOpcode::kLoadField:
case IrOpcode::kLoadFromObject:
......
......@@ -735,6 +735,7 @@ struct BaseWithIndexAndDisplacementMatcher {
Node* from = use.from();
switch (from->opcode()) {
case IrOpcode::kLoad:
case IrOpcode::kLoadImmutable:
case IrOpcode::kPoisonedLoad:
case IrOpcode::kProtectedLoad:
case IrOpcode::kInt32Add:
......
......@@ -681,6 +681,7 @@
V(Comment) \
V(Load) \
V(PoisonedLoad) \
V(LoadImmutable) \
V(Store) \
V(StackSlot) \
V(Word32Popcnt) \
......
......@@ -551,6 +551,9 @@ void SimdScalarLowering::LowerLoadOp(Node* node, SimdType type) {
case IrOpcode::kLoad:
load_op = machine()->Load(MachineTypeFrom(type));
break;
case IrOpcode::kLoadImmutable:
load_op = machine()->LoadImmutable(MachineTypeFrom(type));
break;
case IrOpcode::kLoadFromObject:
load_op = simplified()->LoadFromObject(
ObjectAccess(MachineTypeFrom(type), kNoWriteBarrier));
......@@ -1470,7 +1473,8 @@ void SimdScalarLowering::LowerNode(Node* node) {
case IrOpcode::kLoad:
case IrOpcode::kLoadFromObject:
case IrOpcode::kUnalignedLoad:
case IrOpcode::kProtectedLoad: {
case IrOpcode::kProtectedLoad:
case IrOpcode::kLoadImmutable: {
LowerLoadOp(node, rep_type);
break;
}
......
......@@ -1628,6 +1628,7 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) {
// Machine operators
// -----------------------
case IrOpcode::kLoad:
case IrOpcode::kLoadImmutable:
case IrOpcode::kPoisonedLoad:
case IrOpcode::kProtectedLoad:
case IrOpcode::kProtectedStore:
......
......@@ -79,16 +79,21 @@ MachineType assert_size(int expected_size, MachineType type) {
(WasmInstanceObject::k##name##OffsetEnd - \
WasmInstanceObject::k##name##Offset + 1) // NOLINT(whitespace/indent)
#define LOAD_INSTANCE_FIELD(name, type) \
#define LOAD_MUTABLE_INSTANCE_FIELD(name, type) \
gasm_->LoadFromObject( \
assert_size(WASM_INSTANCE_OBJECT_SIZE(name), type), BuildLoadInstance(), \
wasm::ObjectAccess::ToTagged(WasmInstanceObject::k##name##Offset))
#define LOAD_INSTANCE_FIELD(name, type) \
gasm_->LoadImmutable( \
assert_size(WASM_INSTANCE_OBJECT_SIZE(name), type), BuildLoadInstance(), \
wasm::ObjectAccess::ToTagged(WasmInstanceObject::k##name##Offset))
#define LOAD_ROOT(root_name, factory_name) \
(use_js_isolate_and_params() \
? graph()->NewNode(mcgraph()->common()->HeapConstant( \
isolate_->factory()->factory_name())) \
: gasm_->LoadFromObject( \
: gasm_->LoadImmutable( \
MachineType::Pointer(), BuildLoadIsolateRoot(), \
IsolateData::root_slot_offset(RootIndex::k##root_name)))
......@@ -245,6 +250,15 @@ class WasmGraphAssembler : public GraphAssembler {
return LoadFromObject(type, base, IntPtrConstant(offset));
}
Node* LoadImmutable(LoadRepresentation rep, Node* base, Node* offset) {
return AddNode(graph()->NewNode(mcgraph()->machine()->LoadImmutable(rep),
base, offset));
}
Node* LoadImmutable(LoadRepresentation rep, Node* base, int offset) {
return LoadImmutable(rep, base, IntPtrConstant(offset));
}
Node* StoreToObject(ObjectAccess access, Node* base, Node* offset,
Node* value) {
return AddNode(graph()->NewNode(simplified_.StoreToObject(access), base,
......@@ -2956,19 +2970,19 @@ void WasmGraphBuilder::LoadIndirectFunctionTable(uint32_t table_index,
Node** ift_targets,
Node** ift_instances) {
if (table_index == 0) {
*ift_size =
LOAD_INSTANCE_FIELD(IndirectFunctionTableSize, MachineType::Uint32());
*ift_sig_ids = LOAD_INSTANCE_FIELD(IndirectFunctionTableSigIds,
MachineType::Pointer());
*ift_targets = LOAD_INSTANCE_FIELD(IndirectFunctionTableTargets,
MachineType::Pointer());
*ift_instances = LOAD_INSTANCE_FIELD(IndirectFunctionTableRefs,
MachineType::TaggedPointer());
*ift_size = LOAD_MUTABLE_INSTANCE_FIELD(IndirectFunctionTableSize,
MachineType::Uint32());
*ift_sig_ids = LOAD_MUTABLE_INSTANCE_FIELD(IndirectFunctionTableSigIds,
MachineType::Pointer());
*ift_targets = LOAD_MUTABLE_INSTANCE_FIELD(IndirectFunctionTableTargets,
MachineType::Pointer());
*ift_instances = LOAD_MUTABLE_INSTANCE_FIELD(IndirectFunctionTableRefs,
MachineType::TaggedPointer());
return;
}
Node* ift_tables =
LOAD_INSTANCE_FIELD(IndirectFunctionTables, MachineType::TaggedPointer());
Node* ift_tables = LOAD_MUTABLE_INSTANCE_FIELD(IndirectFunctionTables,
MachineType::TaggedPointer());
Node* ift_table = gasm_->LoadFixedArrayElementAny(ift_tables, table_index);
*ift_size = gasm_->LoadFromObject(
......@@ -3355,11 +3369,11 @@ void WasmGraphBuilder::InitInstanceCache(
// Load the memory start.
instance_cache->mem_start =
LOAD_INSTANCE_FIELD(MemoryStart, MachineType::UintPtr());
LOAD_MUTABLE_INSTANCE_FIELD(MemoryStart, MachineType::UintPtr());
// Load the memory size.
instance_cache->mem_size =
LOAD_INSTANCE_FIELD(MemorySize, MachineType::UintPtr());
LOAD_MUTABLE_INSTANCE_FIELD(MemorySize, MachineType::UintPtr());
if (untrusted_code_mitigations_) {
// Load the memory mask.
......@@ -8159,6 +8173,8 @@ AssemblerOptions WasmStubAssemblerOptions() {
#undef FATAL_UNSUPPORTED_OPCODE
#undef WASM_INSTANCE_OBJECT_SIZE
#undef LOAD_INSTANCE_FIELD
#undef LOAD_MUTABLE_INSTANCE_FIELD
#undef LOAD_ROOT
} // namespace compiler
} // namespace internal
......
......@@ -206,6 +206,42 @@ TEST_F(Int64LoweringTest, Int64LoadFromObject) {
simplified);
}
TEST_F(Int64LoweringTest, Int64LoadImmutable) {
int32_t base = 0x1234;
int32_t index = 0x5678;
LowerGraph(graph()->NewNode(machine()->LoadImmutable(MachineType::Int64()),
Int32Constant(base), Int32Constant(index)),
MachineRepresentation::kWord64);
Capture<Node*> high_word_load;
#if defined(V8_TARGET_LITTLE_ENDIAN)
Matcher<Node*> high_word_load_matcher =
IsLoadImmutable(MachineType::Int32(), IsInt32Constant(base),
IsInt32Add(IsInt32Constant(index), IsInt32Constant(0x4)));
EXPECT_THAT(
graph()->end()->InputAt(1),
IsReturn2(IsLoadImmutable(MachineType::Int32(), IsInt32Constant(base),
IsInt32Constant(index)),
AllOf(CaptureEq(&high_word_load), high_word_load_matcher),
start(), start()));
#elif defined(V8_TARGET_BIG_ENDIAN)
Matcher<Node*> high_word_load_matcher =
IsLoadImmutable(MachineType::Int32(), IsInt32Constant(base),
IsInt32Constant(index), start(), start());
EXPECT_THAT(
graph()->end()->InputAt(1),
IsReturn2(IsLoadImmutable(
MachineType::Int32(), IsInt32Constant(base),
IsInt32Add(IsInt32Constant(index), IsInt32Constant(0x4))),
AllOf(CaptureEq(&high_word_load), high_word_load_matcher),
start(), start()));
#endif
}
#if defined(V8_TARGET_LITTLE_ENDIAN)
#define STORE_VERIFY(kStore, kRep) \
EXPECT_THAT( \
......
......@@ -1153,6 +1153,43 @@ LOAD_MATCHER(UnalignedLoad)
LOAD_MATCHER(PoisonedLoad)
LOAD_MATCHER(LoadFromObject)
class IsLoadImmutableMatcher final : public TestNodeMatcher {
public:
IsLoadImmutableMatcher(const Matcher<LoadRepresentation>& rep_matcher,
const Matcher<Node*>& base_matcher,
const Matcher<Node*>& index_matcher)
: TestNodeMatcher(IrOpcode::kLoadImmutable),
rep_matcher_(rep_matcher),
base_matcher_(base_matcher),
index_matcher_(index_matcher) {}
void DescribeTo(std::ostream* os) const final {
TestNodeMatcher::DescribeTo(os);
*os << " whose rep (";
rep_matcher_.DescribeTo(os);
*os << "), base (";
base_matcher_.DescribeTo(os);
*os << ") and index (";
index_matcher_.DescribeTo(os);
*os << ")";
}
bool MatchAndExplain(Node* node, MatchResultListener* listener) const final {
LoadRepresentation rep = LoadRepresentationOf(node->op());
return TestNodeMatcher::MatchAndExplain(node, listener) &&
PrintMatchAndExplain(rep, "rep", rep_matcher_, listener) &&
PrintMatchAndExplain(NodeProperties::GetValueInput(node, 0), "base",
base_matcher_, listener) &&
PrintMatchAndExplain(NodeProperties::GetValueInput(node, 1), "index",
index_matcher_, listener);
}
private:
const Matcher<LoadRepresentation> rep_matcher_;
const Matcher<Node*> base_matcher_;
const Matcher<Node*> index_matcher_;
};
#define STORE_MATCHER(kStore, representation) \
class Is##kStore##Matcher final : public TestNodeMatcher { \
public: \
......@@ -2096,6 +2133,13 @@ Matcher<Node*> IsLoadFromObject(const Matcher<LoadRepresentation>& rep_matcher,
control_matcher));
}
Matcher<Node*> IsLoadImmutable(const Matcher<LoadRepresentation>& rep_matcher,
const Matcher<Node*>& base_matcher,
const Matcher<Node*>& index_matcher) {
return MakeMatcher(
new IsLoadImmutableMatcher(rep_matcher, base_matcher, index_matcher));
}
Matcher<Node*> IsStore(const Matcher<StoreRepresentation>& rep_matcher,
const Matcher<Node*>& base_matcher,
const Matcher<Node*>& index_matcher,
......
......@@ -343,6 +343,9 @@ Matcher<Node*> IsLoadFromObject(const Matcher<LoadRepresentation>& rep_matcher,
const Matcher<Node*>& index_matcher,
const Matcher<Node*>& effect_matcher,
const Matcher<Node*>& control_matcher);
Matcher<Node*> IsLoadImmutable(const Matcher<LoadRepresentation>& rep_matcher,
const Matcher<Node*>& base_matcher,
const Matcher<Node*>& index_matcher);
Matcher<Node*> IsStore(const Matcher<StoreRepresentation>& rep_matcher,
const Matcher<Node*>& base_matcher,
const Matcher<Node*>& index_matcher,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment